From e6f35368bb0e12c61aa127ce5d0fc459f7e56e6a Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Fri, 26 Apr 2024 11:39:01 -0700 Subject: [PATCH 0001/1133] mv all under prelude for cherry-picking --- .buckconfig => prelude/.buckconfig | 0 .gitignore => prelude/.gitignore | 0 BUCK => prelude/BUCK | 0 CHANGELOG.md => prelude/CHANGELOG.md | 0 CODE_OF_CONDUCT.md => prelude/CODE_OF_CONDUCT.md | 0 CONTRIBUTING.md => prelude/CONTRIBUTING.md | 0 LICENSE-APACHE => prelude/LICENSE-APACHE | 0 LICENSE-MIT => prelude/LICENSE-MIT | 0 README.md => prelude/README.md | 0 {abi => prelude/abi}/BUCK.v2 | 0 {abi => prelude/abi}/constraints/BUCK.v2 | 0 alias.bzl => prelude/alias.bzl | 0 {android => prelude/android}/aapt2_link.bzl | 0 {android => prelude/android}/android.bzl | 0 {android => prelude/android}/android_aar.bzl | 0 {android => prelude/android}/android_apk.bzl | 0 {android => prelude/android}/android_binary.bzl | 0 .../android_binary_native_library_rules.bzl | 0 .../android}/android_binary_resources_rules.bzl | 0 .../android}/android_build_config.bzl | 0 {android => prelude/android}/android_bundle.bzl | 0 .../android}/android_instrumentation_apk.bzl | 0 .../android}/android_instrumentation_test.bzl | 0 {android => prelude/android}/android_library.bzl | 0 {android => prelude/android}/android_manifest.bzl | 0 .../android}/android_prebuilt_aar.bzl | 0 {android => prelude/android}/android_providers.bzl | 0 {android => prelude/android}/android_resource.bzl | 0 {android => prelude/android}/android_toolchain.bzl | 0 {android => prelude/android}/apk_genrule.bzl | 0 .../android}/build_only_native_code.bzl | 0 {android => prelude/android}/configuration.bzl | 0 {android => prelude/android}/constraints/BUCK.v2 | 0 {android => prelude/android}/cpu_filters.bzl | 0 {android => prelude/android}/dex_rules.bzl | 0 {android => prelude/android}/exopackage.bzl | 0 {android => prelude/android}/gen_aidl.bzl | 0 {android => prelude/android}/min_sdk_version.bzl | 0 .../android}/prebuilt_native_library.bzl | 0 .../android}/preprocess_java_classes.bzl | 0 {android => prelude/android}/proguard.bzl | 0 {android => prelude/android}/r_dot_java.bzl | 0 {android => prelude/android}/robolectric_test.bzl | 0 {android => prelude/android}/tools/BUCK.v2 | 0 .../buck_generated/AppWithoutResourcesStub.java | 0 .../android}/tools/combine_native_library_dirs.py | 0 {android => prelude/android}/tools/filter_dex.py | 0 .../android}/tools/filter_extra_resources.py | 0 .../tools/filter_prebuilt_native_library_dir.py | 0 .../android}/tools/merge_sequence.py | 0 .../tools/native_libs_as_assets_metadata.py | 0 {android => prelude/android}/tools/unpack_aar.py | 0 {android => prelude/android}/util.bzl | 0 {android => prelude/android}/voltron.bzl | 0 {apple => prelude/apple}/apple_asset_catalog.bzl | 0 .../apple_asset_catalog_compilation_options.bzl | 0 .../apple}/apple_asset_catalog_types.bzl | 0 {apple => prelude/apple}/apple_binary.bzl | 0 .../apple}/apple_buck2_compatibility.bzl | 0 {apple => prelude/apple}/apple_bundle.bzl | 0 {apple => prelude/apple}/apple_bundle_attrs.bzl | 0 {apple => prelude/apple}/apple_bundle_config.bzl | 0 .../apple}/apple_bundle_destination.bzl | 0 {apple => prelude/apple}/apple_bundle_part.bzl | 0 {apple => prelude/apple}/apple_bundle_resources.bzl | 0 {apple => prelude/apple}/apple_bundle_types.bzl | 0 {apple => prelude/apple}/apple_bundle_utility.bzl | 0 .../apple}/apple_code_signing_types.bzl | 0 {apple => prelude/apple}/apple_core_data.bzl | 0 {apple => prelude/apple}/apple_core_data_types.bzl | 0 {apple => prelude/apple}/apple_dsym.bzl | 0 {apple => prelude/apple}/apple_dsym_config.bzl | 0 {apple => prelude/apple}/apple_entitlements.bzl | 0 .../apple}/apple_framework_versions.bzl | 0 {apple => prelude/apple}/apple_frameworks.bzl | 0 {apple => prelude/apple}/apple_genrule_deps.bzl | 0 {apple => prelude/apple}/apple_info_plist.bzl | 0 .../apple_info_plist_substitutions_parsing.bzl | 0 {apple => prelude/apple}/apple_library.bzl | 0 {apple => prelude/apple}/apple_macro_layer.bzl | 0 {apple => prelude/apple}/apple_modular_utility.bzl | 0 {apple => prelude/apple}/apple_package.bzl | 0 {apple => prelude/apple}/apple_package_config.bzl | 0 {apple => prelude/apple}/apple_resource.bzl | 0 {apple => prelude/apple}/apple_resource_bundle.bzl | 0 {apple => prelude/apple}/apple_resource_types.bzl | 0 {apple => prelude/apple}/apple_resource_utility.bzl | 0 {apple => prelude/apple}/apple_rules_impl.bzl | 0 .../apple}/apple_rules_impl_utility.bzl | 0 {apple => prelude/apple}/apple_sdk.bzl | 0 {apple => prelude/apple}/apple_sdk_metadata.bzl | 0 {apple => prelude/apple}/apple_stripping.bzl | 0 {apple => prelude/apple}/apple_swift_stdlib.bzl | 0 .../apple}/apple_target_sdk_version.bzl | 0 {apple => prelude/apple}/apple_test.bzl | 0 {apple => prelude/apple}/apple_toolchain.bzl | 0 {apple => prelude/apple}/apple_toolchain_types.bzl | 0 .../apple}/apple_universal_binaries.bzl | 0 .../apple}/apple_universal_executable.bzl | 0 {apple => prelude/apple}/apple_utility.bzl | 0 {apple => prelude/apple}/debug.bzl | 0 {apple => prelude/apple}/modulemap.bzl | 0 .../apple}/prebuilt_apple_framework.bzl | 0 {apple => prelude/apple}/resource_groups.bzl | 0 {apple => prelude/apple}/scene_kit_assets.bzl | 0 {apple => prelude/apple}/scene_kit_assets_types.bzl | 0 .../apple}/swift/apple_sdk_clang_module.bzl | 0 .../apple}/swift/apple_sdk_modules_utility.bzl | 0 .../apple}/swift/apple_sdk_swift_module.bzl | 0 .../apple}/swift/swift_compilation.bzl | 0 .../apple}/swift/swift_debug_info_utils.bzl | 0 .../apple}/swift/swift_incremental_support.bzl | 0 {apple => prelude/apple}/swift/swift_module_map.bzl | 0 .../apple}/swift/swift_pcm_compilation.bzl | 0 .../apple}/swift/swift_pcm_compilation_types.bzl | 0 {apple => prelude/apple}/swift/swift_runtime.bzl | 0 .../apple}/swift/swift_sdk_pcm_compilation.bzl | 0 .../swift/swift_sdk_swiftinterface_compilation.bzl | 0 {apple => prelude/apple}/swift/swift_toolchain.bzl | 0 .../apple}/swift/swift_toolchain_macro_layer.bzl | 0 .../apple}/swift/swift_toolchain_types.bzl | 0 {apple => prelude/apple}/swift/swift_types.bzl | 0 {apple => prelude/apple}/tools/BUCK.v2 | 0 {apple => prelude/apple}/tools/bundling/BUCK.v2 | 0 .../apple}/tools/bundling/action_metadata.py | 0 .../apple}/tools/bundling/action_metadata_test.py | 0 .../apple}/tools/bundling/assemble_bundle.py | 0 .../apple}/tools/bundling/assemble_bundle_types.py | 0 .../apple}/tools/bundling/incremental_state.py | 0 .../apple}/tools/bundling/incremental_state_test.py | 0 .../apple}/tools/bundling/incremental_utils.py | 0 .../apple}/tools/bundling/incremental_utils_test.py | 0 {apple => prelude/apple}/tools/bundling/main.py | 0 .../apple}/tools/bundling/swift_support.py | 0 .../newer_version_action_metadata.json | 0 .../newer_version_incremental_state.json | 0 .../tools/bundling/test_resources/the.broken_json | 0 .../test_resources/valid_action_metadata.json | 0 .../test_resources/valid_incremental_state.json | 0 {apple => prelude/apple}/tools/code_signing/BUCK.v2 | 0 .../apple}/tools/code_signing/app_id.py | 0 .../apple}/tools/code_signing/app_id_test.py | 0 .../apple}/tools/code_signing/apple_platform.py | 0 .../apple}/tools/code_signing/codesign_bundle.py | 0 .../tools/code_signing/codesign_command_factory.py | 0 .../apple}/tools/code_signing/fast_adhoc.py | 0 .../apple}/tools/code_signing/identity.py | 0 .../apple}/tools/code_signing/identity_test.py | 0 .../tools/code_signing/info_plist_metadata.py | 0 .../tools/code_signing/info_plist_metadata_test.py | 0 .../list_codesign_identities_command_factory.py | 0 {apple => prelude/apple}/tools/code_signing/main.py | 0 .../prepare_code_signing_entitlements.py | 0 .../prepare_code_signing_entitlements_test.py | 0 .../apple}/tools/code_signing/prepare_info_plist.py | 0 .../tools/code_signing/prepare_info_plist_test.py | 0 .../provisioning_profile_diagnostics.py | 0 .../code_signing/provisioning_profile_metadata.py | 0 .../provisioning_profile_metadata_test.py | 0 .../code_signing/provisioning_profile_selection.py | 0 .../provisioning_profile_selection_test.py | 0 .../read_provisioning_profile_command_factory.py | 0 .../code_signing/test_resources/Entitlements.plist | 0 .../test_resources/qualified_sample.mobileprovision | 0 .../test_resources/sample.mobileprovision | 0 {apple => prelude/apple}/tools/defs.bzl | 0 {apple => prelude/apple}/tools/dry_codesign_tool.py | 0 .../apple}/tools/info_plist_processor/BUCK.v2 | 0 .../apple}/tools/info_plist_processor/main.py | 0 .../apple}/tools/info_plist_processor/preprocess.py | 0 .../tools/info_plist_processor/preprocess_test.py | 0 .../apple}/tools/info_plist_processor/process.py | 0 .../tools/info_plist_processor/process_test.py | 0 {apple => prelude/apple}/tools/ipa_package_maker.py | 0 {apple => prelude/apple}/tools/make_modulemap.py | 0 .../apple}/tools/make_swift_comp_db.py | 0 {apple => prelude/apple}/tools/make_vfsoverlay.py | 0 {apple => prelude/apple}/tools/plistlib_utils.py | 0 .../apple}/tools/re_compatibility_utils/BUCK | 0 .../apple}/tools/re_compatibility_utils/writable.py | 0 .../apple}/tools/selective_debugging/BUCK.v2 | 0 .../apple}/tools/selective_debugging/macho.py | 0 .../tools/selective_debugging/macho_parser.py | 0 .../apple}/tools/selective_debugging/main.py | 0 .../apple}/tools/selective_debugging/scrubber.py | 0 .../tools/selective_debugging/scrubber_test.py | 0 .../apple}/tools/selective_debugging/spec.py | 0 .../apple}/tools/selective_debugging/spec_test.py | 0 .../selective_debugging/test_resources/HelloWorld | Bin .../test_resources/focused_spec.json | 0 .../test_resources/focused_targets.json | 0 .../test_resources/focused_targets_empty.json | 0 .../apple}/tools/selective_debugging/utils.py | 0 .../tools/split_arch_combine_dsym_bundles_tool.py | 0 {apple => prelude/apple}/tools/swift_exec.sh | 0 .../apple}/tools/swift_objc_header_postprocess.py | 0 .../apple}/user/apple_resource_bundle.bzl | 0 .../apple}/user/apple_resource_transition.bzl | 0 .../apple}/user/apple_selected_debug_path_file.bzl | 0 .../apple}/user/apple_selective_debugging.bzl | 0 {apple => prelude/apple}/user/apple_simulators.bzl | 0 .../apple}/user/apple_toolchain_override.bzl | 0 {apple => prelude/apple}/user/apple_tools.bzl | 0 .../apple}/user/apple_watchos_bundle.bzl | 0 .../apple}/user/cpu_split_transition.bzl | 0 .../apple}/user/resource_group_map.bzl | 0 {apple => prelude/apple}/user/watch_transition.bzl | 0 {apple => prelude/apple}/xcode.bzl | 0 {apple => prelude/apple}/xcode_postbuild_script.bzl | 0 {apple => prelude/apple}/xcode_prebuild_script.bzl | 0 {apple => prelude/apple}/xctest_swift_support.bzl | 0 artifact_tset.bzl => prelude/artifact_tset.bzl | 0 artifacts.bzl => prelude/artifacts.bzl | 0 asserts.bzl => prelude/asserts.bzl | 0 attributes.bzl => prelude/attributes.bzl | 0 build_mode.bzl => prelude/build_mode.bzl | 0 builtin.bzl => prelude/builtin.bzl | 0 cache_mode.bzl => prelude/cache_mode.bzl | 0 command_alias.bzl => prelude/command_alias.bzl | 0 .../configurations}/rules.bzl | 0 {configurations => prelude/configurations}/util.bzl | 0 {cpu => prelude/cpu}/BUCK.v2 | 0 {cpu => prelude/cpu}/constraints/BUCK.v2 | 0 {csharp => prelude/csharp}/csharp.bzl | 0 {csharp => prelude/csharp}/csharp_providers.bzl | 0 {csharp => prelude/csharp}/toolchain.bzl | 0 {cxx => prelude/cxx}/anon_link.bzl | 0 {cxx => prelude/cxx}/archive.bzl | 0 {cxx => prelude/cxx}/argsfiles.bzl | 0 {cxx => prelude/cxx}/attr_selection.bzl | 0 {cxx => prelude/cxx}/bitcode.bzl | 0 {cxx => prelude/cxx}/comp_db.bzl | 0 {cxx => prelude/cxx}/compile.bzl | 0 {cxx => prelude/cxx}/compiler.bzl | 0 {cxx => prelude/cxx}/cxx.bzl | 0 {cxx => prelude/cxx}/cxx_bolt.bzl | 0 {cxx => prelude/cxx}/cxx_context.bzl | 0 {cxx => prelude/cxx}/cxx_executable.bzl | 0 {cxx => prelude/cxx}/cxx_library.bzl | 0 {cxx => prelude/cxx}/cxx_library_utility.bzl | 0 {cxx => prelude/cxx}/cxx_link_utility.bzl | 0 {cxx => prelude/cxx}/cxx_sources.bzl | 0 {cxx => prelude/cxx}/cxx_toolchain.bzl | 0 {cxx => prelude/cxx}/cxx_toolchain_macro_layer.bzl | 0 {cxx => prelude/cxx}/cxx_toolchain_types.bzl | 0 {cxx => prelude/cxx}/cxx_types.bzl | 0 {cxx => prelude/cxx}/debug.bzl | 0 {cxx => prelude/cxx}/dist_lto/README.md | 0 {cxx => prelude/cxx}/dist_lto/dist_lto.bzl | 0 {cxx => prelude/cxx}/dist_lto/tools.bzl | 0 {cxx => prelude/cxx}/dist_lto/tools/BUCK.v2 | 0 {cxx => prelude/cxx}/dist_lto/tools/__init__.py | 0 .../cxx}/dist_lto/tools/dist_lto_copy.py | 0 {cxx => prelude/cxx}/dist_lto/tools/dist_lto_opt.py | 0 .../cxx}/dist_lto/tools/dist_lto_planner.py | 0 .../cxx}/dist_lto/tools/dist_lto_prepare.py | 0 .../cxx}/dist_lto/tools/tests/test_dist_lto_opt.py | 0 {cxx => prelude/cxx}/dwp.bzl | 0 {cxx => prelude/cxx}/groups.bzl | 0 {cxx => prelude/cxx}/headers.bzl | 0 {cxx => prelude/cxx}/link.bzl | 0 {cxx => prelude/cxx}/link_groups.bzl | 0 {cxx => prelude/cxx}/link_types.bzl | 0 {cxx => prelude/cxx}/linker.bzl | 0 {cxx => prelude/cxx}/omnibus.bzl | 0 {cxx => prelude/cxx}/platform.bzl | 0 {cxx => prelude/cxx}/prebuilt_cxx_library_group.bzl | 0 {cxx => prelude/cxx}/preprocessor.bzl | 0 {cxx => prelude/cxx}/shared_library_interface.bzl | 0 {cxx => prelude/cxx}/symbols.bzl | 0 {cxx => prelude/cxx}/tools/BUCK.v2 | 0 {cxx => prelude/cxx}/tools/defs.bzl | 0 {cxx => prelude/cxx}/tools/dep_file_processor.py | 0 {cxx => prelude/cxx}/tools/dep_file_utils.py | 0 {cxx => prelude/cxx}/tools/hmap_wrapper.py | 0 {cxx => prelude/cxx}/tools/linker_wrapper.py | 0 {cxx => prelude/cxx}/tools/make_comp_db.py | 0 {cxx => prelude/cxx}/tools/makefile_to_dep_file.py | 0 .../cxx}/tools/show_headers_to_dep_file.py | 0 .../cxx}/tools/show_includes_to_dep_file.py | 0 .../cxx}/user/cxx_toolchain_override.bzl | 0 {cxx => prelude/cxx}/user/link_group_map.bzl | 0 {cxx => prelude/cxx}/xcode.bzl | 0 {debugging => prelude/debugging}/common.bzl | 0 {debugging => prelude/debugging}/ensure_dwp.bzl | 0 {debugging => prelude/debugging}/fdb.bxl | 0 .../debugging}/inspect_dbg_exec.bzl | 0 .../debugging}/inspect_default.bzl | 0 {debugging => prelude/debugging}/inspect_java.bzl | 0 {debugging => prelude/debugging}/labels.bzl | 0 {debugging => prelude/debugging}/types.bzl | 0 {decls => prelude/decls}/android_common.bzl | 0 {decls => prelude/decls}/android_rules.bzl | 0 {decls => prelude/decls}/apple_common.bzl | 0 {decls => prelude/decls}/common.bzl | 0 {decls => prelude/decls}/core_rules.bzl | 0 {decls => prelude/decls}/cxx_common.bzl | 0 {decls => prelude/decls}/cxx_rules.bzl | 0 {decls => prelude/decls}/d_common.bzl | 0 {decls => prelude/decls}/d_rules.bzl | 0 {decls => prelude/decls}/dotnet_rules.bzl | 0 {decls => prelude/decls}/erlang_rules.bzl | 0 {decls => prelude/decls}/genrule_common.bzl | 0 {decls => prelude/decls}/git_rules.bzl | 0 {decls => prelude/decls}/go_common.bzl | 0 {decls => prelude/decls}/go_rules.bzl | 0 {decls => prelude/decls}/groovy_rules.bzl | 0 {decls => prelude/decls}/halide_rules.bzl | 0 {decls => prelude/decls}/haskell_common.bzl | 0 {decls => prelude/decls}/haskell_rules.bzl | 0 {decls => prelude/decls}/ios_rules.bzl | 0 {decls => prelude/decls}/java_rules.bzl | 0 {decls => prelude/decls}/js_rules.bzl | 0 {decls => prelude/decls}/jvm_common.bzl | 0 {decls => prelude/decls}/kotlin_rules.bzl | 0 {decls => prelude/decls}/lua_common.bzl | 0 {decls => prelude/decls}/lua_rules.bzl | 0 {decls => prelude/decls}/native_common.bzl | 0 {decls => prelude/decls}/ocaml_common.bzl | 0 {decls => prelude/decls}/ocaml_rules.bzl | 0 {decls => prelude/decls}/python_common.bzl | 0 {decls => prelude/decls}/python_rules.bzl | 0 {decls => prelude/decls}/re_test_common.bzl | 0 {decls => prelude/decls}/remote_common.bzl | 0 {decls => prelude/decls}/rust_common.bzl | 0 {decls => prelude/decls}/rust_rules.bzl | 0 {decls => prelude/decls}/scala_rules.bzl | 0 {decls => prelude/decls}/shell_rules.bzl | 0 {decls => prelude/decls}/toolchains_common.bzl | 0 {decls => prelude/decls}/uncategorized_rules.bzl | 0 {dist => prelude/dist}/dist_info.bzl | 0 {docs => prelude/docs}/rules.bzl | 0 {erlang => prelude/erlang}/applications/BUCK.v2 | 0 {erlang => prelude/erlang}/common_test/.elp.toml | 0 .../erlang}/common_test/common/BUCK.v2 | 0 .../common/include/artifact_annotations.hrl | 0 .../common_test/common/include/buck_ct_records.hrl | 0 .../common_test/common/include/tpx_records.hrl | 0 .../common_test/common/src/artifact_annotations.erl | 0 .../common_test/common/src/bounded_buffer.erl | 0 .../common_test/common/src/buck_ct_parser.erl | 0 .../common_test/common/src/buck_ct_provider.erl | 0 .../common_test/common/src/ct_error_printer.erl | 0 .../common_test/common/src/execution_logs.erl | 0 .../erlang}/common_test/common/src/io_buffer.erl | 0 .../common/src/test_artifact_directory.erl | 0 .../erlang}/common_test/common/src/test_logger.erl | 0 .../erlang}/common_test/cth_hooks/BUCK.v2 | 0 .../erlang}/common_test/cth_hooks/src/cth_tpx.erl | 0 .../common_test/cth_hooks/src/cth_tpx_role.erl | 0 .../common_test/cth_hooks/src/cth_tpx_server.erl | 0 .../common_test/cth_hooks/src/cth_tpx_test_tree.erl | 0 .../common_test/cth_hooks/src/method_ids.hrl | 0 .../erlang}/common_test/test_binary/BUCK.v2 | 0 .../common_test/test_binary/src/json_interfacer.erl | 0 .../test_binary/src/junit_interfacer.erl | 0 .../common_test/test_binary/src/list_test.erl | 0 .../test_binary/src/listing_interfacer.erl | 0 .../common_test/test_binary/src/test_binary.erl | 0 .../common_test/test_binary/src/test_runner.erl | 0 .../erlang}/common_test/test_cli_lib/BUCK.v2 | 0 .../erlang}/common_test/test_cli_lib/src/test.erl | 0 .../erlang}/common_test/test_exec/BUCK.v2 | 0 .../erlang}/common_test/test_exec/src/ct_daemon.erl | 0 .../common_test/test_exec/src/ct_daemon_core.erl | 0 .../common_test/test_exec/src/ct_daemon_hooks.erl | 0 .../common_test/test_exec/src/ct_daemon_logger.erl | 0 .../common_test/test_exec/src/ct_daemon_node.erl | 0 .../common_test/test_exec/src/ct_daemon_printer.erl | 0 .../common_test/test_exec/src/ct_daemon_runner.erl | 0 .../common_test/test_exec/src/ct_executor.erl | 0 .../erlang}/common_test/test_exec/src/ct_runner.erl | 0 .../common_test/test_exec/src/epmd_manager.erl | 0 .../common_test/test_exec/src/test_exec.app.src | 0 .../erlang}/common_test/test_exec/src/test_exec.erl | 0 .../common_test/test_exec/src/test_exec_sup.erl | 0 {erlang => prelude/erlang}/erlang.bzl | 0 {erlang => prelude/erlang}/erlang_application.bzl | 0 .../erlang}/erlang_application_includes.bzl | 0 {erlang => prelude/erlang}/erlang_build.bzl | 0 {erlang => prelude/erlang}/erlang_dependencies.bzl | 0 {erlang => prelude/erlang}/erlang_escript.bzl | 0 {erlang => prelude/erlang}/erlang_info.bzl | 0 {erlang => prelude/erlang}/erlang_ls.config | 0 .../erlang}/erlang_otp_application.bzl | 0 {erlang => prelude/erlang}/erlang_release.bzl | 0 {erlang => prelude/erlang}/erlang_shell.bzl | 0 {erlang => prelude/erlang}/erlang_tests.bzl | 0 {erlang => prelude/erlang}/erlang_toolchain.bzl | 0 {erlang => prelude/erlang}/erlang_utils.bzl | 0 {erlang => prelude/erlang}/shell/BUCK.v2 | 0 {erlang => prelude/erlang}/shell/shell.bxl | 0 .../erlang}/shell/src/shell_buck2_utils.erl | 0 .../erlang}/shell/src/user_default.erl | 0 {erlang => prelude/erlang}/toolchain/BUCK.v2 | 0 .../erlang}/toolchain/app_src_builder.escript | 0 .../erlang}/toolchain/boot_script_builder.escript | 0 .../erlang}/toolchain/dependency_analyzer.escript | 0 .../erlang}/toolchain/dependency_finalizer.escript | 0 .../erlang}/toolchain/edoc_cli.escript | 0 .../erlang}/toolchain/edoc_doclet_chunks.erl | 0 .../erlang}/toolchain/edoc_report.erl | 0 .../erlang}/toolchain/erlang_ls.config | 0 .../erlang}/toolchain/erlc_trampoline.sh | 0 .../erlang}/toolchain/escript_builder.escript | 0 .../erlang}/toolchain/include_erts.escript | 0 .../toolchain/release_variables_builder.escript | 0 .../erlang}/toolchain/transform_project_root.erl | 0 export_exe.bzl => prelude/export_exe.bzl | 0 export_file.bzl => prelude/export_file.bzl | 0 filegroup.bzl => prelude/filegroup.bzl | 0 genrule.bzl => prelude/genrule.bzl | 0 .../genrule_local_labels.bzl | 0 .../genrule_toolchain.bzl | 0 genrule_types.bzl => prelude/genrule_types.bzl | 0 {git => prelude/git}/git_fetch.bzl | 0 {git => prelude/git}/tools/BUCK.v2 | 0 {git => prelude/git}/tools/git_fetch.py | 0 {go => prelude/go}/cgo_library.bzl | 0 {go => prelude/go}/compile.bzl | 0 {go => prelude/go}/coverage.bzl | 0 {go => prelude/go}/go_binary.bzl | 0 {go => prelude/go}/go_exported_library.bzl | 0 {go => prelude/go}/go_library.bzl | 0 {go => prelude/go}/go_test.bzl | 0 {go => prelude/go}/link.bzl | 0 {go => prelude/go}/packages.bzl | 0 {go => prelude/go}/toolchain.bzl | 0 {go => prelude/go}/tools/BUCK.v2 | 0 {go => prelude/go}/tools/cgo_wrapper.py | 0 {go => prelude/go}/tools/compile_wrapper.py | 0 {go => prelude/go}/tools/cover_srcs.py | 0 {go => prelude/go}/tools/filter_srcs.py | 0 {go => prelude/go}/tools/testmaingen.go | 0 {haskell => prelude/haskell}/compile.bzl | 0 {haskell => prelude/haskell}/haskell.bzl | 0 {haskell => prelude/haskell}/haskell_ghci.bzl | 0 {haskell => prelude/haskell}/haskell_haddock.bzl | 0 {haskell => prelude/haskell}/haskell_ide.bzl | 0 {haskell => prelude/haskell}/ide/README.md | 0 {haskell => prelude/haskell}/ide/hie.yaml | 0 {haskell => prelude/haskell}/ide/ide.bxl | 0 {haskell => prelude/haskell}/link_info.bzl | 0 {haskell => prelude/haskell}/toolchain.bzl | 0 {haskell => prelude/haskell}/tools/BUCK.v2 | 0 .../haskell}/tools/script_template_processor.py | 0 {haskell => prelude/haskell}/util.bzl | 0 .../http_archive}/exec_deps.bzl | 0 .../http_archive}/http_archive.bzl | 0 .../http_archive}/tools/BUCK.v2 | 0 .../http_archive}/tools/create_exclusion_list.py | 0 http_file.bzl => prelude/http_file.bzl | 0 .../ide_integrations}/xcode.bzl | 0 is_buck2.bzl => prelude/is_buck2.bzl | 0 .../is_buck2_internal.bzl | 0 .../is_full_meta_repo.bzl | 0 {java => prelude/java}/class_to_srcs.bzl | 0 {java => prelude/java}/dex.bzl | 0 {java => prelude/java}/dex_toolchain.bzl | 0 {java => prelude/java}/gwt_binary.bzl | 0 {java => prelude/java}/jar_genrule.bzl | 0 {java => prelude/java}/java.bzl | 0 {java => prelude/java}/java_binary.bzl | 0 {java => prelude/java}/java_library.bzl | 0 {java => prelude/java}/java_providers.bzl | 0 {java => prelude/java}/java_resources.bzl | 0 {java => prelude/java}/java_test.bzl | 0 {java => prelude/java}/java_toolchain.bzl | 0 {java => prelude/java}/javacd_jar_creator.bzl | 0 {java => prelude/java}/keystore.bzl | 0 .../java}/plugins/java_annotation_processor.bzl | 0 {java => prelude/java}/plugins/java_plugin.bzl | 0 {java => prelude/java}/prebuilt_jar.bzl | 0 {java => prelude/java}/tools/BUCK.v2 | 0 {java => prelude/java}/tools/compile_and_package.py | 0 {java => prelude/java}/tools/fat_jar.py | 0 .../java}/tools/gen_class_to_source_map.py | 0 {java => prelude/java}/tools/list_class_names.py | 0 .../java}/tools/merge_class_to_source_maps.py | 0 {java => prelude/java}/tools/utils.py | 0 {java => prelude/java}/utils/java_more_utils.bzl | 0 {java => prelude/java}/utils/java_utils.bzl | 0 {js => prelude/js}/js.bzl | 0 {js => prelude/js}/js_bundle.bzl | 0 {js => prelude/js}/js_bundle_genrule.bzl | 0 {js => prelude/js}/js_library.bzl | 0 {js => prelude/js}/js_providers.bzl | 0 {js => prelude/js}/js_utils.bzl | 0 {julia => prelude/julia}/julia.bzl | 0 {julia => prelude/julia}/julia_binary.bzl | 0 {julia => prelude/julia}/julia_info.bzl | 0 {julia => prelude/julia}/julia_library.bzl | 0 {julia => prelude/julia}/julia_test.bzl | 0 {julia => prelude/julia}/julia_toolchain.bzl | 0 {julia => prelude/julia}/tools/BUCK.v2 | 0 {julia => prelude/julia}/tools/parse_julia_cmd.py | 0 {jvm => prelude/jvm}/cd_jar_creator_util.bzl | 0 {jvm => prelude/jvm}/nullsafe.bzl | 0 {kotlin => prelude/kotlin}/kotlin.bzl | 0 {kotlin => prelude/kotlin}/kotlin_library.bzl | 0 {kotlin => prelude/kotlin}/kotlin_test.bzl | 0 {kotlin => prelude/kotlin}/kotlin_toolchain.bzl | 0 {kotlin => prelude/kotlin}/kotlin_utils.bzl | 0 {kotlin => prelude/kotlin}/kotlincd_jar_creator.bzl | 0 .../kotlin}/tools/compile_kotlin/BUCK.v2 | 0 .../kotlin}/tools/compile_kotlin/compile_kotlin.py | 0 {kotlin => prelude/kotlin}/tools/defs.bzl | 0 .../kotlin}/tools/kapt_base64_encoder/BUCK.v2 | 0 .../com/facebook/kapt/KaptBase64Encoder.java | 0 .../linking}/execution_preference.bzl | 0 {linking => prelude/linking}/link_groups.bzl | 0 {linking => prelude/linking}/link_info.bzl | 0 {linking => prelude/linking}/linkable_graph.bzl | 0 {linking => prelude/linking}/linkables.bzl | 0 {linking => prelude/linking}/lto.bzl | 0 {linking => prelude/linking}/shared_libraries.bzl | 0 {linking => prelude/linking}/strip.bzl | 0 local_only.bzl => prelude/local_only.bzl | 0 {lua => prelude/lua}/cxx_lua_extension.bzl | 0 {lua => prelude/lua}/lua_binary.bzl | 0 {lua => prelude/lua}/lua_library.bzl | 0 native.bzl => prelude/native.bzl | 0 {ocaml => prelude/ocaml}/attrs.bzl | 0 {ocaml => prelude/ocaml}/makefile.bzl | 0 {ocaml => prelude/ocaml}/ocaml.bzl | 0 {ocaml => prelude/ocaml}/ocaml_toolchain_types.bzl | 0 {os => prelude/os}/BUCK.v2 | 0 {os => prelude/os}/constraints/BUCK.v2 | 0 {os_lookup => prelude/os_lookup}/defs.bzl | 0 {os_lookup => prelude/os_lookup}/targets/BUCK.v2 | 0 paths.bzl => prelude/paths.bzl | 0 {platforms => prelude/platforms}/BUCK | 0 {platforms => prelude/platforms}/BUCK.v2 | 0 {platforms => prelude/platforms}/apple/arch.bzl | 0 {platforms => prelude/platforms}/apple/sdk.bzl | 0 {platforms => prelude/platforms}/defs.bzl | 0 {playground => prelude/playground}/test.bxl | 0 prelude.bzl => prelude/prelude.bzl | 0 .../pull_request_template.md | 0 {python => prelude/python}/compile.bzl | 0 {python => prelude/python}/cxx_python_extension.bzl | 0 {python => prelude/python}/interface.bzl | 0 {python => prelude/python}/make_py_package.bzl | 0 {python => prelude/python}/manifest.bzl | 0 {python => prelude/python}/native_python_util.bzl | 0 {python => prelude/python}/needed_coverage.bzl | 0 .../python}/prebuilt_python_library.bzl | 0 {python => prelude/python}/python.bzl | 0 {python => prelude/python}/python_binary.bzl | 0 {python => prelude/python}/python_library.bzl | 0 .../python}/python_needed_coverage_test.bzl | 0 {python => prelude/python}/python_test.bzl | 0 {python => prelude/python}/runtime/BUCK.v2 | 0 .../python}/runtime/__par__/bootstrap.py | 0 {python => prelude/python}/source_db.bzl | 0 {python => prelude/python}/sourcedb/build.bxl | 0 {python => prelude/python}/sourcedb/classic.bxl | 0 .../python}/sourcedb/code_navigation.bxl | 0 {python => prelude/python}/sourcedb/merge.bxl | 0 {python => prelude/python}/sourcedb/query.bxl | 0 {python => prelude/python}/toolchain.bzl | 0 {python => prelude/python}/tools/BUCK.v2 | 0 {python => prelude/python}/tools/__test_main__.py | 0 {python => prelude/python}/tools/compile.py | 0 .../python}/tools/create_manifest_for_source_dir.py | 0 {python => prelude/python}/tools/embedded_main.cpp | 0 {python => prelude/python}/tools/extract.py | 0 .../python}/tools/fail_with_message.py | 0 .../python}/tools/generate_static_extension_info.py | 0 {python => prelude/python}/tools/make_par/BUCK | 0 .../python}/tools/make_par/__run_lpar_main__.py | 0 .../tools/make_par/_lpar_bootstrap.sh.template | 0 .../python}/tools/make_par/sitecustomize.py | 0 .../python}/tools/make_py_package_inplace.py | 0 .../tools/make_py_package_manifest_module.py | 0 .../python}/tools/make_py_package_modules.py | 0 {python => prelude/python}/tools/make_source_db.py | 0 .../python}/tools/make_source_db_no_deps.py | 0 {python => prelude/python}/tools/parse_imports.py | 0 {python => prelude/python}/tools/py38stdlib.py | 0 {python => prelude/python}/tools/run_inplace.py.in | 0 .../python}/tools/run_inplace_lite.py.in | 0 .../python}/tools/sourcedb_merger/BUCK.v2 | 0 .../python}/tools/sourcedb_merger/inputs.py | 0 .../python}/tools/sourcedb_merger/legacy_merge.py | 0 .../python}/tools/sourcedb_merger/legacy_outputs.py | 0 .../python}/tools/sourcedb_merger/merge.py | 0 .../python}/tools/sourcedb_merger/outputs.py | 0 .../python}/tools/sourcedb_merger/tests/__init__.py | 0 .../tools/sourcedb_merger/tests/inputs_test.py | 0 .../sourcedb_merger/tests/legacy_output_test.py | 0 .../python}/tools/sourcedb_merger/tests/main.sh | 0 .../tools/sourcedb_merger/tests/outputs_test.py | 0 .../python}/tools/static_extension_finder.py | 0 .../python}/tools/static_extension_utils.cpp | 0 .../python}/tools/traverse_dep_manifest.py | 0 .../python_bootstrap}/python_bootstrap.bzl | 0 .../python_bootstrap}/tools/BUCK.v2 | 0 .../python_bootstrap}/tools/win_python_wrapper.bat | 0 remote_file.bzl => prelude/remote_file.bzl | 0 resources.bzl => prelude/resources.bzl | 0 rules.bzl => prelude/rules.bzl | 0 rules_impl.bzl => prelude/rules_impl.bzl | 0 {rust => prelude/rust}/build.bzl | 0 {rust => prelude/rust}/build_params.bzl | 0 {rust => prelude/rust}/cargo_buildscript.bzl | 0 {rust => prelude/rust}/cargo_package.bzl | 0 {rust => prelude/rust}/context.bzl | 0 {rust => prelude/rust}/extern.bzl | 0 {rust => prelude/rust}/failure_filter.bzl | 0 {rust => prelude/rust}/link_info.bzl | 0 {rust => prelude/rust}/proc_macro_alias.bzl | 0 {rust => prelude/rust}/resources.bzl | 0 {rust => prelude/rust}/rust-analyzer/check.bxl | 0 .../rust}/rust-analyzer/resolve_deps.bxl | 0 {rust => prelude/rust}/rust_binary.bzl | 0 {rust => prelude/rust}/rust_common.bzl | 0 {rust => prelude/rust}/rust_library.bzl | 0 {rust => prelude/rust}/rust_toolchain.bzl | 0 {rust => prelude/rust}/targets.bzl | 0 {rust => prelude/rust}/tools/BUCK.v2 | 0 {rust => prelude/rust}/tools/attrs.bzl | 0 {rust => prelude/rust}/tools/buildscript_run.py | 0 {rust => prelude/rust}/tools/concat.py | 0 .../rust}/tools/failure_filter_action.py | 0 {rust => prelude/rust}/tools/rustc_action.py | 0 .../rust}/tools/rustdoc_test_with_resources.py | 0 {rust => prelude/rust}/tools/tool_rules.bzl | 0 .../rust}/tools/transitive_dependency_symlinks.py | 0 {rust => prelude/rust}/with_workspace.bzl | 0 sh_binary.bzl => prelude/sh_binary.bzl | 0 sh_test.bzl => prelude/sh_test.bzl | 0 {test => prelude/test}/inject_test_run_info.bzl | 0 {test => prelude/test}/tools/BUCK.v2 | 0 {test => prelude/test}/tools/inject_test_env.py | 0 test_suite.bzl => prelude/test_suite.bzl | 0 {tests => prelude/tests}/re_utils.bzl | 0 .../tests}/remote_test_execution_toolchain.bzl | 0 {tests => prelude/tests}/tpx_re_legacy.bzl | 0 .../third-party}/hmaptool/BUCK.v2 | 0 .../third-party}/hmaptool/METADATA.bzl | 0 .../third-party}/hmaptool/README.md | 0 .../third-party}/hmaptool/hmaptool | 0 .../apple/xcode_version_checker/.gitignore | 0 .../toolchains}/apple/xcode_version_checker/BUCK.v2 | 0 .../apple/xcode_version_checker/Makefile | 0 .../toolchains}/apple/xcode_version_checker/README | 0 .../apple/xcode_version_checker/defs.bzl | 0 .../xcode_version_checker/src/xcode_exec_tester.m | 0 .../src/xcode_version_checker.m | 0 .../src/xcode_version_checks.h | 0 .../src/xcode_version_checks.m | 0 .../src/xcode_version_tester.m | 0 .../test/Xcode_14.2.0_14C18_fb_version.plist | 0 .../xcode_version_checker/xcode_version_checker | Bin {toolchains => prelude/toolchains}/conan/BUCK.v2 | 0 .../toolchains}/conan/buckler/conanfile.py | 0 .../toolchains}/conan/conan_common.py | 0 .../toolchains}/conan/conan_generate.py | 0 .../toolchains}/conan/conan_init.py | 0 .../toolchains}/conan/conan_lock.py | 0 .../toolchains}/conan/conan_package.py | 0 .../toolchains}/conan/conan_package_extract.py | 0 .../toolchains}/conan/conan_update.py | 0 {toolchains => prelude/toolchains}/conan/defs.bzl | 0 .../toolchains}/conan/lock_generate.py | 0 {toolchains => prelude/toolchains}/csharp.bzl | 0 {toolchains => prelude/toolchains}/cxx.bzl | 0 {toolchains => prelude/toolchains}/cxx/zig/BUCK.v2 | 0 {toolchains => prelude/toolchains}/cxx/zig/defs.bzl | 0 .../toolchains}/cxx/zig/releases.bzl | 0 {toolchains => prelude/toolchains}/demo.bzl | 0 .../toolchains}/execution_host.bzl | 0 {toolchains => prelude/toolchains}/genrule.bzl | 0 {toolchains => prelude/toolchains}/go.bzl | 0 {toolchains => prelude/toolchains}/haskell.bzl | 0 {toolchains => prelude/toolchains}/msvc/BUCK.v2 | 0 .../toolchains}/msvc/run_msvc_tool.py | 0 {toolchains => prelude/toolchains}/msvc/tools.bzl | 0 {toolchains => prelude/toolchains}/msvc/vswhere.py | 0 {toolchains => prelude/toolchains}/ocaml.bzl | 0 {toolchains => prelude/toolchains}/python.bzl | 0 .../toolchains}/remote_test_execution.bzl | 0 {toolchains => prelude/toolchains}/rust.bzl | 0 .../transitions}/constraint_overrides.bzl | 0 {user => prelude/user}/all.bzl | 0 {user => prelude/user}/cxx_headers_bundle.bzl | 0 {user => prelude/user}/extract_archive.bzl | 0 {user => prelude/user}/rule_spec.bzl | 0 {user => prelude/user}/write_file.bzl | 0 {utils => prelude/utils}/arglike.bzl | 0 {utils => prelude/utils}/buckconfig.bzl | 0 {utils => prelude/utils}/build_target_pattern.bzl | 0 {utils => prelude/utils}/cmd_script.bzl | 0 {utils => prelude/utils}/dicts.bzl | 0 {utils => prelude/utils}/expect.bzl | 0 {utils => prelude/utils}/graph_utils.bzl | 0 {utils => prelude/utils}/host.bzl | 0 {utils => prelude/utils}/lazy.bzl | 0 {utils => prelude/utils}/pick.bzl | 0 {utils => prelude/utils}/platform_flavors_util.bzl | 0 {utils => prelude/utils}/selects.bzl | 0 {utils => prelude/utils}/set.bzl | 0 {utils => prelude/utils}/strings.bzl | 0 {utils => prelude/utils}/type_defs.bzl | 0 {utils => prelude/utils}/utils.bzl | 0 {windows => prelude/windows}/tools/BUCK.v2 | 0 .../windows}/tools/msvc_hermetic_exec.bat | 0 worker_tool.bzl => prelude/worker_tool.bzl | 0 {zip_file => prelude/zip_file}/tools/BUCK.v2 | 0 {zip_file => prelude/zip_file}/tools/unzip.py | 0 {zip_file => prelude/zip_file}/zip_file.bzl | 0 .../zip_file}/zip_file_toolchain.bzl | 0 713 files changed, 0 insertions(+), 0 deletions(-) rename .buckconfig => prelude/.buckconfig (100%) rename .gitignore => prelude/.gitignore (100%) rename BUCK => prelude/BUCK (100%) rename CHANGELOG.md => prelude/CHANGELOG.md (100%) rename CODE_OF_CONDUCT.md => prelude/CODE_OF_CONDUCT.md (100%) rename CONTRIBUTING.md => prelude/CONTRIBUTING.md (100%) rename LICENSE-APACHE => prelude/LICENSE-APACHE (100%) rename LICENSE-MIT => prelude/LICENSE-MIT (100%) rename README.md => prelude/README.md (100%) rename {abi => prelude/abi}/BUCK.v2 (100%) rename {abi => prelude/abi}/constraints/BUCK.v2 (100%) rename alias.bzl => prelude/alias.bzl (100%) rename {android => prelude/android}/aapt2_link.bzl (100%) rename {android => prelude/android}/android.bzl (100%) rename {android => prelude/android}/android_aar.bzl (100%) rename {android => prelude/android}/android_apk.bzl (100%) rename {android => prelude/android}/android_binary.bzl (100%) rename {android => prelude/android}/android_binary_native_library_rules.bzl (100%) rename {android => prelude/android}/android_binary_resources_rules.bzl (100%) rename {android => prelude/android}/android_build_config.bzl (100%) rename {android => prelude/android}/android_bundle.bzl (100%) rename {android => prelude/android}/android_instrumentation_apk.bzl (100%) rename {android => prelude/android}/android_instrumentation_test.bzl (100%) rename {android => prelude/android}/android_library.bzl (100%) rename {android => prelude/android}/android_manifest.bzl (100%) rename {android => prelude/android}/android_prebuilt_aar.bzl (100%) rename {android => prelude/android}/android_providers.bzl (100%) rename {android => prelude/android}/android_resource.bzl (100%) rename {android => prelude/android}/android_toolchain.bzl (100%) rename {android => prelude/android}/apk_genrule.bzl (100%) rename {android => prelude/android}/build_only_native_code.bzl (100%) rename {android => prelude/android}/configuration.bzl (100%) rename {android => prelude/android}/constraints/BUCK.v2 (100%) rename {android => prelude/android}/cpu_filters.bzl (100%) rename {android => prelude/android}/dex_rules.bzl (100%) rename {android => prelude/android}/exopackage.bzl (100%) rename {android => prelude/android}/gen_aidl.bzl (100%) rename {android => prelude/android}/min_sdk_version.bzl (100%) rename {android => prelude/android}/prebuilt_native_library.bzl (100%) rename {android => prelude/android}/preprocess_java_classes.bzl (100%) rename {android => prelude/android}/proguard.bzl (100%) rename {android => prelude/android}/r_dot_java.bzl (100%) rename {android => prelude/android}/robolectric_test.bzl (100%) rename {android => prelude/android}/tools/BUCK.v2 (100%) rename {android => prelude/android}/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java (100%) rename {android => prelude/android}/tools/combine_native_library_dirs.py (100%) rename {android => prelude/android}/tools/filter_dex.py (100%) rename {android => prelude/android}/tools/filter_extra_resources.py (100%) rename {android => prelude/android}/tools/filter_prebuilt_native_library_dir.py (100%) rename {android => prelude/android}/tools/merge_sequence.py (100%) rename {android => prelude/android}/tools/native_libs_as_assets_metadata.py (100%) rename {android => prelude/android}/tools/unpack_aar.py (100%) rename {android => prelude/android}/util.bzl (100%) rename {android => prelude/android}/voltron.bzl (100%) rename {apple => prelude/apple}/apple_asset_catalog.bzl (100%) rename {apple => prelude/apple}/apple_asset_catalog_compilation_options.bzl (100%) rename {apple => prelude/apple}/apple_asset_catalog_types.bzl (100%) rename {apple => prelude/apple}/apple_binary.bzl (100%) rename {apple => prelude/apple}/apple_buck2_compatibility.bzl (100%) rename {apple => prelude/apple}/apple_bundle.bzl (100%) rename {apple => prelude/apple}/apple_bundle_attrs.bzl (100%) rename {apple => prelude/apple}/apple_bundle_config.bzl (100%) rename {apple => prelude/apple}/apple_bundle_destination.bzl (100%) rename {apple => prelude/apple}/apple_bundle_part.bzl (100%) rename {apple => prelude/apple}/apple_bundle_resources.bzl (100%) rename {apple => prelude/apple}/apple_bundle_types.bzl (100%) rename {apple => prelude/apple}/apple_bundle_utility.bzl (100%) rename {apple => prelude/apple}/apple_code_signing_types.bzl (100%) rename {apple => prelude/apple}/apple_core_data.bzl (100%) rename {apple => prelude/apple}/apple_core_data_types.bzl (100%) rename {apple => prelude/apple}/apple_dsym.bzl (100%) rename {apple => prelude/apple}/apple_dsym_config.bzl (100%) rename {apple => prelude/apple}/apple_entitlements.bzl (100%) rename {apple => prelude/apple}/apple_framework_versions.bzl (100%) rename {apple => prelude/apple}/apple_frameworks.bzl (100%) rename {apple => prelude/apple}/apple_genrule_deps.bzl (100%) rename {apple => prelude/apple}/apple_info_plist.bzl (100%) rename {apple => prelude/apple}/apple_info_plist_substitutions_parsing.bzl (100%) rename {apple => prelude/apple}/apple_library.bzl (100%) rename {apple => prelude/apple}/apple_macro_layer.bzl (100%) rename {apple => prelude/apple}/apple_modular_utility.bzl (100%) rename {apple => prelude/apple}/apple_package.bzl (100%) rename {apple => prelude/apple}/apple_package_config.bzl (100%) rename {apple => prelude/apple}/apple_resource.bzl (100%) rename {apple => prelude/apple}/apple_resource_bundle.bzl (100%) rename {apple => prelude/apple}/apple_resource_types.bzl (100%) rename {apple => prelude/apple}/apple_resource_utility.bzl (100%) rename {apple => prelude/apple}/apple_rules_impl.bzl (100%) rename {apple => prelude/apple}/apple_rules_impl_utility.bzl (100%) rename {apple => prelude/apple}/apple_sdk.bzl (100%) rename {apple => prelude/apple}/apple_sdk_metadata.bzl (100%) rename {apple => prelude/apple}/apple_stripping.bzl (100%) rename {apple => prelude/apple}/apple_swift_stdlib.bzl (100%) rename {apple => prelude/apple}/apple_target_sdk_version.bzl (100%) rename {apple => prelude/apple}/apple_test.bzl (100%) rename {apple => prelude/apple}/apple_toolchain.bzl (100%) rename {apple => prelude/apple}/apple_toolchain_types.bzl (100%) rename {apple => prelude/apple}/apple_universal_binaries.bzl (100%) rename {apple => prelude/apple}/apple_universal_executable.bzl (100%) rename {apple => prelude/apple}/apple_utility.bzl (100%) rename {apple => prelude/apple}/debug.bzl (100%) rename {apple => prelude/apple}/modulemap.bzl (100%) rename {apple => prelude/apple}/prebuilt_apple_framework.bzl (100%) rename {apple => prelude/apple}/resource_groups.bzl (100%) rename {apple => prelude/apple}/scene_kit_assets.bzl (100%) rename {apple => prelude/apple}/scene_kit_assets_types.bzl (100%) rename {apple => prelude/apple}/swift/apple_sdk_clang_module.bzl (100%) rename {apple => prelude/apple}/swift/apple_sdk_modules_utility.bzl (100%) rename {apple => prelude/apple}/swift/apple_sdk_swift_module.bzl (100%) rename {apple => prelude/apple}/swift/swift_compilation.bzl (100%) rename {apple => prelude/apple}/swift/swift_debug_info_utils.bzl (100%) rename {apple => prelude/apple}/swift/swift_incremental_support.bzl (100%) rename {apple => prelude/apple}/swift/swift_module_map.bzl (100%) rename {apple => prelude/apple}/swift/swift_pcm_compilation.bzl (100%) rename {apple => prelude/apple}/swift/swift_pcm_compilation_types.bzl (100%) rename {apple => prelude/apple}/swift/swift_runtime.bzl (100%) rename {apple => prelude/apple}/swift/swift_sdk_pcm_compilation.bzl (100%) rename {apple => prelude/apple}/swift/swift_sdk_swiftinterface_compilation.bzl (100%) rename {apple => prelude/apple}/swift/swift_toolchain.bzl (100%) rename {apple => prelude/apple}/swift/swift_toolchain_macro_layer.bzl (100%) rename {apple => prelude/apple}/swift/swift_toolchain_types.bzl (100%) rename {apple => prelude/apple}/swift/swift_types.bzl (100%) rename {apple => prelude/apple}/tools/BUCK.v2 (100%) rename {apple => prelude/apple}/tools/bundling/BUCK.v2 (100%) rename {apple => prelude/apple}/tools/bundling/action_metadata.py (100%) rename {apple => prelude/apple}/tools/bundling/action_metadata_test.py (100%) rename {apple => prelude/apple}/tools/bundling/assemble_bundle.py (100%) rename {apple => prelude/apple}/tools/bundling/assemble_bundle_types.py (100%) rename {apple => prelude/apple}/tools/bundling/incremental_state.py (100%) rename {apple => prelude/apple}/tools/bundling/incremental_state_test.py (100%) rename {apple => prelude/apple}/tools/bundling/incremental_utils.py (100%) rename {apple => prelude/apple}/tools/bundling/incremental_utils_test.py (100%) rename {apple => prelude/apple}/tools/bundling/main.py (100%) rename {apple => prelude/apple}/tools/bundling/swift_support.py (100%) rename {apple => prelude/apple}/tools/bundling/test_resources/newer_version_action_metadata.json (100%) rename {apple => prelude/apple}/tools/bundling/test_resources/newer_version_incremental_state.json (100%) rename {apple => prelude/apple}/tools/bundling/test_resources/the.broken_json (100%) rename {apple => prelude/apple}/tools/bundling/test_resources/valid_action_metadata.json (100%) rename {apple => prelude/apple}/tools/bundling/test_resources/valid_incremental_state.json (100%) rename {apple => prelude/apple}/tools/code_signing/BUCK.v2 (100%) rename {apple => prelude/apple}/tools/code_signing/app_id.py (100%) rename {apple => prelude/apple}/tools/code_signing/app_id_test.py (100%) rename {apple => prelude/apple}/tools/code_signing/apple_platform.py (100%) rename {apple => prelude/apple}/tools/code_signing/codesign_bundle.py (100%) rename {apple => prelude/apple}/tools/code_signing/codesign_command_factory.py (100%) rename {apple => prelude/apple}/tools/code_signing/fast_adhoc.py (100%) rename {apple => prelude/apple}/tools/code_signing/identity.py (100%) rename {apple => prelude/apple}/tools/code_signing/identity_test.py (100%) rename {apple => prelude/apple}/tools/code_signing/info_plist_metadata.py (100%) rename {apple => prelude/apple}/tools/code_signing/info_plist_metadata_test.py (100%) rename {apple => prelude/apple}/tools/code_signing/list_codesign_identities_command_factory.py (100%) rename {apple => prelude/apple}/tools/code_signing/main.py (100%) rename {apple => prelude/apple}/tools/code_signing/prepare_code_signing_entitlements.py (100%) rename {apple => prelude/apple}/tools/code_signing/prepare_code_signing_entitlements_test.py (100%) rename {apple => prelude/apple}/tools/code_signing/prepare_info_plist.py (100%) rename {apple => prelude/apple}/tools/code_signing/prepare_info_plist_test.py (100%) rename {apple => prelude/apple}/tools/code_signing/provisioning_profile_diagnostics.py (100%) rename {apple => prelude/apple}/tools/code_signing/provisioning_profile_metadata.py (100%) rename {apple => prelude/apple}/tools/code_signing/provisioning_profile_metadata_test.py (100%) rename {apple => prelude/apple}/tools/code_signing/provisioning_profile_selection.py (100%) rename {apple => prelude/apple}/tools/code_signing/provisioning_profile_selection_test.py (100%) rename {apple => prelude/apple}/tools/code_signing/read_provisioning_profile_command_factory.py (100%) rename {apple => prelude/apple}/tools/code_signing/test_resources/Entitlements.plist (100%) rename {apple => prelude/apple}/tools/code_signing/test_resources/qualified_sample.mobileprovision (100%) rename {apple => prelude/apple}/tools/code_signing/test_resources/sample.mobileprovision (100%) rename {apple => prelude/apple}/tools/defs.bzl (100%) rename {apple => prelude/apple}/tools/dry_codesign_tool.py (100%) rename {apple => prelude/apple}/tools/info_plist_processor/BUCK.v2 (100%) rename {apple => prelude/apple}/tools/info_plist_processor/main.py (100%) rename {apple => prelude/apple}/tools/info_plist_processor/preprocess.py (100%) rename {apple => prelude/apple}/tools/info_plist_processor/preprocess_test.py (100%) rename {apple => prelude/apple}/tools/info_plist_processor/process.py (100%) rename {apple => prelude/apple}/tools/info_plist_processor/process_test.py (100%) rename {apple => prelude/apple}/tools/ipa_package_maker.py (100%) rename {apple => prelude/apple}/tools/make_modulemap.py (100%) rename {apple => prelude/apple}/tools/make_swift_comp_db.py (100%) rename {apple => prelude/apple}/tools/make_vfsoverlay.py (100%) rename {apple => prelude/apple}/tools/plistlib_utils.py (100%) rename {apple => prelude/apple}/tools/re_compatibility_utils/BUCK (100%) rename {apple => prelude/apple}/tools/re_compatibility_utils/writable.py (100%) rename {apple => prelude/apple}/tools/selective_debugging/BUCK.v2 (100%) rename {apple => prelude/apple}/tools/selective_debugging/macho.py (100%) rename {apple => prelude/apple}/tools/selective_debugging/macho_parser.py (100%) rename {apple => prelude/apple}/tools/selective_debugging/main.py (100%) rename {apple => prelude/apple}/tools/selective_debugging/scrubber.py (100%) rename {apple => prelude/apple}/tools/selective_debugging/scrubber_test.py (100%) rename {apple => prelude/apple}/tools/selective_debugging/spec.py (100%) rename {apple => prelude/apple}/tools/selective_debugging/spec_test.py (100%) rename {apple => prelude/apple}/tools/selective_debugging/test_resources/HelloWorld (100%) rename {apple => prelude/apple}/tools/selective_debugging/test_resources/focused_spec.json (100%) rename {apple => prelude/apple}/tools/selective_debugging/test_resources/focused_targets.json (100%) rename {apple => prelude/apple}/tools/selective_debugging/test_resources/focused_targets_empty.json (100%) rename {apple => prelude/apple}/tools/selective_debugging/utils.py (100%) rename {apple => prelude/apple}/tools/split_arch_combine_dsym_bundles_tool.py (100%) rename {apple => prelude/apple}/tools/swift_exec.sh (100%) rename {apple => prelude/apple}/tools/swift_objc_header_postprocess.py (100%) rename {apple => prelude/apple}/user/apple_resource_bundle.bzl (100%) rename {apple => prelude/apple}/user/apple_resource_transition.bzl (100%) rename {apple => prelude/apple}/user/apple_selected_debug_path_file.bzl (100%) rename {apple => prelude/apple}/user/apple_selective_debugging.bzl (100%) rename {apple => prelude/apple}/user/apple_simulators.bzl (100%) rename {apple => prelude/apple}/user/apple_toolchain_override.bzl (100%) rename {apple => prelude/apple}/user/apple_tools.bzl (100%) rename {apple => prelude/apple}/user/apple_watchos_bundle.bzl (100%) rename {apple => prelude/apple}/user/cpu_split_transition.bzl (100%) rename {apple => prelude/apple}/user/resource_group_map.bzl (100%) rename {apple => prelude/apple}/user/watch_transition.bzl (100%) rename {apple => prelude/apple}/xcode.bzl (100%) rename {apple => prelude/apple}/xcode_postbuild_script.bzl (100%) rename {apple => prelude/apple}/xcode_prebuild_script.bzl (100%) rename {apple => prelude/apple}/xctest_swift_support.bzl (100%) rename artifact_tset.bzl => prelude/artifact_tset.bzl (100%) rename artifacts.bzl => prelude/artifacts.bzl (100%) rename asserts.bzl => prelude/asserts.bzl (100%) rename attributes.bzl => prelude/attributes.bzl (100%) rename build_mode.bzl => prelude/build_mode.bzl (100%) rename builtin.bzl => prelude/builtin.bzl (100%) rename cache_mode.bzl => prelude/cache_mode.bzl (100%) rename command_alias.bzl => prelude/command_alias.bzl (100%) rename {configurations => prelude/configurations}/rules.bzl (100%) rename {configurations => prelude/configurations}/util.bzl (100%) rename {cpu => prelude/cpu}/BUCK.v2 (100%) rename {cpu => prelude/cpu}/constraints/BUCK.v2 (100%) rename {csharp => prelude/csharp}/csharp.bzl (100%) rename {csharp => prelude/csharp}/csharp_providers.bzl (100%) rename {csharp => prelude/csharp}/toolchain.bzl (100%) rename {cxx => prelude/cxx}/anon_link.bzl (100%) rename {cxx => prelude/cxx}/archive.bzl (100%) rename {cxx => prelude/cxx}/argsfiles.bzl (100%) rename {cxx => prelude/cxx}/attr_selection.bzl (100%) rename {cxx => prelude/cxx}/bitcode.bzl (100%) rename {cxx => prelude/cxx}/comp_db.bzl (100%) rename {cxx => prelude/cxx}/compile.bzl (100%) rename {cxx => prelude/cxx}/compiler.bzl (100%) rename {cxx => prelude/cxx}/cxx.bzl (100%) rename {cxx => prelude/cxx}/cxx_bolt.bzl (100%) rename {cxx => prelude/cxx}/cxx_context.bzl (100%) rename {cxx => prelude/cxx}/cxx_executable.bzl (100%) rename {cxx => prelude/cxx}/cxx_library.bzl (100%) rename {cxx => prelude/cxx}/cxx_library_utility.bzl (100%) rename {cxx => prelude/cxx}/cxx_link_utility.bzl (100%) rename {cxx => prelude/cxx}/cxx_sources.bzl (100%) rename {cxx => prelude/cxx}/cxx_toolchain.bzl (100%) rename {cxx => prelude/cxx}/cxx_toolchain_macro_layer.bzl (100%) rename {cxx => prelude/cxx}/cxx_toolchain_types.bzl (100%) rename {cxx => prelude/cxx}/cxx_types.bzl (100%) rename {cxx => prelude/cxx}/debug.bzl (100%) rename {cxx => prelude/cxx}/dist_lto/README.md (100%) rename {cxx => prelude/cxx}/dist_lto/dist_lto.bzl (100%) rename {cxx => prelude/cxx}/dist_lto/tools.bzl (100%) rename {cxx => prelude/cxx}/dist_lto/tools/BUCK.v2 (100%) rename {cxx => prelude/cxx}/dist_lto/tools/__init__.py (100%) rename {cxx => prelude/cxx}/dist_lto/tools/dist_lto_copy.py (100%) rename {cxx => prelude/cxx}/dist_lto/tools/dist_lto_opt.py (100%) rename {cxx => prelude/cxx}/dist_lto/tools/dist_lto_planner.py (100%) rename {cxx => prelude/cxx}/dist_lto/tools/dist_lto_prepare.py (100%) rename {cxx => prelude/cxx}/dist_lto/tools/tests/test_dist_lto_opt.py (100%) rename {cxx => prelude/cxx}/dwp.bzl (100%) rename {cxx => prelude/cxx}/groups.bzl (100%) rename {cxx => prelude/cxx}/headers.bzl (100%) rename {cxx => prelude/cxx}/link.bzl (100%) rename {cxx => prelude/cxx}/link_groups.bzl (100%) rename {cxx => prelude/cxx}/link_types.bzl (100%) rename {cxx => prelude/cxx}/linker.bzl (100%) rename {cxx => prelude/cxx}/omnibus.bzl (100%) rename {cxx => prelude/cxx}/platform.bzl (100%) rename {cxx => prelude/cxx}/prebuilt_cxx_library_group.bzl (100%) rename {cxx => prelude/cxx}/preprocessor.bzl (100%) rename {cxx => prelude/cxx}/shared_library_interface.bzl (100%) rename {cxx => prelude/cxx}/symbols.bzl (100%) rename {cxx => prelude/cxx}/tools/BUCK.v2 (100%) rename {cxx => prelude/cxx}/tools/defs.bzl (100%) rename {cxx => prelude/cxx}/tools/dep_file_processor.py (100%) rename {cxx => prelude/cxx}/tools/dep_file_utils.py (100%) rename {cxx => prelude/cxx}/tools/hmap_wrapper.py (100%) rename {cxx => prelude/cxx}/tools/linker_wrapper.py (100%) rename {cxx => prelude/cxx}/tools/make_comp_db.py (100%) rename {cxx => prelude/cxx}/tools/makefile_to_dep_file.py (100%) rename {cxx => prelude/cxx}/tools/show_headers_to_dep_file.py (100%) rename {cxx => prelude/cxx}/tools/show_includes_to_dep_file.py (100%) rename {cxx => prelude/cxx}/user/cxx_toolchain_override.bzl (100%) rename {cxx => prelude/cxx}/user/link_group_map.bzl (100%) rename {cxx => prelude/cxx}/xcode.bzl (100%) rename {debugging => prelude/debugging}/common.bzl (100%) rename {debugging => prelude/debugging}/ensure_dwp.bzl (100%) rename {debugging => prelude/debugging}/fdb.bxl (100%) rename {debugging => prelude/debugging}/inspect_dbg_exec.bzl (100%) rename {debugging => prelude/debugging}/inspect_default.bzl (100%) rename {debugging => prelude/debugging}/inspect_java.bzl (100%) rename {debugging => prelude/debugging}/labels.bzl (100%) rename {debugging => prelude/debugging}/types.bzl (100%) rename {decls => prelude/decls}/android_common.bzl (100%) rename {decls => prelude/decls}/android_rules.bzl (100%) rename {decls => prelude/decls}/apple_common.bzl (100%) rename {decls => prelude/decls}/common.bzl (100%) rename {decls => prelude/decls}/core_rules.bzl (100%) rename {decls => prelude/decls}/cxx_common.bzl (100%) rename {decls => prelude/decls}/cxx_rules.bzl (100%) rename {decls => prelude/decls}/d_common.bzl (100%) rename {decls => prelude/decls}/d_rules.bzl (100%) rename {decls => prelude/decls}/dotnet_rules.bzl (100%) rename {decls => prelude/decls}/erlang_rules.bzl (100%) rename {decls => prelude/decls}/genrule_common.bzl (100%) rename {decls => prelude/decls}/git_rules.bzl (100%) rename {decls => prelude/decls}/go_common.bzl (100%) rename {decls => prelude/decls}/go_rules.bzl (100%) rename {decls => prelude/decls}/groovy_rules.bzl (100%) rename {decls => prelude/decls}/halide_rules.bzl (100%) rename {decls => prelude/decls}/haskell_common.bzl (100%) rename {decls => prelude/decls}/haskell_rules.bzl (100%) rename {decls => prelude/decls}/ios_rules.bzl (100%) rename {decls => prelude/decls}/java_rules.bzl (100%) rename {decls => prelude/decls}/js_rules.bzl (100%) rename {decls => prelude/decls}/jvm_common.bzl (100%) rename {decls => prelude/decls}/kotlin_rules.bzl (100%) rename {decls => prelude/decls}/lua_common.bzl (100%) rename {decls => prelude/decls}/lua_rules.bzl (100%) rename {decls => prelude/decls}/native_common.bzl (100%) rename {decls => prelude/decls}/ocaml_common.bzl (100%) rename {decls => prelude/decls}/ocaml_rules.bzl (100%) rename {decls => prelude/decls}/python_common.bzl (100%) rename {decls => prelude/decls}/python_rules.bzl (100%) rename {decls => prelude/decls}/re_test_common.bzl (100%) rename {decls => prelude/decls}/remote_common.bzl (100%) rename {decls => prelude/decls}/rust_common.bzl (100%) rename {decls => prelude/decls}/rust_rules.bzl (100%) rename {decls => prelude/decls}/scala_rules.bzl (100%) rename {decls => prelude/decls}/shell_rules.bzl (100%) rename {decls => prelude/decls}/toolchains_common.bzl (100%) rename {decls => prelude/decls}/uncategorized_rules.bzl (100%) rename {dist => prelude/dist}/dist_info.bzl (100%) rename {docs => prelude/docs}/rules.bzl (100%) rename {erlang => prelude/erlang}/applications/BUCK.v2 (100%) rename {erlang => prelude/erlang}/common_test/.elp.toml (100%) rename {erlang => prelude/erlang}/common_test/common/BUCK.v2 (100%) rename {erlang => prelude/erlang}/common_test/common/include/artifact_annotations.hrl (100%) rename {erlang => prelude/erlang}/common_test/common/include/buck_ct_records.hrl (100%) rename {erlang => prelude/erlang}/common_test/common/include/tpx_records.hrl (100%) rename {erlang => prelude/erlang}/common_test/common/src/artifact_annotations.erl (100%) rename {erlang => prelude/erlang}/common_test/common/src/bounded_buffer.erl (100%) rename {erlang => prelude/erlang}/common_test/common/src/buck_ct_parser.erl (100%) rename {erlang => prelude/erlang}/common_test/common/src/buck_ct_provider.erl (100%) rename {erlang => prelude/erlang}/common_test/common/src/ct_error_printer.erl (100%) rename {erlang => prelude/erlang}/common_test/common/src/execution_logs.erl (100%) rename {erlang => prelude/erlang}/common_test/common/src/io_buffer.erl (100%) rename {erlang => prelude/erlang}/common_test/common/src/test_artifact_directory.erl (100%) rename {erlang => prelude/erlang}/common_test/common/src/test_logger.erl (100%) rename {erlang => prelude/erlang}/common_test/cth_hooks/BUCK.v2 (100%) rename {erlang => prelude/erlang}/common_test/cth_hooks/src/cth_tpx.erl (100%) rename {erlang => prelude/erlang}/common_test/cth_hooks/src/cth_tpx_role.erl (100%) rename {erlang => prelude/erlang}/common_test/cth_hooks/src/cth_tpx_server.erl (100%) rename {erlang => prelude/erlang}/common_test/cth_hooks/src/cth_tpx_test_tree.erl (100%) rename {erlang => prelude/erlang}/common_test/cth_hooks/src/method_ids.hrl (100%) rename {erlang => prelude/erlang}/common_test/test_binary/BUCK.v2 (100%) rename {erlang => prelude/erlang}/common_test/test_binary/src/json_interfacer.erl (100%) rename {erlang => prelude/erlang}/common_test/test_binary/src/junit_interfacer.erl (100%) rename {erlang => prelude/erlang}/common_test/test_binary/src/list_test.erl (100%) rename {erlang => prelude/erlang}/common_test/test_binary/src/listing_interfacer.erl (100%) rename {erlang => prelude/erlang}/common_test/test_binary/src/test_binary.erl (100%) rename {erlang => prelude/erlang}/common_test/test_binary/src/test_runner.erl (100%) rename {erlang => prelude/erlang}/common_test/test_cli_lib/BUCK.v2 (100%) rename {erlang => prelude/erlang}/common_test/test_cli_lib/src/test.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/BUCK.v2 (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/ct_daemon.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/ct_daemon_core.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/ct_daemon_hooks.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/ct_daemon_logger.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/ct_daemon_node.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/ct_daemon_printer.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/ct_daemon_runner.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/ct_executor.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/ct_runner.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/epmd_manager.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/test_exec.app.src (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/test_exec.erl (100%) rename {erlang => prelude/erlang}/common_test/test_exec/src/test_exec_sup.erl (100%) rename {erlang => prelude/erlang}/erlang.bzl (100%) rename {erlang => prelude/erlang}/erlang_application.bzl (100%) rename {erlang => prelude/erlang}/erlang_application_includes.bzl (100%) rename {erlang => prelude/erlang}/erlang_build.bzl (100%) rename {erlang => prelude/erlang}/erlang_dependencies.bzl (100%) rename {erlang => prelude/erlang}/erlang_escript.bzl (100%) rename {erlang => prelude/erlang}/erlang_info.bzl (100%) rename {erlang => prelude/erlang}/erlang_ls.config (100%) rename {erlang => prelude/erlang}/erlang_otp_application.bzl (100%) rename {erlang => prelude/erlang}/erlang_release.bzl (100%) rename {erlang => prelude/erlang}/erlang_shell.bzl (100%) rename {erlang => prelude/erlang}/erlang_tests.bzl (100%) rename {erlang => prelude/erlang}/erlang_toolchain.bzl (100%) rename {erlang => prelude/erlang}/erlang_utils.bzl (100%) rename {erlang => prelude/erlang}/shell/BUCK.v2 (100%) rename {erlang => prelude/erlang}/shell/shell.bxl (100%) rename {erlang => prelude/erlang}/shell/src/shell_buck2_utils.erl (100%) rename {erlang => prelude/erlang}/shell/src/user_default.erl (100%) rename {erlang => prelude/erlang}/toolchain/BUCK.v2 (100%) rename {erlang => prelude/erlang}/toolchain/app_src_builder.escript (100%) rename {erlang => prelude/erlang}/toolchain/boot_script_builder.escript (100%) rename {erlang => prelude/erlang}/toolchain/dependency_analyzer.escript (100%) rename {erlang => prelude/erlang}/toolchain/dependency_finalizer.escript (100%) rename {erlang => prelude/erlang}/toolchain/edoc_cli.escript (100%) rename {erlang => prelude/erlang}/toolchain/edoc_doclet_chunks.erl (100%) rename {erlang => prelude/erlang}/toolchain/edoc_report.erl (100%) rename {erlang => prelude/erlang}/toolchain/erlang_ls.config (100%) rename {erlang => prelude/erlang}/toolchain/erlc_trampoline.sh (100%) rename {erlang => prelude/erlang}/toolchain/escript_builder.escript (100%) rename {erlang => prelude/erlang}/toolchain/include_erts.escript (100%) rename {erlang => prelude/erlang}/toolchain/release_variables_builder.escript (100%) rename {erlang => prelude/erlang}/toolchain/transform_project_root.erl (100%) rename export_exe.bzl => prelude/export_exe.bzl (100%) rename export_file.bzl => prelude/export_file.bzl (100%) rename filegroup.bzl => prelude/filegroup.bzl (100%) rename genrule.bzl => prelude/genrule.bzl (100%) rename genrule_local_labels.bzl => prelude/genrule_local_labels.bzl (100%) rename genrule_toolchain.bzl => prelude/genrule_toolchain.bzl (100%) rename genrule_types.bzl => prelude/genrule_types.bzl (100%) rename {git => prelude/git}/git_fetch.bzl (100%) rename {git => prelude/git}/tools/BUCK.v2 (100%) rename {git => prelude/git}/tools/git_fetch.py (100%) rename {go => prelude/go}/cgo_library.bzl (100%) rename {go => prelude/go}/compile.bzl (100%) rename {go => prelude/go}/coverage.bzl (100%) rename {go => prelude/go}/go_binary.bzl (100%) rename {go => prelude/go}/go_exported_library.bzl (100%) rename {go => prelude/go}/go_library.bzl (100%) rename {go => prelude/go}/go_test.bzl (100%) rename {go => prelude/go}/link.bzl (100%) rename {go => prelude/go}/packages.bzl (100%) rename {go => prelude/go}/toolchain.bzl (100%) rename {go => prelude/go}/tools/BUCK.v2 (100%) rename {go => prelude/go}/tools/cgo_wrapper.py (100%) rename {go => prelude/go}/tools/compile_wrapper.py (100%) rename {go => prelude/go}/tools/cover_srcs.py (100%) rename {go => prelude/go}/tools/filter_srcs.py (100%) rename {go => prelude/go}/tools/testmaingen.go (100%) rename {haskell => prelude/haskell}/compile.bzl (100%) rename {haskell => prelude/haskell}/haskell.bzl (100%) rename {haskell => prelude/haskell}/haskell_ghci.bzl (100%) rename {haskell => prelude/haskell}/haskell_haddock.bzl (100%) rename {haskell => prelude/haskell}/haskell_ide.bzl (100%) rename {haskell => prelude/haskell}/ide/README.md (100%) rename {haskell => prelude/haskell}/ide/hie.yaml (100%) rename {haskell => prelude/haskell}/ide/ide.bxl (100%) rename {haskell => prelude/haskell}/link_info.bzl (100%) rename {haskell => prelude/haskell}/toolchain.bzl (100%) rename {haskell => prelude/haskell}/tools/BUCK.v2 (100%) rename {haskell => prelude/haskell}/tools/script_template_processor.py (100%) rename {haskell => prelude/haskell}/util.bzl (100%) rename {http_archive => prelude/http_archive}/exec_deps.bzl (100%) rename {http_archive => prelude/http_archive}/http_archive.bzl (100%) rename {http_archive => prelude/http_archive}/tools/BUCK.v2 (100%) rename {http_archive => prelude/http_archive}/tools/create_exclusion_list.py (100%) rename http_file.bzl => prelude/http_file.bzl (100%) rename {ide_integrations => prelude/ide_integrations}/xcode.bzl (100%) rename is_buck2.bzl => prelude/is_buck2.bzl (100%) rename is_buck2_internal.bzl => prelude/is_buck2_internal.bzl (100%) rename is_full_meta_repo.bzl => prelude/is_full_meta_repo.bzl (100%) rename {java => prelude/java}/class_to_srcs.bzl (100%) rename {java => prelude/java}/dex.bzl (100%) rename {java => prelude/java}/dex_toolchain.bzl (100%) rename {java => prelude/java}/gwt_binary.bzl (100%) rename {java => prelude/java}/jar_genrule.bzl (100%) rename {java => prelude/java}/java.bzl (100%) rename {java => prelude/java}/java_binary.bzl (100%) rename {java => prelude/java}/java_library.bzl (100%) rename {java => prelude/java}/java_providers.bzl (100%) rename {java => prelude/java}/java_resources.bzl (100%) rename {java => prelude/java}/java_test.bzl (100%) rename {java => prelude/java}/java_toolchain.bzl (100%) rename {java => prelude/java}/javacd_jar_creator.bzl (100%) rename {java => prelude/java}/keystore.bzl (100%) rename {java => prelude/java}/plugins/java_annotation_processor.bzl (100%) rename {java => prelude/java}/plugins/java_plugin.bzl (100%) rename {java => prelude/java}/prebuilt_jar.bzl (100%) rename {java => prelude/java}/tools/BUCK.v2 (100%) rename {java => prelude/java}/tools/compile_and_package.py (100%) rename {java => prelude/java}/tools/fat_jar.py (100%) rename {java => prelude/java}/tools/gen_class_to_source_map.py (100%) rename {java => prelude/java}/tools/list_class_names.py (100%) rename {java => prelude/java}/tools/merge_class_to_source_maps.py (100%) rename {java => prelude/java}/tools/utils.py (100%) rename {java => prelude/java}/utils/java_more_utils.bzl (100%) rename {java => prelude/java}/utils/java_utils.bzl (100%) rename {js => prelude/js}/js.bzl (100%) rename {js => prelude/js}/js_bundle.bzl (100%) rename {js => prelude/js}/js_bundle_genrule.bzl (100%) rename {js => prelude/js}/js_library.bzl (100%) rename {js => prelude/js}/js_providers.bzl (100%) rename {js => prelude/js}/js_utils.bzl (100%) rename {julia => prelude/julia}/julia.bzl (100%) rename {julia => prelude/julia}/julia_binary.bzl (100%) rename {julia => prelude/julia}/julia_info.bzl (100%) rename {julia => prelude/julia}/julia_library.bzl (100%) rename {julia => prelude/julia}/julia_test.bzl (100%) rename {julia => prelude/julia}/julia_toolchain.bzl (100%) rename {julia => prelude/julia}/tools/BUCK.v2 (100%) rename {julia => prelude/julia}/tools/parse_julia_cmd.py (100%) rename {jvm => prelude/jvm}/cd_jar_creator_util.bzl (100%) rename {jvm => prelude/jvm}/nullsafe.bzl (100%) rename {kotlin => prelude/kotlin}/kotlin.bzl (100%) rename {kotlin => prelude/kotlin}/kotlin_library.bzl (100%) rename {kotlin => prelude/kotlin}/kotlin_test.bzl (100%) rename {kotlin => prelude/kotlin}/kotlin_toolchain.bzl (100%) rename {kotlin => prelude/kotlin}/kotlin_utils.bzl (100%) rename {kotlin => prelude/kotlin}/kotlincd_jar_creator.bzl (100%) rename {kotlin => prelude/kotlin}/tools/compile_kotlin/BUCK.v2 (100%) rename {kotlin => prelude/kotlin}/tools/compile_kotlin/compile_kotlin.py (100%) rename {kotlin => prelude/kotlin}/tools/defs.bzl (100%) rename {kotlin => prelude/kotlin}/tools/kapt_base64_encoder/BUCK.v2 (100%) rename {kotlin => prelude/kotlin}/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java (100%) rename {linking => prelude/linking}/execution_preference.bzl (100%) rename {linking => prelude/linking}/link_groups.bzl (100%) rename {linking => prelude/linking}/link_info.bzl (100%) rename {linking => prelude/linking}/linkable_graph.bzl (100%) rename {linking => prelude/linking}/linkables.bzl (100%) rename {linking => prelude/linking}/lto.bzl (100%) rename {linking => prelude/linking}/shared_libraries.bzl (100%) rename {linking => prelude/linking}/strip.bzl (100%) rename local_only.bzl => prelude/local_only.bzl (100%) rename {lua => prelude/lua}/cxx_lua_extension.bzl (100%) rename {lua => prelude/lua}/lua_binary.bzl (100%) rename {lua => prelude/lua}/lua_library.bzl (100%) rename native.bzl => prelude/native.bzl (100%) rename {ocaml => prelude/ocaml}/attrs.bzl (100%) rename {ocaml => prelude/ocaml}/makefile.bzl (100%) rename {ocaml => prelude/ocaml}/ocaml.bzl (100%) rename {ocaml => prelude/ocaml}/ocaml_toolchain_types.bzl (100%) rename {os => prelude/os}/BUCK.v2 (100%) rename {os => prelude/os}/constraints/BUCK.v2 (100%) rename {os_lookup => prelude/os_lookup}/defs.bzl (100%) rename {os_lookup => prelude/os_lookup}/targets/BUCK.v2 (100%) rename paths.bzl => prelude/paths.bzl (100%) rename {platforms => prelude/platforms}/BUCK (100%) rename {platforms => prelude/platforms}/BUCK.v2 (100%) rename {platforms => prelude/platforms}/apple/arch.bzl (100%) rename {platforms => prelude/platforms}/apple/sdk.bzl (100%) rename {platforms => prelude/platforms}/defs.bzl (100%) rename {playground => prelude/playground}/test.bxl (100%) rename prelude.bzl => prelude/prelude.bzl (100%) rename pull_request_template.md => prelude/pull_request_template.md (100%) rename {python => prelude/python}/compile.bzl (100%) rename {python => prelude/python}/cxx_python_extension.bzl (100%) rename {python => prelude/python}/interface.bzl (100%) rename {python => prelude/python}/make_py_package.bzl (100%) rename {python => prelude/python}/manifest.bzl (100%) rename {python => prelude/python}/native_python_util.bzl (100%) rename {python => prelude/python}/needed_coverage.bzl (100%) rename {python => prelude/python}/prebuilt_python_library.bzl (100%) rename {python => prelude/python}/python.bzl (100%) rename {python => prelude/python}/python_binary.bzl (100%) rename {python => prelude/python}/python_library.bzl (100%) rename {python => prelude/python}/python_needed_coverage_test.bzl (100%) rename {python => prelude/python}/python_test.bzl (100%) rename {python => prelude/python}/runtime/BUCK.v2 (100%) rename {python => prelude/python}/runtime/__par__/bootstrap.py (100%) rename {python => prelude/python}/source_db.bzl (100%) rename {python => prelude/python}/sourcedb/build.bxl (100%) rename {python => prelude/python}/sourcedb/classic.bxl (100%) rename {python => prelude/python}/sourcedb/code_navigation.bxl (100%) rename {python => prelude/python}/sourcedb/merge.bxl (100%) rename {python => prelude/python}/sourcedb/query.bxl (100%) rename {python => prelude/python}/toolchain.bzl (100%) rename {python => prelude/python}/tools/BUCK.v2 (100%) rename {python => prelude/python}/tools/__test_main__.py (100%) rename {python => prelude/python}/tools/compile.py (100%) rename {python => prelude/python}/tools/create_manifest_for_source_dir.py (100%) rename {python => prelude/python}/tools/embedded_main.cpp (100%) rename {python => prelude/python}/tools/extract.py (100%) rename {python => prelude/python}/tools/fail_with_message.py (100%) rename {python => prelude/python}/tools/generate_static_extension_info.py (100%) rename {python => prelude/python}/tools/make_par/BUCK (100%) rename {python => prelude/python}/tools/make_par/__run_lpar_main__.py (100%) rename {python => prelude/python}/tools/make_par/_lpar_bootstrap.sh.template (100%) rename {python => prelude/python}/tools/make_par/sitecustomize.py (100%) rename {python => prelude/python}/tools/make_py_package_inplace.py (100%) rename {python => prelude/python}/tools/make_py_package_manifest_module.py (100%) rename {python => prelude/python}/tools/make_py_package_modules.py (100%) rename {python => prelude/python}/tools/make_source_db.py (100%) rename {python => prelude/python}/tools/make_source_db_no_deps.py (100%) rename {python => prelude/python}/tools/parse_imports.py (100%) rename {python => prelude/python}/tools/py38stdlib.py (100%) rename {python => prelude/python}/tools/run_inplace.py.in (100%) rename {python => prelude/python}/tools/run_inplace_lite.py.in (100%) rename {python => prelude/python}/tools/sourcedb_merger/BUCK.v2 (100%) rename {python => prelude/python}/tools/sourcedb_merger/inputs.py (100%) rename {python => prelude/python}/tools/sourcedb_merger/legacy_merge.py (100%) rename {python => prelude/python}/tools/sourcedb_merger/legacy_outputs.py (100%) rename {python => prelude/python}/tools/sourcedb_merger/merge.py (100%) rename {python => prelude/python}/tools/sourcedb_merger/outputs.py (100%) rename {python => prelude/python}/tools/sourcedb_merger/tests/__init__.py (100%) rename {python => prelude/python}/tools/sourcedb_merger/tests/inputs_test.py (100%) rename {python => prelude/python}/tools/sourcedb_merger/tests/legacy_output_test.py (100%) rename {python => prelude/python}/tools/sourcedb_merger/tests/main.sh (100%) rename {python => prelude/python}/tools/sourcedb_merger/tests/outputs_test.py (100%) rename {python => prelude/python}/tools/static_extension_finder.py (100%) rename {python => prelude/python}/tools/static_extension_utils.cpp (100%) rename {python => prelude/python}/tools/traverse_dep_manifest.py (100%) rename {python_bootstrap => prelude/python_bootstrap}/python_bootstrap.bzl (100%) rename {python_bootstrap => prelude/python_bootstrap}/tools/BUCK.v2 (100%) rename {python_bootstrap => prelude/python_bootstrap}/tools/win_python_wrapper.bat (100%) rename remote_file.bzl => prelude/remote_file.bzl (100%) rename resources.bzl => prelude/resources.bzl (100%) rename rules.bzl => prelude/rules.bzl (100%) rename rules_impl.bzl => prelude/rules_impl.bzl (100%) rename {rust => prelude/rust}/build.bzl (100%) rename {rust => prelude/rust}/build_params.bzl (100%) rename {rust => prelude/rust}/cargo_buildscript.bzl (100%) rename {rust => prelude/rust}/cargo_package.bzl (100%) rename {rust => prelude/rust}/context.bzl (100%) rename {rust => prelude/rust}/extern.bzl (100%) rename {rust => prelude/rust}/failure_filter.bzl (100%) rename {rust => prelude/rust}/link_info.bzl (100%) rename {rust => prelude/rust}/proc_macro_alias.bzl (100%) rename {rust => prelude/rust}/resources.bzl (100%) rename {rust => prelude/rust}/rust-analyzer/check.bxl (100%) rename {rust => prelude/rust}/rust-analyzer/resolve_deps.bxl (100%) rename {rust => prelude/rust}/rust_binary.bzl (100%) rename {rust => prelude/rust}/rust_common.bzl (100%) rename {rust => prelude/rust}/rust_library.bzl (100%) rename {rust => prelude/rust}/rust_toolchain.bzl (100%) rename {rust => prelude/rust}/targets.bzl (100%) rename {rust => prelude/rust}/tools/BUCK.v2 (100%) rename {rust => prelude/rust}/tools/attrs.bzl (100%) rename {rust => prelude/rust}/tools/buildscript_run.py (100%) rename {rust => prelude/rust}/tools/concat.py (100%) rename {rust => prelude/rust}/tools/failure_filter_action.py (100%) rename {rust => prelude/rust}/tools/rustc_action.py (100%) rename {rust => prelude/rust}/tools/rustdoc_test_with_resources.py (100%) rename {rust => prelude/rust}/tools/tool_rules.bzl (100%) rename {rust => prelude/rust}/tools/transitive_dependency_symlinks.py (100%) rename {rust => prelude/rust}/with_workspace.bzl (100%) rename sh_binary.bzl => prelude/sh_binary.bzl (100%) rename sh_test.bzl => prelude/sh_test.bzl (100%) rename {test => prelude/test}/inject_test_run_info.bzl (100%) rename {test => prelude/test}/tools/BUCK.v2 (100%) rename {test => prelude/test}/tools/inject_test_env.py (100%) rename test_suite.bzl => prelude/test_suite.bzl (100%) rename {tests => prelude/tests}/re_utils.bzl (100%) rename {tests => prelude/tests}/remote_test_execution_toolchain.bzl (100%) rename {tests => prelude/tests}/tpx_re_legacy.bzl (100%) rename {third-party => prelude/third-party}/hmaptool/BUCK.v2 (100%) rename {third-party => prelude/third-party}/hmaptool/METADATA.bzl (100%) rename {third-party => prelude/third-party}/hmaptool/README.md (100%) rename {third-party => prelude/third-party}/hmaptool/hmaptool (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/.gitignore (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/BUCK.v2 (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/Makefile (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/README (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/defs.bzl (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/src/xcode_exec_tester.m (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/src/xcode_version_checker.m (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/src/xcode_version_checks.h (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/src/xcode_version_checks.m (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/src/xcode_version_tester.m (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/test/Xcode_14.2.0_14C18_fb_version.plist (100%) rename {toolchains => prelude/toolchains}/apple/xcode_version_checker/xcode_version_checker (100%) rename {toolchains => prelude/toolchains}/conan/BUCK.v2 (100%) rename {toolchains => prelude/toolchains}/conan/buckler/conanfile.py (100%) rename {toolchains => prelude/toolchains}/conan/conan_common.py (100%) rename {toolchains => prelude/toolchains}/conan/conan_generate.py (100%) rename {toolchains => prelude/toolchains}/conan/conan_init.py (100%) rename {toolchains => prelude/toolchains}/conan/conan_lock.py (100%) rename {toolchains => prelude/toolchains}/conan/conan_package.py (100%) rename {toolchains => prelude/toolchains}/conan/conan_package_extract.py (100%) rename {toolchains => prelude/toolchains}/conan/conan_update.py (100%) rename {toolchains => prelude/toolchains}/conan/defs.bzl (100%) rename {toolchains => prelude/toolchains}/conan/lock_generate.py (100%) rename {toolchains => prelude/toolchains}/csharp.bzl (100%) rename {toolchains => prelude/toolchains}/cxx.bzl (100%) rename {toolchains => prelude/toolchains}/cxx/zig/BUCK.v2 (100%) rename {toolchains => prelude/toolchains}/cxx/zig/defs.bzl (100%) rename {toolchains => prelude/toolchains}/cxx/zig/releases.bzl (100%) rename {toolchains => prelude/toolchains}/demo.bzl (100%) rename {toolchains => prelude/toolchains}/execution_host.bzl (100%) rename {toolchains => prelude/toolchains}/genrule.bzl (100%) rename {toolchains => prelude/toolchains}/go.bzl (100%) rename {toolchains => prelude/toolchains}/haskell.bzl (100%) rename {toolchains => prelude/toolchains}/msvc/BUCK.v2 (100%) rename {toolchains => prelude/toolchains}/msvc/run_msvc_tool.py (100%) rename {toolchains => prelude/toolchains}/msvc/tools.bzl (100%) rename {toolchains => prelude/toolchains}/msvc/vswhere.py (100%) rename {toolchains => prelude/toolchains}/ocaml.bzl (100%) rename {toolchains => prelude/toolchains}/python.bzl (100%) rename {toolchains => prelude/toolchains}/remote_test_execution.bzl (100%) rename {toolchains => prelude/toolchains}/rust.bzl (100%) rename {transitions => prelude/transitions}/constraint_overrides.bzl (100%) rename {user => prelude/user}/all.bzl (100%) rename {user => prelude/user}/cxx_headers_bundle.bzl (100%) rename {user => prelude/user}/extract_archive.bzl (100%) rename {user => prelude/user}/rule_spec.bzl (100%) rename {user => prelude/user}/write_file.bzl (100%) rename {utils => prelude/utils}/arglike.bzl (100%) rename {utils => prelude/utils}/buckconfig.bzl (100%) rename {utils => prelude/utils}/build_target_pattern.bzl (100%) rename {utils => prelude/utils}/cmd_script.bzl (100%) rename {utils => prelude/utils}/dicts.bzl (100%) rename {utils => prelude/utils}/expect.bzl (100%) rename {utils => prelude/utils}/graph_utils.bzl (100%) rename {utils => prelude/utils}/host.bzl (100%) rename {utils => prelude/utils}/lazy.bzl (100%) rename {utils => prelude/utils}/pick.bzl (100%) rename {utils => prelude/utils}/platform_flavors_util.bzl (100%) rename {utils => prelude/utils}/selects.bzl (100%) rename {utils => prelude/utils}/set.bzl (100%) rename {utils => prelude/utils}/strings.bzl (100%) rename {utils => prelude/utils}/type_defs.bzl (100%) rename {utils => prelude/utils}/utils.bzl (100%) rename {windows => prelude/windows}/tools/BUCK.v2 (100%) rename {windows => prelude/windows}/tools/msvc_hermetic_exec.bat (100%) rename worker_tool.bzl => prelude/worker_tool.bzl (100%) rename {zip_file => prelude/zip_file}/tools/BUCK.v2 (100%) rename {zip_file => prelude/zip_file}/tools/unzip.py (100%) rename {zip_file => prelude/zip_file}/zip_file.bzl (100%) rename {zip_file => prelude/zip_file}/zip_file_toolchain.bzl (100%) diff --git a/.buckconfig b/prelude/.buckconfig similarity index 100% rename from .buckconfig rename to prelude/.buckconfig diff --git a/.gitignore b/prelude/.gitignore similarity index 100% rename from .gitignore rename to prelude/.gitignore diff --git a/BUCK b/prelude/BUCK similarity index 100% rename from BUCK rename to prelude/BUCK diff --git a/CHANGELOG.md b/prelude/CHANGELOG.md similarity index 100% rename from CHANGELOG.md rename to prelude/CHANGELOG.md diff --git a/CODE_OF_CONDUCT.md b/prelude/CODE_OF_CONDUCT.md similarity index 100% rename from CODE_OF_CONDUCT.md rename to prelude/CODE_OF_CONDUCT.md diff --git a/CONTRIBUTING.md b/prelude/CONTRIBUTING.md similarity index 100% rename from CONTRIBUTING.md rename to prelude/CONTRIBUTING.md diff --git a/LICENSE-APACHE b/prelude/LICENSE-APACHE similarity index 100% rename from LICENSE-APACHE rename to prelude/LICENSE-APACHE diff --git a/LICENSE-MIT b/prelude/LICENSE-MIT similarity index 100% rename from LICENSE-MIT rename to prelude/LICENSE-MIT diff --git a/README.md b/prelude/README.md similarity index 100% rename from README.md rename to prelude/README.md diff --git a/abi/BUCK.v2 b/prelude/abi/BUCK.v2 similarity index 100% rename from abi/BUCK.v2 rename to prelude/abi/BUCK.v2 diff --git a/abi/constraints/BUCK.v2 b/prelude/abi/constraints/BUCK.v2 similarity index 100% rename from abi/constraints/BUCK.v2 rename to prelude/abi/constraints/BUCK.v2 diff --git a/alias.bzl b/prelude/alias.bzl similarity index 100% rename from alias.bzl rename to prelude/alias.bzl diff --git a/android/aapt2_link.bzl b/prelude/android/aapt2_link.bzl similarity index 100% rename from android/aapt2_link.bzl rename to prelude/android/aapt2_link.bzl diff --git a/android/android.bzl b/prelude/android/android.bzl similarity index 100% rename from android/android.bzl rename to prelude/android/android.bzl diff --git a/android/android_aar.bzl b/prelude/android/android_aar.bzl similarity index 100% rename from android/android_aar.bzl rename to prelude/android/android_aar.bzl diff --git a/android/android_apk.bzl b/prelude/android/android_apk.bzl similarity index 100% rename from android/android_apk.bzl rename to prelude/android/android_apk.bzl diff --git a/android/android_binary.bzl b/prelude/android/android_binary.bzl similarity index 100% rename from android/android_binary.bzl rename to prelude/android/android_binary.bzl diff --git a/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl similarity index 100% rename from android/android_binary_native_library_rules.bzl rename to prelude/android/android_binary_native_library_rules.bzl diff --git a/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl similarity index 100% rename from android/android_binary_resources_rules.bzl rename to prelude/android/android_binary_resources_rules.bzl diff --git a/android/android_build_config.bzl b/prelude/android/android_build_config.bzl similarity index 100% rename from android/android_build_config.bzl rename to prelude/android/android_build_config.bzl diff --git a/android/android_bundle.bzl b/prelude/android/android_bundle.bzl similarity index 100% rename from android/android_bundle.bzl rename to prelude/android/android_bundle.bzl diff --git a/android/android_instrumentation_apk.bzl b/prelude/android/android_instrumentation_apk.bzl similarity index 100% rename from android/android_instrumentation_apk.bzl rename to prelude/android/android_instrumentation_apk.bzl diff --git a/android/android_instrumentation_test.bzl b/prelude/android/android_instrumentation_test.bzl similarity index 100% rename from android/android_instrumentation_test.bzl rename to prelude/android/android_instrumentation_test.bzl diff --git a/android/android_library.bzl b/prelude/android/android_library.bzl similarity index 100% rename from android/android_library.bzl rename to prelude/android/android_library.bzl diff --git a/android/android_manifest.bzl b/prelude/android/android_manifest.bzl similarity index 100% rename from android/android_manifest.bzl rename to prelude/android/android_manifest.bzl diff --git a/android/android_prebuilt_aar.bzl b/prelude/android/android_prebuilt_aar.bzl similarity index 100% rename from android/android_prebuilt_aar.bzl rename to prelude/android/android_prebuilt_aar.bzl diff --git a/android/android_providers.bzl b/prelude/android/android_providers.bzl similarity index 100% rename from android/android_providers.bzl rename to prelude/android/android_providers.bzl diff --git a/android/android_resource.bzl b/prelude/android/android_resource.bzl similarity index 100% rename from android/android_resource.bzl rename to prelude/android/android_resource.bzl diff --git a/android/android_toolchain.bzl b/prelude/android/android_toolchain.bzl similarity index 100% rename from android/android_toolchain.bzl rename to prelude/android/android_toolchain.bzl diff --git a/android/apk_genrule.bzl b/prelude/android/apk_genrule.bzl similarity index 100% rename from android/apk_genrule.bzl rename to prelude/android/apk_genrule.bzl diff --git a/android/build_only_native_code.bzl b/prelude/android/build_only_native_code.bzl similarity index 100% rename from android/build_only_native_code.bzl rename to prelude/android/build_only_native_code.bzl diff --git a/android/configuration.bzl b/prelude/android/configuration.bzl similarity index 100% rename from android/configuration.bzl rename to prelude/android/configuration.bzl diff --git a/android/constraints/BUCK.v2 b/prelude/android/constraints/BUCK.v2 similarity index 100% rename from android/constraints/BUCK.v2 rename to prelude/android/constraints/BUCK.v2 diff --git a/android/cpu_filters.bzl b/prelude/android/cpu_filters.bzl similarity index 100% rename from android/cpu_filters.bzl rename to prelude/android/cpu_filters.bzl diff --git a/android/dex_rules.bzl b/prelude/android/dex_rules.bzl similarity index 100% rename from android/dex_rules.bzl rename to prelude/android/dex_rules.bzl diff --git a/android/exopackage.bzl b/prelude/android/exopackage.bzl similarity index 100% rename from android/exopackage.bzl rename to prelude/android/exopackage.bzl diff --git a/android/gen_aidl.bzl b/prelude/android/gen_aidl.bzl similarity index 100% rename from android/gen_aidl.bzl rename to prelude/android/gen_aidl.bzl diff --git a/android/min_sdk_version.bzl b/prelude/android/min_sdk_version.bzl similarity index 100% rename from android/min_sdk_version.bzl rename to prelude/android/min_sdk_version.bzl diff --git a/android/prebuilt_native_library.bzl b/prelude/android/prebuilt_native_library.bzl similarity index 100% rename from android/prebuilt_native_library.bzl rename to prelude/android/prebuilt_native_library.bzl diff --git a/android/preprocess_java_classes.bzl b/prelude/android/preprocess_java_classes.bzl similarity index 100% rename from android/preprocess_java_classes.bzl rename to prelude/android/preprocess_java_classes.bzl diff --git a/android/proguard.bzl b/prelude/android/proguard.bzl similarity index 100% rename from android/proguard.bzl rename to prelude/android/proguard.bzl diff --git a/android/r_dot_java.bzl b/prelude/android/r_dot_java.bzl similarity index 100% rename from android/r_dot_java.bzl rename to prelude/android/r_dot_java.bzl diff --git a/android/robolectric_test.bzl b/prelude/android/robolectric_test.bzl similarity index 100% rename from android/robolectric_test.bzl rename to prelude/android/robolectric_test.bzl diff --git a/android/tools/BUCK.v2 b/prelude/android/tools/BUCK.v2 similarity index 100% rename from android/tools/BUCK.v2 rename to prelude/android/tools/BUCK.v2 diff --git a/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java b/prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java similarity index 100% rename from android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java rename to prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java diff --git a/android/tools/combine_native_library_dirs.py b/prelude/android/tools/combine_native_library_dirs.py similarity index 100% rename from android/tools/combine_native_library_dirs.py rename to prelude/android/tools/combine_native_library_dirs.py diff --git a/android/tools/filter_dex.py b/prelude/android/tools/filter_dex.py similarity index 100% rename from android/tools/filter_dex.py rename to prelude/android/tools/filter_dex.py diff --git a/android/tools/filter_extra_resources.py b/prelude/android/tools/filter_extra_resources.py similarity index 100% rename from android/tools/filter_extra_resources.py rename to prelude/android/tools/filter_extra_resources.py diff --git a/android/tools/filter_prebuilt_native_library_dir.py b/prelude/android/tools/filter_prebuilt_native_library_dir.py similarity index 100% rename from android/tools/filter_prebuilt_native_library_dir.py rename to prelude/android/tools/filter_prebuilt_native_library_dir.py diff --git a/android/tools/merge_sequence.py b/prelude/android/tools/merge_sequence.py similarity index 100% rename from android/tools/merge_sequence.py rename to prelude/android/tools/merge_sequence.py diff --git a/android/tools/native_libs_as_assets_metadata.py b/prelude/android/tools/native_libs_as_assets_metadata.py similarity index 100% rename from android/tools/native_libs_as_assets_metadata.py rename to prelude/android/tools/native_libs_as_assets_metadata.py diff --git a/android/tools/unpack_aar.py b/prelude/android/tools/unpack_aar.py similarity index 100% rename from android/tools/unpack_aar.py rename to prelude/android/tools/unpack_aar.py diff --git a/android/util.bzl b/prelude/android/util.bzl similarity index 100% rename from android/util.bzl rename to prelude/android/util.bzl diff --git a/android/voltron.bzl b/prelude/android/voltron.bzl similarity index 100% rename from android/voltron.bzl rename to prelude/android/voltron.bzl diff --git a/apple/apple_asset_catalog.bzl b/prelude/apple/apple_asset_catalog.bzl similarity index 100% rename from apple/apple_asset_catalog.bzl rename to prelude/apple/apple_asset_catalog.bzl diff --git a/apple/apple_asset_catalog_compilation_options.bzl b/prelude/apple/apple_asset_catalog_compilation_options.bzl similarity index 100% rename from apple/apple_asset_catalog_compilation_options.bzl rename to prelude/apple/apple_asset_catalog_compilation_options.bzl diff --git a/apple/apple_asset_catalog_types.bzl b/prelude/apple/apple_asset_catalog_types.bzl similarity index 100% rename from apple/apple_asset_catalog_types.bzl rename to prelude/apple/apple_asset_catalog_types.bzl diff --git a/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl similarity index 100% rename from apple/apple_binary.bzl rename to prelude/apple/apple_binary.bzl diff --git a/apple/apple_buck2_compatibility.bzl b/prelude/apple/apple_buck2_compatibility.bzl similarity index 100% rename from apple/apple_buck2_compatibility.bzl rename to prelude/apple/apple_buck2_compatibility.bzl diff --git a/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl similarity index 100% rename from apple/apple_bundle.bzl rename to prelude/apple/apple_bundle.bzl diff --git a/apple/apple_bundle_attrs.bzl b/prelude/apple/apple_bundle_attrs.bzl similarity index 100% rename from apple/apple_bundle_attrs.bzl rename to prelude/apple/apple_bundle_attrs.bzl diff --git a/apple/apple_bundle_config.bzl b/prelude/apple/apple_bundle_config.bzl similarity index 100% rename from apple/apple_bundle_config.bzl rename to prelude/apple/apple_bundle_config.bzl diff --git a/apple/apple_bundle_destination.bzl b/prelude/apple/apple_bundle_destination.bzl similarity index 100% rename from apple/apple_bundle_destination.bzl rename to prelude/apple/apple_bundle_destination.bzl diff --git a/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl similarity index 100% rename from apple/apple_bundle_part.bzl rename to prelude/apple/apple_bundle_part.bzl diff --git a/apple/apple_bundle_resources.bzl b/prelude/apple/apple_bundle_resources.bzl similarity index 100% rename from apple/apple_bundle_resources.bzl rename to prelude/apple/apple_bundle_resources.bzl diff --git a/apple/apple_bundle_types.bzl b/prelude/apple/apple_bundle_types.bzl similarity index 100% rename from apple/apple_bundle_types.bzl rename to prelude/apple/apple_bundle_types.bzl diff --git a/apple/apple_bundle_utility.bzl b/prelude/apple/apple_bundle_utility.bzl similarity index 100% rename from apple/apple_bundle_utility.bzl rename to prelude/apple/apple_bundle_utility.bzl diff --git a/apple/apple_code_signing_types.bzl b/prelude/apple/apple_code_signing_types.bzl similarity index 100% rename from apple/apple_code_signing_types.bzl rename to prelude/apple/apple_code_signing_types.bzl diff --git a/apple/apple_core_data.bzl b/prelude/apple/apple_core_data.bzl similarity index 100% rename from apple/apple_core_data.bzl rename to prelude/apple/apple_core_data.bzl diff --git a/apple/apple_core_data_types.bzl b/prelude/apple/apple_core_data_types.bzl similarity index 100% rename from apple/apple_core_data_types.bzl rename to prelude/apple/apple_core_data_types.bzl diff --git a/apple/apple_dsym.bzl b/prelude/apple/apple_dsym.bzl similarity index 100% rename from apple/apple_dsym.bzl rename to prelude/apple/apple_dsym.bzl diff --git a/apple/apple_dsym_config.bzl b/prelude/apple/apple_dsym_config.bzl similarity index 100% rename from apple/apple_dsym_config.bzl rename to prelude/apple/apple_dsym_config.bzl diff --git a/apple/apple_entitlements.bzl b/prelude/apple/apple_entitlements.bzl similarity index 100% rename from apple/apple_entitlements.bzl rename to prelude/apple/apple_entitlements.bzl diff --git a/apple/apple_framework_versions.bzl b/prelude/apple/apple_framework_versions.bzl similarity index 100% rename from apple/apple_framework_versions.bzl rename to prelude/apple/apple_framework_versions.bzl diff --git a/apple/apple_frameworks.bzl b/prelude/apple/apple_frameworks.bzl similarity index 100% rename from apple/apple_frameworks.bzl rename to prelude/apple/apple_frameworks.bzl diff --git a/apple/apple_genrule_deps.bzl b/prelude/apple/apple_genrule_deps.bzl similarity index 100% rename from apple/apple_genrule_deps.bzl rename to prelude/apple/apple_genrule_deps.bzl diff --git a/apple/apple_info_plist.bzl b/prelude/apple/apple_info_plist.bzl similarity index 100% rename from apple/apple_info_plist.bzl rename to prelude/apple/apple_info_plist.bzl diff --git a/apple/apple_info_plist_substitutions_parsing.bzl b/prelude/apple/apple_info_plist_substitutions_parsing.bzl similarity index 100% rename from apple/apple_info_plist_substitutions_parsing.bzl rename to prelude/apple/apple_info_plist_substitutions_parsing.bzl diff --git a/apple/apple_library.bzl b/prelude/apple/apple_library.bzl similarity index 100% rename from apple/apple_library.bzl rename to prelude/apple/apple_library.bzl diff --git a/apple/apple_macro_layer.bzl b/prelude/apple/apple_macro_layer.bzl similarity index 100% rename from apple/apple_macro_layer.bzl rename to prelude/apple/apple_macro_layer.bzl diff --git a/apple/apple_modular_utility.bzl b/prelude/apple/apple_modular_utility.bzl similarity index 100% rename from apple/apple_modular_utility.bzl rename to prelude/apple/apple_modular_utility.bzl diff --git a/apple/apple_package.bzl b/prelude/apple/apple_package.bzl similarity index 100% rename from apple/apple_package.bzl rename to prelude/apple/apple_package.bzl diff --git a/apple/apple_package_config.bzl b/prelude/apple/apple_package_config.bzl similarity index 100% rename from apple/apple_package_config.bzl rename to prelude/apple/apple_package_config.bzl diff --git a/apple/apple_resource.bzl b/prelude/apple/apple_resource.bzl similarity index 100% rename from apple/apple_resource.bzl rename to prelude/apple/apple_resource.bzl diff --git a/apple/apple_resource_bundle.bzl b/prelude/apple/apple_resource_bundle.bzl similarity index 100% rename from apple/apple_resource_bundle.bzl rename to prelude/apple/apple_resource_bundle.bzl diff --git a/apple/apple_resource_types.bzl b/prelude/apple/apple_resource_types.bzl similarity index 100% rename from apple/apple_resource_types.bzl rename to prelude/apple/apple_resource_types.bzl diff --git a/apple/apple_resource_utility.bzl b/prelude/apple/apple_resource_utility.bzl similarity index 100% rename from apple/apple_resource_utility.bzl rename to prelude/apple/apple_resource_utility.bzl diff --git a/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl similarity index 100% rename from apple/apple_rules_impl.bzl rename to prelude/apple/apple_rules_impl.bzl diff --git a/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl similarity index 100% rename from apple/apple_rules_impl_utility.bzl rename to prelude/apple/apple_rules_impl_utility.bzl diff --git a/apple/apple_sdk.bzl b/prelude/apple/apple_sdk.bzl similarity index 100% rename from apple/apple_sdk.bzl rename to prelude/apple/apple_sdk.bzl diff --git a/apple/apple_sdk_metadata.bzl b/prelude/apple/apple_sdk_metadata.bzl similarity index 100% rename from apple/apple_sdk_metadata.bzl rename to prelude/apple/apple_sdk_metadata.bzl diff --git a/apple/apple_stripping.bzl b/prelude/apple/apple_stripping.bzl similarity index 100% rename from apple/apple_stripping.bzl rename to prelude/apple/apple_stripping.bzl diff --git a/apple/apple_swift_stdlib.bzl b/prelude/apple/apple_swift_stdlib.bzl similarity index 100% rename from apple/apple_swift_stdlib.bzl rename to prelude/apple/apple_swift_stdlib.bzl diff --git a/apple/apple_target_sdk_version.bzl b/prelude/apple/apple_target_sdk_version.bzl similarity index 100% rename from apple/apple_target_sdk_version.bzl rename to prelude/apple/apple_target_sdk_version.bzl diff --git a/apple/apple_test.bzl b/prelude/apple/apple_test.bzl similarity index 100% rename from apple/apple_test.bzl rename to prelude/apple/apple_test.bzl diff --git a/apple/apple_toolchain.bzl b/prelude/apple/apple_toolchain.bzl similarity index 100% rename from apple/apple_toolchain.bzl rename to prelude/apple/apple_toolchain.bzl diff --git a/apple/apple_toolchain_types.bzl b/prelude/apple/apple_toolchain_types.bzl similarity index 100% rename from apple/apple_toolchain_types.bzl rename to prelude/apple/apple_toolchain_types.bzl diff --git a/apple/apple_universal_binaries.bzl b/prelude/apple/apple_universal_binaries.bzl similarity index 100% rename from apple/apple_universal_binaries.bzl rename to prelude/apple/apple_universal_binaries.bzl diff --git a/apple/apple_universal_executable.bzl b/prelude/apple/apple_universal_executable.bzl similarity index 100% rename from apple/apple_universal_executable.bzl rename to prelude/apple/apple_universal_executable.bzl diff --git a/apple/apple_utility.bzl b/prelude/apple/apple_utility.bzl similarity index 100% rename from apple/apple_utility.bzl rename to prelude/apple/apple_utility.bzl diff --git a/apple/debug.bzl b/prelude/apple/debug.bzl similarity index 100% rename from apple/debug.bzl rename to prelude/apple/debug.bzl diff --git a/apple/modulemap.bzl b/prelude/apple/modulemap.bzl similarity index 100% rename from apple/modulemap.bzl rename to prelude/apple/modulemap.bzl diff --git a/apple/prebuilt_apple_framework.bzl b/prelude/apple/prebuilt_apple_framework.bzl similarity index 100% rename from apple/prebuilt_apple_framework.bzl rename to prelude/apple/prebuilt_apple_framework.bzl diff --git a/apple/resource_groups.bzl b/prelude/apple/resource_groups.bzl similarity index 100% rename from apple/resource_groups.bzl rename to prelude/apple/resource_groups.bzl diff --git a/apple/scene_kit_assets.bzl b/prelude/apple/scene_kit_assets.bzl similarity index 100% rename from apple/scene_kit_assets.bzl rename to prelude/apple/scene_kit_assets.bzl diff --git a/apple/scene_kit_assets_types.bzl b/prelude/apple/scene_kit_assets_types.bzl similarity index 100% rename from apple/scene_kit_assets_types.bzl rename to prelude/apple/scene_kit_assets_types.bzl diff --git a/apple/swift/apple_sdk_clang_module.bzl b/prelude/apple/swift/apple_sdk_clang_module.bzl similarity index 100% rename from apple/swift/apple_sdk_clang_module.bzl rename to prelude/apple/swift/apple_sdk_clang_module.bzl diff --git a/apple/swift/apple_sdk_modules_utility.bzl b/prelude/apple/swift/apple_sdk_modules_utility.bzl similarity index 100% rename from apple/swift/apple_sdk_modules_utility.bzl rename to prelude/apple/swift/apple_sdk_modules_utility.bzl diff --git a/apple/swift/apple_sdk_swift_module.bzl b/prelude/apple/swift/apple_sdk_swift_module.bzl similarity index 100% rename from apple/swift/apple_sdk_swift_module.bzl rename to prelude/apple/swift/apple_sdk_swift_module.bzl diff --git a/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl similarity index 100% rename from apple/swift/swift_compilation.bzl rename to prelude/apple/swift/swift_compilation.bzl diff --git a/apple/swift/swift_debug_info_utils.bzl b/prelude/apple/swift/swift_debug_info_utils.bzl similarity index 100% rename from apple/swift/swift_debug_info_utils.bzl rename to prelude/apple/swift/swift_debug_info_utils.bzl diff --git a/apple/swift/swift_incremental_support.bzl b/prelude/apple/swift/swift_incremental_support.bzl similarity index 100% rename from apple/swift/swift_incremental_support.bzl rename to prelude/apple/swift/swift_incremental_support.bzl diff --git a/apple/swift/swift_module_map.bzl b/prelude/apple/swift/swift_module_map.bzl similarity index 100% rename from apple/swift/swift_module_map.bzl rename to prelude/apple/swift/swift_module_map.bzl diff --git a/apple/swift/swift_pcm_compilation.bzl b/prelude/apple/swift/swift_pcm_compilation.bzl similarity index 100% rename from apple/swift/swift_pcm_compilation.bzl rename to prelude/apple/swift/swift_pcm_compilation.bzl diff --git a/apple/swift/swift_pcm_compilation_types.bzl b/prelude/apple/swift/swift_pcm_compilation_types.bzl similarity index 100% rename from apple/swift/swift_pcm_compilation_types.bzl rename to prelude/apple/swift/swift_pcm_compilation_types.bzl diff --git a/apple/swift/swift_runtime.bzl b/prelude/apple/swift/swift_runtime.bzl similarity index 100% rename from apple/swift/swift_runtime.bzl rename to prelude/apple/swift/swift_runtime.bzl diff --git a/apple/swift/swift_sdk_pcm_compilation.bzl b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl similarity index 100% rename from apple/swift/swift_sdk_pcm_compilation.bzl rename to prelude/apple/swift/swift_sdk_pcm_compilation.bzl diff --git a/apple/swift/swift_sdk_swiftinterface_compilation.bzl b/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl similarity index 100% rename from apple/swift/swift_sdk_swiftinterface_compilation.bzl rename to prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl diff --git a/apple/swift/swift_toolchain.bzl b/prelude/apple/swift/swift_toolchain.bzl similarity index 100% rename from apple/swift/swift_toolchain.bzl rename to prelude/apple/swift/swift_toolchain.bzl diff --git a/apple/swift/swift_toolchain_macro_layer.bzl b/prelude/apple/swift/swift_toolchain_macro_layer.bzl similarity index 100% rename from apple/swift/swift_toolchain_macro_layer.bzl rename to prelude/apple/swift/swift_toolchain_macro_layer.bzl diff --git a/apple/swift/swift_toolchain_types.bzl b/prelude/apple/swift/swift_toolchain_types.bzl similarity index 100% rename from apple/swift/swift_toolchain_types.bzl rename to prelude/apple/swift/swift_toolchain_types.bzl diff --git a/apple/swift/swift_types.bzl b/prelude/apple/swift/swift_types.bzl similarity index 100% rename from apple/swift/swift_types.bzl rename to prelude/apple/swift/swift_types.bzl diff --git a/apple/tools/BUCK.v2 b/prelude/apple/tools/BUCK.v2 similarity index 100% rename from apple/tools/BUCK.v2 rename to prelude/apple/tools/BUCK.v2 diff --git a/apple/tools/bundling/BUCK.v2 b/prelude/apple/tools/bundling/BUCK.v2 similarity index 100% rename from apple/tools/bundling/BUCK.v2 rename to prelude/apple/tools/bundling/BUCK.v2 diff --git a/apple/tools/bundling/action_metadata.py b/prelude/apple/tools/bundling/action_metadata.py similarity index 100% rename from apple/tools/bundling/action_metadata.py rename to prelude/apple/tools/bundling/action_metadata.py diff --git a/apple/tools/bundling/action_metadata_test.py b/prelude/apple/tools/bundling/action_metadata_test.py similarity index 100% rename from apple/tools/bundling/action_metadata_test.py rename to prelude/apple/tools/bundling/action_metadata_test.py diff --git a/apple/tools/bundling/assemble_bundle.py b/prelude/apple/tools/bundling/assemble_bundle.py similarity index 100% rename from apple/tools/bundling/assemble_bundle.py rename to prelude/apple/tools/bundling/assemble_bundle.py diff --git a/apple/tools/bundling/assemble_bundle_types.py b/prelude/apple/tools/bundling/assemble_bundle_types.py similarity index 100% rename from apple/tools/bundling/assemble_bundle_types.py rename to prelude/apple/tools/bundling/assemble_bundle_types.py diff --git a/apple/tools/bundling/incremental_state.py b/prelude/apple/tools/bundling/incremental_state.py similarity index 100% rename from apple/tools/bundling/incremental_state.py rename to prelude/apple/tools/bundling/incremental_state.py diff --git a/apple/tools/bundling/incremental_state_test.py b/prelude/apple/tools/bundling/incremental_state_test.py similarity index 100% rename from apple/tools/bundling/incremental_state_test.py rename to prelude/apple/tools/bundling/incremental_state_test.py diff --git a/apple/tools/bundling/incremental_utils.py b/prelude/apple/tools/bundling/incremental_utils.py similarity index 100% rename from apple/tools/bundling/incremental_utils.py rename to prelude/apple/tools/bundling/incremental_utils.py diff --git a/apple/tools/bundling/incremental_utils_test.py b/prelude/apple/tools/bundling/incremental_utils_test.py similarity index 100% rename from apple/tools/bundling/incremental_utils_test.py rename to prelude/apple/tools/bundling/incremental_utils_test.py diff --git a/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py similarity index 100% rename from apple/tools/bundling/main.py rename to prelude/apple/tools/bundling/main.py diff --git a/apple/tools/bundling/swift_support.py b/prelude/apple/tools/bundling/swift_support.py similarity index 100% rename from apple/tools/bundling/swift_support.py rename to prelude/apple/tools/bundling/swift_support.py diff --git a/apple/tools/bundling/test_resources/newer_version_action_metadata.json b/prelude/apple/tools/bundling/test_resources/newer_version_action_metadata.json similarity index 100% rename from apple/tools/bundling/test_resources/newer_version_action_metadata.json rename to prelude/apple/tools/bundling/test_resources/newer_version_action_metadata.json diff --git a/apple/tools/bundling/test_resources/newer_version_incremental_state.json b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json similarity index 100% rename from apple/tools/bundling/test_resources/newer_version_incremental_state.json rename to prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json diff --git a/apple/tools/bundling/test_resources/the.broken_json b/prelude/apple/tools/bundling/test_resources/the.broken_json similarity index 100% rename from apple/tools/bundling/test_resources/the.broken_json rename to prelude/apple/tools/bundling/test_resources/the.broken_json diff --git a/apple/tools/bundling/test_resources/valid_action_metadata.json b/prelude/apple/tools/bundling/test_resources/valid_action_metadata.json similarity index 100% rename from apple/tools/bundling/test_resources/valid_action_metadata.json rename to prelude/apple/tools/bundling/test_resources/valid_action_metadata.json diff --git a/apple/tools/bundling/test_resources/valid_incremental_state.json b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json similarity index 100% rename from apple/tools/bundling/test_resources/valid_incremental_state.json rename to prelude/apple/tools/bundling/test_resources/valid_incremental_state.json diff --git a/apple/tools/code_signing/BUCK.v2 b/prelude/apple/tools/code_signing/BUCK.v2 similarity index 100% rename from apple/tools/code_signing/BUCK.v2 rename to prelude/apple/tools/code_signing/BUCK.v2 diff --git a/apple/tools/code_signing/app_id.py b/prelude/apple/tools/code_signing/app_id.py similarity index 100% rename from apple/tools/code_signing/app_id.py rename to prelude/apple/tools/code_signing/app_id.py diff --git a/apple/tools/code_signing/app_id_test.py b/prelude/apple/tools/code_signing/app_id_test.py similarity index 100% rename from apple/tools/code_signing/app_id_test.py rename to prelude/apple/tools/code_signing/app_id_test.py diff --git a/apple/tools/code_signing/apple_platform.py b/prelude/apple/tools/code_signing/apple_platform.py similarity index 100% rename from apple/tools/code_signing/apple_platform.py rename to prelude/apple/tools/code_signing/apple_platform.py diff --git a/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py similarity index 100% rename from apple/tools/code_signing/codesign_bundle.py rename to prelude/apple/tools/code_signing/codesign_bundle.py diff --git a/apple/tools/code_signing/codesign_command_factory.py b/prelude/apple/tools/code_signing/codesign_command_factory.py similarity index 100% rename from apple/tools/code_signing/codesign_command_factory.py rename to prelude/apple/tools/code_signing/codesign_command_factory.py diff --git a/apple/tools/code_signing/fast_adhoc.py b/prelude/apple/tools/code_signing/fast_adhoc.py similarity index 100% rename from apple/tools/code_signing/fast_adhoc.py rename to prelude/apple/tools/code_signing/fast_adhoc.py diff --git a/apple/tools/code_signing/identity.py b/prelude/apple/tools/code_signing/identity.py similarity index 100% rename from apple/tools/code_signing/identity.py rename to prelude/apple/tools/code_signing/identity.py diff --git a/apple/tools/code_signing/identity_test.py b/prelude/apple/tools/code_signing/identity_test.py similarity index 100% rename from apple/tools/code_signing/identity_test.py rename to prelude/apple/tools/code_signing/identity_test.py diff --git a/apple/tools/code_signing/info_plist_metadata.py b/prelude/apple/tools/code_signing/info_plist_metadata.py similarity index 100% rename from apple/tools/code_signing/info_plist_metadata.py rename to prelude/apple/tools/code_signing/info_plist_metadata.py diff --git a/apple/tools/code_signing/info_plist_metadata_test.py b/prelude/apple/tools/code_signing/info_plist_metadata_test.py similarity index 100% rename from apple/tools/code_signing/info_plist_metadata_test.py rename to prelude/apple/tools/code_signing/info_plist_metadata_test.py diff --git a/apple/tools/code_signing/list_codesign_identities_command_factory.py b/prelude/apple/tools/code_signing/list_codesign_identities_command_factory.py similarity index 100% rename from apple/tools/code_signing/list_codesign_identities_command_factory.py rename to prelude/apple/tools/code_signing/list_codesign_identities_command_factory.py diff --git a/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py similarity index 100% rename from apple/tools/code_signing/main.py rename to prelude/apple/tools/code_signing/main.py diff --git a/apple/tools/code_signing/prepare_code_signing_entitlements.py b/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py similarity index 100% rename from apple/tools/code_signing/prepare_code_signing_entitlements.py rename to prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py diff --git a/apple/tools/code_signing/prepare_code_signing_entitlements_test.py b/prelude/apple/tools/code_signing/prepare_code_signing_entitlements_test.py similarity index 100% rename from apple/tools/code_signing/prepare_code_signing_entitlements_test.py rename to prelude/apple/tools/code_signing/prepare_code_signing_entitlements_test.py diff --git a/apple/tools/code_signing/prepare_info_plist.py b/prelude/apple/tools/code_signing/prepare_info_plist.py similarity index 100% rename from apple/tools/code_signing/prepare_info_plist.py rename to prelude/apple/tools/code_signing/prepare_info_plist.py diff --git a/apple/tools/code_signing/prepare_info_plist_test.py b/prelude/apple/tools/code_signing/prepare_info_plist_test.py similarity index 100% rename from apple/tools/code_signing/prepare_info_plist_test.py rename to prelude/apple/tools/code_signing/prepare_info_plist_test.py diff --git a/apple/tools/code_signing/provisioning_profile_diagnostics.py b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py similarity index 100% rename from apple/tools/code_signing/provisioning_profile_diagnostics.py rename to prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py diff --git a/apple/tools/code_signing/provisioning_profile_metadata.py b/prelude/apple/tools/code_signing/provisioning_profile_metadata.py similarity index 100% rename from apple/tools/code_signing/provisioning_profile_metadata.py rename to prelude/apple/tools/code_signing/provisioning_profile_metadata.py diff --git a/apple/tools/code_signing/provisioning_profile_metadata_test.py b/prelude/apple/tools/code_signing/provisioning_profile_metadata_test.py similarity index 100% rename from apple/tools/code_signing/provisioning_profile_metadata_test.py rename to prelude/apple/tools/code_signing/provisioning_profile_metadata_test.py diff --git a/apple/tools/code_signing/provisioning_profile_selection.py b/prelude/apple/tools/code_signing/provisioning_profile_selection.py similarity index 100% rename from apple/tools/code_signing/provisioning_profile_selection.py rename to prelude/apple/tools/code_signing/provisioning_profile_selection.py diff --git a/apple/tools/code_signing/provisioning_profile_selection_test.py b/prelude/apple/tools/code_signing/provisioning_profile_selection_test.py similarity index 100% rename from apple/tools/code_signing/provisioning_profile_selection_test.py rename to prelude/apple/tools/code_signing/provisioning_profile_selection_test.py diff --git a/apple/tools/code_signing/read_provisioning_profile_command_factory.py b/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py similarity index 100% rename from apple/tools/code_signing/read_provisioning_profile_command_factory.py rename to prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py diff --git a/apple/tools/code_signing/test_resources/Entitlements.plist b/prelude/apple/tools/code_signing/test_resources/Entitlements.plist similarity index 100% rename from apple/tools/code_signing/test_resources/Entitlements.plist rename to prelude/apple/tools/code_signing/test_resources/Entitlements.plist diff --git a/apple/tools/code_signing/test_resources/qualified_sample.mobileprovision b/prelude/apple/tools/code_signing/test_resources/qualified_sample.mobileprovision similarity index 100% rename from apple/tools/code_signing/test_resources/qualified_sample.mobileprovision rename to prelude/apple/tools/code_signing/test_resources/qualified_sample.mobileprovision diff --git a/apple/tools/code_signing/test_resources/sample.mobileprovision b/prelude/apple/tools/code_signing/test_resources/sample.mobileprovision similarity index 100% rename from apple/tools/code_signing/test_resources/sample.mobileprovision rename to prelude/apple/tools/code_signing/test_resources/sample.mobileprovision diff --git a/apple/tools/defs.bzl b/prelude/apple/tools/defs.bzl similarity index 100% rename from apple/tools/defs.bzl rename to prelude/apple/tools/defs.bzl diff --git a/apple/tools/dry_codesign_tool.py b/prelude/apple/tools/dry_codesign_tool.py similarity index 100% rename from apple/tools/dry_codesign_tool.py rename to prelude/apple/tools/dry_codesign_tool.py diff --git a/apple/tools/info_plist_processor/BUCK.v2 b/prelude/apple/tools/info_plist_processor/BUCK.v2 similarity index 100% rename from apple/tools/info_plist_processor/BUCK.v2 rename to prelude/apple/tools/info_plist_processor/BUCK.v2 diff --git a/apple/tools/info_plist_processor/main.py b/prelude/apple/tools/info_plist_processor/main.py similarity index 100% rename from apple/tools/info_plist_processor/main.py rename to prelude/apple/tools/info_plist_processor/main.py diff --git a/apple/tools/info_plist_processor/preprocess.py b/prelude/apple/tools/info_plist_processor/preprocess.py similarity index 100% rename from apple/tools/info_plist_processor/preprocess.py rename to prelude/apple/tools/info_plist_processor/preprocess.py diff --git a/apple/tools/info_plist_processor/preprocess_test.py b/prelude/apple/tools/info_plist_processor/preprocess_test.py similarity index 100% rename from apple/tools/info_plist_processor/preprocess_test.py rename to prelude/apple/tools/info_plist_processor/preprocess_test.py diff --git a/apple/tools/info_plist_processor/process.py b/prelude/apple/tools/info_plist_processor/process.py similarity index 100% rename from apple/tools/info_plist_processor/process.py rename to prelude/apple/tools/info_plist_processor/process.py diff --git a/apple/tools/info_plist_processor/process_test.py b/prelude/apple/tools/info_plist_processor/process_test.py similarity index 100% rename from apple/tools/info_plist_processor/process_test.py rename to prelude/apple/tools/info_plist_processor/process_test.py diff --git a/apple/tools/ipa_package_maker.py b/prelude/apple/tools/ipa_package_maker.py similarity index 100% rename from apple/tools/ipa_package_maker.py rename to prelude/apple/tools/ipa_package_maker.py diff --git a/apple/tools/make_modulemap.py b/prelude/apple/tools/make_modulemap.py similarity index 100% rename from apple/tools/make_modulemap.py rename to prelude/apple/tools/make_modulemap.py diff --git a/apple/tools/make_swift_comp_db.py b/prelude/apple/tools/make_swift_comp_db.py similarity index 100% rename from apple/tools/make_swift_comp_db.py rename to prelude/apple/tools/make_swift_comp_db.py diff --git a/apple/tools/make_vfsoverlay.py b/prelude/apple/tools/make_vfsoverlay.py similarity index 100% rename from apple/tools/make_vfsoverlay.py rename to prelude/apple/tools/make_vfsoverlay.py diff --git a/apple/tools/plistlib_utils.py b/prelude/apple/tools/plistlib_utils.py similarity index 100% rename from apple/tools/plistlib_utils.py rename to prelude/apple/tools/plistlib_utils.py diff --git a/apple/tools/re_compatibility_utils/BUCK b/prelude/apple/tools/re_compatibility_utils/BUCK similarity index 100% rename from apple/tools/re_compatibility_utils/BUCK rename to prelude/apple/tools/re_compatibility_utils/BUCK diff --git a/apple/tools/re_compatibility_utils/writable.py b/prelude/apple/tools/re_compatibility_utils/writable.py similarity index 100% rename from apple/tools/re_compatibility_utils/writable.py rename to prelude/apple/tools/re_compatibility_utils/writable.py diff --git a/apple/tools/selective_debugging/BUCK.v2 b/prelude/apple/tools/selective_debugging/BUCK.v2 similarity index 100% rename from apple/tools/selective_debugging/BUCK.v2 rename to prelude/apple/tools/selective_debugging/BUCK.v2 diff --git a/apple/tools/selective_debugging/macho.py b/prelude/apple/tools/selective_debugging/macho.py similarity index 100% rename from apple/tools/selective_debugging/macho.py rename to prelude/apple/tools/selective_debugging/macho.py diff --git a/apple/tools/selective_debugging/macho_parser.py b/prelude/apple/tools/selective_debugging/macho_parser.py similarity index 100% rename from apple/tools/selective_debugging/macho_parser.py rename to prelude/apple/tools/selective_debugging/macho_parser.py diff --git a/apple/tools/selective_debugging/main.py b/prelude/apple/tools/selective_debugging/main.py similarity index 100% rename from apple/tools/selective_debugging/main.py rename to prelude/apple/tools/selective_debugging/main.py diff --git a/apple/tools/selective_debugging/scrubber.py b/prelude/apple/tools/selective_debugging/scrubber.py similarity index 100% rename from apple/tools/selective_debugging/scrubber.py rename to prelude/apple/tools/selective_debugging/scrubber.py diff --git a/apple/tools/selective_debugging/scrubber_test.py b/prelude/apple/tools/selective_debugging/scrubber_test.py similarity index 100% rename from apple/tools/selective_debugging/scrubber_test.py rename to prelude/apple/tools/selective_debugging/scrubber_test.py diff --git a/apple/tools/selective_debugging/spec.py b/prelude/apple/tools/selective_debugging/spec.py similarity index 100% rename from apple/tools/selective_debugging/spec.py rename to prelude/apple/tools/selective_debugging/spec.py diff --git a/apple/tools/selective_debugging/spec_test.py b/prelude/apple/tools/selective_debugging/spec_test.py similarity index 100% rename from apple/tools/selective_debugging/spec_test.py rename to prelude/apple/tools/selective_debugging/spec_test.py diff --git a/apple/tools/selective_debugging/test_resources/HelloWorld b/prelude/apple/tools/selective_debugging/test_resources/HelloWorld similarity index 100% rename from apple/tools/selective_debugging/test_resources/HelloWorld rename to prelude/apple/tools/selective_debugging/test_resources/HelloWorld diff --git a/apple/tools/selective_debugging/test_resources/focused_spec.json b/prelude/apple/tools/selective_debugging/test_resources/focused_spec.json similarity index 100% rename from apple/tools/selective_debugging/test_resources/focused_spec.json rename to prelude/apple/tools/selective_debugging/test_resources/focused_spec.json diff --git a/apple/tools/selective_debugging/test_resources/focused_targets.json b/prelude/apple/tools/selective_debugging/test_resources/focused_targets.json similarity index 100% rename from apple/tools/selective_debugging/test_resources/focused_targets.json rename to prelude/apple/tools/selective_debugging/test_resources/focused_targets.json diff --git a/apple/tools/selective_debugging/test_resources/focused_targets_empty.json b/prelude/apple/tools/selective_debugging/test_resources/focused_targets_empty.json similarity index 100% rename from apple/tools/selective_debugging/test_resources/focused_targets_empty.json rename to prelude/apple/tools/selective_debugging/test_resources/focused_targets_empty.json diff --git a/apple/tools/selective_debugging/utils.py b/prelude/apple/tools/selective_debugging/utils.py similarity index 100% rename from apple/tools/selective_debugging/utils.py rename to prelude/apple/tools/selective_debugging/utils.py diff --git a/apple/tools/split_arch_combine_dsym_bundles_tool.py b/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py similarity index 100% rename from apple/tools/split_arch_combine_dsym_bundles_tool.py rename to prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py diff --git a/apple/tools/swift_exec.sh b/prelude/apple/tools/swift_exec.sh similarity index 100% rename from apple/tools/swift_exec.sh rename to prelude/apple/tools/swift_exec.sh diff --git a/apple/tools/swift_objc_header_postprocess.py b/prelude/apple/tools/swift_objc_header_postprocess.py similarity index 100% rename from apple/tools/swift_objc_header_postprocess.py rename to prelude/apple/tools/swift_objc_header_postprocess.py diff --git a/apple/user/apple_resource_bundle.bzl b/prelude/apple/user/apple_resource_bundle.bzl similarity index 100% rename from apple/user/apple_resource_bundle.bzl rename to prelude/apple/user/apple_resource_bundle.bzl diff --git a/apple/user/apple_resource_transition.bzl b/prelude/apple/user/apple_resource_transition.bzl similarity index 100% rename from apple/user/apple_resource_transition.bzl rename to prelude/apple/user/apple_resource_transition.bzl diff --git a/apple/user/apple_selected_debug_path_file.bzl b/prelude/apple/user/apple_selected_debug_path_file.bzl similarity index 100% rename from apple/user/apple_selected_debug_path_file.bzl rename to prelude/apple/user/apple_selected_debug_path_file.bzl diff --git a/apple/user/apple_selective_debugging.bzl b/prelude/apple/user/apple_selective_debugging.bzl similarity index 100% rename from apple/user/apple_selective_debugging.bzl rename to prelude/apple/user/apple_selective_debugging.bzl diff --git a/apple/user/apple_simulators.bzl b/prelude/apple/user/apple_simulators.bzl similarity index 100% rename from apple/user/apple_simulators.bzl rename to prelude/apple/user/apple_simulators.bzl diff --git a/apple/user/apple_toolchain_override.bzl b/prelude/apple/user/apple_toolchain_override.bzl similarity index 100% rename from apple/user/apple_toolchain_override.bzl rename to prelude/apple/user/apple_toolchain_override.bzl diff --git a/apple/user/apple_tools.bzl b/prelude/apple/user/apple_tools.bzl similarity index 100% rename from apple/user/apple_tools.bzl rename to prelude/apple/user/apple_tools.bzl diff --git a/apple/user/apple_watchos_bundle.bzl b/prelude/apple/user/apple_watchos_bundle.bzl similarity index 100% rename from apple/user/apple_watchos_bundle.bzl rename to prelude/apple/user/apple_watchos_bundle.bzl diff --git a/apple/user/cpu_split_transition.bzl b/prelude/apple/user/cpu_split_transition.bzl similarity index 100% rename from apple/user/cpu_split_transition.bzl rename to prelude/apple/user/cpu_split_transition.bzl diff --git a/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl similarity index 100% rename from apple/user/resource_group_map.bzl rename to prelude/apple/user/resource_group_map.bzl diff --git a/apple/user/watch_transition.bzl b/prelude/apple/user/watch_transition.bzl similarity index 100% rename from apple/user/watch_transition.bzl rename to prelude/apple/user/watch_transition.bzl diff --git a/apple/xcode.bzl b/prelude/apple/xcode.bzl similarity index 100% rename from apple/xcode.bzl rename to prelude/apple/xcode.bzl diff --git a/apple/xcode_postbuild_script.bzl b/prelude/apple/xcode_postbuild_script.bzl similarity index 100% rename from apple/xcode_postbuild_script.bzl rename to prelude/apple/xcode_postbuild_script.bzl diff --git a/apple/xcode_prebuild_script.bzl b/prelude/apple/xcode_prebuild_script.bzl similarity index 100% rename from apple/xcode_prebuild_script.bzl rename to prelude/apple/xcode_prebuild_script.bzl diff --git a/apple/xctest_swift_support.bzl b/prelude/apple/xctest_swift_support.bzl similarity index 100% rename from apple/xctest_swift_support.bzl rename to prelude/apple/xctest_swift_support.bzl diff --git a/artifact_tset.bzl b/prelude/artifact_tset.bzl similarity index 100% rename from artifact_tset.bzl rename to prelude/artifact_tset.bzl diff --git a/artifacts.bzl b/prelude/artifacts.bzl similarity index 100% rename from artifacts.bzl rename to prelude/artifacts.bzl diff --git a/asserts.bzl b/prelude/asserts.bzl similarity index 100% rename from asserts.bzl rename to prelude/asserts.bzl diff --git a/attributes.bzl b/prelude/attributes.bzl similarity index 100% rename from attributes.bzl rename to prelude/attributes.bzl diff --git a/build_mode.bzl b/prelude/build_mode.bzl similarity index 100% rename from build_mode.bzl rename to prelude/build_mode.bzl diff --git a/builtin.bzl b/prelude/builtin.bzl similarity index 100% rename from builtin.bzl rename to prelude/builtin.bzl diff --git a/cache_mode.bzl b/prelude/cache_mode.bzl similarity index 100% rename from cache_mode.bzl rename to prelude/cache_mode.bzl diff --git a/command_alias.bzl b/prelude/command_alias.bzl similarity index 100% rename from command_alias.bzl rename to prelude/command_alias.bzl diff --git a/configurations/rules.bzl b/prelude/configurations/rules.bzl similarity index 100% rename from configurations/rules.bzl rename to prelude/configurations/rules.bzl diff --git a/configurations/util.bzl b/prelude/configurations/util.bzl similarity index 100% rename from configurations/util.bzl rename to prelude/configurations/util.bzl diff --git a/cpu/BUCK.v2 b/prelude/cpu/BUCK.v2 similarity index 100% rename from cpu/BUCK.v2 rename to prelude/cpu/BUCK.v2 diff --git a/cpu/constraints/BUCK.v2 b/prelude/cpu/constraints/BUCK.v2 similarity index 100% rename from cpu/constraints/BUCK.v2 rename to prelude/cpu/constraints/BUCK.v2 diff --git a/csharp/csharp.bzl b/prelude/csharp/csharp.bzl similarity index 100% rename from csharp/csharp.bzl rename to prelude/csharp/csharp.bzl diff --git a/csharp/csharp_providers.bzl b/prelude/csharp/csharp_providers.bzl similarity index 100% rename from csharp/csharp_providers.bzl rename to prelude/csharp/csharp_providers.bzl diff --git a/csharp/toolchain.bzl b/prelude/csharp/toolchain.bzl similarity index 100% rename from csharp/toolchain.bzl rename to prelude/csharp/toolchain.bzl diff --git a/cxx/anon_link.bzl b/prelude/cxx/anon_link.bzl similarity index 100% rename from cxx/anon_link.bzl rename to prelude/cxx/anon_link.bzl diff --git a/cxx/archive.bzl b/prelude/cxx/archive.bzl similarity index 100% rename from cxx/archive.bzl rename to prelude/cxx/archive.bzl diff --git a/cxx/argsfiles.bzl b/prelude/cxx/argsfiles.bzl similarity index 100% rename from cxx/argsfiles.bzl rename to prelude/cxx/argsfiles.bzl diff --git a/cxx/attr_selection.bzl b/prelude/cxx/attr_selection.bzl similarity index 100% rename from cxx/attr_selection.bzl rename to prelude/cxx/attr_selection.bzl diff --git a/cxx/bitcode.bzl b/prelude/cxx/bitcode.bzl similarity index 100% rename from cxx/bitcode.bzl rename to prelude/cxx/bitcode.bzl diff --git a/cxx/comp_db.bzl b/prelude/cxx/comp_db.bzl similarity index 100% rename from cxx/comp_db.bzl rename to prelude/cxx/comp_db.bzl diff --git a/cxx/compile.bzl b/prelude/cxx/compile.bzl similarity index 100% rename from cxx/compile.bzl rename to prelude/cxx/compile.bzl diff --git a/cxx/compiler.bzl b/prelude/cxx/compiler.bzl similarity index 100% rename from cxx/compiler.bzl rename to prelude/cxx/compiler.bzl diff --git a/cxx/cxx.bzl b/prelude/cxx/cxx.bzl similarity index 100% rename from cxx/cxx.bzl rename to prelude/cxx/cxx.bzl diff --git a/cxx/cxx_bolt.bzl b/prelude/cxx/cxx_bolt.bzl similarity index 100% rename from cxx/cxx_bolt.bzl rename to prelude/cxx/cxx_bolt.bzl diff --git a/cxx/cxx_context.bzl b/prelude/cxx/cxx_context.bzl similarity index 100% rename from cxx/cxx_context.bzl rename to prelude/cxx/cxx_context.bzl diff --git a/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl similarity index 100% rename from cxx/cxx_executable.bzl rename to prelude/cxx/cxx_executable.bzl diff --git a/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl similarity index 100% rename from cxx/cxx_library.bzl rename to prelude/cxx/cxx_library.bzl diff --git a/cxx/cxx_library_utility.bzl b/prelude/cxx/cxx_library_utility.bzl similarity index 100% rename from cxx/cxx_library_utility.bzl rename to prelude/cxx/cxx_library_utility.bzl diff --git a/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl similarity index 100% rename from cxx/cxx_link_utility.bzl rename to prelude/cxx/cxx_link_utility.bzl diff --git a/cxx/cxx_sources.bzl b/prelude/cxx/cxx_sources.bzl similarity index 100% rename from cxx/cxx_sources.bzl rename to prelude/cxx/cxx_sources.bzl diff --git a/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl similarity index 100% rename from cxx/cxx_toolchain.bzl rename to prelude/cxx/cxx_toolchain.bzl diff --git a/cxx/cxx_toolchain_macro_layer.bzl b/prelude/cxx/cxx_toolchain_macro_layer.bzl similarity index 100% rename from cxx/cxx_toolchain_macro_layer.bzl rename to prelude/cxx/cxx_toolchain_macro_layer.bzl diff --git a/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl similarity index 100% rename from cxx/cxx_toolchain_types.bzl rename to prelude/cxx/cxx_toolchain_types.bzl diff --git a/cxx/cxx_types.bzl b/prelude/cxx/cxx_types.bzl similarity index 100% rename from cxx/cxx_types.bzl rename to prelude/cxx/cxx_types.bzl diff --git a/cxx/debug.bzl b/prelude/cxx/debug.bzl similarity index 100% rename from cxx/debug.bzl rename to prelude/cxx/debug.bzl diff --git a/cxx/dist_lto/README.md b/prelude/cxx/dist_lto/README.md similarity index 100% rename from cxx/dist_lto/README.md rename to prelude/cxx/dist_lto/README.md diff --git a/cxx/dist_lto/dist_lto.bzl b/prelude/cxx/dist_lto/dist_lto.bzl similarity index 100% rename from cxx/dist_lto/dist_lto.bzl rename to prelude/cxx/dist_lto/dist_lto.bzl diff --git a/cxx/dist_lto/tools.bzl b/prelude/cxx/dist_lto/tools.bzl similarity index 100% rename from cxx/dist_lto/tools.bzl rename to prelude/cxx/dist_lto/tools.bzl diff --git a/cxx/dist_lto/tools/BUCK.v2 b/prelude/cxx/dist_lto/tools/BUCK.v2 similarity index 100% rename from cxx/dist_lto/tools/BUCK.v2 rename to prelude/cxx/dist_lto/tools/BUCK.v2 diff --git a/cxx/dist_lto/tools/__init__.py b/prelude/cxx/dist_lto/tools/__init__.py similarity index 100% rename from cxx/dist_lto/tools/__init__.py rename to prelude/cxx/dist_lto/tools/__init__.py diff --git a/cxx/dist_lto/tools/dist_lto_copy.py b/prelude/cxx/dist_lto/tools/dist_lto_copy.py similarity index 100% rename from cxx/dist_lto/tools/dist_lto_copy.py rename to prelude/cxx/dist_lto/tools/dist_lto_copy.py diff --git a/cxx/dist_lto/tools/dist_lto_opt.py b/prelude/cxx/dist_lto/tools/dist_lto_opt.py similarity index 100% rename from cxx/dist_lto/tools/dist_lto_opt.py rename to prelude/cxx/dist_lto/tools/dist_lto_opt.py diff --git a/cxx/dist_lto/tools/dist_lto_planner.py b/prelude/cxx/dist_lto/tools/dist_lto_planner.py similarity index 100% rename from cxx/dist_lto/tools/dist_lto_planner.py rename to prelude/cxx/dist_lto/tools/dist_lto_planner.py diff --git a/cxx/dist_lto/tools/dist_lto_prepare.py b/prelude/cxx/dist_lto/tools/dist_lto_prepare.py similarity index 100% rename from cxx/dist_lto/tools/dist_lto_prepare.py rename to prelude/cxx/dist_lto/tools/dist_lto_prepare.py diff --git a/cxx/dist_lto/tools/tests/test_dist_lto_opt.py b/prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py similarity index 100% rename from cxx/dist_lto/tools/tests/test_dist_lto_opt.py rename to prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py diff --git a/cxx/dwp.bzl b/prelude/cxx/dwp.bzl similarity index 100% rename from cxx/dwp.bzl rename to prelude/cxx/dwp.bzl diff --git a/cxx/groups.bzl b/prelude/cxx/groups.bzl similarity index 100% rename from cxx/groups.bzl rename to prelude/cxx/groups.bzl diff --git a/cxx/headers.bzl b/prelude/cxx/headers.bzl similarity index 100% rename from cxx/headers.bzl rename to prelude/cxx/headers.bzl diff --git a/cxx/link.bzl b/prelude/cxx/link.bzl similarity index 100% rename from cxx/link.bzl rename to prelude/cxx/link.bzl diff --git a/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl similarity index 100% rename from cxx/link_groups.bzl rename to prelude/cxx/link_groups.bzl diff --git a/cxx/link_types.bzl b/prelude/cxx/link_types.bzl similarity index 100% rename from cxx/link_types.bzl rename to prelude/cxx/link_types.bzl diff --git a/cxx/linker.bzl b/prelude/cxx/linker.bzl similarity index 100% rename from cxx/linker.bzl rename to prelude/cxx/linker.bzl diff --git a/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl similarity index 100% rename from cxx/omnibus.bzl rename to prelude/cxx/omnibus.bzl diff --git a/cxx/platform.bzl b/prelude/cxx/platform.bzl similarity index 100% rename from cxx/platform.bzl rename to prelude/cxx/platform.bzl diff --git a/cxx/prebuilt_cxx_library_group.bzl b/prelude/cxx/prebuilt_cxx_library_group.bzl similarity index 100% rename from cxx/prebuilt_cxx_library_group.bzl rename to prelude/cxx/prebuilt_cxx_library_group.bzl diff --git a/cxx/preprocessor.bzl b/prelude/cxx/preprocessor.bzl similarity index 100% rename from cxx/preprocessor.bzl rename to prelude/cxx/preprocessor.bzl diff --git a/cxx/shared_library_interface.bzl b/prelude/cxx/shared_library_interface.bzl similarity index 100% rename from cxx/shared_library_interface.bzl rename to prelude/cxx/shared_library_interface.bzl diff --git a/cxx/symbols.bzl b/prelude/cxx/symbols.bzl similarity index 100% rename from cxx/symbols.bzl rename to prelude/cxx/symbols.bzl diff --git a/cxx/tools/BUCK.v2 b/prelude/cxx/tools/BUCK.v2 similarity index 100% rename from cxx/tools/BUCK.v2 rename to prelude/cxx/tools/BUCK.v2 diff --git a/cxx/tools/defs.bzl b/prelude/cxx/tools/defs.bzl similarity index 100% rename from cxx/tools/defs.bzl rename to prelude/cxx/tools/defs.bzl diff --git a/cxx/tools/dep_file_processor.py b/prelude/cxx/tools/dep_file_processor.py similarity index 100% rename from cxx/tools/dep_file_processor.py rename to prelude/cxx/tools/dep_file_processor.py diff --git a/cxx/tools/dep_file_utils.py b/prelude/cxx/tools/dep_file_utils.py similarity index 100% rename from cxx/tools/dep_file_utils.py rename to prelude/cxx/tools/dep_file_utils.py diff --git a/cxx/tools/hmap_wrapper.py b/prelude/cxx/tools/hmap_wrapper.py similarity index 100% rename from cxx/tools/hmap_wrapper.py rename to prelude/cxx/tools/hmap_wrapper.py diff --git a/cxx/tools/linker_wrapper.py b/prelude/cxx/tools/linker_wrapper.py similarity index 100% rename from cxx/tools/linker_wrapper.py rename to prelude/cxx/tools/linker_wrapper.py diff --git a/cxx/tools/make_comp_db.py b/prelude/cxx/tools/make_comp_db.py similarity index 100% rename from cxx/tools/make_comp_db.py rename to prelude/cxx/tools/make_comp_db.py diff --git a/cxx/tools/makefile_to_dep_file.py b/prelude/cxx/tools/makefile_to_dep_file.py similarity index 100% rename from cxx/tools/makefile_to_dep_file.py rename to prelude/cxx/tools/makefile_to_dep_file.py diff --git a/cxx/tools/show_headers_to_dep_file.py b/prelude/cxx/tools/show_headers_to_dep_file.py similarity index 100% rename from cxx/tools/show_headers_to_dep_file.py rename to prelude/cxx/tools/show_headers_to_dep_file.py diff --git a/cxx/tools/show_includes_to_dep_file.py b/prelude/cxx/tools/show_includes_to_dep_file.py similarity index 100% rename from cxx/tools/show_includes_to_dep_file.py rename to prelude/cxx/tools/show_includes_to_dep_file.py diff --git a/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl similarity index 100% rename from cxx/user/cxx_toolchain_override.bzl rename to prelude/cxx/user/cxx_toolchain_override.bzl diff --git a/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl similarity index 100% rename from cxx/user/link_group_map.bzl rename to prelude/cxx/user/link_group_map.bzl diff --git a/cxx/xcode.bzl b/prelude/cxx/xcode.bzl similarity index 100% rename from cxx/xcode.bzl rename to prelude/cxx/xcode.bzl diff --git a/debugging/common.bzl b/prelude/debugging/common.bzl similarity index 100% rename from debugging/common.bzl rename to prelude/debugging/common.bzl diff --git a/debugging/ensure_dwp.bzl b/prelude/debugging/ensure_dwp.bzl similarity index 100% rename from debugging/ensure_dwp.bzl rename to prelude/debugging/ensure_dwp.bzl diff --git a/debugging/fdb.bxl b/prelude/debugging/fdb.bxl similarity index 100% rename from debugging/fdb.bxl rename to prelude/debugging/fdb.bxl diff --git a/debugging/inspect_dbg_exec.bzl b/prelude/debugging/inspect_dbg_exec.bzl similarity index 100% rename from debugging/inspect_dbg_exec.bzl rename to prelude/debugging/inspect_dbg_exec.bzl diff --git a/debugging/inspect_default.bzl b/prelude/debugging/inspect_default.bzl similarity index 100% rename from debugging/inspect_default.bzl rename to prelude/debugging/inspect_default.bzl diff --git a/debugging/inspect_java.bzl b/prelude/debugging/inspect_java.bzl similarity index 100% rename from debugging/inspect_java.bzl rename to prelude/debugging/inspect_java.bzl diff --git a/debugging/labels.bzl b/prelude/debugging/labels.bzl similarity index 100% rename from debugging/labels.bzl rename to prelude/debugging/labels.bzl diff --git a/debugging/types.bzl b/prelude/debugging/types.bzl similarity index 100% rename from debugging/types.bzl rename to prelude/debugging/types.bzl diff --git a/decls/android_common.bzl b/prelude/decls/android_common.bzl similarity index 100% rename from decls/android_common.bzl rename to prelude/decls/android_common.bzl diff --git a/decls/android_rules.bzl b/prelude/decls/android_rules.bzl similarity index 100% rename from decls/android_rules.bzl rename to prelude/decls/android_rules.bzl diff --git a/decls/apple_common.bzl b/prelude/decls/apple_common.bzl similarity index 100% rename from decls/apple_common.bzl rename to prelude/decls/apple_common.bzl diff --git a/decls/common.bzl b/prelude/decls/common.bzl similarity index 100% rename from decls/common.bzl rename to prelude/decls/common.bzl diff --git a/decls/core_rules.bzl b/prelude/decls/core_rules.bzl similarity index 100% rename from decls/core_rules.bzl rename to prelude/decls/core_rules.bzl diff --git a/decls/cxx_common.bzl b/prelude/decls/cxx_common.bzl similarity index 100% rename from decls/cxx_common.bzl rename to prelude/decls/cxx_common.bzl diff --git a/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl similarity index 100% rename from decls/cxx_rules.bzl rename to prelude/decls/cxx_rules.bzl diff --git a/decls/d_common.bzl b/prelude/decls/d_common.bzl similarity index 100% rename from decls/d_common.bzl rename to prelude/decls/d_common.bzl diff --git a/decls/d_rules.bzl b/prelude/decls/d_rules.bzl similarity index 100% rename from decls/d_rules.bzl rename to prelude/decls/d_rules.bzl diff --git a/decls/dotnet_rules.bzl b/prelude/decls/dotnet_rules.bzl similarity index 100% rename from decls/dotnet_rules.bzl rename to prelude/decls/dotnet_rules.bzl diff --git a/decls/erlang_rules.bzl b/prelude/decls/erlang_rules.bzl similarity index 100% rename from decls/erlang_rules.bzl rename to prelude/decls/erlang_rules.bzl diff --git a/decls/genrule_common.bzl b/prelude/decls/genrule_common.bzl similarity index 100% rename from decls/genrule_common.bzl rename to prelude/decls/genrule_common.bzl diff --git a/decls/git_rules.bzl b/prelude/decls/git_rules.bzl similarity index 100% rename from decls/git_rules.bzl rename to prelude/decls/git_rules.bzl diff --git a/decls/go_common.bzl b/prelude/decls/go_common.bzl similarity index 100% rename from decls/go_common.bzl rename to prelude/decls/go_common.bzl diff --git a/decls/go_rules.bzl b/prelude/decls/go_rules.bzl similarity index 100% rename from decls/go_rules.bzl rename to prelude/decls/go_rules.bzl diff --git a/decls/groovy_rules.bzl b/prelude/decls/groovy_rules.bzl similarity index 100% rename from decls/groovy_rules.bzl rename to prelude/decls/groovy_rules.bzl diff --git a/decls/halide_rules.bzl b/prelude/decls/halide_rules.bzl similarity index 100% rename from decls/halide_rules.bzl rename to prelude/decls/halide_rules.bzl diff --git a/decls/haskell_common.bzl b/prelude/decls/haskell_common.bzl similarity index 100% rename from decls/haskell_common.bzl rename to prelude/decls/haskell_common.bzl diff --git a/decls/haskell_rules.bzl b/prelude/decls/haskell_rules.bzl similarity index 100% rename from decls/haskell_rules.bzl rename to prelude/decls/haskell_rules.bzl diff --git a/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl similarity index 100% rename from decls/ios_rules.bzl rename to prelude/decls/ios_rules.bzl diff --git a/decls/java_rules.bzl b/prelude/decls/java_rules.bzl similarity index 100% rename from decls/java_rules.bzl rename to prelude/decls/java_rules.bzl diff --git a/decls/js_rules.bzl b/prelude/decls/js_rules.bzl similarity index 100% rename from decls/js_rules.bzl rename to prelude/decls/js_rules.bzl diff --git a/decls/jvm_common.bzl b/prelude/decls/jvm_common.bzl similarity index 100% rename from decls/jvm_common.bzl rename to prelude/decls/jvm_common.bzl diff --git a/decls/kotlin_rules.bzl b/prelude/decls/kotlin_rules.bzl similarity index 100% rename from decls/kotlin_rules.bzl rename to prelude/decls/kotlin_rules.bzl diff --git a/decls/lua_common.bzl b/prelude/decls/lua_common.bzl similarity index 100% rename from decls/lua_common.bzl rename to prelude/decls/lua_common.bzl diff --git a/decls/lua_rules.bzl b/prelude/decls/lua_rules.bzl similarity index 100% rename from decls/lua_rules.bzl rename to prelude/decls/lua_rules.bzl diff --git a/decls/native_common.bzl b/prelude/decls/native_common.bzl similarity index 100% rename from decls/native_common.bzl rename to prelude/decls/native_common.bzl diff --git a/decls/ocaml_common.bzl b/prelude/decls/ocaml_common.bzl similarity index 100% rename from decls/ocaml_common.bzl rename to prelude/decls/ocaml_common.bzl diff --git a/decls/ocaml_rules.bzl b/prelude/decls/ocaml_rules.bzl similarity index 100% rename from decls/ocaml_rules.bzl rename to prelude/decls/ocaml_rules.bzl diff --git a/decls/python_common.bzl b/prelude/decls/python_common.bzl similarity index 100% rename from decls/python_common.bzl rename to prelude/decls/python_common.bzl diff --git a/decls/python_rules.bzl b/prelude/decls/python_rules.bzl similarity index 100% rename from decls/python_rules.bzl rename to prelude/decls/python_rules.bzl diff --git a/decls/re_test_common.bzl b/prelude/decls/re_test_common.bzl similarity index 100% rename from decls/re_test_common.bzl rename to prelude/decls/re_test_common.bzl diff --git a/decls/remote_common.bzl b/prelude/decls/remote_common.bzl similarity index 100% rename from decls/remote_common.bzl rename to prelude/decls/remote_common.bzl diff --git a/decls/rust_common.bzl b/prelude/decls/rust_common.bzl similarity index 100% rename from decls/rust_common.bzl rename to prelude/decls/rust_common.bzl diff --git a/decls/rust_rules.bzl b/prelude/decls/rust_rules.bzl similarity index 100% rename from decls/rust_rules.bzl rename to prelude/decls/rust_rules.bzl diff --git a/decls/scala_rules.bzl b/prelude/decls/scala_rules.bzl similarity index 100% rename from decls/scala_rules.bzl rename to prelude/decls/scala_rules.bzl diff --git a/decls/shell_rules.bzl b/prelude/decls/shell_rules.bzl similarity index 100% rename from decls/shell_rules.bzl rename to prelude/decls/shell_rules.bzl diff --git a/decls/toolchains_common.bzl b/prelude/decls/toolchains_common.bzl similarity index 100% rename from decls/toolchains_common.bzl rename to prelude/decls/toolchains_common.bzl diff --git a/decls/uncategorized_rules.bzl b/prelude/decls/uncategorized_rules.bzl similarity index 100% rename from decls/uncategorized_rules.bzl rename to prelude/decls/uncategorized_rules.bzl diff --git a/dist/dist_info.bzl b/prelude/dist/dist_info.bzl similarity index 100% rename from dist/dist_info.bzl rename to prelude/dist/dist_info.bzl diff --git a/docs/rules.bzl b/prelude/docs/rules.bzl similarity index 100% rename from docs/rules.bzl rename to prelude/docs/rules.bzl diff --git a/erlang/applications/BUCK.v2 b/prelude/erlang/applications/BUCK.v2 similarity index 100% rename from erlang/applications/BUCK.v2 rename to prelude/erlang/applications/BUCK.v2 diff --git a/erlang/common_test/.elp.toml b/prelude/erlang/common_test/.elp.toml similarity index 100% rename from erlang/common_test/.elp.toml rename to prelude/erlang/common_test/.elp.toml diff --git a/erlang/common_test/common/BUCK.v2 b/prelude/erlang/common_test/common/BUCK.v2 similarity index 100% rename from erlang/common_test/common/BUCK.v2 rename to prelude/erlang/common_test/common/BUCK.v2 diff --git a/erlang/common_test/common/include/artifact_annotations.hrl b/prelude/erlang/common_test/common/include/artifact_annotations.hrl similarity index 100% rename from erlang/common_test/common/include/artifact_annotations.hrl rename to prelude/erlang/common_test/common/include/artifact_annotations.hrl diff --git a/erlang/common_test/common/include/buck_ct_records.hrl b/prelude/erlang/common_test/common/include/buck_ct_records.hrl similarity index 100% rename from erlang/common_test/common/include/buck_ct_records.hrl rename to prelude/erlang/common_test/common/include/buck_ct_records.hrl diff --git a/erlang/common_test/common/include/tpx_records.hrl b/prelude/erlang/common_test/common/include/tpx_records.hrl similarity index 100% rename from erlang/common_test/common/include/tpx_records.hrl rename to prelude/erlang/common_test/common/include/tpx_records.hrl diff --git a/erlang/common_test/common/src/artifact_annotations.erl b/prelude/erlang/common_test/common/src/artifact_annotations.erl similarity index 100% rename from erlang/common_test/common/src/artifact_annotations.erl rename to prelude/erlang/common_test/common/src/artifact_annotations.erl diff --git a/erlang/common_test/common/src/bounded_buffer.erl b/prelude/erlang/common_test/common/src/bounded_buffer.erl similarity index 100% rename from erlang/common_test/common/src/bounded_buffer.erl rename to prelude/erlang/common_test/common/src/bounded_buffer.erl diff --git a/erlang/common_test/common/src/buck_ct_parser.erl b/prelude/erlang/common_test/common/src/buck_ct_parser.erl similarity index 100% rename from erlang/common_test/common/src/buck_ct_parser.erl rename to prelude/erlang/common_test/common/src/buck_ct_parser.erl diff --git a/erlang/common_test/common/src/buck_ct_provider.erl b/prelude/erlang/common_test/common/src/buck_ct_provider.erl similarity index 100% rename from erlang/common_test/common/src/buck_ct_provider.erl rename to prelude/erlang/common_test/common/src/buck_ct_provider.erl diff --git a/erlang/common_test/common/src/ct_error_printer.erl b/prelude/erlang/common_test/common/src/ct_error_printer.erl similarity index 100% rename from erlang/common_test/common/src/ct_error_printer.erl rename to prelude/erlang/common_test/common/src/ct_error_printer.erl diff --git a/erlang/common_test/common/src/execution_logs.erl b/prelude/erlang/common_test/common/src/execution_logs.erl similarity index 100% rename from erlang/common_test/common/src/execution_logs.erl rename to prelude/erlang/common_test/common/src/execution_logs.erl diff --git a/erlang/common_test/common/src/io_buffer.erl b/prelude/erlang/common_test/common/src/io_buffer.erl similarity index 100% rename from erlang/common_test/common/src/io_buffer.erl rename to prelude/erlang/common_test/common/src/io_buffer.erl diff --git a/erlang/common_test/common/src/test_artifact_directory.erl b/prelude/erlang/common_test/common/src/test_artifact_directory.erl similarity index 100% rename from erlang/common_test/common/src/test_artifact_directory.erl rename to prelude/erlang/common_test/common/src/test_artifact_directory.erl diff --git a/erlang/common_test/common/src/test_logger.erl b/prelude/erlang/common_test/common/src/test_logger.erl similarity index 100% rename from erlang/common_test/common/src/test_logger.erl rename to prelude/erlang/common_test/common/src/test_logger.erl diff --git a/erlang/common_test/cth_hooks/BUCK.v2 b/prelude/erlang/common_test/cth_hooks/BUCK.v2 similarity index 100% rename from erlang/common_test/cth_hooks/BUCK.v2 rename to prelude/erlang/common_test/cth_hooks/BUCK.v2 diff --git a/erlang/common_test/cth_hooks/src/cth_tpx.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl similarity index 100% rename from erlang/common_test/cth_hooks/src/cth_tpx.erl rename to prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl diff --git a/erlang/common_test/cth_hooks/src/cth_tpx_role.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl similarity index 100% rename from erlang/common_test/cth_hooks/src/cth_tpx_role.erl rename to prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl diff --git a/erlang/common_test/cth_hooks/src/cth_tpx_server.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl similarity index 100% rename from erlang/common_test/cth_hooks/src/cth_tpx_server.erl rename to prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl diff --git a/erlang/common_test/cth_hooks/src/cth_tpx_test_tree.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_test_tree.erl similarity index 100% rename from erlang/common_test/cth_hooks/src/cth_tpx_test_tree.erl rename to prelude/erlang/common_test/cth_hooks/src/cth_tpx_test_tree.erl diff --git a/erlang/common_test/cth_hooks/src/method_ids.hrl b/prelude/erlang/common_test/cth_hooks/src/method_ids.hrl similarity index 100% rename from erlang/common_test/cth_hooks/src/method_ids.hrl rename to prelude/erlang/common_test/cth_hooks/src/method_ids.hrl diff --git a/erlang/common_test/test_binary/BUCK.v2 b/prelude/erlang/common_test/test_binary/BUCK.v2 similarity index 100% rename from erlang/common_test/test_binary/BUCK.v2 rename to prelude/erlang/common_test/test_binary/BUCK.v2 diff --git a/erlang/common_test/test_binary/src/json_interfacer.erl b/prelude/erlang/common_test/test_binary/src/json_interfacer.erl similarity index 100% rename from erlang/common_test/test_binary/src/json_interfacer.erl rename to prelude/erlang/common_test/test_binary/src/json_interfacer.erl diff --git a/erlang/common_test/test_binary/src/junit_interfacer.erl b/prelude/erlang/common_test/test_binary/src/junit_interfacer.erl similarity index 100% rename from erlang/common_test/test_binary/src/junit_interfacer.erl rename to prelude/erlang/common_test/test_binary/src/junit_interfacer.erl diff --git a/erlang/common_test/test_binary/src/list_test.erl b/prelude/erlang/common_test/test_binary/src/list_test.erl similarity index 100% rename from erlang/common_test/test_binary/src/list_test.erl rename to prelude/erlang/common_test/test_binary/src/list_test.erl diff --git a/erlang/common_test/test_binary/src/listing_interfacer.erl b/prelude/erlang/common_test/test_binary/src/listing_interfacer.erl similarity index 100% rename from erlang/common_test/test_binary/src/listing_interfacer.erl rename to prelude/erlang/common_test/test_binary/src/listing_interfacer.erl diff --git a/erlang/common_test/test_binary/src/test_binary.erl b/prelude/erlang/common_test/test_binary/src/test_binary.erl similarity index 100% rename from erlang/common_test/test_binary/src/test_binary.erl rename to prelude/erlang/common_test/test_binary/src/test_binary.erl diff --git a/erlang/common_test/test_binary/src/test_runner.erl b/prelude/erlang/common_test/test_binary/src/test_runner.erl similarity index 100% rename from erlang/common_test/test_binary/src/test_runner.erl rename to prelude/erlang/common_test/test_binary/src/test_runner.erl diff --git a/erlang/common_test/test_cli_lib/BUCK.v2 b/prelude/erlang/common_test/test_cli_lib/BUCK.v2 similarity index 100% rename from erlang/common_test/test_cli_lib/BUCK.v2 rename to prelude/erlang/common_test/test_cli_lib/BUCK.v2 diff --git a/erlang/common_test/test_cli_lib/src/test.erl b/prelude/erlang/common_test/test_cli_lib/src/test.erl similarity index 100% rename from erlang/common_test/test_cli_lib/src/test.erl rename to prelude/erlang/common_test/test_cli_lib/src/test.erl diff --git a/erlang/common_test/test_exec/BUCK.v2 b/prelude/erlang/common_test/test_exec/BUCK.v2 similarity index 100% rename from erlang/common_test/test_exec/BUCK.v2 rename to prelude/erlang/common_test/test_exec/BUCK.v2 diff --git a/erlang/common_test/test_exec/src/ct_daemon.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon.erl similarity index 100% rename from erlang/common_test/test_exec/src/ct_daemon.erl rename to prelude/erlang/common_test/test_exec/src/ct_daemon.erl diff --git a/erlang/common_test/test_exec/src/ct_daemon_core.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl similarity index 100% rename from erlang/common_test/test_exec/src/ct_daemon_core.erl rename to prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl diff --git a/erlang/common_test/test_exec/src/ct_daemon_hooks.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl similarity index 100% rename from erlang/common_test/test_exec/src/ct_daemon_hooks.erl rename to prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl diff --git a/erlang/common_test/test_exec/src/ct_daemon_logger.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl similarity index 100% rename from erlang/common_test/test_exec/src/ct_daemon_logger.erl rename to prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl diff --git a/erlang/common_test/test_exec/src/ct_daemon_node.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl similarity index 100% rename from erlang/common_test/test_exec/src/ct_daemon_node.erl rename to prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl diff --git a/erlang/common_test/test_exec/src/ct_daemon_printer.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl similarity index 100% rename from erlang/common_test/test_exec/src/ct_daemon_printer.erl rename to prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl diff --git a/erlang/common_test/test_exec/src/ct_daemon_runner.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl similarity index 100% rename from erlang/common_test/test_exec/src/ct_daemon_runner.erl rename to prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl diff --git a/erlang/common_test/test_exec/src/ct_executor.erl b/prelude/erlang/common_test/test_exec/src/ct_executor.erl similarity index 100% rename from erlang/common_test/test_exec/src/ct_executor.erl rename to prelude/erlang/common_test/test_exec/src/ct_executor.erl diff --git a/erlang/common_test/test_exec/src/ct_runner.erl b/prelude/erlang/common_test/test_exec/src/ct_runner.erl similarity index 100% rename from erlang/common_test/test_exec/src/ct_runner.erl rename to prelude/erlang/common_test/test_exec/src/ct_runner.erl diff --git a/erlang/common_test/test_exec/src/epmd_manager.erl b/prelude/erlang/common_test/test_exec/src/epmd_manager.erl similarity index 100% rename from erlang/common_test/test_exec/src/epmd_manager.erl rename to prelude/erlang/common_test/test_exec/src/epmd_manager.erl diff --git a/erlang/common_test/test_exec/src/test_exec.app.src b/prelude/erlang/common_test/test_exec/src/test_exec.app.src similarity index 100% rename from erlang/common_test/test_exec/src/test_exec.app.src rename to prelude/erlang/common_test/test_exec/src/test_exec.app.src diff --git a/erlang/common_test/test_exec/src/test_exec.erl b/prelude/erlang/common_test/test_exec/src/test_exec.erl similarity index 100% rename from erlang/common_test/test_exec/src/test_exec.erl rename to prelude/erlang/common_test/test_exec/src/test_exec.erl diff --git a/erlang/common_test/test_exec/src/test_exec_sup.erl b/prelude/erlang/common_test/test_exec/src/test_exec_sup.erl similarity index 100% rename from erlang/common_test/test_exec/src/test_exec_sup.erl rename to prelude/erlang/common_test/test_exec/src/test_exec_sup.erl diff --git a/erlang/erlang.bzl b/prelude/erlang/erlang.bzl similarity index 100% rename from erlang/erlang.bzl rename to prelude/erlang/erlang.bzl diff --git a/erlang/erlang_application.bzl b/prelude/erlang/erlang_application.bzl similarity index 100% rename from erlang/erlang_application.bzl rename to prelude/erlang/erlang_application.bzl diff --git a/erlang/erlang_application_includes.bzl b/prelude/erlang/erlang_application_includes.bzl similarity index 100% rename from erlang/erlang_application_includes.bzl rename to prelude/erlang/erlang_application_includes.bzl diff --git a/erlang/erlang_build.bzl b/prelude/erlang/erlang_build.bzl similarity index 100% rename from erlang/erlang_build.bzl rename to prelude/erlang/erlang_build.bzl diff --git a/erlang/erlang_dependencies.bzl b/prelude/erlang/erlang_dependencies.bzl similarity index 100% rename from erlang/erlang_dependencies.bzl rename to prelude/erlang/erlang_dependencies.bzl diff --git a/erlang/erlang_escript.bzl b/prelude/erlang/erlang_escript.bzl similarity index 100% rename from erlang/erlang_escript.bzl rename to prelude/erlang/erlang_escript.bzl diff --git a/erlang/erlang_info.bzl b/prelude/erlang/erlang_info.bzl similarity index 100% rename from erlang/erlang_info.bzl rename to prelude/erlang/erlang_info.bzl diff --git a/erlang/erlang_ls.config b/prelude/erlang/erlang_ls.config similarity index 100% rename from erlang/erlang_ls.config rename to prelude/erlang/erlang_ls.config diff --git a/erlang/erlang_otp_application.bzl b/prelude/erlang/erlang_otp_application.bzl similarity index 100% rename from erlang/erlang_otp_application.bzl rename to prelude/erlang/erlang_otp_application.bzl diff --git a/erlang/erlang_release.bzl b/prelude/erlang/erlang_release.bzl similarity index 100% rename from erlang/erlang_release.bzl rename to prelude/erlang/erlang_release.bzl diff --git a/erlang/erlang_shell.bzl b/prelude/erlang/erlang_shell.bzl similarity index 100% rename from erlang/erlang_shell.bzl rename to prelude/erlang/erlang_shell.bzl diff --git a/erlang/erlang_tests.bzl b/prelude/erlang/erlang_tests.bzl similarity index 100% rename from erlang/erlang_tests.bzl rename to prelude/erlang/erlang_tests.bzl diff --git a/erlang/erlang_toolchain.bzl b/prelude/erlang/erlang_toolchain.bzl similarity index 100% rename from erlang/erlang_toolchain.bzl rename to prelude/erlang/erlang_toolchain.bzl diff --git a/erlang/erlang_utils.bzl b/prelude/erlang/erlang_utils.bzl similarity index 100% rename from erlang/erlang_utils.bzl rename to prelude/erlang/erlang_utils.bzl diff --git a/erlang/shell/BUCK.v2 b/prelude/erlang/shell/BUCK.v2 similarity index 100% rename from erlang/shell/BUCK.v2 rename to prelude/erlang/shell/BUCK.v2 diff --git a/erlang/shell/shell.bxl b/prelude/erlang/shell/shell.bxl similarity index 100% rename from erlang/shell/shell.bxl rename to prelude/erlang/shell/shell.bxl diff --git a/erlang/shell/src/shell_buck2_utils.erl b/prelude/erlang/shell/src/shell_buck2_utils.erl similarity index 100% rename from erlang/shell/src/shell_buck2_utils.erl rename to prelude/erlang/shell/src/shell_buck2_utils.erl diff --git a/erlang/shell/src/user_default.erl b/prelude/erlang/shell/src/user_default.erl similarity index 100% rename from erlang/shell/src/user_default.erl rename to prelude/erlang/shell/src/user_default.erl diff --git a/erlang/toolchain/BUCK.v2 b/prelude/erlang/toolchain/BUCK.v2 similarity index 100% rename from erlang/toolchain/BUCK.v2 rename to prelude/erlang/toolchain/BUCK.v2 diff --git a/erlang/toolchain/app_src_builder.escript b/prelude/erlang/toolchain/app_src_builder.escript similarity index 100% rename from erlang/toolchain/app_src_builder.escript rename to prelude/erlang/toolchain/app_src_builder.escript diff --git a/erlang/toolchain/boot_script_builder.escript b/prelude/erlang/toolchain/boot_script_builder.escript similarity index 100% rename from erlang/toolchain/boot_script_builder.escript rename to prelude/erlang/toolchain/boot_script_builder.escript diff --git a/erlang/toolchain/dependency_analyzer.escript b/prelude/erlang/toolchain/dependency_analyzer.escript similarity index 100% rename from erlang/toolchain/dependency_analyzer.escript rename to prelude/erlang/toolchain/dependency_analyzer.escript diff --git a/erlang/toolchain/dependency_finalizer.escript b/prelude/erlang/toolchain/dependency_finalizer.escript similarity index 100% rename from erlang/toolchain/dependency_finalizer.escript rename to prelude/erlang/toolchain/dependency_finalizer.escript diff --git a/erlang/toolchain/edoc_cli.escript b/prelude/erlang/toolchain/edoc_cli.escript similarity index 100% rename from erlang/toolchain/edoc_cli.escript rename to prelude/erlang/toolchain/edoc_cli.escript diff --git a/erlang/toolchain/edoc_doclet_chunks.erl b/prelude/erlang/toolchain/edoc_doclet_chunks.erl similarity index 100% rename from erlang/toolchain/edoc_doclet_chunks.erl rename to prelude/erlang/toolchain/edoc_doclet_chunks.erl diff --git a/erlang/toolchain/edoc_report.erl b/prelude/erlang/toolchain/edoc_report.erl similarity index 100% rename from erlang/toolchain/edoc_report.erl rename to prelude/erlang/toolchain/edoc_report.erl diff --git a/erlang/toolchain/erlang_ls.config b/prelude/erlang/toolchain/erlang_ls.config similarity index 100% rename from erlang/toolchain/erlang_ls.config rename to prelude/erlang/toolchain/erlang_ls.config diff --git a/erlang/toolchain/erlc_trampoline.sh b/prelude/erlang/toolchain/erlc_trampoline.sh similarity index 100% rename from erlang/toolchain/erlc_trampoline.sh rename to prelude/erlang/toolchain/erlc_trampoline.sh diff --git a/erlang/toolchain/escript_builder.escript b/prelude/erlang/toolchain/escript_builder.escript similarity index 100% rename from erlang/toolchain/escript_builder.escript rename to prelude/erlang/toolchain/escript_builder.escript diff --git a/erlang/toolchain/include_erts.escript b/prelude/erlang/toolchain/include_erts.escript similarity index 100% rename from erlang/toolchain/include_erts.escript rename to prelude/erlang/toolchain/include_erts.escript diff --git a/erlang/toolchain/release_variables_builder.escript b/prelude/erlang/toolchain/release_variables_builder.escript similarity index 100% rename from erlang/toolchain/release_variables_builder.escript rename to prelude/erlang/toolchain/release_variables_builder.escript diff --git a/erlang/toolchain/transform_project_root.erl b/prelude/erlang/toolchain/transform_project_root.erl similarity index 100% rename from erlang/toolchain/transform_project_root.erl rename to prelude/erlang/toolchain/transform_project_root.erl diff --git a/export_exe.bzl b/prelude/export_exe.bzl similarity index 100% rename from export_exe.bzl rename to prelude/export_exe.bzl diff --git a/export_file.bzl b/prelude/export_file.bzl similarity index 100% rename from export_file.bzl rename to prelude/export_file.bzl diff --git a/filegroup.bzl b/prelude/filegroup.bzl similarity index 100% rename from filegroup.bzl rename to prelude/filegroup.bzl diff --git a/genrule.bzl b/prelude/genrule.bzl similarity index 100% rename from genrule.bzl rename to prelude/genrule.bzl diff --git a/genrule_local_labels.bzl b/prelude/genrule_local_labels.bzl similarity index 100% rename from genrule_local_labels.bzl rename to prelude/genrule_local_labels.bzl diff --git a/genrule_toolchain.bzl b/prelude/genrule_toolchain.bzl similarity index 100% rename from genrule_toolchain.bzl rename to prelude/genrule_toolchain.bzl diff --git a/genrule_types.bzl b/prelude/genrule_types.bzl similarity index 100% rename from genrule_types.bzl rename to prelude/genrule_types.bzl diff --git a/git/git_fetch.bzl b/prelude/git/git_fetch.bzl similarity index 100% rename from git/git_fetch.bzl rename to prelude/git/git_fetch.bzl diff --git a/git/tools/BUCK.v2 b/prelude/git/tools/BUCK.v2 similarity index 100% rename from git/tools/BUCK.v2 rename to prelude/git/tools/BUCK.v2 diff --git a/git/tools/git_fetch.py b/prelude/git/tools/git_fetch.py similarity index 100% rename from git/tools/git_fetch.py rename to prelude/git/tools/git_fetch.py diff --git a/go/cgo_library.bzl b/prelude/go/cgo_library.bzl similarity index 100% rename from go/cgo_library.bzl rename to prelude/go/cgo_library.bzl diff --git a/go/compile.bzl b/prelude/go/compile.bzl similarity index 100% rename from go/compile.bzl rename to prelude/go/compile.bzl diff --git a/go/coverage.bzl b/prelude/go/coverage.bzl similarity index 100% rename from go/coverage.bzl rename to prelude/go/coverage.bzl diff --git a/go/go_binary.bzl b/prelude/go/go_binary.bzl similarity index 100% rename from go/go_binary.bzl rename to prelude/go/go_binary.bzl diff --git a/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl similarity index 100% rename from go/go_exported_library.bzl rename to prelude/go/go_exported_library.bzl diff --git a/go/go_library.bzl b/prelude/go/go_library.bzl similarity index 100% rename from go/go_library.bzl rename to prelude/go/go_library.bzl diff --git a/go/go_test.bzl b/prelude/go/go_test.bzl similarity index 100% rename from go/go_test.bzl rename to prelude/go/go_test.bzl diff --git a/go/link.bzl b/prelude/go/link.bzl similarity index 100% rename from go/link.bzl rename to prelude/go/link.bzl diff --git a/go/packages.bzl b/prelude/go/packages.bzl similarity index 100% rename from go/packages.bzl rename to prelude/go/packages.bzl diff --git a/go/toolchain.bzl b/prelude/go/toolchain.bzl similarity index 100% rename from go/toolchain.bzl rename to prelude/go/toolchain.bzl diff --git a/go/tools/BUCK.v2 b/prelude/go/tools/BUCK.v2 similarity index 100% rename from go/tools/BUCK.v2 rename to prelude/go/tools/BUCK.v2 diff --git a/go/tools/cgo_wrapper.py b/prelude/go/tools/cgo_wrapper.py similarity index 100% rename from go/tools/cgo_wrapper.py rename to prelude/go/tools/cgo_wrapper.py diff --git a/go/tools/compile_wrapper.py b/prelude/go/tools/compile_wrapper.py similarity index 100% rename from go/tools/compile_wrapper.py rename to prelude/go/tools/compile_wrapper.py diff --git a/go/tools/cover_srcs.py b/prelude/go/tools/cover_srcs.py similarity index 100% rename from go/tools/cover_srcs.py rename to prelude/go/tools/cover_srcs.py diff --git a/go/tools/filter_srcs.py b/prelude/go/tools/filter_srcs.py similarity index 100% rename from go/tools/filter_srcs.py rename to prelude/go/tools/filter_srcs.py diff --git a/go/tools/testmaingen.go b/prelude/go/tools/testmaingen.go similarity index 100% rename from go/tools/testmaingen.go rename to prelude/go/tools/testmaingen.go diff --git a/haskell/compile.bzl b/prelude/haskell/compile.bzl similarity index 100% rename from haskell/compile.bzl rename to prelude/haskell/compile.bzl diff --git a/haskell/haskell.bzl b/prelude/haskell/haskell.bzl similarity index 100% rename from haskell/haskell.bzl rename to prelude/haskell/haskell.bzl diff --git a/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl similarity index 100% rename from haskell/haskell_ghci.bzl rename to prelude/haskell/haskell_ghci.bzl diff --git a/haskell/haskell_haddock.bzl b/prelude/haskell/haskell_haddock.bzl similarity index 100% rename from haskell/haskell_haddock.bzl rename to prelude/haskell/haskell_haddock.bzl diff --git a/haskell/haskell_ide.bzl b/prelude/haskell/haskell_ide.bzl similarity index 100% rename from haskell/haskell_ide.bzl rename to prelude/haskell/haskell_ide.bzl diff --git a/haskell/ide/README.md b/prelude/haskell/ide/README.md similarity index 100% rename from haskell/ide/README.md rename to prelude/haskell/ide/README.md diff --git a/haskell/ide/hie.yaml b/prelude/haskell/ide/hie.yaml similarity index 100% rename from haskell/ide/hie.yaml rename to prelude/haskell/ide/hie.yaml diff --git a/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl similarity index 100% rename from haskell/ide/ide.bxl rename to prelude/haskell/ide/ide.bxl diff --git a/haskell/link_info.bzl b/prelude/haskell/link_info.bzl similarity index 100% rename from haskell/link_info.bzl rename to prelude/haskell/link_info.bzl diff --git a/haskell/toolchain.bzl b/prelude/haskell/toolchain.bzl similarity index 100% rename from haskell/toolchain.bzl rename to prelude/haskell/toolchain.bzl diff --git a/haskell/tools/BUCK.v2 b/prelude/haskell/tools/BUCK.v2 similarity index 100% rename from haskell/tools/BUCK.v2 rename to prelude/haskell/tools/BUCK.v2 diff --git a/haskell/tools/script_template_processor.py b/prelude/haskell/tools/script_template_processor.py similarity index 100% rename from haskell/tools/script_template_processor.py rename to prelude/haskell/tools/script_template_processor.py diff --git a/haskell/util.bzl b/prelude/haskell/util.bzl similarity index 100% rename from haskell/util.bzl rename to prelude/haskell/util.bzl diff --git a/http_archive/exec_deps.bzl b/prelude/http_archive/exec_deps.bzl similarity index 100% rename from http_archive/exec_deps.bzl rename to prelude/http_archive/exec_deps.bzl diff --git a/http_archive/http_archive.bzl b/prelude/http_archive/http_archive.bzl similarity index 100% rename from http_archive/http_archive.bzl rename to prelude/http_archive/http_archive.bzl diff --git a/http_archive/tools/BUCK.v2 b/prelude/http_archive/tools/BUCK.v2 similarity index 100% rename from http_archive/tools/BUCK.v2 rename to prelude/http_archive/tools/BUCK.v2 diff --git a/http_archive/tools/create_exclusion_list.py b/prelude/http_archive/tools/create_exclusion_list.py similarity index 100% rename from http_archive/tools/create_exclusion_list.py rename to prelude/http_archive/tools/create_exclusion_list.py diff --git a/http_file.bzl b/prelude/http_file.bzl similarity index 100% rename from http_file.bzl rename to prelude/http_file.bzl diff --git a/ide_integrations/xcode.bzl b/prelude/ide_integrations/xcode.bzl similarity index 100% rename from ide_integrations/xcode.bzl rename to prelude/ide_integrations/xcode.bzl diff --git a/is_buck2.bzl b/prelude/is_buck2.bzl similarity index 100% rename from is_buck2.bzl rename to prelude/is_buck2.bzl diff --git a/is_buck2_internal.bzl b/prelude/is_buck2_internal.bzl similarity index 100% rename from is_buck2_internal.bzl rename to prelude/is_buck2_internal.bzl diff --git a/is_full_meta_repo.bzl b/prelude/is_full_meta_repo.bzl similarity index 100% rename from is_full_meta_repo.bzl rename to prelude/is_full_meta_repo.bzl diff --git a/java/class_to_srcs.bzl b/prelude/java/class_to_srcs.bzl similarity index 100% rename from java/class_to_srcs.bzl rename to prelude/java/class_to_srcs.bzl diff --git a/java/dex.bzl b/prelude/java/dex.bzl similarity index 100% rename from java/dex.bzl rename to prelude/java/dex.bzl diff --git a/java/dex_toolchain.bzl b/prelude/java/dex_toolchain.bzl similarity index 100% rename from java/dex_toolchain.bzl rename to prelude/java/dex_toolchain.bzl diff --git a/java/gwt_binary.bzl b/prelude/java/gwt_binary.bzl similarity index 100% rename from java/gwt_binary.bzl rename to prelude/java/gwt_binary.bzl diff --git a/java/jar_genrule.bzl b/prelude/java/jar_genrule.bzl similarity index 100% rename from java/jar_genrule.bzl rename to prelude/java/jar_genrule.bzl diff --git a/java/java.bzl b/prelude/java/java.bzl similarity index 100% rename from java/java.bzl rename to prelude/java/java.bzl diff --git a/java/java_binary.bzl b/prelude/java/java_binary.bzl similarity index 100% rename from java/java_binary.bzl rename to prelude/java/java_binary.bzl diff --git a/java/java_library.bzl b/prelude/java/java_library.bzl similarity index 100% rename from java/java_library.bzl rename to prelude/java/java_library.bzl diff --git a/java/java_providers.bzl b/prelude/java/java_providers.bzl similarity index 100% rename from java/java_providers.bzl rename to prelude/java/java_providers.bzl diff --git a/java/java_resources.bzl b/prelude/java/java_resources.bzl similarity index 100% rename from java/java_resources.bzl rename to prelude/java/java_resources.bzl diff --git a/java/java_test.bzl b/prelude/java/java_test.bzl similarity index 100% rename from java/java_test.bzl rename to prelude/java/java_test.bzl diff --git a/java/java_toolchain.bzl b/prelude/java/java_toolchain.bzl similarity index 100% rename from java/java_toolchain.bzl rename to prelude/java/java_toolchain.bzl diff --git a/java/javacd_jar_creator.bzl b/prelude/java/javacd_jar_creator.bzl similarity index 100% rename from java/javacd_jar_creator.bzl rename to prelude/java/javacd_jar_creator.bzl diff --git a/java/keystore.bzl b/prelude/java/keystore.bzl similarity index 100% rename from java/keystore.bzl rename to prelude/java/keystore.bzl diff --git a/java/plugins/java_annotation_processor.bzl b/prelude/java/plugins/java_annotation_processor.bzl similarity index 100% rename from java/plugins/java_annotation_processor.bzl rename to prelude/java/plugins/java_annotation_processor.bzl diff --git a/java/plugins/java_plugin.bzl b/prelude/java/plugins/java_plugin.bzl similarity index 100% rename from java/plugins/java_plugin.bzl rename to prelude/java/plugins/java_plugin.bzl diff --git a/java/prebuilt_jar.bzl b/prelude/java/prebuilt_jar.bzl similarity index 100% rename from java/prebuilt_jar.bzl rename to prelude/java/prebuilt_jar.bzl diff --git a/java/tools/BUCK.v2 b/prelude/java/tools/BUCK.v2 similarity index 100% rename from java/tools/BUCK.v2 rename to prelude/java/tools/BUCK.v2 diff --git a/java/tools/compile_and_package.py b/prelude/java/tools/compile_and_package.py similarity index 100% rename from java/tools/compile_and_package.py rename to prelude/java/tools/compile_and_package.py diff --git a/java/tools/fat_jar.py b/prelude/java/tools/fat_jar.py similarity index 100% rename from java/tools/fat_jar.py rename to prelude/java/tools/fat_jar.py diff --git a/java/tools/gen_class_to_source_map.py b/prelude/java/tools/gen_class_to_source_map.py similarity index 100% rename from java/tools/gen_class_to_source_map.py rename to prelude/java/tools/gen_class_to_source_map.py diff --git a/java/tools/list_class_names.py b/prelude/java/tools/list_class_names.py similarity index 100% rename from java/tools/list_class_names.py rename to prelude/java/tools/list_class_names.py diff --git a/java/tools/merge_class_to_source_maps.py b/prelude/java/tools/merge_class_to_source_maps.py similarity index 100% rename from java/tools/merge_class_to_source_maps.py rename to prelude/java/tools/merge_class_to_source_maps.py diff --git a/java/tools/utils.py b/prelude/java/tools/utils.py similarity index 100% rename from java/tools/utils.py rename to prelude/java/tools/utils.py diff --git a/java/utils/java_more_utils.bzl b/prelude/java/utils/java_more_utils.bzl similarity index 100% rename from java/utils/java_more_utils.bzl rename to prelude/java/utils/java_more_utils.bzl diff --git a/java/utils/java_utils.bzl b/prelude/java/utils/java_utils.bzl similarity index 100% rename from java/utils/java_utils.bzl rename to prelude/java/utils/java_utils.bzl diff --git a/js/js.bzl b/prelude/js/js.bzl similarity index 100% rename from js/js.bzl rename to prelude/js/js.bzl diff --git a/js/js_bundle.bzl b/prelude/js/js_bundle.bzl similarity index 100% rename from js/js_bundle.bzl rename to prelude/js/js_bundle.bzl diff --git a/js/js_bundle_genrule.bzl b/prelude/js/js_bundle_genrule.bzl similarity index 100% rename from js/js_bundle_genrule.bzl rename to prelude/js/js_bundle_genrule.bzl diff --git a/js/js_library.bzl b/prelude/js/js_library.bzl similarity index 100% rename from js/js_library.bzl rename to prelude/js/js_library.bzl diff --git a/js/js_providers.bzl b/prelude/js/js_providers.bzl similarity index 100% rename from js/js_providers.bzl rename to prelude/js/js_providers.bzl diff --git a/js/js_utils.bzl b/prelude/js/js_utils.bzl similarity index 100% rename from js/js_utils.bzl rename to prelude/js/js_utils.bzl diff --git a/julia/julia.bzl b/prelude/julia/julia.bzl similarity index 100% rename from julia/julia.bzl rename to prelude/julia/julia.bzl diff --git a/julia/julia_binary.bzl b/prelude/julia/julia_binary.bzl similarity index 100% rename from julia/julia_binary.bzl rename to prelude/julia/julia_binary.bzl diff --git a/julia/julia_info.bzl b/prelude/julia/julia_info.bzl similarity index 100% rename from julia/julia_info.bzl rename to prelude/julia/julia_info.bzl diff --git a/julia/julia_library.bzl b/prelude/julia/julia_library.bzl similarity index 100% rename from julia/julia_library.bzl rename to prelude/julia/julia_library.bzl diff --git a/julia/julia_test.bzl b/prelude/julia/julia_test.bzl similarity index 100% rename from julia/julia_test.bzl rename to prelude/julia/julia_test.bzl diff --git a/julia/julia_toolchain.bzl b/prelude/julia/julia_toolchain.bzl similarity index 100% rename from julia/julia_toolchain.bzl rename to prelude/julia/julia_toolchain.bzl diff --git a/julia/tools/BUCK.v2 b/prelude/julia/tools/BUCK.v2 similarity index 100% rename from julia/tools/BUCK.v2 rename to prelude/julia/tools/BUCK.v2 diff --git a/julia/tools/parse_julia_cmd.py b/prelude/julia/tools/parse_julia_cmd.py similarity index 100% rename from julia/tools/parse_julia_cmd.py rename to prelude/julia/tools/parse_julia_cmd.py diff --git a/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl similarity index 100% rename from jvm/cd_jar_creator_util.bzl rename to prelude/jvm/cd_jar_creator_util.bzl diff --git a/jvm/nullsafe.bzl b/prelude/jvm/nullsafe.bzl similarity index 100% rename from jvm/nullsafe.bzl rename to prelude/jvm/nullsafe.bzl diff --git a/kotlin/kotlin.bzl b/prelude/kotlin/kotlin.bzl similarity index 100% rename from kotlin/kotlin.bzl rename to prelude/kotlin/kotlin.bzl diff --git a/kotlin/kotlin_library.bzl b/prelude/kotlin/kotlin_library.bzl similarity index 100% rename from kotlin/kotlin_library.bzl rename to prelude/kotlin/kotlin_library.bzl diff --git a/kotlin/kotlin_test.bzl b/prelude/kotlin/kotlin_test.bzl similarity index 100% rename from kotlin/kotlin_test.bzl rename to prelude/kotlin/kotlin_test.bzl diff --git a/kotlin/kotlin_toolchain.bzl b/prelude/kotlin/kotlin_toolchain.bzl similarity index 100% rename from kotlin/kotlin_toolchain.bzl rename to prelude/kotlin/kotlin_toolchain.bzl diff --git a/kotlin/kotlin_utils.bzl b/prelude/kotlin/kotlin_utils.bzl similarity index 100% rename from kotlin/kotlin_utils.bzl rename to prelude/kotlin/kotlin_utils.bzl diff --git a/kotlin/kotlincd_jar_creator.bzl b/prelude/kotlin/kotlincd_jar_creator.bzl similarity index 100% rename from kotlin/kotlincd_jar_creator.bzl rename to prelude/kotlin/kotlincd_jar_creator.bzl diff --git a/kotlin/tools/compile_kotlin/BUCK.v2 b/prelude/kotlin/tools/compile_kotlin/BUCK.v2 similarity index 100% rename from kotlin/tools/compile_kotlin/BUCK.v2 rename to prelude/kotlin/tools/compile_kotlin/BUCK.v2 diff --git a/kotlin/tools/compile_kotlin/compile_kotlin.py b/prelude/kotlin/tools/compile_kotlin/compile_kotlin.py similarity index 100% rename from kotlin/tools/compile_kotlin/compile_kotlin.py rename to prelude/kotlin/tools/compile_kotlin/compile_kotlin.py diff --git a/kotlin/tools/defs.bzl b/prelude/kotlin/tools/defs.bzl similarity index 100% rename from kotlin/tools/defs.bzl rename to prelude/kotlin/tools/defs.bzl diff --git a/kotlin/tools/kapt_base64_encoder/BUCK.v2 b/prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 similarity index 100% rename from kotlin/tools/kapt_base64_encoder/BUCK.v2 rename to prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 diff --git a/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java b/prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java similarity index 100% rename from kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java rename to prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java diff --git a/linking/execution_preference.bzl b/prelude/linking/execution_preference.bzl similarity index 100% rename from linking/execution_preference.bzl rename to prelude/linking/execution_preference.bzl diff --git a/linking/link_groups.bzl b/prelude/linking/link_groups.bzl similarity index 100% rename from linking/link_groups.bzl rename to prelude/linking/link_groups.bzl diff --git a/linking/link_info.bzl b/prelude/linking/link_info.bzl similarity index 100% rename from linking/link_info.bzl rename to prelude/linking/link_info.bzl diff --git a/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl similarity index 100% rename from linking/linkable_graph.bzl rename to prelude/linking/linkable_graph.bzl diff --git a/linking/linkables.bzl b/prelude/linking/linkables.bzl similarity index 100% rename from linking/linkables.bzl rename to prelude/linking/linkables.bzl diff --git a/linking/lto.bzl b/prelude/linking/lto.bzl similarity index 100% rename from linking/lto.bzl rename to prelude/linking/lto.bzl diff --git a/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl similarity index 100% rename from linking/shared_libraries.bzl rename to prelude/linking/shared_libraries.bzl diff --git a/linking/strip.bzl b/prelude/linking/strip.bzl similarity index 100% rename from linking/strip.bzl rename to prelude/linking/strip.bzl diff --git a/local_only.bzl b/prelude/local_only.bzl similarity index 100% rename from local_only.bzl rename to prelude/local_only.bzl diff --git a/lua/cxx_lua_extension.bzl b/prelude/lua/cxx_lua_extension.bzl similarity index 100% rename from lua/cxx_lua_extension.bzl rename to prelude/lua/cxx_lua_extension.bzl diff --git a/lua/lua_binary.bzl b/prelude/lua/lua_binary.bzl similarity index 100% rename from lua/lua_binary.bzl rename to prelude/lua/lua_binary.bzl diff --git a/lua/lua_library.bzl b/prelude/lua/lua_library.bzl similarity index 100% rename from lua/lua_library.bzl rename to prelude/lua/lua_library.bzl diff --git a/native.bzl b/prelude/native.bzl similarity index 100% rename from native.bzl rename to prelude/native.bzl diff --git a/ocaml/attrs.bzl b/prelude/ocaml/attrs.bzl similarity index 100% rename from ocaml/attrs.bzl rename to prelude/ocaml/attrs.bzl diff --git a/ocaml/makefile.bzl b/prelude/ocaml/makefile.bzl similarity index 100% rename from ocaml/makefile.bzl rename to prelude/ocaml/makefile.bzl diff --git a/ocaml/ocaml.bzl b/prelude/ocaml/ocaml.bzl similarity index 100% rename from ocaml/ocaml.bzl rename to prelude/ocaml/ocaml.bzl diff --git a/ocaml/ocaml_toolchain_types.bzl b/prelude/ocaml/ocaml_toolchain_types.bzl similarity index 100% rename from ocaml/ocaml_toolchain_types.bzl rename to prelude/ocaml/ocaml_toolchain_types.bzl diff --git a/os/BUCK.v2 b/prelude/os/BUCK.v2 similarity index 100% rename from os/BUCK.v2 rename to prelude/os/BUCK.v2 diff --git a/os/constraints/BUCK.v2 b/prelude/os/constraints/BUCK.v2 similarity index 100% rename from os/constraints/BUCK.v2 rename to prelude/os/constraints/BUCK.v2 diff --git a/os_lookup/defs.bzl b/prelude/os_lookup/defs.bzl similarity index 100% rename from os_lookup/defs.bzl rename to prelude/os_lookup/defs.bzl diff --git a/os_lookup/targets/BUCK.v2 b/prelude/os_lookup/targets/BUCK.v2 similarity index 100% rename from os_lookup/targets/BUCK.v2 rename to prelude/os_lookup/targets/BUCK.v2 diff --git a/paths.bzl b/prelude/paths.bzl similarity index 100% rename from paths.bzl rename to prelude/paths.bzl diff --git a/platforms/BUCK b/prelude/platforms/BUCK similarity index 100% rename from platforms/BUCK rename to prelude/platforms/BUCK diff --git a/platforms/BUCK.v2 b/prelude/platforms/BUCK.v2 similarity index 100% rename from platforms/BUCK.v2 rename to prelude/platforms/BUCK.v2 diff --git a/platforms/apple/arch.bzl b/prelude/platforms/apple/arch.bzl similarity index 100% rename from platforms/apple/arch.bzl rename to prelude/platforms/apple/arch.bzl diff --git a/platforms/apple/sdk.bzl b/prelude/platforms/apple/sdk.bzl similarity index 100% rename from platforms/apple/sdk.bzl rename to prelude/platforms/apple/sdk.bzl diff --git a/platforms/defs.bzl b/prelude/platforms/defs.bzl similarity index 100% rename from platforms/defs.bzl rename to prelude/platforms/defs.bzl diff --git a/playground/test.bxl b/prelude/playground/test.bxl similarity index 100% rename from playground/test.bxl rename to prelude/playground/test.bxl diff --git a/prelude.bzl b/prelude/prelude.bzl similarity index 100% rename from prelude.bzl rename to prelude/prelude.bzl diff --git a/pull_request_template.md b/prelude/pull_request_template.md similarity index 100% rename from pull_request_template.md rename to prelude/pull_request_template.md diff --git a/python/compile.bzl b/prelude/python/compile.bzl similarity index 100% rename from python/compile.bzl rename to prelude/python/compile.bzl diff --git a/python/cxx_python_extension.bzl b/prelude/python/cxx_python_extension.bzl similarity index 100% rename from python/cxx_python_extension.bzl rename to prelude/python/cxx_python_extension.bzl diff --git a/python/interface.bzl b/prelude/python/interface.bzl similarity index 100% rename from python/interface.bzl rename to prelude/python/interface.bzl diff --git a/python/make_py_package.bzl b/prelude/python/make_py_package.bzl similarity index 100% rename from python/make_py_package.bzl rename to prelude/python/make_py_package.bzl diff --git a/python/manifest.bzl b/prelude/python/manifest.bzl similarity index 100% rename from python/manifest.bzl rename to prelude/python/manifest.bzl diff --git a/python/native_python_util.bzl b/prelude/python/native_python_util.bzl similarity index 100% rename from python/native_python_util.bzl rename to prelude/python/native_python_util.bzl diff --git a/python/needed_coverage.bzl b/prelude/python/needed_coverage.bzl similarity index 100% rename from python/needed_coverage.bzl rename to prelude/python/needed_coverage.bzl diff --git a/python/prebuilt_python_library.bzl b/prelude/python/prebuilt_python_library.bzl similarity index 100% rename from python/prebuilt_python_library.bzl rename to prelude/python/prebuilt_python_library.bzl diff --git a/python/python.bzl b/prelude/python/python.bzl similarity index 100% rename from python/python.bzl rename to prelude/python/python.bzl diff --git a/python/python_binary.bzl b/prelude/python/python_binary.bzl similarity index 100% rename from python/python_binary.bzl rename to prelude/python/python_binary.bzl diff --git a/python/python_library.bzl b/prelude/python/python_library.bzl similarity index 100% rename from python/python_library.bzl rename to prelude/python/python_library.bzl diff --git a/python/python_needed_coverage_test.bzl b/prelude/python/python_needed_coverage_test.bzl similarity index 100% rename from python/python_needed_coverage_test.bzl rename to prelude/python/python_needed_coverage_test.bzl diff --git a/python/python_test.bzl b/prelude/python/python_test.bzl similarity index 100% rename from python/python_test.bzl rename to prelude/python/python_test.bzl diff --git a/python/runtime/BUCK.v2 b/prelude/python/runtime/BUCK.v2 similarity index 100% rename from python/runtime/BUCK.v2 rename to prelude/python/runtime/BUCK.v2 diff --git a/python/runtime/__par__/bootstrap.py b/prelude/python/runtime/__par__/bootstrap.py similarity index 100% rename from python/runtime/__par__/bootstrap.py rename to prelude/python/runtime/__par__/bootstrap.py diff --git a/python/source_db.bzl b/prelude/python/source_db.bzl similarity index 100% rename from python/source_db.bzl rename to prelude/python/source_db.bzl diff --git a/python/sourcedb/build.bxl b/prelude/python/sourcedb/build.bxl similarity index 100% rename from python/sourcedb/build.bxl rename to prelude/python/sourcedb/build.bxl diff --git a/python/sourcedb/classic.bxl b/prelude/python/sourcedb/classic.bxl similarity index 100% rename from python/sourcedb/classic.bxl rename to prelude/python/sourcedb/classic.bxl diff --git a/python/sourcedb/code_navigation.bxl b/prelude/python/sourcedb/code_navigation.bxl similarity index 100% rename from python/sourcedb/code_navigation.bxl rename to prelude/python/sourcedb/code_navigation.bxl diff --git a/python/sourcedb/merge.bxl b/prelude/python/sourcedb/merge.bxl similarity index 100% rename from python/sourcedb/merge.bxl rename to prelude/python/sourcedb/merge.bxl diff --git a/python/sourcedb/query.bxl b/prelude/python/sourcedb/query.bxl similarity index 100% rename from python/sourcedb/query.bxl rename to prelude/python/sourcedb/query.bxl diff --git a/python/toolchain.bzl b/prelude/python/toolchain.bzl similarity index 100% rename from python/toolchain.bzl rename to prelude/python/toolchain.bzl diff --git a/python/tools/BUCK.v2 b/prelude/python/tools/BUCK.v2 similarity index 100% rename from python/tools/BUCK.v2 rename to prelude/python/tools/BUCK.v2 diff --git a/python/tools/__test_main__.py b/prelude/python/tools/__test_main__.py similarity index 100% rename from python/tools/__test_main__.py rename to prelude/python/tools/__test_main__.py diff --git a/python/tools/compile.py b/prelude/python/tools/compile.py similarity index 100% rename from python/tools/compile.py rename to prelude/python/tools/compile.py diff --git a/python/tools/create_manifest_for_source_dir.py b/prelude/python/tools/create_manifest_for_source_dir.py similarity index 100% rename from python/tools/create_manifest_for_source_dir.py rename to prelude/python/tools/create_manifest_for_source_dir.py diff --git a/python/tools/embedded_main.cpp b/prelude/python/tools/embedded_main.cpp similarity index 100% rename from python/tools/embedded_main.cpp rename to prelude/python/tools/embedded_main.cpp diff --git a/python/tools/extract.py b/prelude/python/tools/extract.py similarity index 100% rename from python/tools/extract.py rename to prelude/python/tools/extract.py diff --git a/python/tools/fail_with_message.py b/prelude/python/tools/fail_with_message.py similarity index 100% rename from python/tools/fail_with_message.py rename to prelude/python/tools/fail_with_message.py diff --git a/python/tools/generate_static_extension_info.py b/prelude/python/tools/generate_static_extension_info.py similarity index 100% rename from python/tools/generate_static_extension_info.py rename to prelude/python/tools/generate_static_extension_info.py diff --git a/python/tools/make_par/BUCK b/prelude/python/tools/make_par/BUCK similarity index 100% rename from python/tools/make_par/BUCK rename to prelude/python/tools/make_par/BUCK diff --git a/python/tools/make_par/__run_lpar_main__.py b/prelude/python/tools/make_par/__run_lpar_main__.py similarity index 100% rename from python/tools/make_par/__run_lpar_main__.py rename to prelude/python/tools/make_par/__run_lpar_main__.py diff --git a/python/tools/make_par/_lpar_bootstrap.sh.template b/prelude/python/tools/make_par/_lpar_bootstrap.sh.template similarity index 100% rename from python/tools/make_par/_lpar_bootstrap.sh.template rename to prelude/python/tools/make_par/_lpar_bootstrap.sh.template diff --git a/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py similarity index 100% rename from python/tools/make_par/sitecustomize.py rename to prelude/python/tools/make_par/sitecustomize.py diff --git a/python/tools/make_py_package_inplace.py b/prelude/python/tools/make_py_package_inplace.py similarity index 100% rename from python/tools/make_py_package_inplace.py rename to prelude/python/tools/make_py_package_inplace.py diff --git a/python/tools/make_py_package_manifest_module.py b/prelude/python/tools/make_py_package_manifest_module.py similarity index 100% rename from python/tools/make_py_package_manifest_module.py rename to prelude/python/tools/make_py_package_manifest_module.py diff --git a/python/tools/make_py_package_modules.py b/prelude/python/tools/make_py_package_modules.py similarity index 100% rename from python/tools/make_py_package_modules.py rename to prelude/python/tools/make_py_package_modules.py diff --git a/python/tools/make_source_db.py b/prelude/python/tools/make_source_db.py similarity index 100% rename from python/tools/make_source_db.py rename to prelude/python/tools/make_source_db.py diff --git a/python/tools/make_source_db_no_deps.py b/prelude/python/tools/make_source_db_no_deps.py similarity index 100% rename from python/tools/make_source_db_no_deps.py rename to prelude/python/tools/make_source_db_no_deps.py diff --git a/python/tools/parse_imports.py b/prelude/python/tools/parse_imports.py similarity index 100% rename from python/tools/parse_imports.py rename to prelude/python/tools/parse_imports.py diff --git a/python/tools/py38stdlib.py b/prelude/python/tools/py38stdlib.py similarity index 100% rename from python/tools/py38stdlib.py rename to prelude/python/tools/py38stdlib.py diff --git a/python/tools/run_inplace.py.in b/prelude/python/tools/run_inplace.py.in similarity index 100% rename from python/tools/run_inplace.py.in rename to prelude/python/tools/run_inplace.py.in diff --git a/python/tools/run_inplace_lite.py.in b/prelude/python/tools/run_inplace_lite.py.in similarity index 100% rename from python/tools/run_inplace_lite.py.in rename to prelude/python/tools/run_inplace_lite.py.in diff --git a/python/tools/sourcedb_merger/BUCK.v2 b/prelude/python/tools/sourcedb_merger/BUCK.v2 similarity index 100% rename from python/tools/sourcedb_merger/BUCK.v2 rename to prelude/python/tools/sourcedb_merger/BUCK.v2 diff --git a/python/tools/sourcedb_merger/inputs.py b/prelude/python/tools/sourcedb_merger/inputs.py similarity index 100% rename from python/tools/sourcedb_merger/inputs.py rename to prelude/python/tools/sourcedb_merger/inputs.py diff --git a/python/tools/sourcedb_merger/legacy_merge.py b/prelude/python/tools/sourcedb_merger/legacy_merge.py similarity index 100% rename from python/tools/sourcedb_merger/legacy_merge.py rename to prelude/python/tools/sourcedb_merger/legacy_merge.py diff --git a/python/tools/sourcedb_merger/legacy_outputs.py b/prelude/python/tools/sourcedb_merger/legacy_outputs.py similarity index 100% rename from python/tools/sourcedb_merger/legacy_outputs.py rename to prelude/python/tools/sourcedb_merger/legacy_outputs.py diff --git a/python/tools/sourcedb_merger/merge.py b/prelude/python/tools/sourcedb_merger/merge.py similarity index 100% rename from python/tools/sourcedb_merger/merge.py rename to prelude/python/tools/sourcedb_merger/merge.py diff --git a/python/tools/sourcedb_merger/outputs.py b/prelude/python/tools/sourcedb_merger/outputs.py similarity index 100% rename from python/tools/sourcedb_merger/outputs.py rename to prelude/python/tools/sourcedb_merger/outputs.py diff --git a/python/tools/sourcedb_merger/tests/__init__.py b/prelude/python/tools/sourcedb_merger/tests/__init__.py similarity index 100% rename from python/tools/sourcedb_merger/tests/__init__.py rename to prelude/python/tools/sourcedb_merger/tests/__init__.py diff --git a/python/tools/sourcedb_merger/tests/inputs_test.py b/prelude/python/tools/sourcedb_merger/tests/inputs_test.py similarity index 100% rename from python/tools/sourcedb_merger/tests/inputs_test.py rename to prelude/python/tools/sourcedb_merger/tests/inputs_test.py diff --git a/python/tools/sourcedb_merger/tests/legacy_output_test.py b/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py similarity index 100% rename from python/tools/sourcedb_merger/tests/legacy_output_test.py rename to prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py diff --git a/python/tools/sourcedb_merger/tests/main.sh b/prelude/python/tools/sourcedb_merger/tests/main.sh similarity index 100% rename from python/tools/sourcedb_merger/tests/main.sh rename to prelude/python/tools/sourcedb_merger/tests/main.sh diff --git a/python/tools/sourcedb_merger/tests/outputs_test.py b/prelude/python/tools/sourcedb_merger/tests/outputs_test.py similarity index 100% rename from python/tools/sourcedb_merger/tests/outputs_test.py rename to prelude/python/tools/sourcedb_merger/tests/outputs_test.py diff --git a/python/tools/static_extension_finder.py b/prelude/python/tools/static_extension_finder.py similarity index 100% rename from python/tools/static_extension_finder.py rename to prelude/python/tools/static_extension_finder.py diff --git a/python/tools/static_extension_utils.cpp b/prelude/python/tools/static_extension_utils.cpp similarity index 100% rename from python/tools/static_extension_utils.cpp rename to prelude/python/tools/static_extension_utils.cpp diff --git a/python/tools/traverse_dep_manifest.py b/prelude/python/tools/traverse_dep_manifest.py similarity index 100% rename from python/tools/traverse_dep_manifest.py rename to prelude/python/tools/traverse_dep_manifest.py diff --git a/python_bootstrap/python_bootstrap.bzl b/prelude/python_bootstrap/python_bootstrap.bzl similarity index 100% rename from python_bootstrap/python_bootstrap.bzl rename to prelude/python_bootstrap/python_bootstrap.bzl diff --git a/python_bootstrap/tools/BUCK.v2 b/prelude/python_bootstrap/tools/BUCK.v2 similarity index 100% rename from python_bootstrap/tools/BUCK.v2 rename to prelude/python_bootstrap/tools/BUCK.v2 diff --git a/python_bootstrap/tools/win_python_wrapper.bat b/prelude/python_bootstrap/tools/win_python_wrapper.bat similarity index 100% rename from python_bootstrap/tools/win_python_wrapper.bat rename to prelude/python_bootstrap/tools/win_python_wrapper.bat diff --git a/remote_file.bzl b/prelude/remote_file.bzl similarity index 100% rename from remote_file.bzl rename to prelude/remote_file.bzl diff --git a/resources.bzl b/prelude/resources.bzl similarity index 100% rename from resources.bzl rename to prelude/resources.bzl diff --git a/rules.bzl b/prelude/rules.bzl similarity index 100% rename from rules.bzl rename to prelude/rules.bzl diff --git a/rules_impl.bzl b/prelude/rules_impl.bzl similarity index 100% rename from rules_impl.bzl rename to prelude/rules_impl.bzl diff --git a/rust/build.bzl b/prelude/rust/build.bzl similarity index 100% rename from rust/build.bzl rename to prelude/rust/build.bzl diff --git a/rust/build_params.bzl b/prelude/rust/build_params.bzl similarity index 100% rename from rust/build_params.bzl rename to prelude/rust/build_params.bzl diff --git a/rust/cargo_buildscript.bzl b/prelude/rust/cargo_buildscript.bzl similarity index 100% rename from rust/cargo_buildscript.bzl rename to prelude/rust/cargo_buildscript.bzl diff --git a/rust/cargo_package.bzl b/prelude/rust/cargo_package.bzl similarity index 100% rename from rust/cargo_package.bzl rename to prelude/rust/cargo_package.bzl diff --git a/rust/context.bzl b/prelude/rust/context.bzl similarity index 100% rename from rust/context.bzl rename to prelude/rust/context.bzl diff --git a/rust/extern.bzl b/prelude/rust/extern.bzl similarity index 100% rename from rust/extern.bzl rename to prelude/rust/extern.bzl diff --git a/rust/failure_filter.bzl b/prelude/rust/failure_filter.bzl similarity index 100% rename from rust/failure_filter.bzl rename to prelude/rust/failure_filter.bzl diff --git a/rust/link_info.bzl b/prelude/rust/link_info.bzl similarity index 100% rename from rust/link_info.bzl rename to prelude/rust/link_info.bzl diff --git a/rust/proc_macro_alias.bzl b/prelude/rust/proc_macro_alias.bzl similarity index 100% rename from rust/proc_macro_alias.bzl rename to prelude/rust/proc_macro_alias.bzl diff --git a/rust/resources.bzl b/prelude/rust/resources.bzl similarity index 100% rename from rust/resources.bzl rename to prelude/rust/resources.bzl diff --git a/rust/rust-analyzer/check.bxl b/prelude/rust/rust-analyzer/check.bxl similarity index 100% rename from rust/rust-analyzer/check.bxl rename to prelude/rust/rust-analyzer/check.bxl diff --git a/rust/rust-analyzer/resolve_deps.bxl b/prelude/rust/rust-analyzer/resolve_deps.bxl similarity index 100% rename from rust/rust-analyzer/resolve_deps.bxl rename to prelude/rust/rust-analyzer/resolve_deps.bxl diff --git a/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl similarity index 100% rename from rust/rust_binary.bzl rename to prelude/rust/rust_binary.bzl diff --git a/rust/rust_common.bzl b/prelude/rust/rust_common.bzl similarity index 100% rename from rust/rust_common.bzl rename to prelude/rust/rust_common.bzl diff --git a/rust/rust_library.bzl b/prelude/rust/rust_library.bzl similarity index 100% rename from rust/rust_library.bzl rename to prelude/rust/rust_library.bzl diff --git a/rust/rust_toolchain.bzl b/prelude/rust/rust_toolchain.bzl similarity index 100% rename from rust/rust_toolchain.bzl rename to prelude/rust/rust_toolchain.bzl diff --git a/rust/targets.bzl b/prelude/rust/targets.bzl similarity index 100% rename from rust/targets.bzl rename to prelude/rust/targets.bzl diff --git a/rust/tools/BUCK.v2 b/prelude/rust/tools/BUCK.v2 similarity index 100% rename from rust/tools/BUCK.v2 rename to prelude/rust/tools/BUCK.v2 diff --git a/rust/tools/attrs.bzl b/prelude/rust/tools/attrs.bzl similarity index 100% rename from rust/tools/attrs.bzl rename to prelude/rust/tools/attrs.bzl diff --git a/rust/tools/buildscript_run.py b/prelude/rust/tools/buildscript_run.py similarity index 100% rename from rust/tools/buildscript_run.py rename to prelude/rust/tools/buildscript_run.py diff --git a/rust/tools/concat.py b/prelude/rust/tools/concat.py similarity index 100% rename from rust/tools/concat.py rename to prelude/rust/tools/concat.py diff --git a/rust/tools/failure_filter_action.py b/prelude/rust/tools/failure_filter_action.py similarity index 100% rename from rust/tools/failure_filter_action.py rename to prelude/rust/tools/failure_filter_action.py diff --git a/rust/tools/rustc_action.py b/prelude/rust/tools/rustc_action.py similarity index 100% rename from rust/tools/rustc_action.py rename to prelude/rust/tools/rustc_action.py diff --git a/rust/tools/rustdoc_test_with_resources.py b/prelude/rust/tools/rustdoc_test_with_resources.py similarity index 100% rename from rust/tools/rustdoc_test_with_resources.py rename to prelude/rust/tools/rustdoc_test_with_resources.py diff --git a/rust/tools/tool_rules.bzl b/prelude/rust/tools/tool_rules.bzl similarity index 100% rename from rust/tools/tool_rules.bzl rename to prelude/rust/tools/tool_rules.bzl diff --git a/rust/tools/transitive_dependency_symlinks.py b/prelude/rust/tools/transitive_dependency_symlinks.py similarity index 100% rename from rust/tools/transitive_dependency_symlinks.py rename to prelude/rust/tools/transitive_dependency_symlinks.py diff --git a/rust/with_workspace.bzl b/prelude/rust/with_workspace.bzl similarity index 100% rename from rust/with_workspace.bzl rename to prelude/rust/with_workspace.bzl diff --git a/sh_binary.bzl b/prelude/sh_binary.bzl similarity index 100% rename from sh_binary.bzl rename to prelude/sh_binary.bzl diff --git a/sh_test.bzl b/prelude/sh_test.bzl similarity index 100% rename from sh_test.bzl rename to prelude/sh_test.bzl diff --git a/test/inject_test_run_info.bzl b/prelude/test/inject_test_run_info.bzl similarity index 100% rename from test/inject_test_run_info.bzl rename to prelude/test/inject_test_run_info.bzl diff --git a/test/tools/BUCK.v2 b/prelude/test/tools/BUCK.v2 similarity index 100% rename from test/tools/BUCK.v2 rename to prelude/test/tools/BUCK.v2 diff --git a/test/tools/inject_test_env.py b/prelude/test/tools/inject_test_env.py similarity index 100% rename from test/tools/inject_test_env.py rename to prelude/test/tools/inject_test_env.py diff --git a/test_suite.bzl b/prelude/test_suite.bzl similarity index 100% rename from test_suite.bzl rename to prelude/test_suite.bzl diff --git a/tests/re_utils.bzl b/prelude/tests/re_utils.bzl similarity index 100% rename from tests/re_utils.bzl rename to prelude/tests/re_utils.bzl diff --git a/tests/remote_test_execution_toolchain.bzl b/prelude/tests/remote_test_execution_toolchain.bzl similarity index 100% rename from tests/remote_test_execution_toolchain.bzl rename to prelude/tests/remote_test_execution_toolchain.bzl diff --git a/tests/tpx_re_legacy.bzl b/prelude/tests/tpx_re_legacy.bzl similarity index 100% rename from tests/tpx_re_legacy.bzl rename to prelude/tests/tpx_re_legacy.bzl diff --git a/third-party/hmaptool/BUCK.v2 b/prelude/third-party/hmaptool/BUCK.v2 similarity index 100% rename from third-party/hmaptool/BUCK.v2 rename to prelude/third-party/hmaptool/BUCK.v2 diff --git a/third-party/hmaptool/METADATA.bzl b/prelude/third-party/hmaptool/METADATA.bzl similarity index 100% rename from third-party/hmaptool/METADATA.bzl rename to prelude/third-party/hmaptool/METADATA.bzl diff --git a/third-party/hmaptool/README.md b/prelude/third-party/hmaptool/README.md similarity index 100% rename from third-party/hmaptool/README.md rename to prelude/third-party/hmaptool/README.md diff --git a/third-party/hmaptool/hmaptool b/prelude/third-party/hmaptool/hmaptool similarity index 100% rename from third-party/hmaptool/hmaptool rename to prelude/third-party/hmaptool/hmaptool diff --git a/toolchains/apple/xcode_version_checker/.gitignore b/prelude/toolchains/apple/xcode_version_checker/.gitignore similarity index 100% rename from toolchains/apple/xcode_version_checker/.gitignore rename to prelude/toolchains/apple/xcode_version_checker/.gitignore diff --git a/toolchains/apple/xcode_version_checker/BUCK.v2 b/prelude/toolchains/apple/xcode_version_checker/BUCK.v2 similarity index 100% rename from toolchains/apple/xcode_version_checker/BUCK.v2 rename to prelude/toolchains/apple/xcode_version_checker/BUCK.v2 diff --git a/toolchains/apple/xcode_version_checker/Makefile b/prelude/toolchains/apple/xcode_version_checker/Makefile similarity index 100% rename from toolchains/apple/xcode_version_checker/Makefile rename to prelude/toolchains/apple/xcode_version_checker/Makefile diff --git a/toolchains/apple/xcode_version_checker/README b/prelude/toolchains/apple/xcode_version_checker/README similarity index 100% rename from toolchains/apple/xcode_version_checker/README rename to prelude/toolchains/apple/xcode_version_checker/README diff --git a/toolchains/apple/xcode_version_checker/defs.bzl b/prelude/toolchains/apple/xcode_version_checker/defs.bzl similarity index 100% rename from toolchains/apple/xcode_version_checker/defs.bzl rename to prelude/toolchains/apple/xcode_version_checker/defs.bzl diff --git a/toolchains/apple/xcode_version_checker/src/xcode_exec_tester.m b/prelude/toolchains/apple/xcode_version_checker/src/xcode_exec_tester.m similarity index 100% rename from toolchains/apple/xcode_version_checker/src/xcode_exec_tester.m rename to prelude/toolchains/apple/xcode_version_checker/src/xcode_exec_tester.m diff --git a/toolchains/apple/xcode_version_checker/src/xcode_version_checker.m b/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checker.m similarity index 100% rename from toolchains/apple/xcode_version_checker/src/xcode_version_checker.m rename to prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checker.m diff --git a/toolchains/apple/xcode_version_checker/src/xcode_version_checks.h b/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.h similarity index 100% rename from toolchains/apple/xcode_version_checker/src/xcode_version_checks.h rename to prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.h diff --git a/toolchains/apple/xcode_version_checker/src/xcode_version_checks.m b/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.m similarity index 100% rename from toolchains/apple/xcode_version_checker/src/xcode_version_checks.m rename to prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.m diff --git a/toolchains/apple/xcode_version_checker/src/xcode_version_tester.m b/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_tester.m similarity index 100% rename from toolchains/apple/xcode_version_checker/src/xcode_version_tester.m rename to prelude/toolchains/apple/xcode_version_checker/src/xcode_version_tester.m diff --git a/toolchains/apple/xcode_version_checker/test/Xcode_14.2.0_14C18_fb_version.plist b/prelude/toolchains/apple/xcode_version_checker/test/Xcode_14.2.0_14C18_fb_version.plist similarity index 100% rename from toolchains/apple/xcode_version_checker/test/Xcode_14.2.0_14C18_fb_version.plist rename to prelude/toolchains/apple/xcode_version_checker/test/Xcode_14.2.0_14C18_fb_version.plist diff --git a/toolchains/apple/xcode_version_checker/xcode_version_checker b/prelude/toolchains/apple/xcode_version_checker/xcode_version_checker similarity index 100% rename from toolchains/apple/xcode_version_checker/xcode_version_checker rename to prelude/toolchains/apple/xcode_version_checker/xcode_version_checker diff --git a/toolchains/conan/BUCK.v2 b/prelude/toolchains/conan/BUCK.v2 similarity index 100% rename from toolchains/conan/BUCK.v2 rename to prelude/toolchains/conan/BUCK.v2 diff --git a/toolchains/conan/buckler/conanfile.py b/prelude/toolchains/conan/buckler/conanfile.py similarity index 100% rename from toolchains/conan/buckler/conanfile.py rename to prelude/toolchains/conan/buckler/conanfile.py diff --git a/toolchains/conan/conan_common.py b/prelude/toolchains/conan/conan_common.py similarity index 100% rename from toolchains/conan/conan_common.py rename to prelude/toolchains/conan/conan_common.py diff --git a/toolchains/conan/conan_generate.py b/prelude/toolchains/conan/conan_generate.py similarity index 100% rename from toolchains/conan/conan_generate.py rename to prelude/toolchains/conan/conan_generate.py diff --git a/toolchains/conan/conan_init.py b/prelude/toolchains/conan/conan_init.py similarity index 100% rename from toolchains/conan/conan_init.py rename to prelude/toolchains/conan/conan_init.py diff --git a/toolchains/conan/conan_lock.py b/prelude/toolchains/conan/conan_lock.py similarity index 100% rename from toolchains/conan/conan_lock.py rename to prelude/toolchains/conan/conan_lock.py diff --git a/toolchains/conan/conan_package.py b/prelude/toolchains/conan/conan_package.py similarity index 100% rename from toolchains/conan/conan_package.py rename to prelude/toolchains/conan/conan_package.py diff --git a/toolchains/conan/conan_package_extract.py b/prelude/toolchains/conan/conan_package_extract.py similarity index 100% rename from toolchains/conan/conan_package_extract.py rename to prelude/toolchains/conan/conan_package_extract.py diff --git a/toolchains/conan/conan_update.py b/prelude/toolchains/conan/conan_update.py similarity index 100% rename from toolchains/conan/conan_update.py rename to prelude/toolchains/conan/conan_update.py diff --git a/toolchains/conan/defs.bzl b/prelude/toolchains/conan/defs.bzl similarity index 100% rename from toolchains/conan/defs.bzl rename to prelude/toolchains/conan/defs.bzl diff --git a/toolchains/conan/lock_generate.py b/prelude/toolchains/conan/lock_generate.py similarity index 100% rename from toolchains/conan/lock_generate.py rename to prelude/toolchains/conan/lock_generate.py diff --git a/toolchains/csharp.bzl b/prelude/toolchains/csharp.bzl similarity index 100% rename from toolchains/csharp.bzl rename to prelude/toolchains/csharp.bzl diff --git a/toolchains/cxx.bzl b/prelude/toolchains/cxx.bzl similarity index 100% rename from toolchains/cxx.bzl rename to prelude/toolchains/cxx.bzl diff --git a/toolchains/cxx/zig/BUCK.v2 b/prelude/toolchains/cxx/zig/BUCK.v2 similarity index 100% rename from toolchains/cxx/zig/BUCK.v2 rename to prelude/toolchains/cxx/zig/BUCK.v2 diff --git a/toolchains/cxx/zig/defs.bzl b/prelude/toolchains/cxx/zig/defs.bzl similarity index 100% rename from toolchains/cxx/zig/defs.bzl rename to prelude/toolchains/cxx/zig/defs.bzl diff --git a/toolchains/cxx/zig/releases.bzl b/prelude/toolchains/cxx/zig/releases.bzl similarity index 100% rename from toolchains/cxx/zig/releases.bzl rename to prelude/toolchains/cxx/zig/releases.bzl diff --git a/toolchains/demo.bzl b/prelude/toolchains/demo.bzl similarity index 100% rename from toolchains/demo.bzl rename to prelude/toolchains/demo.bzl diff --git a/toolchains/execution_host.bzl b/prelude/toolchains/execution_host.bzl similarity index 100% rename from toolchains/execution_host.bzl rename to prelude/toolchains/execution_host.bzl diff --git a/toolchains/genrule.bzl b/prelude/toolchains/genrule.bzl similarity index 100% rename from toolchains/genrule.bzl rename to prelude/toolchains/genrule.bzl diff --git a/toolchains/go.bzl b/prelude/toolchains/go.bzl similarity index 100% rename from toolchains/go.bzl rename to prelude/toolchains/go.bzl diff --git a/toolchains/haskell.bzl b/prelude/toolchains/haskell.bzl similarity index 100% rename from toolchains/haskell.bzl rename to prelude/toolchains/haskell.bzl diff --git a/toolchains/msvc/BUCK.v2 b/prelude/toolchains/msvc/BUCK.v2 similarity index 100% rename from toolchains/msvc/BUCK.v2 rename to prelude/toolchains/msvc/BUCK.v2 diff --git a/toolchains/msvc/run_msvc_tool.py b/prelude/toolchains/msvc/run_msvc_tool.py similarity index 100% rename from toolchains/msvc/run_msvc_tool.py rename to prelude/toolchains/msvc/run_msvc_tool.py diff --git a/toolchains/msvc/tools.bzl b/prelude/toolchains/msvc/tools.bzl similarity index 100% rename from toolchains/msvc/tools.bzl rename to prelude/toolchains/msvc/tools.bzl diff --git a/toolchains/msvc/vswhere.py b/prelude/toolchains/msvc/vswhere.py similarity index 100% rename from toolchains/msvc/vswhere.py rename to prelude/toolchains/msvc/vswhere.py diff --git a/toolchains/ocaml.bzl b/prelude/toolchains/ocaml.bzl similarity index 100% rename from toolchains/ocaml.bzl rename to prelude/toolchains/ocaml.bzl diff --git a/toolchains/python.bzl b/prelude/toolchains/python.bzl similarity index 100% rename from toolchains/python.bzl rename to prelude/toolchains/python.bzl diff --git a/toolchains/remote_test_execution.bzl b/prelude/toolchains/remote_test_execution.bzl similarity index 100% rename from toolchains/remote_test_execution.bzl rename to prelude/toolchains/remote_test_execution.bzl diff --git a/toolchains/rust.bzl b/prelude/toolchains/rust.bzl similarity index 100% rename from toolchains/rust.bzl rename to prelude/toolchains/rust.bzl diff --git a/transitions/constraint_overrides.bzl b/prelude/transitions/constraint_overrides.bzl similarity index 100% rename from transitions/constraint_overrides.bzl rename to prelude/transitions/constraint_overrides.bzl diff --git a/user/all.bzl b/prelude/user/all.bzl similarity index 100% rename from user/all.bzl rename to prelude/user/all.bzl diff --git a/user/cxx_headers_bundle.bzl b/prelude/user/cxx_headers_bundle.bzl similarity index 100% rename from user/cxx_headers_bundle.bzl rename to prelude/user/cxx_headers_bundle.bzl diff --git a/user/extract_archive.bzl b/prelude/user/extract_archive.bzl similarity index 100% rename from user/extract_archive.bzl rename to prelude/user/extract_archive.bzl diff --git a/user/rule_spec.bzl b/prelude/user/rule_spec.bzl similarity index 100% rename from user/rule_spec.bzl rename to prelude/user/rule_spec.bzl diff --git a/user/write_file.bzl b/prelude/user/write_file.bzl similarity index 100% rename from user/write_file.bzl rename to prelude/user/write_file.bzl diff --git a/utils/arglike.bzl b/prelude/utils/arglike.bzl similarity index 100% rename from utils/arglike.bzl rename to prelude/utils/arglike.bzl diff --git a/utils/buckconfig.bzl b/prelude/utils/buckconfig.bzl similarity index 100% rename from utils/buckconfig.bzl rename to prelude/utils/buckconfig.bzl diff --git a/utils/build_target_pattern.bzl b/prelude/utils/build_target_pattern.bzl similarity index 100% rename from utils/build_target_pattern.bzl rename to prelude/utils/build_target_pattern.bzl diff --git a/utils/cmd_script.bzl b/prelude/utils/cmd_script.bzl similarity index 100% rename from utils/cmd_script.bzl rename to prelude/utils/cmd_script.bzl diff --git a/utils/dicts.bzl b/prelude/utils/dicts.bzl similarity index 100% rename from utils/dicts.bzl rename to prelude/utils/dicts.bzl diff --git a/utils/expect.bzl b/prelude/utils/expect.bzl similarity index 100% rename from utils/expect.bzl rename to prelude/utils/expect.bzl diff --git a/utils/graph_utils.bzl b/prelude/utils/graph_utils.bzl similarity index 100% rename from utils/graph_utils.bzl rename to prelude/utils/graph_utils.bzl diff --git a/utils/host.bzl b/prelude/utils/host.bzl similarity index 100% rename from utils/host.bzl rename to prelude/utils/host.bzl diff --git a/utils/lazy.bzl b/prelude/utils/lazy.bzl similarity index 100% rename from utils/lazy.bzl rename to prelude/utils/lazy.bzl diff --git a/utils/pick.bzl b/prelude/utils/pick.bzl similarity index 100% rename from utils/pick.bzl rename to prelude/utils/pick.bzl diff --git a/utils/platform_flavors_util.bzl b/prelude/utils/platform_flavors_util.bzl similarity index 100% rename from utils/platform_flavors_util.bzl rename to prelude/utils/platform_flavors_util.bzl diff --git a/utils/selects.bzl b/prelude/utils/selects.bzl similarity index 100% rename from utils/selects.bzl rename to prelude/utils/selects.bzl diff --git a/utils/set.bzl b/prelude/utils/set.bzl similarity index 100% rename from utils/set.bzl rename to prelude/utils/set.bzl diff --git a/utils/strings.bzl b/prelude/utils/strings.bzl similarity index 100% rename from utils/strings.bzl rename to prelude/utils/strings.bzl diff --git a/utils/type_defs.bzl b/prelude/utils/type_defs.bzl similarity index 100% rename from utils/type_defs.bzl rename to prelude/utils/type_defs.bzl diff --git a/utils/utils.bzl b/prelude/utils/utils.bzl similarity index 100% rename from utils/utils.bzl rename to prelude/utils/utils.bzl diff --git a/windows/tools/BUCK.v2 b/prelude/windows/tools/BUCK.v2 similarity index 100% rename from windows/tools/BUCK.v2 rename to prelude/windows/tools/BUCK.v2 diff --git a/windows/tools/msvc_hermetic_exec.bat b/prelude/windows/tools/msvc_hermetic_exec.bat similarity index 100% rename from windows/tools/msvc_hermetic_exec.bat rename to prelude/windows/tools/msvc_hermetic_exec.bat diff --git a/worker_tool.bzl b/prelude/worker_tool.bzl similarity index 100% rename from worker_tool.bzl rename to prelude/worker_tool.bzl diff --git a/zip_file/tools/BUCK.v2 b/prelude/zip_file/tools/BUCK.v2 similarity index 100% rename from zip_file/tools/BUCK.v2 rename to prelude/zip_file/tools/BUCK.v2 diff --git a/zip_file/tools/unzip.py b/prelude/zip_file/tools/unzip.py similarity index 100% rename from zip_file/tools/unzip.py rename to prelude/zip_file/tools/unzip.py diff --git a/zip_file/zip_file.bzl b/prelude/zip_file/zip_file.bzl similarity index 100% rename from zip_file/zip_file.bzl rename to prelude/zip_file/zip_file.bzl diff --git a/zip_file/zip_file_toolchain.bzl b/prelude/zip_file/zip_file_toolchain.bzl similarity index 100% rename from zip_file/zip_file_toolchain.bzl rename to prelude/zip_file/zip_file_toolchain.bzl From eff962cdf568167f8afc6da3369b18199f1a58a9 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 15 Jan 2024 17:07:55 +0100 Subject: [PATCH 0002/1133] Haskell toolchain: automatic host arch name --- prelude/toolchains/haskell.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/toolchains/haskell.bzl b/prelude/toolchains/haskell.bzl index fd1384616..c3e99c382 100644 --- a/prelude/toolchains/haskell.bzl +++ b/prelude/toolchains/haskell.bzl @@ -19,7 +19,7 @@ def _system_haskell_toolchain(_ctx: AnalysisContext) -> list[Provider]: linker_flags = [], ), HaskellPlatformInfo( - name = "x86_64", + name = host_info().arch, ), ] From c5ec1c66bf017b9d43794f4c7bf4800d8f6d9d0d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 15 Jan 2024 17:32:15 +0100 Subject: [PATCH 0003/1133] Configure platform specific linker flags --- prelude/haskell/haskell.bzl | 30 ++++++++++++++++++++++++------ 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index b35edd60f..6ef490ba2 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -18,6 +18,12 @@ load( "CxxToolchainInfo", "PicBehavior", ) +load( + "@prelude//cxx:linker.bzl", + "LINKERS", + "get_shared_library_name_linker_flags", + "get_shared_library_flags", +) load( "@prelude//cxx:link_groups.bzl", "LinkGroupContext", @@ -479,6 +485,17 @@ HaskellLibBuildOutput = record( libs = list[Artifact], ) +def _get_haskell_shared_library_name_linker_flags(linker_type: str, soname: str) -> list[str]: + if linker_type == "gnu": + return ["-Wl,-soname,{}".format(soname)] + elif linker_type == "darwin": + # Passing `-install_name @rpath/...` or + # `-Xlinker -install_name -Xlinker @rpath/...` instead causes + # ghc-9.6.3: panic! (the 'impossible' happened) + return ["-Wl,-install_name,@rpath/{}".format(soname)] + else: + fail("Unknown linker type '{}'.".format(linker_type)) + def _build_haskell_lib( ctx, libname: str, @@ -511,8 +528,9 @@ def _build_haskell_lib( if link_style == LinkStyle("static_pic"): libstem += "_pic" + dynamic_lib_suffix = "." + LINKERS[linker_info.type].default_shared_library_extension static_lib_suffix = "_p.a" if enable_profiling else ".a" - libfile = "lib" + libstem + (".so" if link_style == LinkStyle("shared") else static_lib_suffix) + libfile = "lib" + libstem + (dynamic_lib_suffix if link_style == LinkStyle("shared") else static_lib_suffix) lib_short_path = paths.join("lib-{}".format(artifact_suffix), libfile) @@ -528,12 +546,12 @@ def _build_haskell_lib( link.add(ctx.attrs.linker_flags) link.add("-o", lib.as_output()) link.add( - "-shared", + get_shared_library_flags(linker_info.type), "-dynamic", - "-optl", - "-Wl,-soname", - "-optl", - "-Wl," + libfile, + cmd_args( + _get_haskell_shared_library_name_linker_flags(linker_info.type, libfile), + prepend = "-optl", + ), ) link.add(objfiles) From ff73bc4662a2d4955a54a07456da3b87330e0538 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Jan 2024 11:25:00 +0100 Subject: [PATCH 0004/1133] fix unused load --- prelude/haskell/haskell.bzl | 1 - 1 file changed, 1 deletion(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 6ef490ba2..975471079 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -21,7 +21,6 @@ load( load( "@prelude//cxx:linker.bzl", "LINKERS", - "get_shared_library_name_linker_flags", "get_shared_library_flags", ) load( From cdec37303b3c1e0c637b9d724d2258c2b05f2ab8 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 16 Jan 2024 16:48:50 +0100 Subject: [PATCH 0005/1133] [buck2] Add `haskell_toolchain_library` rule to prelude This rule is used to declare available toolchain libraries of the current haskell toolchain. Instances of these should be used in the `deps` attribute of haskell_library|haskell_binary rules in order pass this information to ghc when compiling and linking. --- prelude/decls/haskell_rules.bzl | 11 +++++++++++ prelude/haskell/compile.bzl | 4 ++++ prelude/haskell/haskell.bzl | 9 +++++++++ prelude/haskell/toolchain.bzl | 6 ++++++ prelude/rules_impl.bzl | 3 ++- 5 files changed, 32 insertions(+), 1 deletion(-) diff --git a/prelude/decls/haskell_rules.bzl b/prelude/decls/haskell_rules.bzl index 64de29507..a9c327004 100644 --- a/prelude/decls/haskell_rules.bzl +++ b/prelude/decls/haskell_rules.bzl @@ -183,6 +183,16 @@ haskell_library = prelude_rule( ), ) +haskell_toolchain_library = prelude_rule( + name = "haskell_toolchain_library", + docs = """ + Declare a library available as part of the GHC toolchain. + """, + attrs = { + }, +) + + haskell_prebuilt_library = prelude_rule( name = "haskell_prebuilt_library", docs = """ @@ -255,4 +265,5 @@ haskell_rules = struct( haskell_ide = haskell_ide, haskell_library = haskell_library, haskell_prebuilt_library = haskell_prebuilt_library, + haskell_toolchain_library = haskell_toolchain_library, ) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 8fc9f6aeb..27244c340 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -18,6 +18,7 @@ load( load( "@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo", + "HaskellToolchainLibrary", ) load( "@prelude//haskell:util.bzl", @@ -189,6 +190,8 @@ def compile_args( suffix: str = "") -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] + compile_cmd = cmd_args() compile_cmd.add(haskell_toolchain.compiler_flags) @@ -198,6 +201,7 @@ def compile_args( compile_args = cmd_args() compile_args.add("-no-link", "-i") + compile_args.add(cmd_args(toolchain_libs, prepend="-package")) if enable_profiling: compile_args.add("-prof") diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 975471079..bb303100a 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -63,6 +63,7 @@ load( load( "@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo", + "HaskellToolchainLibrary", ) load( "@prelude//haskell:util.bzl", @@ -165,6 +166,11 @@ def _attr_preferred_linkage(ctx: AnalysisContext) -> Linkage: # -- +def haskell_toolchain_library_impl(ctx: AnalysisContext): + return [DefaultInfo(), HaskellToolchainLibrary(name = ctx.attrs.name)] + +# -- + def _get_haskell_prebuilt_libs( ctx, link_style: LinkStyle, @@ -538,11 +544,14 @@ def _build_haskell_lib( objfiles = _srcs_to_objfiles(ctx, compiled.objects, osuf) + toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] + if link_style == LinkStyle("shared"): lib = ctx.actions.declare_output(lib_short_path) link = cmd_args(haskell_toolchain.linker) link.add(haskell_toolchain.linker_flags) link.add(ctx.attrs.linker_flags) + link.add(cmd_args(toolchain_libs, prepend="-package")) link.add("-o", lib.as_output()) link.add( get_shared_library_flags(linker_info.type), diff --git a/prelude/haskell/toolchain.bzl b/prelude/haskell/toolchain.bzl index f6c072fbf..fbd140ff7 100644 --- a/prelude/haskell/toolchain.bzl +++ b/prelude/haskell/toolchain.bzl @@ -39,3 +39,9 @@ HaskellToolchainInfo = provider( "script_template_processor": provider_field(typing.Any, default = None), }, ) + +HaskellToolchainLibrary = provider( + fields = { + "name": provider_field(str), + }, +) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 74fa84c4a..9a9eb1239 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -26,7 +26,7 @@ load("@prelude//go:go_exported_library.bzl", "go_exported_library_impl") load("@prelude//go:go_library.bzl", "go_library_impl") load("@prelude//go:go_test.bzl", "go_test_impl") load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") -load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl") +load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl", "haskell_toolchain_library_impl") load("@prelude//haskell:haskell_ghci.bzl", "haskell_ghci_impl") load("@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_impl") load("@prelude//haskell:haskell_ide.bzl", "haskell_ide_impl") @@ -177,6 +177,7 @@ extra_implemented_rules = struct( haskell_haddock = haskell_haddock_impl, haskell_ide = haskell_ide_impl, haskell_prebuilt_library = haskell_prebuilt_library_impl, + haskell_toolchain_library = haskell_toolchain_library_impl, #lua cxx_lua_extension = cxx_lua_extension_impl, From 36bb06cffcf74667446a26c44b790a4c728a0ac0 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 16 Jan 2024 17:51:17 +0100 Subject: [PATCH 0006/1133] [buck2] Require explicit haskell dependencies If dependencies are not properly declared than this might cause problems in a later stage, e.g. when generated shared libraries do not properly declare their needed shared libraries in the .so file. (readelf -d libHSabc.so) --- prelude/haskell/compile.bzl | 1 + prelude/haskell/haskell.bzl | 1 + 2 files changed, 2 insertions(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 27244c340..a0495a851 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -201,6 +201,7 @@ def compile_args( compile_args = cmd_args() compile_args.add("-no-link", "-i") + compile_args.add("-hide-all-packages") compile_args.add(cmd_args(toolchain_libs, prepend="-package")) if enable_profiling: diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index bb303100a..40bfddfff 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -551,6 +551,7 @@ def _build_haskell_lib( link = cmd_args(haskell_toolchain.linker) link.add(haskell_toolchain.linker_flags) link.add(ctx.attrs.linker_flags) + link.add("-hide-all-packages") link.add(cmd_args(toolchain_libs, prepend="-package")) link.add("-o", lib.as_output()) link.add( From 4d97f9b9513f7eb51645f844934dc5ba16ac9121 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 18 Jan 2024 17:30:17 +0100 Subject: [PATCH 0007/1133] [buck2] Use dynamic outputs to track dependencies between haskell modules --- prelude/haskell/compile.bzl | 287 ++++++++++++++++++++++++++++++++++-- prelude/haskell/util.bzl | 3 +- 2 files changed, 278 insertions(+), 12 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index a0495a851..5c4f8168b 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -26,12 +26,15 @@ load( "get_artifact_suffix", "is_haskell_src", "output_extensions", + "src_to_module_name", "srcs_to_pairs", ) load( "@prelude//linking:link_info.bzl", "LinkStyle", ) +load("@prelude//:paths.bzl", "paths") +load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_traversal") # The type of the return value of the `_compile()` function. CompileResultInfo = record( @@ -91,6 +94,92 @@ PackagesInfo = record( transitive_deps = field(list[HaskellLibraryInfo]), ) +_Module = record( + source = field(Artifact), + interface = field(Artifact), + object = field(Artifact), + stub_dir = field(Artifact), +) + + +def _strip_prefix(p, path_prefix): + if p.startswith(path_prefix): + return p[len(path_prefix):] + else: + return p + +def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_style: LinkStyle, enable_profiling: bool, suffix: str) -> dict[str, _Module]: + modules = {} + + osuf, hisuf = output_extensions(link_style, enable_profiling) + + for src in sources: + if not is_haskell_src(src.short_path): + continue + + module_name = src_to_module_name(src.short_path) + interface_path = paths.replace_extension(src.short_path, "." + hisuf) + interface = ctx.actions.declare_output(interface_path) + object_path = paths.replace_extension(src.short_path, "." + osuf) + object = ctx.actions.declare_output(object_path) + stub_dir = ctx.actions.declare_output("stub-" + suffix + "-" + module_name, dir=True) + modules[module_name] = _Module(source = src, interface = interface, object = object, stub_dir = stub_dir) + + return modules + +def _ghc_depends(ctx: AnalysisContext, *, filename: str, sources: list[Artifact], link_style: LinkStyle, enable_profiling: bool) -> Artifact: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + + toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] + + # Add -package-db and -package/-expose-package flags for each Haskell + # library dependency. + packages_info = get_packages_info( + ctx, + link_style, + specify_pkg_version = False, + enable_profiling = enable_profiling, + ) + + dep_file = ctx.actions.declare_output(filename) + osuf, hisuf = output_extensions(link_style, enable_profiling) + # note, hisuf = "_hi", so -dep-suffix should be set to "_" + # note, `-outputdir ''` + dep_args = cmd_args(haskell_toolchain.compiler, "-M", "-outputdir", "", "-dep-suffix", hisuf[:-2], "-dep-makefile", dep_file.as_output()) + #dep_args.add("-osuf", osuf, "-hisuf", hisuf) + dep_args.add("-hide-all-packages") + dep_args.add("-package", "base") + dep_args.add(cmd_args(toolchain_libs, prepend="-package")) + dep_args.add(packages_info.exposed_package_args) + dep_args.add(packages_info.packagedb_args) + + dep_args.add(ctx.attrs.compiler_flags) + dep_args.add(sources) + ctx.actions.run(dep_args, category = "ghc_depends", identifier = filename) + + return dep_file + +def _parse_depends(depends: str, path_prefix: str) -> dict[str, list[str]]: + graph = {} + + for line in depends.splitlines(): + if line.startswith("#"): + continue + + k, v = line.strip().split(" : ", 1) + vs = v.split(" ") + + module_name = src_to_module_name(k) + deps = [ + src_to_module_name(_strip_prefix(v, path_prefix).lstrip("/")) + for v in vs + if not is_haskell_src(v) + ] + + graph.setdefault(module_name, []).extend(deps) + + return graph + def _attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: return filter( None, @@ -282,20 +371,116 @@ def compile_args( args_for_file = compile_args, ) -# Compile all the context's sources. -def compile( +def __compile_args( ctx: AnalysisContext, + module: _Module, link_style: LinkStyle, enable_profiling: bool, - pkgname: str | None = None) -> CompileResultInfo: + outputs: dict[Artifact, Artifact], + pkgname = None, + suffix: str = "") -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - compile_cmd = cmd_args(haskell_toolchain.compiler) - args = compile_args(ctx, link_style, enable_profiling, pkgname) + toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] - compile_cmd.add(args.args_for_cmd) + compile_cmd = cmd_args() + compile_cmd.add(haskell_toolchain.compiler_flags) - artifact_suffix = get_artifact_suffix(link_style, enable_profiling) + # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't + # be parsed when inside an argsfile. + compile_cmd.add(ctx.attrs.compiler_flags) + + compile_args = cmd_args() + compile_args.add("-no-link", "-i", "-c") + compile_args.add("-hide-all-packages") + compile_args.add(cmd_args(toolchain_libs, prepend="-package")) + + if enable_profiling: + compile_args.add("-prof") + + if link_style == LinkStyle("shared"): + compile_args.add("-dynamic", "-fPIC") + elif link_style == LinkStyle("static_pic"): + compile_args.add("-fPIC", "-fexternal-dynamic-refs") + + osuf, hisuf = output_extensions(link_style, enable_profiling) + compile_args.add("-osuf", osuf, "-hisuf", hisuf) + + if getattr(ctx.attrs, "main", None) != None: + compile_args.add(["-main-is", ctx.attrs.main]) + + #artifact_suffix = get_artifact_suffix(link_style, enable_profiling, suffix) + + object = outputs[module.object] + hi = outputs[module.interface] + stubs = outputs[module.stub_dir] + + compile_args.add("-ohi", cmd_args(hi.as_output())) + compile_args.add("-o", cmd_args(object.as_output())) + compile_args.add("-stubdir", stubs.as_output()) + + # Add -package-db and -package/-expose-package flags for each Haskell + # library dependency. + packages_info = get_packages_info( + ctx, + link_style, + specify_pkg_version = False, + enable_profiling = enable_profiling, + ) + + compile_args.add(packages_info.exposed_package_args) + compile_args.add(packages_info.packagedb_args) + + # Add args from preprocess-able inputs. + inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) + pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) + pre_args = pre.set.project_as_args("args") + compile_args.add(cmd_args(pre_args, format = "-optP={}")) + + if pkgname: + compile_args.add(["-this-unit-id", pkgname]) + + srcs = cmd_args(module.source) + for (path, src) in srcs_to_pairs(ctx.attrs.srcs): + # hs-boot files aren't expected to be an argument to compiler but does need + # to be included in the directory of the associated src file + if not is_haskell_src(path): + srcs.hidden(src) + + producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags + + return CompileArgsInfo( + result = CompileResultInfo( + objects = object, + hi = hi, + stubs = stubs, + producing_indices = producing_indices, + ), + srcs = srcs, + args_for_cmd = compile_cmd, + args_for_file = compile_args, + ) + + +def _compile_module( + ctx: AnalysisContext, + *, + link_style: LinkStyle, + enable_profiling: bool, + module_name: str, + modules: dict[str, _Module], + dep_file: Artifact, + graph: dict[str, list[str]], + outputs: dict[Artifact, Artifact], + artifact_suffix: str, + pkgname: str | None = None, +) -> None: + module = modules[module_name] + + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + compile_cmd = cmd_args(haskell_toolchain.compiler) + + args = __compile_args(ctx, module, link_style, enable_profiling, outputs, pkgname) if args.args_for_file: if haskell_toolchain.use_argsfile: @@ -310,11 +495,91 @@ def compile( compile_cmd.add(args.args_for_file) compile_cmd.add(args.srcs) + compile_cmd.add(args.args_for_cmd) + + compile_cmd.add(cmd_args(dep_file, format = "-i{}").parent()) + + for dep_name in breadth_first_traversal(graph, [module_name])[1:]: + dep = modules[dep_name] + compile_cmd.hidden(dep.interface, dep.object) + + ctx.actions.run(compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, no_outputs_cleanup = True) + + + +# Compile all the context's sources. +def compile( + ctx: AnalysisContext, + link_style: LinkStyle, + enable_profiling: bool, + pkgname: str | None = None) -> CompileResultInfo: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) + + dep_name = ctx.attrs.name + artifact_suffix + ".depends" + dep_file = _ghc_depends(ctx, filename = dep_name, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling) + + modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) + + def do_compile(ctx, artifacts, outputs, dep_file=dep_file, modules=modules): + graph = _parse_depends(artifacts[dep_file].read_string(), _strip_prefix(str(ctx.label.path), str(ctx.label.cell_root))) + + for module_name in post_order_traversal(graph): + _compile_module( + ctx, + link_style = link_style, + enable_profiling = enable_profiling, + module_name = module_name, + modules = modules, + graph = graph, + outputs = outputs, + dep_file=dep_file, + artifact_suffix = artifact_suffix, + pkgname = pkgname, + ) + + interfaces = [module.interface for module in modules.values()] + objects = [module.object for module in modules.values()] + stub_dirs = [module.stub_dir for module in modules.values()] + + ctx.actions.dynamic_output( + dynamic = [dep_file], + inputs = ctx.attrs.srcs, + outputs = interfaces + objects + stub_dirs, + f = do_compile) + + object_dir = ctx.actions.declare_output("objects-" + artifact_suffix, dir=True) + + ctx.actions.copied_dir(object_dir.as_output(), { + a.short_path : a for a in objects + }) + + hi_dir = ctx.actions.declare_output("hi-" + artifact_suffix, dir=True) + + ctx.actions.copied_dir(hi_dir.as_output(), { + a.short_path : a for a in interfaces + }) + + stubs_dir = ctx.actions.declare_output("stubs-" + artifact_suffix, dir=True) + + # collect the stubs from all modules into the stubs_dir ctx.actions.run( - compile_cmd, - category = "haskell_compile_" + artifact_suffix.replace("-", "_"), - no_outputs_cleanup = True, + cmd_args([ + "bash", "-c", + """set -ex + mkdir -p \"$0\" + for stub; do + find \"$stub\" -mindepth 1 -maxdepth 1 -exec cp -r -t \"$0\" '{}' ';' + done""", + stubs_dir.as_output(), + stub_dirs + ]), + category = "haskell_stubs", + identifier = artifact_suffix ) - return args.result + return CompileResultInfo( + objects = object_dir, + hi = hi_dir, + stubs = stubs_dir, + producing_indices = False, + ) diff --git a/prelude/haskell/util.bzl b/prelude/haskell/util.bzl index 21cbd7b05..956fefd80 100644 --- a/prelude/haskell/util.bzl +++ b/prelude/haskell/util.bzl @@ -53,7 +53,8 @@ def _link_style_extensions(link_style: LinkStyle) -> (str, str): if link_style == LinkStyle("shared"): return ("dyn_o", "dyn_hi") elif link_style == LinkStyle("static_pic"): - return ("o", "hi") # is this right? + # FIXME conflicts with "static" LinkStyle + return ("pico", "pichi") # is this right? elif link_style == LinkStyle("static"): return ("o", "hi") fail("unknown LinkStyle") From cb65f8f4b3c798563b562f810479879a643a35bc Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 23 Jan 2024 10:09:36 +0100 Subject: [PATCH 0008/1133] [buck2] Use a sub-directory per link type / profiling config --- prelude/haskell/compile.bzl | 16 ++++++++++++---- prelude/haskell/util.bzl | 3 +-- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 5c4f8168b..c571daa71 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -99,6 +99,7 @@ _Module = record( interface = field(Artifact), object = field(Artifact), stub_dir = field(Artifact), + prefix_dir = field(str), ) @@ -119,11 +120,11 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl module_name = src_to_module_name(src.short_path) interface_path = paths.replace_extension(src.short_path, "." + hisuf) - interface = ctx.actions.declare_output(interface_path) + interface = ctx.actions.declare_output("mod-" + suffix, interface_path) object_path = paths.replace_extension(src.short_path, "." + osuf) - object = ctx.actions.declare_output(object_path) + object = ctx.actions.declare_output("mod-" + suffix, object_path) stub_dir = ctx.actions.declare_output("stub-" + suffix + "-" + module_name, dir=True) - modules[module_name] = _Module(source = src, interface = interface, object = object, stub_dir = stub_dir) + modules[module_name] = _Module(source = src, interface = interface, object = object, stub_dir = stub_dir, prefix_dir = "mod-" + suffix) return modules @@ -497,7 +498,14 @@ def _compile_module( compile_cmd.add(args.args_for_cmd) - compile_cmd.add(cmd_args(dep_file, format = "-i{}").parent()) + compile_cmd.add( + cmd_args( + cmd_args(dep_file, format = "-i{}").parent(), + "/", + module.prefix_dir, + delimiter="" + ) + ) for dep_name in breadth_first_traversal(graph, [module_name])[1:]: dep = modules[dep_name] diff --git a/prelude/haskell/util.bzl b/prelude/haskell/util.bzl index 956fefd80..21cbd7b05 100644 --- a/prelude/haskell/util.bzl +++ b/prelude/haskell/util.bzl @@ -53,8 +53,7 @@ def _link_style_extensions(link_style: LinkStyle) -> (str, str): if link_style == LinkStyle("shared"): return ("dyn_o", "dyn_hi") elif link_style == LinkStyle("static_pic"): - # FIXME conflicts with "static" LinkStyle - return ("pico", "pichi") # is this right? + return ("o", "hi") # is this right? elif link_style == LinkStyle("static"): return ("o", "hi") fail("unknown LinkStyle") From 961e6ffccc511fe66bdf63d313031cc4708f4f9f Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 23 Jan 2024 12:27:58 +0100 Subject: [PATCH 0009/1133] [buck2] Run haskell_stubs action only locally --- prelude/haskell/compile.bzl | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index c571daa71..906b603a3 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -572,17 +572,19 @@ def compile( # collect the stubs from all modules into the stubs_dir ctx.actions.run( cmd_args([ - "bash", "-c", - """set -ex + "bash", "-exuc", + """\ mkdir -p \"$0\" for stub; do find \"$stub\" -mindepth 1 -maxdepth 1 -exec cp -r -t \"$0\" '{}' ';' - done""", + done + """, stubs_dir.as_output(), stub_dirs ]), category = "haskell_stubs", - identifier = artifact_suffix + identifier = artifact_suffix, + local_only = True, ) return CompileResultInfo( From 56f3b72ea5df1960a33e4723dd7612c6cadf9295 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 23 Jan 2024 14:05:33 +0100 Subject: [PATCH 0010/1133] [buck2] Generate a single dependency file for all link / profiling modes --- prelude/haskell/compile.bzl | 32 +++++++++++++++++++------------- prelude/haskell/haskell.bzl | 9 +++++++++ 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 906b603a3..6c25f213e 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -128,7 +128,7 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl return modules -def _ghc_depends(ctx: AnalysisContext, *, filename: str, sources: list[Artifact], link_style: LinkStyle, enable_profiling: bool) -> Artifact: +def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] @@ -137,17 +137,25 @@ def _ghc_depends(ctx: AnalysisContext, *, filename: str, sources: list[Artifact] # library dependency. packages_info = get_packages_info( ctx, - link_style, + LinkStyle("shared"), specify_pkg_version = False, - enable_profiling = enable_profiling, + enable_profiling = False, ) - dep_file = ctx.actions.declare_output(filename) - osuf, hisuf = output_extensions(link_style, enable_profiling) - # note, hisuf = "_hi", so -dep-suffix should be set to "_" - # note, `-outputdir ''` - dep_args = cmd_args(haskell_toolchain.compiler, "-M", "-outputdir", "", "-dep-suffix", hisuf[:-2], "-dep-makefile", dep_file.as_output()) - #dep_args.add("-osuf", osuf, "-hisuf", hisuf) + dep_file = ctx.actions.declare_output(ctx.attrs.name + ".depends") + + # The object and interface file paths are depending on the real module name + # as inferred by GHC, not the source file path; currently this requires the + # module name to correspond to the source file path as otherwise GHC will + # not be able to find the created object or interface files in the search + # path. + # + # (module X.Y.Z must be defined in a file at X/Y/Z.hs) + + # Note: `-outputdir ''` removes the prefix directory of all targets: + # backend/src/Foo/Util. => Foo/Util. + dep_args = cmd_args(haskell_toolchain.compiler, "-M", "-outputdir", "", "-dep-makefile", dep_file.as_output()) + dep_args.add("-hide-all-packages") dep_args.add("-package", "base") dep_args.add(cmd_args(toolchain_libs, prepend="-package")) @@ -156,7 +164,7 @@ def _ghc_depends(ctx: AnalysisContext, *, filename: str, sources: list[Artifact] dep_args.add(ctx.attrs.compiler_flags) dep_args.add(sources) - ctx.actions.run(dep_args, category = "ghc_depends", identifier = filename) + ctx.actions.run(dep_args, category = "ghc_depends") return dep_file @@ -520,12 +528,10 @@ def compile( ctx: AnalysisContext, link_style: LinkStyle, enable_profiling: bool, + dep_file: Artifact, pkgname: str | None = None) -> CompileResultInfo: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) - dep_name = ctx.attrs.name + artifact_suffix + ".depends" - dep_file = _ghc_depends(ctx, filename = dep_name, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling) - modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) def do_compile(ctx, artifacts, outputs, dep_file=dep_file, modules=modules): diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 40bfddfff..db6561527 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -48,6 +48,7 @@ load( "HaskellLibraryInfo", "HaskellLibraryProvider", "compile", + "ghc_depends", ) load( "@prelude//haskell:haskell_haddock.bzl", @@ -509,6 +510,7 @@ def _build_haskell_lib( nlis: list[MergedLinkInfo], # native link infos from all deps link_style: LinkStyle, enable_profiling: bool, + dep_file: Artifact, # The non-profiling artifacts are also needed to build the package for # profiling, so it should be passed when `enable_profiling` is True. non_profiling_hlib: [HaskellLibBuildOutput, None] = None) -> HaskellLibBuildOutput: @@ -524,6 +526,7 @@ def _build_haskell_lib( ctx, link_style, enable_profiling = enable_profiling, + dep_file = dep_file, pkgname = pkgname, ) solibs = {} @@ -697,6 +700,8 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: libname = repr(ctx.label.path).replace("//", "_").replace("/", "_") + "_" + ctx.label.name pkgname = libname.replace("_", "-") + dep_file = ghc_depends(ctx, sources = ctx.attrs.srcs) + # The non-profiling library is also needed to build the package with # profiling enabled, so we need to keep track of it for each link style. non_profiling_hlib = {} @@ -715,6 +720,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: nlis = nlis, link_style = link_style, enable_profiling = enable_profiling, + dep_file = dep_file, non_profiling_hlib = non_profiling_hlib.get(link_style), ) if not enable_profiling: @@ -919,10 +925,13 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: if enable_profiling and link_style == LinkStyle("shared"): link_style = LinkStyle("static") + dep_file = ghc_depends(ctx, sources = ctx.attrs.srcs) + compiled = compile( ctx, link_style, enable_profiling = enable_profiling, + dep_file = dep_file, ) haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] From 9fd39467eeb99457191cac60038f2700c34b09be Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 23 Jan 2024 14:38:30 +0100 Subject: [PATCH 0011/1133] [buck2] Introduce helper function to provide common compile args --- prelude/haskell/compile.bzl | 128 ++++++++++++++---------------------- 1 file changed, 48 insertions(+), 80 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 6c25f213e..da62fb2b9 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -280,23 +280,14 @@ def get_packages_info( transitive_deps = libs.values(), ) -def compile_args( + +def _common_compile_args( ctx: AnalysisContext, link_style: LinkStyle, enable_profiling: bool, - pkgname = None, - suffix: str = "") -> CompileArgsInfo: - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - + pkgname: str | None) -> cmd_args: toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] - compile_cmd = cmd_args() - compile_cmd.add(haskell_toolchain.compiler_flags) - - # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't - # be parsed when inside an argsfile. - compile_cmd.add(ctx.attrs.compiler_flags) - compile_args = cmd_args() compile_args.add("-no-link", "-i") compile_args.add("-hide-all-packages") @@ -313,6 +304,46 @@ def compile_args( osuf, hisuf = output_extensions(link_style, enable_profiling) compile_args.add("-osuf", osuf, "-hisuf", hisuf) + # Add -package-db and -package/-expose-package flags for each Haskell + # library dependency. + packages_info = get_packages_info( + ctx, + link_style, + specify_pkg_version = False, + enable_profiling = enable_profiling, + ) + + compile_args.add(packages_info.exposed_package_args) + compile_args.add(packages_info.packagedb_args) + + # Add args from preprocess-able inputs. + inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) + pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) + pre_args = pre.set.project_as_args("args") + compile_args.add(cmd_args(pre_args, format = "-optP={}")) + + if pkgname: + compile_args.add(["-this-unit-id", pkgname]) + + return compile_args + +def compile_args( + ctx: AnalysisContext, + link_style: LinkStyle, + enable_profiling: bool, + pkgname = None, + suffix: str = "") -> CompileArgsInfo: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + + compile_cmd = cmd_args() + compile_cmd.add(haskell_toolchain.compiler_flags) + + # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't + # be parsed when inside an argsfile. + compile_cmd.add(ctx.attrs.compiler_flags) + + compile_args = _common_compile_args(ctx, link_style, enable_profiling, pkgname) + if getattr(ctx.attrs, "main", None) != None: compile_args.add(["-main-is", ctx.attrs.main]) @@ -336,27 +367,6 @@ def compile_args( stubs.as_output(), ) - # Add -package-db and -package/-expose-package flags for each Haskell - # library dependency. - packages_info = get_packages_info( - ctx, - link_style, - specify_pkg_version = False, - enable_profiling = enable_profiling, - ) - - compile_args.add(packages_info.exposed_package_args) - compile_args.add(packages_info.packagedb_args) - - # Add args from preprocess-able inputs. - inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) - pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) - pre_args = pre.set.project_as_args("args") - compile_args.add(cmd_args(pre_args, format = "-optP={}")) - - if pkgname: - compile_args.add(["-this-unit-id", pkgname]) - srcs = cmd_args() for (path, src) in srcs_to_pairs(ctx.attrs.srcs): # hs-boot files aren't expected to be an argument to compiler but does need @@ -380,45 +390,24 @@ def compile_args( args_for_file = compile_args, ) -def __compile_args( +def _compile_module_args( ctx: AnalysisContext, module: _Module, link_style: LinkStyle, enable_profiling: bool, outputs: dict[Artifact, Artifact], - pkgname = None, - suffix: str = "") -> CompileArgsInfo: + pkgname = None) -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] - compile_cmd = cmd_args() compile_cmd.add(haskell_toolchain.compiler_flags) # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't # be parsed when inside an argsfile. compile_cmd.add(ctx.attrs.compiler_flags) + compile_cmd.add("-c") - compile_args = cmd_args() - compile_args.add("-no-link", "-i", "-c") - compile_args.add("-hide-all-packages") - compile_args.add(cmd_args(toolchain_libs, prepend="-package")) - - if enable_profiling: - compile_args.add("-prof") - - if link_style == LinkStyle("shared"): - compile_args.add("-dynamic", "-fPIC") - elif link_style == LinkStyle("static_pic"): - compile_args.add("-fPIC", "-fexternal-dynamic-refs") - - osuf, hisuf = output_extensions(link_style, enable_profiling) - compile_args.add("-osuf", osuf, "-hisuf", hisuf) - - if getattr(ctx.attrs, "main", None) != None: - compile_args.add(["-main-is", ctx.attrs.main]) - - #artifact_suffix = get_artifact_suffix(link_style, enable_profiling, suffix) + compile_args = _common_compile_args(ctx, link_style, enable_profiling, pkgname) object = outputs[module.object] hi = outputs[module.interface] @@ -428,27 +417,6 @@ def __compile_args( compile_args.add("-o", cmd_args(object.as_output())) compile_args.add("-stubdir", stubs.as_output()) - # Add -package-db and -package/-expose-package flags for each Haskell - # library dependency. - packages_info = get_packages_info( - ctx, - link_style, - specify_pkg_version = False, - enable_profiling = enable_profiling, - ) - - compile_args.add(packages_info.exposed_package_args) - compile_args.add(packages_info.packagedb_args) - - # Add args from preprocess-able inputs. - inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) - pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) - pre_args = pre.set.project_as_args("args") - compile_args.add(cmd_args(pre_args, format = "-optP={}")) - - if pkgname: - compile_args.add(["-this-unit-id", pkgname]) - srcs = cmd_args(module.source) for (path, src) in srcs_to_pairs(ctx.attrs.srcs): # hs-boot files aren't expected to be an argument to compiler but does need @@ -489,7 +457,7 @@ def _compile_module( haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] compile_cmd = cmd_args(haskell_toolchain.compiler) - args = __compile_args(ctx, module, link_style, enable_profiling, outputs, pkgname) + args = _compile_module_args(ctx, module, link_style, enable_profiling, outputs, pkgname) if args.args_for_file: if haskell_toolchain.use_argsfile: From 525561c441ed595607be61746a7dad6bb5504bf9 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 23 Jan 2024 14:46:05 +0100 Subject: [PATCH 0012/1133] [buck2] Use `_package_flag` helper function --- prelude/haskell/compile.bzl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index da62fb2b9..a5ae17a13 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -156,9 +156,11 @@ def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: # backend/src/Foo/Util. => Foo/Util. dep_args = cmd_args(haskell_toolchain.compiler, "-M", "-outputdir", "", "-dep-makefile", dep_file.as_output()) + package_flag = _package_flag(haskell_toolchain) + dep_args.add("-hide-all-packages") - dep_args.add("-package", "base") - dep_args.add(cmd_args(toolchain_libs, prepend="-package")) + dep_args.add(package_flag, "base") + dep_args.add(cmd_args(toolchain_libs, prepend=package_flag)) dep_args.add(packages_info.exposed_package_args) dep_args.add(packages_info.packagedb_args) From e2eeb0affd4958fd38edb966feb8fe9bf220179d Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 25 Jan 2024 10:57:53 +0100 Subject: [PATCH 0013/1133] [buck2] Use strip_prefix from prelude/utils --- prelude/haskell/compile.bzl | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index a5ae17a13..381d57e8f 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -35,6 +35,7 @@ load( ) load("@prelude//:paths.bzl", "paths") load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_traversal") +load("@prelude//utils:strings.bzl", "strip_prefix") # The type of the return value of the `_compile()` function. CompileResultInfo = record( @@ -103,11 +104,11 @@ _Module = record( ) -def _strip_prefix(p, path_prefix): - if p.startswith(path_prefix): - return p[len(path_prefix):] - else: - return p +def _strip_prefix(prefix, s): + stripped = strip_prefix(prefix, s) + + return stripped if stripped != None else s + def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_style: LinkStyle, enable_profiling: bool, suffix: str) -> dict[str, _Module]: modules = {} @@ -182,7 +183,7 @@ def _parse_depends(depends: str, path_prefix: str) -> dict[str, list[str]]: module_name = src_to_module_name(k) deps = [ - src_to_module_name(_strip_prefix(v, path_prefix).lstrip("/")) + src_to_module_name(_strip_prefix(path_prefix, v).lstrip("/")) for v in vs if not is_haskell_src(v) ] @@ -505,7 +506,7 @@ def compile( modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) def do_compile(ctx, artifacts, outputs, dep_file=dep_file, modules=modules): - graph = _parse_depends(artifacts[dep_file].read_string(), _strip_prefix(str(ctx.label.path), str(ctx.label.cell_root))) + graph = _parse_depends(artifacts[dep_file].read_string(), _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path))) for module_name in post_order_traversal(graph): _compile_module( From f66781499d8100e250b337a19d8ee839132c469b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 25 Jan 2024 16:43:07 +0100 Subject: [PATCH 0014/1133] [buck2] Remove `no_outputs_cleanup = True` We do not yet fully support incremental actions, this needs more thought. --- prelude/haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 381d57e8f..2d1edf260 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -490,7 +490,7 @@ def _compile_module( dep = modules[dep_name] compile_cmd.hidden(dep.interface, dep.object) - ctx.actions.run(compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, no_outputs_cleanup = True) + ctx.actions.run(compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name) From bf1adade4e3c7650f9201f0ee08df810c0b26446 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 24 Jan 2024 14:31:15 +0100 Subject: [PATCH 0015/1133] [buck2] Add toolchain libraries to linking step for haskell_binary --- prelude/haskell/haskell.bzl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index db6561527..fe537859b 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -936,8 +936,12 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] + output = ctx.actions.declare_output(ctx.attrs.name) link = cmd_args(haskell_toolchain.compiler) + link.add("-hide-all-packages") + link.add(cmd_args(toolchain_libs, prepend="-package")) link.add("-o", output.as_output()) link.add(haskell_toolchain.linker_flags) link.add(ctx.attrs.linker_flags) From 8c568704cea6d9ba753d8f926bd2c55f92567587 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 24 Jan 2024 14:45:49 +0100 Subject: [PATCH 0016/1133] [buck2] Support haskell sources with irregular module name GHC is expecting an interface file in the search path at a location determined from the module name as given by the module-id from the `module` statement. In the build, a module's name is only inferred from the path of the source file, which might not correctly reflect the actual module name as seen by GHC. Reading the dependency information, we can determine the correct module name for a given source file and use that information to fix the mapping of module name to `_Module` information. --- prelude/haskell/compile.bzl | 38 ++++++++++++++++++++++++++++++++----- 1 file changed, 33 insertions(+), 5 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 2d1edf260..0ed044384 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -171,8 +171,16 @@ def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: return dep_file -def _parse_depends(depends: str, path_prefix: str) -> dict[str, list[str]]: +def _parse_depends(depends: str, path_prefix: str) -> tuple: + """ + Returns a tuple of two items: + + 1. the module dependency graph as a dict[str, list[str]] + 2. a mapping from module name inferred from the source path to the real module name as a dict[str, str] + (only mismatching module names are added to the mapping) + """ graph = {} + mapping = {} for line in depends.splitlines(): if line.startswith("#"): @@ -182,15 +190,33 @@ def _parse_depends(depends: str, path_prefix: str) -> dict[str, list[str]]: vs = v.split(" ") module_name = src_to_module_name(k) + deps = [ - src_to_module_name(_strip_prefix(path_prefix, v).lstrip("/")) + src_to_module_name(v) for v in vs if not is_haskell_src(v) ] graph.setdefault(module_name, []).extend(deps) - return graph + ext = paths.split_extension(k)[1] + + if ext != ".o": continue + + sources = filter(is_haskell_src, vs) + + if not sources: continue + + if len(sources) != 1: fail("one object file must correspond to exactly one haskell source") + + hs_file = sources[0] + + hs_module_name = src_to_module_name(_strip_prefix(path_prefix, hs_file).lstrip("/")) + + if hs_module_name != module_name: + mapping[hs_module_name] = module_name + + return (graph, mapping) def _attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: return filter( @@ -506,7 +532,9 @@ def compile( modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) def do_compile(ctx, artifacts, outputs, dep_file=dep_file, modules=modules): - graph = _parse_depends(artifacts[dep_file].read_string(), _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path))) + graph, module_map = _parse_depends(artifacts[dep_file].read_string(), _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path))) + + mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } for module_name in post_order_traversal(graph): _compile_module( @@ -514,7 +542,7 @@ def compile( link_style = link_style, enable_profiling = enable_profiling, module_name = module_name, - modules = modules, + modules = mapped_modules, graph = graph, outputs = outputs, dep_file=dep_file, From 57f3b68e7e8eb2452f55a8cb166dee8d5c917b20 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 31 Jan 2024 09:28:48 +0100 Subject: [PATCH 0017/1133] Fix C pre-processor error with empty output dir Specifying `-outputdir ''` resulted in ghc generating empty `-I` flags to the C pre-processor which led it to ignore the next option since it expected an argument for the include flag. On Darwin (when using clang), this led to this error: ``` clang-11: error: cannot specify -o when generating multiple output files ``` --- prelude/haskell/compile.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 0ed044384..1a78bc90c 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -153,9 +153,9 @@ def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: # # (module X.Y.Z must be defined in a file at X/Y/Z.hs) - # Note: `-outputdir ''` removes the prefix directory of all targets: + # Note: `-outputdir '.'` removes the prefix directory of all targets: # backend/src/Foo/Util. => Foo/Util. - dep_args = cmd_args(haskell_toolchain.compiler, "-M", "-outputdir", "", "-dep-makefile", dep_file.as_output()) + dep_args = cmd_args(haskell_toolchain.compiler, "-M", "-outputdir", ".", "-dep-makefile", dep_file.as_output()) package_flag = _package_flag(haskell_toolchain) From 24a34bc205d8e9a10049ad53a39f6d1f29f374af Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 1 Feb 2024 16:54:03 +0100 Subject: [PATCH 0018/1133] Do not require .hi, .so, ... in ghc-pkg register This way package actions can be scheduled before actual build actions, don't lie on the critical path for a build, and don't form a bottleneck. For reference, a similar approach is taken by rules_haskell: https://github.com/tweag/rules_haskell/blob/7059b07bca46e6c1712375feee05a3a8605d6de5/haskell/private/packages.bzl#L162-L188 --- prelude/haskell/haskell.bzl | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index fe537859b..657f2df2e 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -376,13 +376,18 @@ def _srcs_to_objfiles( objfiles.add(cmd_args([odir, "/", paths.replace_extension(src, "." + osuf)], delimiter = "")) return objfiles +# Script to generate a GHC package-db entry for a new package. +# +# Sets --force so that ghc-pkg does not check for .hi, .so, ... files. +# This way package actions can be scheduled before actual build actions, +# don't lie on the critical path for a build, and don't form a bottleneck. _REGISTER_PACKAGE = """\ set -eu GHC_PKG=$1 DB=$2 PKGCONF=$3 "$GHC_PKG" init "$DB" -"$GHC_PKG" register --package-conf "$DB" --no-expand-pkgroot "$PKGCONF" +"$GHC_PKG" register --package-conf "$DB" --no-expand-pkgroot "$PKGCONF" --force """ # Create a package @@ -476,7 +481,7 @@ def _make_package( haskell_toolchain.packager, db.as_output(), pkg_conf, - ]).hidden(hi.values()).hidden(lib.values()), # needs hi, because ghc-pkg checks that the .hi files exist + ]), category = "haskell_package_" + artifact_suffix.replace("-", "_"), env = {"GHC_PACKAGE_PATH": ghc_package_path}, ) From 3e2810a69a1dcc3b5cf767b8a989b585acb23823 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 2 Feb 2024 20:49:32 +0100 Subject: [PATCH 0019/1133] ghc_depends - avoid .hi, .o dependencies (package_args) --- prelude/haskell/compile.bzl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 1a78bc90c..17a51ad9d 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -91,6 +91,7 @@ HaskellLibraryInfo = record( PackagesInfo = record( exposed_package_args = cmd_args, + exposed_package_args_thin = cmd_args, packagedb_args = cmd_args, transitive_deps = field(list[HaskellLibraryInfo]), ) @@ -162,7 +163,7 @@ def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: dep_args.add("-hide-all-packages") dep_args.add(package_flag, "base") dep_args.add(cmd_args(toolchain_libs, prepend=package_flag)) - dep_args.add(packages_info.exposed_package_args) + dep_args.add(cmd_args(packages_info.exposed_package_args_thin)) dep_args.add(packages_info.packagedb_args) dep_args.add(ctx.attrs.compiler_flags) @@ -269,6 +270,7 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) exposed_package_args = cmd_args([package_flag, "base"]) + exposed_package_args_thin = cmd_args([package_flag, "base"]) packagedb_args = cmd_args() @@ -302,9 +304,11 @@ def get_packages_info( pkg_name += "-{}".format(lib.version) exposed_package_args.add(package_flag, pkg_name) + exposed_package_args_thin.add(package_flag, pkg_name) return PackagesInfo( exposed_package_args = exposed_package_args, + exposed_package_args_thin = exposed_package_args_thin, packagedb_args = packagedb_args, transitive_deps = libs.values(), ) From cf3ec41bd475d7c25290689943cc7c5383c92a9a Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 2 Feb 2024 20:57:14 +0100 Subject: [PATCH 0020/1133] ghc_depends - avoid .hi, .o depends (package_db) --- prelude/haskell/compile.bzl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 17a51ad9d..38ce02169 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -93,6 +93,7 @@ PackagesInfo = record( exposed_package_args = cmd_args, exposed_package_args_thin = cmd_args, packagedb_args = cmd_args, + packagedb_args_thin = cmd_args, transitive_deps = field(list[HaskellLibraryInfo]), ) @@ -164,7 +165,7 @@ def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: dep_args.add(package_flag, "base") dep_args.add(cmd_args(toolchain_libs, prepend=package_flag)) dep_args.add(cmd_args(packages_info.exposed_package_args_thin)) - dep_args.add(packages_info.packagedb_args) + dep_args.add(packages_info.packagedb_args_thin) dep_args.add(ctx.attrs.compiler_flags) dep_args.add(sources) @@ -273,6 +274,7 @@ def get_packages_info( exposed_package_args_thin = cmd_args([package_flag, "base"]) packagedb_args = cmd_args() + packagedb_args_thin = cmd_args() for lib in libs.values(): exposed_package_args.hidden(lib.import_dirs.values()) @@ -290,6 +292,7 @@ def get_packages_info( # These we need to add for all the packages/dependencies, i.e. # direct and transitive (e.g. `fbcode-common-hs-util-hs-array`) packagedb_args.add("-package-db", lib.db) + packagedb_args_thin.add("-package-db", lib.db) haskell_direct_deps_lib_infos = _attr_deps_haskell_lib_infos( ctx, @@ -310,6 +313,7 @@ def get_packages_info( exposed_package_args = exposed_package_args, exposed_package_args_thin = exposed_package_args_thin, packagedb_args = packagedb_args, + packagedb_args_thin = packagedb_args_thin, transitive_deps = libs.values(), ) From 2036e1634737f8fb0a485a0f469abc795683e59c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 5 Feb 2024 13:49:19 +0100 Subject: [PATCH 0021/1133] exposed_package_args -> exposed_package_artifacts The intention is that exposed_package_args should only carry the stringly flags, while exposed_package_artifacts carries all the artifacts needed to use the package, such as interfaces, objects, and libraries. --- prelude/haskell/compile.bzl | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 38ce02169..db5b3f39b 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -90,7 +90,7 @@ HaskellLibraryInfo = record( ) PackagesInfo = record( - exposed_package_args = cmd_args, + exposed_package_artifacts = cmd_args, exposed_package_args_thin = cmd_args, packagedb_args = cmd_args, packagedb_args_thin = cmd_args, @@ -270,19 +270,19 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - exposed_package_args = cmd_args([package_flag, "base"]) + exposed_package_artifacts = cmd_args([package_flag, "base"]) exposed_package_args_thin = cmd_args([package_flag, "base"]) packagedb_args = cmd_args() packagedb_args_thin = cmd_args() for lib in libs.values(): - exposed_package_args.hidden(lib.import_dirs.values()) - exposed_package_args.hidden(lib.stub_dirs) + exposed_package_artifacts.hidden(lib.import_dirs.values()) + exposed_package_artifacts.hidden(lib.stub_dirs) # libs of dependencies might be needed at compile time if # we're using Template Haskell: - exposed_package_args.hidden(lib.libs) + exposed_package_artifacts.hidden(lib.libs) packagedb_args.hidden(lib.import_dirs.values()) packagedb_args.hidden(lib.stub_dirs) @@ -306,11 +306,11 @@ def get_packages_info( if (specify_pkg_version): pkg_name += "-{}".format(lib.version) - exposed_package_args.add(package_flag, pkg_name) + exposed_package_artifacts.add(package_flag, pkg_name) exposed_package_args_thin.add(package_flag, pkg_name) return PackagesInfo( - exposed_package_args = exposed_package_args, + exposed_package_artifacts = exposed_package_artifacts, exposed_package_args_thin = exposed_package_args_thin, packagedb_args = packagedb_args, packagedb_args_thin = packagedb_args_thin, @@ -350,7 +350,7 @@ def _common_compile_args( enable_profiling = enable_profiling, ) - compile_args.add(packages_info.exposed_package_args) + compile_args.add(packages_info.exposed_package_artifacts) compile_args.add(packages_info.packagedb_args) # Add args from preprocess-able inputs. From 3426cd4d27f656778b74fc223a2d77717060b548 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 5 Feb 2024 13:50:55 +0100 Subject: [PATCH 0022/1133] exposed_package_args_thin -> exposed_package_args exposed_package_args_thin takes the role of just carrying the arguments. --- prelude/haskell/compile.bzl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index db5b3f39b..f374f0d7d 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -91,7 +91,7 @@ HaskellLibraryInfo = record( PackagesInfo = record( exposed_package_artifacts = cmd_args, - exposed_package_args_thin = cmd_args, + exposed_package_args = cmd_args, packagedb_args = cmd_args, packagedb_args_thin = cmd_args, transitive_deps = field(list[HaskellLibraryInfo]), @@ -164,7 +164,7 @@ def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: dep_args.add("-hide-all-packages") dep_args.add(package_flag, "base") dep_args.add(cmd_args(toolchain_libs, prepend=package_flag)) - dep_args.add(cmd_args(packages_info.exposed_package_args_thin)) + dep_args.add(cmd_args(packages_info.exposed_package_args)) dep_args.add(packages_info.packagedb_args_thin) dep_args.add(ctx.attrs.compiler_flags) @@ -271,7 +271,7 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) exposed_package_artifacts = cmd_args([package_flag, "base"]) - exposed_package_args_thin = cmd_args([package_flag, "base"]) + exposed_package_args = cmd_args([package_flag, "base"]) packagedb_args = cmd_args() packagedb_args_thin = cmd_args() @@ -307,11 +307,11 @@ def get_packages_info( pkg_name += "-{}".format(lib.version) exposed_package_artifacts.add(package_flag, pkg_name) - exposed_package_args_thin.add(package_flag, pkg_name) + exposed_package_args.add(package_flag, pkg_name) return PackagesInfo( exposed_package_artifacts = exposed_package_artifacts, - exposed_package_args_thin = exposed_package_args_thin, + exposed_package_args = exposed_package_args, packagedb_args = packagedb_args, packagedb_args_thin = packagedb_args_thin, transitive_deps = libs.values(), From d562f09864453af15e7183b134fa3201f4e4ca12 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 5 Feb 2024 13:52:13 +0100 Subject: [PATCH 0023/1133] exposed_package_artifacts no longer carries arguments Instead exposed_package_args should be used. --- prelude/haskell/compile.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index f374f0d7d..a9b938574 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -270,7 +270,7 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - exposed_package_artifacts = cmd_args([package_flag, "base"]) + exposed_package_artifacts = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) packagedb_args = cmd_args() @@ -306,7 +306,6 @@ def get_packages_info( if (specify_pkg_version): pkg_name += "-{}".format(lib.version) - exposed_package_artifacts.add(package_flag, pkg_name) exposed_package_args.add(package_flag, pkg_name) return PackagesInfo( @@ -350,6 +349,7 @@ def _common_compile_args( enable_profiling = enable_profiling, ) + compile_args.add(packages_info.exposed_package_args) compile_args.add(packages_info.exposed_package_artifacts) compile_args.add(packages_info.packagedb_args) From 06f0fb244c17d9d8dce31f798391f9de64b595ca Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 5 Feb 2024 13:55:48 +0100 Subject: [PATCH 0024/1133] packagedb_args_thin -> packagedb_args The separation is redundant. The field exposed_packages_artifacts already carries all the interfaces, objects, and libraries; and these are already inputs where packagedb_args was an input. So, there is no need for a separte package_args_thin. Instead, package_args only carries the package-db flag and inputs, and exposed_packages_artifacts carries the interfaces, objects, and libraries. --- prelude/haskell/compile.bzl | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index a9b938574..76b9c5809 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -93,7 +93,6 @@ PackagesInfo = record( exposed_package_artifacts = cmd_args, exposed_package_args = cmd_args, packagedb_args = cmd_args, - packagedb_args_thin = cmd_args, transitive_deps = field(list[HaskellLibraryInfo]), ) @@ -165,7 +164,7 @@ def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: dep_args.add(package_flag, "base") dep_args.add(cmd_args(toolchain_libs, prepend=package_flag)) dep_args.add(cmd_args(packages_info.exposed_package_args)) - dep_args.add(packages_info.packagedb_args_thin) + dep_args.add(packages_info.packagedb_args) dep_args.add(ctx.attrs.compiler_flags) dep_args.add(sources) @@ -274,7 +273,6 @@ def get_packages_info( exposed_package_args = cmd_args([package_flag, "base"]) packagedb_args = cmd_args() - packagedb_args_thin = cmd_args() for lib in libs.values(): exposed_package_artifacts.hidden(lib.import_dirs.values()) @@ -284,15 +282,10 @@ def get_packages_info( # we're using Template Haskell: exposed_package_artifacts.hidden(lib.libs) - packagedb_args.hidden(lib.import_dirs.values()) - packagedb_args.hidden(lib.stub_dirs) - packagedb_args.hidden(lib.libs) - for lib in libs.values(): # These we need to add for all the packages/dependencies, i.e. # direct and transitive (e.g. `fbcode-common-hs-util-hs-array`) packagedb_args.add("-package-db", lib.db) - packagedb_args_thin.add("-package-db", lib.db) haskell_direct_deps_lib_infos = _attr_deps_haskell_lib_infos( ctx, @@ -312,7 +305,6 @@ def get_packages_info( exposed_package_artifacts = exposed_package_artifacts, exposed_package_args = exposed_package_args, packagedb_args = packagedb_args, - packagedb_args_thin = packagedb_args_thin, transitive_deps = libs.values(), ) From c31c10d307b3121d44e5b6a3ab54f0fbaa526796 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 5 Feb 2024 17:04:38 +0100 Subject: [PATCH 0025/1133] Remove stubs from link inputs The stub directories contain C header files that provide stubs for FFI exports from Haskell code to be consumed by C code. These should not be required link-time inputs. --- prelude/haskell/haskell.bzl | 3 --- 1 file changed, 3 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 657f2df2e..46cb6d79d 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -572,7 +572,6 @@ def _build_haskell_lib( ) link.add(objfiles) - link.hidden(compiled.stubs) infos = get_link_args_for_strategy( ctx, @@ -951,8 +950,6 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link.add(haskell_toolchain.linker_flags) link.add(ctx.attrs.linker_flags) - link.hidden(compiled.stubs) - osuf, _hisuf = output_extensions(link_style, enable_profiling) objfiles = _srcs_to_objfiles(ctx, compiled.objects, osuf) From 455809deceef83972993ef169a86dd44be915fa4 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 5 Feb 2024 17:34:33 +0100 Subject: [PATCH 0026/1133] Remove stubs from compile inputs The stub directories contain C header files that provide stubs for FFI exports from Haskell code to be consumed by C code. These should not be required Haskell compile-time inputs. --- prelude/haskell/compile.bzl | 1 - 1 file changed, 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 76b9c5809..46788b64d 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -276,7 +276,6 @@ def get_packages_info( for lib in libs.values(): exposed_package_artifacts.hidden(lib.import_dirs.values()) - exposed_package_artifacts.hidden(lib.stub_dirs) # libs of dependencies might be needed at compile time if # we're using Template Haskell: From d1c637ee10319828c433ef65f518b04cb43523e8 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 5 Feb 2024 17:52:57 +0100 Subject: [PATCH 0027/1133] Split expose_package _imports and _libs The libraries will only be required for Template Haskell compilation. --- prelude/haskell/compile.bzl | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 46788b64d..245175e07 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -90,7 +90,8 @@ HaskellLibraryInfo = record( ) PackagesInfo = record( - exposed_package_artifacts = cmd_args, + exposed_package_imports = cmd_args, + exposed_package_libs = cmd_args, exposed_package_args = cmd_args, packagedb_args = cmd_args, transitive_deps = field(list[HaskellLibraryInfo]), @@ -269,17 +270,18 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - exposed_package_artifacts = cmd_args() + exposed_package_imports = cmd_args() + exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) packagedb_args = cmd_args() for lib in libs.values(): - exposed_package_artifacts.hidden(lib.import_dirs.values()) + exposed_package_imports.hidden(lib.import_dirs.values()) # libs of dependencies might be needed at compile time if # we're using Template Haskell: - exposed_package_artifacts.hidden(lib.libs) + exposed_package_libs.hidden(lib.libs) for lib in libs.values(): # These we need to add for all the packages/dependencies, i.e. @@ -301,7 +303,8 @@ def get_packages_info( exposed_package_args.add(package_flag, pkg_name) return PackagesInfo( - exposed_package_artifacts = exposed_package_artifacts, + exposed_package_imports = exposed_package_imports, + exposed_package_libs = exposed_package_libs, exposed_package_args = exposed_package_args, packagedb_args = packagedb_args, transitive_deps = libs.values(), @@ -341,7 +344,8 @@ def _common_compile_args( ) compile_args.add(packages_info.exposed_package_args) - compile_args.add(packages_info.exposed_package_artifacts) + compile_args.add(packages_info.exposed_package_imports) + compile_args.add(packages_info.exposed_package_libs) compile_args.add(packages_info.packagedb_args) # Add args from preprocess-able inputs. From c5ff95017576edbf787a83dbadff87eeaff45835 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 5 Feb 2024 19:26:39 +0100 Subject: [PATCH 0028/1133] implement uses_th helper function --- prelude/haskell/compile.bzl | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 245175e07..96bbac2b4 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -173,6 +173,34 @@ def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: return dep_file +def uses_th(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: + """Determine which of the given modules use Template Haskell. + + Template Haskell compilation requires additional inputs. We can avoid these + inputs if Template Haskell is not used. + + Ideally, GHC would expose this information in the generated depends file. + Until it does so, we use this workaround. + """ + th_file = ctx.actions.declare_output(ctx.attrs.name + ".th") + + script = """\ +touch "$1" +for file in "${@:2}"; do + grep -q "$file" \\ + -e '{-# LANGUAGE TemplateHaskell #-}' \\ + -e '{-# LANGUAGE TemplateHaskellQuotes #-}' \\ + -e '{-# LANGUAGE QuasiQuotes #-}' \\ + && echo "$file" >> "$1" || true +done +""" + ctx.actions.run( + cmd_args("sh", "-c", script, "", th_file.as_output(), sources), + category = "haskell_th", + ) + + return th_file + def _parse_depends(depends: str, path_prefix: str) -> tuple: """ Returns a tuple of two items: From 03e433839c4b3ae053939aef34c87fa91ef8504d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 6 Feb 2024 13:35:48 +0100 Subject: [PATCH 0029/1133] Implement _parse_th helper function --- prelude/haskell/compile.bzl | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 96bbac2b4..03485d02a 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -248,6 +248,16 @@ def _parse_depends(depends: str, path_prefix: str) -> tuple: return (graph, mapping) +def _parse_th(th_file: str, path_prefix: str) -> list[str]: + """Returns the list of modules that use Template Haskell.""" + result = [] + + for line in th_file.splitlines(): + module_name = src_to_module_name(_strip_prefix(path_prefix, line.strip()).lstrip("/")) + result.append(module_name) + + return result + def _attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: return filter( None, From 35309977bfc8e2c2e7af8ae7f0200091f35ef240 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 6 Feb 2024 13:49:04 +0100 Subject: [PATCH 0030/1133] use uses_th and parse_th in compile fn --- prelude/haskell/compile.bzl | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 03485d02a..f307d20f1 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -567,13 +567,17 @@ def compile( link_style: LinkStyle, enable_profiling: bool, dep_file: Artifact, + th_file: Artifact, pkgname: str | None = None) -> CompileResultInfo: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) - def do_compile(ctx, artifacts, outputs, dep_file=dep_file, modules=modules): - graph, module_map = _parse_depends(artifacts[dep_file].read_string(), _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path))) + def do_compile(ctx, artifacts, outputs, dep_file=dep_file, th_file=th_file, modules=modules): + path_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)) + graph, module_map = _parse_depends(artifacts[dep_file].read_string(), path_prefix) + th_modules = _parse_th(artifacts[th_file].read_string(), path_prefix) + print("TH MODULES", th_modules, "OF", graph.keys()) mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } @@ -596,7 +600,7 @@ def compile( stub_dirs = [module.stub_dir for module in modules.values()] ctx.actions.dynamic_output( - dynamic = [dep_file], + dynamic = [dep_file, th_file], inputs = ctx.attrs.srcs, outputs = interfaces + objects + stub_dirs, f = do_compile) From 9beedeeedc5c1bc07d552ab63f18cc6addda348d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 6 Feb 2024 13:49:50 +0100 Subject: [PATCH 0031/1133] provide uses th output to compile --- prelude/haskell/haskell.bzl | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 46cb6d79d..514dcf388 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -49,6 +49,7 @@ load( "HaskellLibraryProvider", "compile", "ghc_depends", + "uses_th", ) load( "@prelude//haskell:haskell_haddock.bzl", @@ -516,6 +517,7 @@ def _build_haskell_lib( link_style: LinkStyle, enable_profiling: bool, dep_file: Artifact, + th_file: Artifact, # The non-profiling artifacts are also needed to build the package for # profiling, so it should be passed when `enable_profiling` is True. non_profiling_hlib: [HaskellLibBuildOutput, None] = None) -> HaskellLibBuildOutput: @@ -532,6 +534,7 @@ def _build_haskell_lib( link_style, enable_profiling = enable_profiling, dep_file = dep_file, + th_file = th_file, pkgname = pkgname, ) solibs = {} @@ -705,6 +708,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: pkgname = libname.replace("_", "-") dep_file = ghc_depends(ctx, sources = ctx.attrs.srcs) + th_file = uses_th(ctx, sources = ctx.attrs.srcs) # The non-profiling library is also needed to build the package with # profiling enabled, so we need to keep track of it for each link style. @@ -725,6 +729,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: link_style = link_style, enable_profiling = enable_profiling, dep_file = dep_file, + th_file = th_file, non_profiling_hlib = non_profiling_hlib.get(link_style), ) if not enable_profiling: @@ -930,12 +935,14 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_style = LinkStyle("static") dep_file = ghc_depends(ctx, sources = ctx.attrs.srcs) + th_file = uses_th(ctx, sources = ctx.attrs.srcs) compiled = compile( ctx, link_style, enable_profiling = enable_profiling, dep_file = dep_file, + th_file = th_file, ) haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] From 93c0b7b8b1977111ed0779fd9e57b432d31dbc08 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 6 Feb 2024 14:14:23 +0100 Subject: [PATCH 0032/1133] Conditionally pass libraries for TH compile Omit library inputs when compiling modules that do not use Template Haskell. --- prelude/haskell/compile.bzl | 15 +++++++++++---- prelude/haskell/haskell_haddock.bzl | 1 + 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index f307d20f1..350e05fbd 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -353,6 +353,7 @@ def _common_compile_args( ctx: AnalysisContext, link_style: LinkStyle, enable_profiling: bool, + enable_th: bool, pkgname: str | None) -> cmd_args: toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] @@ -383,8 +384,9 @@ def _common_compile_args( compile_args.add(packages_info.exposed_package_args) compile_args.add(packages_info.exposed_package_imports) - compile_args.add(packages_info.exposed_package_libs) compile_args.add(packages_info.packagedb_args) + if enable_th: + compile_args.add(packages_info.exposed_package_libs) # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) @@ -401,6 +403,7 @@ def compile_args( ctx: AnalysisContext, link_style: LinkStyle, enable_profiling: bool, + enable_th: bool, pkgname = None, suffix: str = "") -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -412,7 +415,7 @@ def compile_args( # be parsed when inside an argsfile. compile_cmd.add(ctx.attrs.compiler_flags) - compile_args = _common_compile_args(ctx, link_style, enable_profiling, pkgname) + compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname) if getattr(ctx.attrs, "main", None) != None: compile_args.add(["-main-is", ctx.attrs.main]) @@ -465,6 +468,7 @@ def _compile_module_args( module: _Module, link_style: LinkStyle, enable_profiling: bool, + enable_th: bool, outputs: dict[Artifact, Artifact], pkgname = None) -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -477,7 +481,7 @@ def _compile_module_args( compile_cmd.add(ctx.attrs.compiler_flags) compile_cmd.add("-c") - compile_args = _common_compile_args(ctx, link_style, enable_profiling, pkgname) + compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname) object = outputs[module.object] hi = outputs[module.interface] @@ -514,6 +518,7 @@ def _compile_module( *, link_style: LinkStyle, enable_profiling: bool, + enable_th: bool, module_name: str, modules: dict[str, _Module], dep_file: Artifact, @@ -527,7 +532,7 @@ def _compile_module( haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] compile_cmd = cmd_args(haskell_toolchain.compiler) - args = _compile_module_args(ctx, module, link_style, enable_profiling, outputs, pkgname) + args = _compile_module_args(ctx, module, link_style, enable_profiling, enable_th, outputs, pkgname) if args.args_for_file: if haskell_toolchain.use_argsfile: @@ -582,10 +587,12 @@ def compile( mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } for module_name in post_order_traversal(graph): + print("TH ENABLE", module_name, module_name in th_modules) _compile_module( ctx, link_style = link_style, enable_profiling = enable_profiling, + enable_th = module_name in th_modules, module_name = module_name, modules = mapped_modules, graph = graph, diff --git a/prelude/haskell/haskell_haddock.bzl b/prelude/haskell/haskell_haddock.bzl index 4d498df2a..c485df3e5 100644 --- a/prelude/haskell/haskell_haddock.bzl +++ b/prelude/haskell/haskell_haddock.bzl @@ -34,6 +34,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: ctx, link_style, enable_profiling = False, + enable_th = True, suffix = "-haddock", pkgname = pkgname, ) From 2a8297a5e3e1756c7c38f3f405836655484ada29 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 6 Feb 2024 14:15:20 +0100 Subject: [PATCH 0033/1133] remove debug print --- prelude/haskell/compile.bzl | 2 -- 1 file changed, 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 350e05fbd..efe334c68 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -582,12 +582,10 @@ def compile( path_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)) graph, module_map = _parse_depends(artifacts[dep_file].read_string(), path_prefix) th_modules = _parse_th(artifacts[th_file].read_string(), path_prefix) - print("TH MODULES", th_modules, "OF", graph.keys()) mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } for module_name in post_order_traversal(graph): - print("TH ENABLE", module_name, module_name in th_modules) _compile_module( ctx, link_style = link_style, From 050780266f5131f90f8156ca8ad8506d7130cb8e Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 6 Feb 2024 14:26:08 +0100 Subject: [PATCH 0034/1133] Explicitly use bash for uses_th The script uses bash substitutions --- prelude/haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index efe334c68..a92d69c6f 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -195,7 +195,7 @@ for file in "${@:2}"; do done """ ctx.actions.run( - cmd_args("sh", "-c", script, "", th_file.as_output(), sources), + cmd_args("bash", "-c", script, "", th_file.as_output(), sources), category = "haskell_th", ) From 6efeeacea028561651b78b2e7c1be8ff2bd4ffcf Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 7 Feb 2024 11:25:04 +0100 Subject: [PATCH 0035/1133] Utility to detect TH usage --- prelude/decls/haskell_common.bzl | 9 ++++ prelude/decls/haskell_rules.bzl | 2 + prelude/haskell/compile.bzl | 12 +----- prelude/haskell/tools/BUCK.v2 | 6 +++ prelude/haskell/tools/detect_th_extension.py | 44 ++++++++++++++++++++ 5 files changed, 62 insertions(+), 11 deletions(-) create mode 100644 prelude/haskell/tools/detect_th_extension.py diff --git a/prelude/decls/haskell_common.bzl b/prelude/decls/haskell_common.bzl index 1cbb8c457..03522e3b4 100644 --- a/prelude/decls/haskell_common.bzl +++ b/prelude/decls/haskell_common.bzl @@ -40,9 +40,18 @@ def _exported_linker_flags_arg(): """), } +def _scripts_arg(): + return { + "_detect_th_extension": attrs.dep( + providers = [RunInfo], + default = "prelude//haskell/tools:detect_th_extension", + ), + } + haskell_common = struct( srcs_arg = _srcs_arg, deps_arg = _deps_arg, compiler_flags_arg = _compiler_flags_arg, exported_linker_flags_arg = _exported_linker_flags_arg, + scripts_arg = _scripts_arg, ) diff --git a/prelude/decls/haskell_rules.bzl b/prelude/decls/haskell_rules.bzl index a9c327004..eee463374 100644 --- a/prelude/decls/haskell_rules.bzl +++ b/prelude/decls/haskell_rules.bzl @@ -47,6 +47,7 @@ haskell_binary = prelude_rule( haskell_common.srcs_arg() | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | + haskell_common.scripts_arg() | buck.platform_deps_arg() | { "contacts": attrs.list(attrs.string(), default = []), @@ -164,6 +165,7 @@ haskell_library = prelude_rule( haskell_common.srcs_arg() | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | + haskell_common.scripts_arg() | buck.platform_deps_arg() | native_common.link_whole(link_whole_type = attrs.bool(default = False)) | native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage)) | diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index a92d69c6f..8558eb415 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -184,18 +184,8 @@ def uses_th(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: """ th_file = ctx.actions.declare_output(ctx.attrs.name + ".th") - script = """\ -touch "$1" -for file in "${@:2}"; do - grep -q "$file" \\ - -e '{-# LANGUAGE TemplateHaskell #-}' \\ - -e '{-# LANGUAGE TemplateHaskellQuotes #-}' \\ - -e '{-# LANGUAGE QuasiQuotes #-}' \\ - && echo "$file" >> "$1" || true -done -""" ctx.actions.run( - cmd_args("bash", "-c", script, "", th_file.as_output(), sources), + cmd_args(ctx.attrs._detect_th_extension[RunInfo], "--output", th_file.as_output(), sources), category = "haskell_th", ) diff --git a/prelude/haskell/tools/BUCK.v2 b/prelude/haskell/tools/BUCK.v2 index 48758abb9..8a81cded4 100644 --- a/prelude/haskell/tools/BUCK.v2 +++ b/prelude/haskell/tools/BUCK.v2 @@ -5,3 +5,9 @@ prelude.python_bootstrap_binary( main = "script_template_processor.py", visibility = ["PUBLIC"], ) + +prelude.python_bootstrap_binary( + name = "detect_th_extension", + main = "detect_th_extension.py", + visibility = ["PUBLIC"], +) diff --git a/prelude/haskell/tools/detect_th_extension.py b/prelude/haskell/tools/detect_th_extension.py new file mode 100644 index 000000000..b66cbaaeb --- /dev/null +++ b/prelude/haskell/tools/detect_th_extension.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 + +"""Helper script to detect when a Haskell module uses Template Haskell. + +Looks for the relevant language pragmas in source files. +""" + +import argparse +import re + +th_regex = re.compile(r"^\s*{-# LANGUAGE (TemplateHaskell|TemplateHaskellQuotes|QuasiQuotes) #-}") + + +def uses_th(filename): + """Determine if the given module uses Template Haskell.""" + with open(filename, "r") as file: + for line in file: + if th_regex.match(line): + return True + + return False + + +def main(): + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--output", + required=True, + type=argparse.FileType("w"), + help="Write the list of modules using Template Haskell to this file, separated by newline characters.") + parser.add_argument( + "modules", + nargs="+", + help="The Haskell module source files to parse.") + args = parser.parse_args() + + output = args.output + for module in args.modules: + if uses_th(module): + output.write(module + "\n") + + +if __name__ == "__main__": + main() From 5521bf8e014dd0f32d72c04a7d7a9c4dfeee378e Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 7 Feb 2024 16:14:02 +0100 Subject: [PATCH 0036/1133] [buck2] Introduce `nix_bash_genrule_toolchain` rule This rule creates a Bash script capturing the value of the `PATH` environment variable locally and then is used to run genrule scripts with `PATH` set to this value. This is useful for remote execution when running buck2 from inside the nix shell. --- prelude/genrule.bzl | 3 ++- prelude/genrule_toolchain.bzl | 1 + toolchains/nix_bash_env.sh | 11 +++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) create mode 100755 toolchains/nix_bash_env.sh diff --git a/prelude/genrule.bzl b/prelude/genrule.bzl index a4e1e6336..638add6b1 100644 --- a/prelude/genrule.bzl +++ b/prelude/genrule.bzl @@ -322,7 +322,8 @@ def process_genrule( if is_windows: script_args = ["cmd.exe", "/v:off", "/c", sh_script] else: - script_args = ["/usr/bin/env", "bash", "-e", sh_script] + script_args = [genrule_toolchain.bash] if genrule_toolchain.bash else ["/usr/bin/env", "bash"] + script_args.extend(["-e", sh_script]) # Only set metadata arguments when they are non-null metadata_args = {} diff --git a/prelude/genrule_toolchain.bzl b/prelude/genrule_toolchain.bzl index 38eafc371..fc5b69e18 100644 --- a/prelude/genrule_toolchain.bzl +++ b/prelude/genrule_toolchain.bzl @@ -9,5 +9,6 @@ GenruleToolchainInfo = provider( doc = "Genrule toolchain info", fields = { "zip_scrubber": provider_field(typing.Any, default = None), + "bash": provider_field(typing.Any, default = None), }, ) diff --git a/toolchains/nix_bash_env.sh b/toolchains/nix_bash_env.sh new file mode 100755 index 000000000..700401b6e --- /dev/null +++ b/toolchains/nix_bash_env.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +set -euo pipefail + +cat > "$1" < Date: Fri, 9 Feb 2024 11:01:11 +0100 Subject: [PATCH 0037/1133] [buck2] Pass correct rpath origin setting to linker on Darwin --- prelude/haskell/haskell.bzl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 514dcf388..6eef8bfec 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -18,9 +18,11 @@ load( "CxxToolchainInfo", "PicBehavior", ) +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") load( "@prelude//cxx:linker.bzl", "LINKERS", + "get_rpath_origin", "get_shared_library_flags", ) load( @@ -1110,7 +1112,9 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: if link_style == LinkStyle("shared") or link_group_info != None: sos_dir = "__{}__shared_libs_symlink_tree".format(ctx.attrs.name) - link.add("-optl", "-Wl,-rpath", "-optl", "-Wl,$ORIGIN/{}".format(sos_dir)) + rpath_ref = get_rpath_origin(get_cxx_toolchain_info(ctx).linker_info.type) + rpath_ldflag = "-Wl,{}/{}".format(rpath_ref, sos_dir) + link.add("-optl", "-Wl,-rpath", "-optl", rpath_ldflag) symlink_dir = ctx.actions.symlinked_dir(sos_dir, {n: o.output for n, o in sos.items()}) run.hidden(symlink_dir) From 1c88a8e0ecd27aab4b691047080d9ee49add0dd7 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 13 Feb 2024 14:51:12 +0100 Subject: [PATCH 0038/1133] Genrule tag "re_message_too_large" ``` Internal error (stage: materialize_outputs): action_digest=... Failed to declare in materializer: Failed to make BatchReadBlobs request: status: OutOfRange, message: "Error, message length too large: found 4194593 bytes, the limit is: 4194304 bytes", details: [], metadata: MetadataMap { headers: {"content-type": "application/grpc", "grpc-encoding": "identity", "grpc-accept-encoding": "gzip"} } ``` This maximum batch request size is [hard-coded into the remote worker](https://github.com/bazelbuild/bazel/blob/f0af0145c324e009c652c53e457581c286010e34/src/tools/remote/src/main/java/com/google/devtools/build/remote/worker/CasServer.java#L46). The corresponding handling in Buck2 [seems to take place here](https://github.com/facebook/buck2/blob/6ac2dd511d1f66f2a0c8c21b62bbabff74712258/remote_execution/oss/re_grpc/src/client.rs#L904-L908). Buck2 should [fetch these capabilities](https://github.com/facebook/buck2/blob/6ac2dd511d1f66f2a0c8c21b62bbabff74712258/remote_execution/oss/re_grpc/src/client.rs#L326-L335), and even if not, Buck2's [default is smaller](https://github.com/facebook/buck2/blob/6ac2dd511d1f66f2a0c8c21b62bbabff74712258/remote_execution/oss/re_grpc/src/client.rs#L82). --- prelude/genrule_local_labels.bzl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/prelude/genrule_local_labels.bzl b/prelude/genrule_local_labels.bzl index 13cb97fd2..db499bce4 100644 --- a/prelude/genrule_local_labels.bzl +++ b/prelude/genrule_local_labels.bzl @@ -135,6 +135,9 @@ _GENRULE_LOCAL_LABELS = {label: True for label in [ # When run on RE produces "Cache is out of space" (excessive disk/memory) "re_cache_out_of_space", + # When run on RE produces "Error, message length too large" on `BatchReadBlobs` + "re_message_too_large", + # HHVM Post-link rules need to be local since the binary is huge. "hhvm_postlink", From 51ed9c4f9e8fdcd7854579c8f21f481b0e783764 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 30 Jan 2024 14:51:07 +0100 Subject: [PATCH 0039/1133] [buck2] Make use of `-dep-json` GHC flag --- prelude/haskell/compile.bzl | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 8558eb415..d6e429811 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -157,7 +157,7 @@ def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: # Note: `-outputdir '.'` removes the prefix directory of all targets: # backend/src/Foo/Util. => Foo/Util. - dep_args = cmd_args(haskell_toolchain.compiler, "-M", "-outputdir", ".", "-dep-makefile", dep_file.as_output()) + dep_args = cmd_args(haskell_toolchain.compiler, "-M", "-outputdir", ".", "-dep-json", dep_file.as_output()) package_flag = _package_flag(haskell_toolchain) @@ -191,7 +191,7 @@ def uses_th(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: return th_file -def _parse_depends(depends: str, path_prefix: str) -> tuple: +def _parse_depends(depends: dict[str, list[str]], path_prefix: str) -> tuple: """ Returns a tuple of two items: @@ -202,13 +202,7 @@ def _parse_depends(depends: str, path_prefix: str) -> tuple: graph = {} mapping = {} - for line in depends.splitlines(): - if line.startswith("#"): - continue - - k, v = line.strip().split(" : ", 1) - vs = v.split(" ") - + for k, vs in depends.items(): module_name = src_to_module_name(k) deps = [ @@ -570,7 +564,7 @@ def compile( def do_compile(ctx, artifacts, outputs, dep_file=dep_file, th_file=th_file, modules=modules): path_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)) - graph, module_map = _parse_depends(artifacts[dep_file].read_string(), path_prefix) + graph, module_map = _parse_depends(artifacts[dep_file].read_json(), path_prefix) th_modules = _parse_th(artifacts[th_file].read_string(), path_prefix) mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } From de37fd428aa7961b642f87ebe6aa647707dab180 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 22 Feb 2024 14:52:54 +0100 Subject: [PATCH 0040/1133] [buck2] Remove leading `./` in dependency json file --- prelude/haskell/compile.bzl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index d6e429811..2d1f842f7 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -203,6 +203,10 @@ def _parse_depends(depends: dict[str, list[str]], path_prefix: str) -> tuple: mapping = {} for k, vs in depends.items(): + # remove leading `./` caused by using `-outputdir '.'` + k = _strip_prefix("./", k) + vs = [_strip_prefix("./", v) for v in vs] + module_name = src_to_module_name(k) deps = [ From 3f9b673b582dc895576854c7b00087d4776776ca Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Feb 2024 13:34:46 +0100 Subject: [PATCH 0041/1133] [buck2] generate_target_metadata.py A script to extract Haskell module metadata for a Haskell target. Currently, we depend on two separate input actions to a dynamic action for Haskell compilation: Detect Template Haskell usage, and determine the module dependency graph. This script will enable us to merge both into a single action. --- prelude/haskell/tools/BUCK.v2 | 6 + .../haskell/tools/generate_target_metadata.py | 208 ++++++++++++++++++ 2 files changed, 214 insertions(+) create mode 100755 prelude/haskell/tools/generate_target_metadata.py diff --git a/prelude/haskell/tools/BUCK.v2 b/prelude/haskell/tools/BUCK.v2 index 8a81cded4..d4558ed03 100644 --- a/prelude/haskell/tools/BUCK.v2 +++ b/prelude/haskell/tools/BUCK.v2 @@ -11,3 +11,9 @@ prelude.python_bootstrap_binary( main = "detect_th_extension.py", visibility = ["PUBLIC"], ) + +prelude.python_bootstrap_binary( + name = "generate_target_metadata", + main = "generate_target_metadata.py", + visibility = ["PUBLIC"], +) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py new file mode 100755 index 000000000..faeed5949 --- /dev/null +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python3 + +"""Helper script to generate relevant metadata about Haskell targets. + +* The mapping from module source file to actual module name. +* The inter-package module dependency graph. +* Which modules require Template Haskell. + +The result is a JSON object with the following fields: +* `th_modules`: List of modules that require Template Haskell. +* `module_mapping`: Mapping from source inferred module name to actual module name, if different. +* `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. +""" + +import argparse +import json +import os +import re +import subprocess +import tempfile + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, + fromfile_prefix_chars="@") + parser.add_argument( + "--output", + required=True, + type=argparse.FileType("w"), + help="Write package metadata to this file in JSON format.") + parser.add_argument( + "--ghc", + required=True, + type=str, + help="Path to the Haskell compiler GHC.") + parser.add_argument( + "--ghc-arg", + required=False, + type=str, + action="append", + help="GHC compiler argument to forward to `ghc -M`, including package flags.") + parser.add_argument( + "--source-prefix", + required=True, + type=str, + help="The path prefix to strip of module sources to extract module names.") + parser.add_argument( + "--source", + required=True, + type=str, + action="append", + help="Haskell module source files of the current package.") + args = parser.parse_args() + + result = obtain_target_metadata(args) + + json.dump(result, args.output, indent=4) + + +def obtain_target_metadata(args): + th_modules = determine_th_modules(args.source, args.source_prefix) + ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source) + module_mapping, module_graph = interpret_ghc_depends( + ghc_depends, args.source_prefix) + return { + "th_modules": th_modules, + "module_mapping": module_mapping, + "module_graph": module_graph, + } + + +def determine_th_modules(sources, source_prefix): + result = [] + + for fname in sources: + if uses_th(fname): + module_name = src_to_module_name( + strip_prefix_(source_prefix, fname).lstrip("/")) + result.append(module_name) + + return result + + +th_regex = re.compile(r"^\s*{-# LANGUAGE (TemplateHaskell|TemplateHaskellQuotes|QuasiQuotes) #-}") + + +def uses_th(filename): + """Determine if the given module uses Template Haskell.""" + with open(filename, "r") as file: + for line in file: + if th_regex.match(line): + return True + + +def run_ghc_depends(ghc, ghc_args, sources): + result = {} + + with tempfile.TemporaryDirectory() as dname: + fname = os.path.join(dname, "depends") + args = [ + ghc, "-M", + # Note: `-outputdir '.'` removes the prefix of all targets: + # backend/src/Foo/Util. => Foo/Util. + "-outputdir", ".", + "-dep-makefile", fname, + ] + ghc_args + sources + subprocess.run(args, check=True) + + with open(fname) as f: + for line in f: + if line.startswith("#"): + continue + + k, v = line.strip().split(" : ", 1) + vs = v.split(" ") + + result.setdefault(k, []).extend(vs) + + return result + + +def interpret_ghc_depends(ghc_depends, source_prefix): + graph = {} + mapping = {} + + for k, vs in ghc_depends.items(): + module_name = src_to_module_name(k) + intdeps = parse_module_deps(vs) + + graph.setdefault(module_name, []).extend(intdeps) + + ext = os.path.splitext(k)[1] + + if ext != ".o": + continue + + sources = list(filter(is_haskell_src, vs)) + + if not sources: + continue + + assert len(sources) == 1, "one object file must correspond to exactly one haskell source " + + hs_file = sources[0] + + hs_module_name = src_to_module_name( + strip_prefix_(source_prefix, hs_file).lstrip("/")) + + if hs_module_name != module_name: + mapping[hs_module_name] = module_name + + return mapping, graph + + +def parse_module_deps(module_deps): + internal_deps = [] + + for module_dep in module_deps: + if is_haskell_src(module_dep): + continue + + if os.path.isabs(module_dep): + continue + + internal_deps.append(src_to_module_name(module_dep)) + + return internal_deps + + +def src_to_module_name(x): + base, _ = os.path.splitext(x) + return base.replace("/", ".") + + +def is_haskell_src(x): + _, ext = os.path.splitext(x) + return ext in HASKELL_EXTENSIONS + + +HASKELL_EXTENSIONS = [ + ".hs", + ".lhs", + ".hsc", + ".chs", + ".x", + ".y", +] + + +def strip_prefix_(prefix, s): + stripped = strip_prefix(prefix, s) + + if stripped == None: + return s + + return stripped + + +def strip_prefix(prefix, s): + if s.startswith(prefix): + return s[len(prefix):] + + return None + + +if __name__ == "__main__": + main() From 48098ac4e2bedb5d70dd4a1c133e799b2c471098 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Feb 2024 13:47:51 +0100 Subject: [PATCH 0042/1133] [buck2] Generate target metadata files --- prelude/decls/haskell_common.bzl | 4 +++ prelude/haskell/compile.bzl | 51 ++++++++++++++++++++++++++++++++ prelude/haskell/haskell.bzl | 6 +++- 3 files changed, 60 insertions(+), 1 deletion(-) diff --git a/prelude/decls/haskell_common.bzl b/prelude/decls/haskell_common.bzl index 03522e3b4..b78e7745d 100644 --- a/prelude/decls/haskell_common.bzl +++ b/prelude/decls/haskell_common.bzl @@ -46,6 +46,10 @@ def _scripts_arg(): providers = [RunInfo], default = "prelude//haskell/tools:detect_th_extension", ), + "_generate_target_metadata": attrs.dep( + providers = [RunInfo], + default = "prelude//haskell/tools:generate_target_metadata", + ), } haskell_common = struct( diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 2d1f842f7..09b79bad5 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -131,6 +131,57 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl return modules +def target_metadata(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: + md_file = ctx.actions.declare_output(ctx.attrs.name + ".md.json") + md_gen = ctx.attrs._generate_target_metadata[RunInfo] + + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + toolchain_libs = [ + dep[HaskellToolchainLibrary].name + for dep in ctx.attrs.deps + if HaskellToolchainLibrary in dep + ] + + # Add -package-db and -package/-expose-package flags for each Haskell + # library dependency. + packages_info = get_packages_info( + ctx, + LinkStyle("shared"), + specify_pkg_version = False, + enable_profiling = False, + ) + + # The object and interface file paths are depending on the real module name + # as inferred by GHC, not the source file path; currently this requires the + # module name to correspond to the source file path as otherwise GHC will + # not be able to find the created object or interface files in the search + # path. + # + # (module X.Y.Z must be defined in a file at X/Y/Z.hs) + + package_flag = _package_flag(haskell_toolchain) + ghc_args = cmd_args() + ghc_args.add("-hide-all-packages") + ghc_args.add(package_flag, "base") + ghc_args.add(cmd_args(toolchain_libs, prepend=package_flag)) + ghc_args.add(cmd_args(packages_info.exposed_package_args)) + ghc_args.add(packages_info.packagedb_args) + ghc_args.add(ctx.attrs.compiler_flags) + + md_args = cmd_args(md_gen) + md_args.add("--output", md_file.as_output()) + md_args.add("--ghc", haskell_toolchain.compiler) + md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) + md_args.add( + "--source-prefix", + _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), + ) + md_args.add(cmd_args(sources, format="--source={}")) + + ctx.actions.run(md_args, category = "haskell_metadata") + + return md_file + def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 6eef8bfec..daeaf8e04 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -50,6 +50,7 @@ load( "HaskellLibraryInfo", "HaskellLibraryProvider", "compile", + "target_metadata", "ghc_depends", "uses_th", ) @@ -709,6 +710,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: libname = repr(ctx.label.path).replace("//", "_").replace("/", "_") + "_" + ctx.label.name pkgname = libname.replace("_", "-") + md_file = target_metadata(ctx, sources = ctx.attrs.srcs) dep_file = ghc_depends(ctx, sources = ctx.attrs.srcs) th_file = uses_th(ctx, sources = ctx.attrs.srcs) @@ -844,6 +846,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: providers = [ DefaultInfo( default_outputs = default_output, + other_outputs = [md_file], sub_targets = sub_targets, ), HaskellLibraryProvider( @@ -936,6 +939,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: if enable_profiling and link_style == LinkStyle("shared"): link_style = LinkStyle("static") + md_file = target_metadata(ctx, sources = ctx.attrs.srcs) dep_file = ghc_depends(ctx, sources = ctx.attrs.srcs) th_file = uses_th(ctx, sources = ctx.attrs.srcs) @@ -1119,7 +1123,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: run.hidden(symlink_dir) providers = [ - DefaultInfo(default_output = output), + DefaultInfo(default_output = output, other_outputs = [md_file]), RunInfo(args = run), ] From bae247cd935a1c48b6330525cdb4c1646b1e1900 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Feb 2024 13:54:57 +0100 Subject: [PATCH 0043/1133] [buck2] Use JSON metadata for Haskell compilation --- prelude/haskell/compile.bzl | 12 +++++++----- prelude/haskell/haskell.bzl | 7 +++++-- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 09b79bad5..d7f9d5d25 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -610,6 +610,7 @@ def compile( ctx: AnalysisContext, link_style: LinkStyle, enable_profiling: bool, + md_file: Artifact, dep_file: Artifact, th_file: Artifact, pkgname: str | None = None) -> CompileResultInfo: @@ -617,10 +618,11 @@ def compile( modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) - def do_compile(ctx, artifacts, outputs, dep_file=dep_file, th_file=th_file, modules=modules): - path_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)) - graph, module_map = _parse_depends(artifacts[dep_file].read_json(), path_prefix) - th_modules = _parse_th(artifacts[th_file].read_string(), path_prefix) + def do_compile(ctx, artifacts, outputs, md_file=md_file, dep_file=dep_file, th_file=th_file, modules=modules): + md = artifacts[md_file].read_json() + th_modules = md["th_modules"] + graph = md["module_graph"] + module_map = md["module_mapping"] mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } @@ -644,7 +646,7 @@ def compile( stub_dirs = [module.stub_dir for module in modules.values()] ctx.actions.dynamic_output( - dynamic = [dep_file, th_file], + dynamic = [md_file, dep_file, th_file], inputs = ctx.attrs.srcs, outputs = interfaces + objects + stub_dirs, f = do_compile) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index daeaf8e04..1f0084abb 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -519,6 +519,7 @@ def _build_haskell_lib( nlis: list[MergedLinkInfo], # native link infos from all deps link_style: LinkStyle, enable_profiling: bool, + md_file: Artifact, dep_file: Artifact, th_file: Artifact, # The non-profiling artifacts are also needed to build the package for @@ -536,6 +537,7 @@ def _build_haskell_lib( ctx, link_style, enable_profiling = enable_profiling, + md_file = md_file, dep_file = dep_file, th_file = th_file, pkgname = pkgname, @@ -732,6 +734,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: nlis = nlis, link_style = link_style, enable_profiling = enable_profiling, + md_file = md_file, dep_file = dep_file, th_file = th_file, non_profiling_hlib = non_profiling_hlib.get(link_style), @@ -846,7 +849,6 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: providers = [ DefaultInfo( default_outputs = default_output, - other_outputs = [md_file], sub_targets = sub_targets, ), HaskellLibraryProvider( @@ -947,6 +949,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ctx, link_style, enable_profiling = enable_profiling, + md_file = md_file, dep_file = dep_file, th_file = th_file, ) @@ -1123,7 +1126,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: run.hidden(symlink_dir) providers = [ - DefaultInfo(default_output = output, other_outputs = [md_file]), + DefaultInfo(default_output = output), RunInfo(args = run), ] From 5a019637c43a32b80eea8a702221fdfdcdf624fc Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Feb 2024 13:57:36 +0100 Subject: [PATCH 0044/1133] [buck2] Use md_file for -i flag --- prelude/haskell/compile.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index d7f9d5d25..51654bda6 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -560,7 +560,7 @@ def _compile_module( enable_th: bool, module_name: str, modules: dict[str, _Module], - dep_file: Artifact, + md_file: Artifact, graph: dict[str, list[str]], outputs: dict[Artifact, Artifact], artifact_suffix: str, @@ -590,7 +590,7 @@ def _compile_module( compile_cmd.add( cmd_args( - cmd_args(dep_file, format = "-i{}").parent(), + cmd_args(md_file, format = "-i{}").parent(), "/", module.prefix_dir, delimiter="" @@ -636,7 +636,7 @@ def compile( modules = mapped_modules, graph = graph, outputs = outputs, - dep_file=dep_file, + md_file=md_file, artifact_suffix = artifact_suffix, pkgname = pkgname, ) From 02714cbd33cc3b8e5facf7b66f3c29e97530623d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Feb 2024 13:59:31 +0100 Subject: [PATCH 0045/1133] [buck2] remove unused dep_file and th_file --- prelude/haskell/compile.bzl | 121 +----------------------------------- prelude/haskell/haskell.bzl | 14 ----- 2 files changed, 2 insertions(+), 133 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 51654bda6..e6088ce41 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -182,121 +182,6 @@ def target_metadata(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifac return md_file -def ghc_depends(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - - toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] - - # Add -package-db and -package/-expose-package flags for each Haskell - # library dependency. - packages_info = get_packages_info( - ctx, - LinkStyle("shared"), - specify_pkg_version = False, - enable_profiling = False, - ) - - dep_file = ctx.actions.declare_output(ctx.attrs.name + ".depends") - - # The object and interface file paths are depending on the real module name - # as inferred by GHC, not the source file path; currently this requires the - # module name to correspond to the source file path as otherwise GHC will - # not be able to find the created object or interface files in the search - # path. - # - # (module X.Y.Z must be defined in a file at X/Y/Z.hs) - - # Note: `-outputdir '.'` removes the prefix directory of all targets: - # backend/src/Foo/Util. => Foo/Util. - dep_args = cmd_args(haskell_toolchain.compiler, "-M", "-outputdir", ".", "-dep-json", dep_file.as_output()) - - package_flag = _package_flag(haskell_toolchain) - - dep_args.add("-hide-all-packages") - dep_args.add(package_flag, "base") - dep_args.add(cmd_args(toolchain_libs, prepend=package_flag)) - dep_args.add(cmd_args(packages_info.exposed_package_args)) - dep_args.add(packages_info.packagedb_args) - - dep_args.add(ctx.attrs.compiler_flags) - dep_args.add(sources) - ctx.actions.run(dep_args, category = "ghc_depends") - - return dep_file - -def uses_th(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: - """Determine which of the given modules use Template Haskell. - - Template Haskell compilation requires additional inputs. We can avoid these - inputs if Template Haskell is not used. - - Ideally, GHC would expose this information in the generated depends file. - Until it does so, we use this workaround. - """ - th_file = ctx.actions.declare_output(ctx.attrs.name + ".th") - - ctx.actions.run( - cmd_args(ctx.attrs._detect_th_extension[RunInfo], "--output", th_file.as_output(), sources), - category = "haskell_th", - ) - - return th_file - -def _parse_depends(depends: dict[str, list[str]], path_prefix: str) -> tuple: - """ - Returns a tuple of two items: - - 1. the module dependency graph as a dict[str, list[str]] - 2. a mapping from module name inferred from the source path to the real module name as a dict[str, str] - (only mismatching module names are added to the mapping) - """ - graph = {} - mapping = {} - - for k, vs in depends.items(): - # remove leading `./` caused by using `-outputdir '.'` - k = _strip_prefix("./", k) - vs = [_strip_prefix("./", v) for v in vs] - - module_name = src_to_module_name(k) - - deps = [ - src_to_module_name(v) - for v in vs - if not is_haskell_src(v) - ] - - graph.setdefault(module_name, []).extend(deps) - - ext = paths.split_extension(k)[1] - - if ext != ".o": continue - - sources = filter(is_haskell_src, vs) - - if not sources: continue - - if len(sources) != 1: fail("one object file must correspond to exactly one haskell source") - - hs_file = sources[0] - - hs_module_name = src_to_module_name(_strip_prefix(path_prefix, hs_file).lstrip("/")) - - if hs_module_name != module_name: - mapping[hs_module_name] = module_name - - return (graph, mapping) - -def _parse_th(th_file: str, path_prefix: str) -> list[str]: - """Returns the list of modules that use Template Haskell.""" - result = [] - - for line in th_file.splitlines(): - module_name = src_to_module_name(_strip_prefix(path_prefix, line.strip()).lstrip("/")) - result.append(module_name) - - return result - def _attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: return filter( None, @@ -611,14 +496,12 @@ def compile( link_style: LinkStyle, enable_profiling: bool, md_file: Artifact, - dep_file: Artifact, - th_file: Artifact, pkgname: str | None = None) -> CompileResultInfo: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) - def do_compile(ctx, artifacts, outputs, md_file=md_file, dep_file=dep_file, th_file=th_file, modules=modules): + def do_compile(ctx, artifacts, outputs, md_file=md_file, modules=modules): md = artifacts[md_file].read_json() th_modules = md["th_modules"] graph = md["module_graph"] @@ -646,7 +529,7 @@ def compile( stub_dirs = [module.stub_dir for module in modules.values()] ctx.actions.dynamic_output( - dynamic = [md_file, dep_file, th_file], + dynamic = [md_file], inputs = ctx.attrs.srcs, outputs = interfaces + objects + stub_dirs, f = do_compile) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 1f0084abb..b752a1f88 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -51,8 +51,6 @@ load( "HaskellLibraryProvider", "compile", "target_metadata", - "ghc_depends", - "uses_th", ) load( "@prelude//haskell:haskell_haddock.bzl", @@ -520,8 +518,6 @@ def _build_haskell_lib( link_style: LinkStyle, enable_profiling: bool, md_file: Artifact, - dep_file: Artifact, - th_file: Artifact, # The non-profiling artifacts are also needed to build the package for # profiling, so it should be passed when `enable_profiling` is True. non_profiling_hlib: [HaskellLibBuildOutput, None] = None) -> HaskellLibBuildOutput: @@ -538,8 +534,6 @@ def _build_haskell_lib( link_style, enable_profiling = enable_profiling, md_file = md_file, - dep_file = dep_file, - th_file = th_file, pkgname = pkgname, ) solibs = {} @@ -713,8 +707,6 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: pkgname = libname.replace("_", "-") md_file = target_metadata(ctx, sources = ctx.attrs.srcs) - dep_file = ghc_depends(ctx, sources = ctx.attrs.srcs) - th_file = uses_th(ctx, sources = ctx.attrs.srcs) # The non-profiling library is also needed to build the package with # profiling enabled, so we need to keep track of it for each link style. @@ -735,8 +727,6 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: link_style = link_style, enable_profiling = enable_profiling, md_file = md_file, - dep_file = dep_file, - th_file = th_file, non_profiling_hlib = non_profiling_hlib.get(link_style), ) if not enable_profiling: @@ -942,16 +932,12 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_style = LinkStyle("static") md_file = target_metadata(ctx, sources = ctx.attrs.srcs) - dep_file = ghc_depends(ctx, sources = ctx.attrs.srcs) - th_file = uses_th(ctx, sources = ctx.attrs.srcs) compiled = compile( ctx, link_style, enable_profiling = enable_profiling, md_file = md_file, - dep_file = dep_file, - th_file = th_file, ) haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] From 434e6bb6c733f01872e327a81e7716f91032d16d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Feb 2024 14:00:39 +0100 Subject: [PATCH 0046/1133] [buck2] Remove unused detect_th_extension.py --- prelude/decls/haskell_common.bzl | 4 -- prelude/haskell/tools/BUCK.v2 | 6 --- prelude/haskell/tools/detect_th_extension.py | 44 -------------------- 3 files changed, 54 deletions(-) delete mode 100644 prelude/haskell/tools/detect_th_extension.py diff --git a/prelude/decls/haskell_common.bzl b/prelude/decls/haskell_common.bzl index b78e7745d..20c58910c 100644 --- a/prelude/decls/haskell_common.bzl +++ b/prelude/decls/haskell_common.bzl @@ -42,10 +42,6 @@ def _exported_linker_flags_arg(): def _scripts_arg(): return { - "_detect_th_extension": attrs.dep( - providers = [RunInfo], - default = "prelude//haskell/tools:detect_th_extension", - ), "_generate_target_metadata": attrs.dep( providers = [RunInfo], default = "prelude//haskell/tools:generate_target_metadata", diff --git a/prelude/haskell/tools/BUCK.v2 b/prelude/haskell/tools/BUCK.v2 index d4558ed03..c1a362652 100644 --- a/prelude/haskell/tools/BUCK.v2 +++ b/prelude/haskell/tools/BUCK.v2 @@ -6,12 +6,6 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) -prelude.python_bootstrap_binary( - name = "detect_th_extension", - main = "detect_th_extension.py", - visibility = ["PUBLIC"], -) - prelude.python_bootstrap_binary( name = "generate_target_metadata", main = "generate_target_metadata.py", diff --git a/prelude/haskell/tools/detect_th_extension.py b/prelude/haskell/tools/detect_th_extension.py deleted file mode 100644 index b66cbaaeb..000000000 --- a/prelude/haskell/tools/detect_th_extension.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python3 - -"""Helper script to detect when a Haskell module uses Template Haskell. - -Looks for the relevant language pragmas in source files. -""" - -import argparse -import re - -th_regex = re.compile(r"^\s*{-# LANGUAGE (TemplateHaskell|TemplateHaskellQuotes|QuasiQuotes) #-}") - - -def uses_th(filename): - """Determine if the given module uses Template Haskell.""" - with open(filename, "r") as file: - for line in file: - if th_regex.match(line): - return True - - return False - - -def main(): - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--output", - required=True, - type=argparse.FileType("w"), - help="Write the list of modules using Template Haskell to this file, separated by newline characters.") - parser.add_argument( - "modules", - nargs="+", - help="The Haskell module source files to parse.") - args = parser.parse_args() - - output = args.output - for module in args.modules: - if uses_th(module): - output.write(module + "\n") - - -if __name__ == "__main__": - main() From efee6a46c7b5ec151f788f02ae54e6edf654a324 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Feb 2024 18:31:48 +0100 Subject: [PATCH 0047/1133] [buck2] Use GHC `-dep-json` --- .../haskell/tools/generate_target_metadata.py | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index faeed5949..810adc4a6 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -94,8 +94,6 @@ def uses_th(filename): def run_ghc_depends(ghc, ghc_args, sources): - result = {} - with tempfile.TemporaryDirectory() as dname: fname = os.path.join(dname, "depends") args = [ @@ -103,21 +101,12 @@ def run_ghc_depends(ghc, ghc_args, sources): # Note: `-outputdir '.'` removes the prefix of all targets: # backend/src/Foo/Util. => Foo/Util. "-outputdir", ".", - "-dep-makefile", fname, + "-dep-json", fname, ] + ghc_args + sources subprocess.run(args, check=True) with open(fname) as f: - for line in f: - if line.startswith("#"): - continue - - k, v = line.strip().split(" : ", 1) - vs = v.split(" ") - - result.setdefault(k, []).extend(vs) - - return result + return json.load(f) def interpret_ghc_depends(ghc_depends, source_prefix): @@ -125,6 +114,10 @@ def interpret_ghc_depends(ghc_depends, source_prefix): mapping = {} for k, vs in ghc_depends.items(): + # remove lead `./` caused by using `-outputdir '.'`. + k = strip_prefix_("./", k) + vs = [strip_prefix_("./", v) for v in vs] + module_name = src_to_module_name(k) intdeps = parse_module_deps(vs) From 1a5122615aa9d95a60e762b4f42a7a2f6f8b369e Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Feb 2024 15:11:58 +0100 Subject: [PATCH 0048/1133] [buck2] CompileResultInfo track hi list This will enable us to track the individual interface files rather than a single import directory. --- prelude/haskell/compile.bzl | 12 +++++++----- prelude/haskell/haskell.bzl | 4 ++-- prelude/haskell/haskell_haddock.bzl | 2 +- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index e6088ce41..541b1190e 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -40,7 +40,7 @@ load("@prelude//utils:strings.bzl", "strip_prefix") # The type of the return value of the `_compile()` function. CompileResultInfo = record( objects = field(Artifact), - hi = field(Artifact), + hi = field(list[Artifact]), stubs = field(Artifact), producing_indices = field(bool), ) @@ -73,7 +73,7 @@ HaskellLibraryInfo = record( # e.g. "base-4.13.0.0" id = str, # Import dirs indexed by profiling enabled/disabled - import_dirs = dict[bool, Artifact], + import_dirs = dict[bool, list[Artifact]], stub_dirs = list[Artifact], # This field is only used as hidden inputs to compilation, to @@ -346,6 +346,8 @@ def compile_args( artifact_suffix = get_artifact_suffix(link_style, enable_profiling, suffix) + # TODO[AH] These are only used for haddock and conflict with tracking + # per-module outputs individually. Rework the Haddock part to support this. objects = ctx.actions.declare_output( "objects-" + artifact_suffix, dir = True, @@ -378,7 +380,7 @@ def compile_args( return CompileArgsInfo( result = CompileResultInfo( objects = objects, - hi = hi, + hi = [hi], stubs = stubs, producing_indices = producing_indices, ), @@ -427,7 +429,7 @@ def _compile_module_args( return CompileArgsInfo( result = CompileResultInfo( objects = object, - hi = hi, + hi = [hi], stubs = stubs, producing_indices = producing_indices, ), @@ -568,7 +570,7 @@ def compile( return CompileResultInfo( objects = object_dir, - hi = hi_dir, + hi = [hi_dir], stubs = stubs_dir, producing_indices = False, ) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index b752a1f88..e062abee4 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -416,7 +416,7 @@ def _make_package( pkgname: str, libname: str, hlis: list[HaskellLibraryInfo], - hi: dict[bool, Artifact], + hi: dict[bool, list[Artifact]], lib: dict[bool, Artifact], enable_profiling: bool) -> Artifact: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) @@ -902,7 +902,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: def derive_indexing_tset( actions: AnalysisActions, link_style: LinkStyle, - value: [Artifact, None], + value: [list[Artifact], None], children: list[Dependency]) -> HaskellIndexingTSet: index_children = [] for dep in children: diff --git a/prelude/haskell/haskell_haddock.bzl b/prelude/haskell/haskell_haddock.bzl index c485df3e5..d80d48475 100644 --- a/prelude/haskell/haskell_haddock.bzl +++ b/prelude/haskell/haskell_haddock.bzl @@ -89,7 +89,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: "mkdir", "-p", args.result.objects.as_output(), - args.result.hi.as_output(), + args.result.hi[0].as_output(), args.result.stubs.as_output(), "&&", cmd_args(cmd, quote = "shell"), From 7fbc7c09061673ae30052585d1fec28de858c50d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Feb 2024 15:23:50 +0100 Subject: [PATCH 0049/1133] [buck2] Track individual hi outputs --- prelude/haskell/compile.bzl | 8 +------- prelude/haskell/haskell.bzl | 2 +- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 541b1190e..760e0e87e 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -542,12 +542,6 @@ def compile( a.short_path : a for a in objects }) - hi_dir = ctx.actions.declare_output("hi-" + artifact_suffix, dir=True) - - ctx.actions.copied_dir(hi_dir.as_output(), { - a.short_path : a for a in interfaces - }) - stubs_dir = ctx.actions.declare_output("stubs-" + artifact_suffix, dir=True) # collect the stubs from all modules into the stubs_dir @@ -570,7 +564,7 @@ def compile( return CompileResultInfo( objects = object_dir, - hi = [hi_dir], + hi = interfaces, stubs = stubs_dir, producing_indices = False, ) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index e062abee4..654838229 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -438,7 +438,7 @@ def _make_package( return "\"${pkgroot}/" + dir_prefix + "-" + art_suff + "\"" import_dirs = [ - mk_artifact_dir("hi", profiled) + mk_artifact_dir("mod", profiled) for profiled in hi.keys() ] library_dirs = [ From 0406593d43de1cdafa1b0a554efb4170805404f9 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Feb 2024 16:44:34 +0100 Subject: [PATCH 0050/1133] [buck2] Track individual object files --- prelude/haskell/compile.bzl | 14 ++++---------- prelude/haskell/haskell.bzl | 20 +++----------------- prelude/haskell/haskell_haddock.bzl | 2 +- 3 files changed, 8 insertions(+), 28 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 760e0e87e..9b173d934 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -39,7 +39,7 @@ load("@prelude//utils:strings.bzl", "strip_prefix") # The type of the return value of the `_compile()` function. CompileResultInfo = record( - objects = field(Artifact), + objects = field(list[Artifact]), hi = field(list[Artifact]), stubs = field(Artifact), producing_indices = field(bool), @@ -379,7 +379,7 @@ def compile_args( return CompileArgsInfo( result = CompileResultInfo( - objects = objects, + objects = [objects], hi = [hi], stubs = stubs, producing_indices = producing_indices, @@ -428,7 +428,7 @@ def _compile_module_args( return CompileArgsInfo( result = CompileResultInfo( - objects = object, + objects = [object], hi = [hi], stubs = stubs, producing_indices = producing_indices, @@ -536,12 +536,6 @@ def compile( outputs = interfaces + objects + stub_dirs, f = do_compile) - object_dir = ctx.actions.declare_output("objects-" + artifact_suffix, dir=True) - - ctx.actions.copied_dir(object_dir.as_output(), { - a.short_path : a for a in objects - }) - stubs_dir = ctx.actions.declare_output("stubs-" + artifact_suffix, dir=True) # collect the stubs from all modules into the stubs_dir @@ -563,7 +557,7 @@ def compile( ) return CompileResultInfo( - objects = object_dir, + objects = objects, hi = interfaces, stubs = stubs_dir, producing_indices = False, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 654838229..7e5dd3e49 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -367,17 +367,6 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: linkable_graph, ] -def _srcs_to_objfiles( - ctx: AnalysisContext, - odir: Artifact, - osuf: str) -> cmd_args: - objfiles = cmd_args() - for src, _ in srcs_to_pairs(ctx.attrs.srcs): - # Don't link boot sources, as they're only meant to be used for compiling. - if is_haskell_src(src): - objfiles.add(cmd_args([odir, "/", paths.replace_extension(src, "." + osuf)], delimiter = "")) - return objfiles - # Script to generate a GHC package-db entry for a new package. # # Sets --force so that ghc-pkg does not check for .hi, .so, ... files. @@ -552,8 +541,6 @@ def _build_haskell_lib( linfos = [x.prof_info if enable_profiling else x.info for x in hlis] uniq_infos = dedupe(flatten([x[link_style] for x in linfos])) - objfiles = _srcs_to_objfiles(ctx, compiled.objects, osuf) - toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] if link_style == LinkStyle("shared"): @@ -573,7 +560,7 @@ def _build_haskell_lib( ), ) - link.add(objfiles) + link.add(compiled.objects) infos = get_link_args_for_strategy( ctx, @@ -595,7 +582,7 @@ def _build_haskell_lib( else: # static flavours # TODO: avoid making an archive for a single object, like cxx does # (but would that work with Template Haskell?) - archive = make_archive(ctx, lib_short_path, [compiled.objects], objfiles) + archive = make_archive(ctx, lib_short_path, compiled.objects) lib = archive.artifact libs = [lib] + archive.external_objects link_infos = LinkInfos( @@ -954,8 +941,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: osuf, _hisuf = output_extensions(link_style, enable_profiling) - objfiles = _srcs_to_objfiles(ctx, compiled.objects, osuf) - link.add(objfiles) + link.add(compiled.objects) indexing_tsets = {} if compiled.producing_indices: diff --git a/prelude/haskell/haskell_haddock.bzl b/prelude/haskell/haskell_haddock.bzl index d80d48475..4e8489790 100644 --- a/prelude/haskell/haskell_haddock.bzl +++ b/prelude/haskell/haskell_haddock.bzl @@ -88,7 +88,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: script_args = cmd_args([ "mkdir", "-p", - args.result.objects.as_output(), + args.result.objects[0].as_output(), args.result.hi[0].as_output(), args.result.stubs.as_output(), "&&", From d9c407443d37445e7c09026744a8ea85c7cf3fc0 Mon Sep 17 00:00:00 2001 From: Claudio Date: Tue, 27 Feb 2024 10:38:24 +0100 Subject: [PATCH 0051/1133] Reduce max batch size for bazel-remote-worker - remove `re_message_too_large` label Fixes #48 --- .../0004-Reduce-max-batch-size.patch | 28 +++++++++++++++++++ prelude/genrule_local_labels.bzl | 3 -- 2 files changed, 28 insertions(+), 3 deletions(-) create mode 100644 nix/overlays/bazel-remote-worker/0004-Reduce-max-batch-size.patch diff --git a/nix/overlays/bazel-remote-worker/0004-Reduce-max-batch-size.patch b/nix/overlays/bazel-remote-worker/0004-Reduce-max-batch-size.patch new file mode 100644 index 000000000..245a2f6fb --- /dev/null +++ b/nix/overlays/bazel-remote-worker/0004-Reduce-max-batch-size.patch @@ -0,0 +1,28 @@ +commit e47452fb67065fa05410c831bc9a5de9065541f6 +Author: Claudio Bley +Date: Tue Feb 27 10:19:20 2024 +0100 + + Configure the default max batch size to leave room for headers + + The default gRPC max transport message (including headers) is 4MiB and may be + enforced by the client when receiving a response. + + This can lead to problems when the response of a batch read request is larger + than this limit. + + Leave some room for the headers by setting the max batch size to 4MB. + +diff --git a/src/tools/remote/src/main/java/com/google/devtools/build/remote/worker/CasServer.java b/src/tools/remote/src/main/java/com/google/devtools/build/remote/worker/CasServer.java +index d874b879af..6347bb669f 100644 +--- a/src/tools/remote/src/main/java/com/google/devtools/build/remote/worker/CasServer.java ++++ b/src/tools/remote/src/main/java/com/google/devtools/build/remote/worker/CasServer.java +@@ -42,7 +42,7 @@ import java.util.Set; + /** A basic implementation of a {@link ContentAddressableStorageImplBase} service. */ + final class CasServer extends ContentAddressableStorageImplBase { + private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); +- static final long MAX_BATCH_SIZE_BYTES = 1024 * 1024 * 4; ++ static final long MAX_BATCH_SIZE_BYTES = 1000 * 1000 * 4; + private final OnDiskBlobStoreCache cache; + + public CasServer(OnDiskBlobStoreCache cache) { + diff --git a/prelude/genrule_local_labels.bzl b/prelude/genrule_local_labels.bzl index db499bce4..13cb97fd2 100644 --- a/prelude/genrule_local_labels.bzl +++ b/prelude/genrule_local_labels.bzl @@ -135,9 +135,6 @@ _GENRULE_LOCAL_LABELS = {label: True for label in [ # When run on RE produces "Cache is out of space" (excessive disk/memory) "re_cache_out_of_space", - # When run on RE produces "Error, message length too large" on `BatchReadBlobs` - "re_message_too_large", - # HHVM Post-link rules need to be local since the binary is huge. "hhvm_postlink", From e475ced93d2fe8f8447fe0f0745c9583072538a7 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 23 Feb 2024 11:15:06 +0100 Subject: [PATCH 0052/1133] [buck2] inter --> intra --- prelude/haskell/tools/generate_target_metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 810adc4a6..6366241a2 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -3,7 +3,7 @@ """Helper script to generate relevant metadata about Haskell targets. * The mapping from module source file to actual module name. -* The inter-package module dependency graph. +* The intra-package module dependency graph. * Which modules require Template Haskell. The result is a JSON object with the following fields: From 386fb456665752e8a84d50bd76fc16a12ced7c75 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 23 Feb 2024 11:25:05 +0100 Subject: [PATCH 0053/1133] [buck2] Obtain cross-package deps from GHC --- prelude/haskell/tools/generate_target_metadata.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 6366241a2..af4a7aab2 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -97,7 +97,7 @@ def run_ghc_depends(ghc, ghc_args, sources): with tempfile.TemporaryDirectory() as dname: fname = os.path.join(dname, "depends") args = [ - ghc, "-M", + ghc, "-M", "-include-pkg-deps", # Note: `-outputdir '.'` removes the prefix of all targets: # backend/src/Foo/Util. => Foo/Util. "-outputdir", ".", @@ -118,6 +118,9 @@ def interpret_ghc_depends(ghc_depends, source_prefix): k = strip_prefix_("./", k) vs = [strip_prefix_("./", v) for v in vs] + # TODO: Handle pkg-deps + vs = filter(lambda x: not x.startswith("buck-out"), vs) + module_name = src_to_module_name(k) intdeps = parse_module_deps(vs) From 8c0f09cb277ae9093bb48812e625431051bac8f0 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 23 Feb 2024 11:27:52 +0100 Subject: [PATCH 0054/1133] [buck2] Prevent Makefile clobbering `ghc -M` will write to a file called `Makefile` in the working directory when `-dep-makefile` is not set, even if `-dep-json` is set. Since local Buck2 builds are not isolated, this means that the user's repository directory will be clobbered. This change passes `-dep-makefile` to direct GHC's output towards a temporary file to avoid this. --- prelude/haskell/tools/generate_target_metadata.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index af4a7aab2..4ac6ba3e0 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -95,17 +95,19 @@ def uses_th(filename): def run_ghc_depends(ghc, ghc_args, sources): with tempfile.TemporaryDirectory() as dname: - fname = os.path.join(dname, "depends") + json_fname = os.path.join(dname, "depends.json") + make_fname = os.path.join(dname, "depends.json") args = [ ghc, "-M", "-include-pkg-deps", # Note: `-outputdir '.'` removes the prefix of all targets: # backend/src/Foo/Util. => Foo/Util. "-outputdir", ".", - "-dep-json", fname, + "-dep-json", json_fname, + "-dep-makefile", make_fname, ] + ghc_args + sources subprocess.run(args, check=True) - with open(fname) as f: + with open(json_fname) as f: return json.load(f) From 5385a2c1f1a04ead0af16fd1c1fe5cbabf6a8b55 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 23 Feb 2024 11:47:59 +0100 Subject: [PATCH 0055/1133] [buck2] Track package imports as artifact list --- prelude/haskell/compile.bzl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 9b173d934..99ee94a1c 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -90,7 +90,7 @@ HaskellLibraryInfo = record( ) PackagesInfo = record( - exposed_package_imports = cmd_args, + exposed_package_imports = field(list[Artifact]), exposed_package_libs = cmd_args, exposed_package_args = cmd_args, packagedb_args = cmd_args, @@ -232,14 +232,14 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - exposed_package_imports = cmd_args() + exposed_package_imports = [] exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) packagedb_args = cmd_args() for lib in libs.values(): - exposed_package_imports.hidden(lib.import_dirs.values()) + exposed_package_imports.extend(lib.import_dirs[enable_profiling]) # libs of dependencies might be needed at compile time if # we're using Template Haskell: @@ -307,7 +307,7 @@ def _common_compile_args( ) compile_args.add(packages_info.exposed_package_args) - compile_args.add(packages_info.exposed_package_imports) + compile_args.hidden(packages_info.exposed_package_imports) compile_args.add(packages_info.packagedb_args) if enable_th: compile_args.add(packages_info.exposed_package_libs) From 5a1f4de49ae3930988676e050ab20410acb4daf2 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 11:10:14 +0100 Subject: [PATCH 0056/1133] Store package name and output prefix --- prelude/haskell/compile.bzl | 8 +++++++- prelude/haskell/haskell.bzl | 8 ++++++-- prelude/haskell/tools/generate_target_metadata.py | 10 ++++++++++ 3 files changed, 23 insertions(+), 3 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 99ee94a1c..87efffd52 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -131,7 +131,12 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl return modules -def target_metadata(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifact: +def target_metadata( + ctx: AnalysisContext, + *, + pkgname: str, + sources: list[Artifact], + ) -> Artifact: md_file = ctx.actions.declare_output(ctx.attrs.name + ".md.json") md_gen = ctx.attrs._generate_target_metadata[RunInfo] @@ -169,6 +174,7 @@ def target_metadata(ctx: AnalysisContext, *, sources: list[Artifact]) -> Artifac ghc_args.add(ctx.attrs.compiler_flags) md_args = cmd_args(md_gen) + md_args.add("--pkgname", pkgname) md_args.add("--output", md_file.as_output()) md_args.add("--ghc", haskell_toolchain.compiler) md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 7e5dd3e49..014fe0105 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -693,7 +693,11 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: libname = repr(ctx.label.path).replace("//", "_").replace("/", "_") + "_" + ctx.label.name pkgname = libname.replace("_", "-") - md_file = target_metadata(ctx, sources = ctx.attrs.srcs) + md_file = target_metadata( + ctx, + pkgname = pkgname, + sources = ctx.attrs.srcs, + ) # The non-profiling library is also needed to build the package with # profiling enabled, so we need to keep track of it for each link style. @@ -918,7 +922,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: if enable_profiling and link_style == LinkStyle("shared"): link_style = LinkStyle("static") - md_file = target_metadata(ctx, sources = ctx.attrs.srcs) + md_file = target_metadata(ctx, pkgname = "", sources = ctx.attrs.srcs) compiled = compile( ctx, diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 4ac6ba3e0..5a21eec9e 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -4,12 +4,14 @@ * The mapping from module source file to actual module name. * The intra-package module dependency graph. +* The transitive cross-package module dependency graph. * Which modules require Template Haskell. The result is a JSON object with the following fields: * `th_modules`: List of modules that require Template Haskell. * `module_mapping`: Mapping from source inferred module name to actual module name, if different. * `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. +* `transitive_deps`: Cross-package module dependencies, `dict[modname, dict[pkgname, list[modname]]]`. """ import argparse @@ -24,6 +26,11 @@ def main(): parser = argparse.ArgumentParser( description=__doc__, fromfile_prefix_chars="@") + parser.add_argument( + "--pkgname", + required=True, + type=str, + help="The name of the current package.") parser.add_argument( "--output", required=True, @@ -59,11 +66,14 @@ def main(): def obtain_target_metadata(args): + output_prefix = os.path.dirname(args.output.name) th_modules = determine_th_modules(args.source, args.source_prefix) ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source) module_mapping, module_graph = interpret_ghc_depends( ghc_depends, args.source_prefix) return { + "pkgname": args.pkgname, + "output_prefix": output_prefix, "th_modules": th_modules, "module_mapping": module_mapping, "module_graph": module_graph, From 142f488ac294eb291719e0e9b0b240fdb1cb8ca1 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 11:13:59 +0100 Subject: [PATCH 0057/1133] Track metadata in providers --- prelude/haskell/compile.bzl | 1 + prelude/haskell/haskell.bzl | 2 ++ 2 files changed, 3 insertions(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 87efffd52..213af5f50 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -59,6 +59,7 @@ CompileArgsInfo = record( # indirect dependencies for the purposes of module visibility. HaskellLibraryProvider = provider( fields = { + "metadata": provider_field(typing.Any, default = None), # Artifact "lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] "prof_lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] }, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 014fe0105..013ba67be 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -298,6 +298,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: prof_info = prof_hlinkinfos, ) haskell_lib_provider = HaskellLibraryProvider( + metadata = None, lib = hlibinfos, prof_lib = prof_hlibinfos, ) @@ -833,6 +834,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: sub_targets = sub_targets, ), HaskellLibraryProvider( + metadata = md_file, lib = hlib_infos, prof_lib = prof_hlib_infos, ), From 339f9e15c81c9c24dc0bb84cd6dc93b04d649c67 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 11:17:57 +0100 Subject: [PATCH 0058/1133] Forward package dependency metadata --- prelude/haskell/compile.bzl | 21 +++++++++++++++++++ .../haskell/tools/generate_target_metadata.py | 7 +++++++ 2 files changed, 28 insertions(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 213af5f50..fe5351f99 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -185,6 +185,11 @@ def target_metadata( ) md_args.add(cmd_args(sources, format="--source={}")) + md_args.add(cmd_args( + _attr_deps_haskell_lib_metadata_files(ctx), + format="--dependency-metadata={}", + )) + ctx.actions.run(md_args, category = "haskell_metadata") return md_file @@ -198,6 +203,22 @@ def _attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo] ], ) +def _attr_deps_haskell_lib_metadata_files(ctx: AnalysisContext) -> list[Artifact]: + result = [] + + for dep in attr_deps(ctx) + ctx.attrs.template_deps: + lib = dep.get(HaskellLibraryProvider) + if lib == None: + continue + + md = lib.metadata + if md == None: + continue + + result.append(md) + + return result + def _attr_deps_haskell_lib_infos( ctx: AnalysisContext, link_style: LinkStyle, diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 5a21eec9e..df96bfcd1 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -58,6 +58,13 @@ def main(): type=str, action="append", help="Haskell module source files of the current package.") + parser.add_argument( + "--dependency-metadata", + required=False, + default=[], + type=str, + action="append", + help="Path to the JSON metadata file of a package dependency.") args = parser.parse_args() result = obtain_target_metadata(args) From f2ed68bd42391ca800102f32e71927a8e10a0135 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 11:44:35 +0100 Subject: [PATCH 0059/1133] calculate external dependencies --- .../haskell/tools/generate_target_metadata.py | 59 +++++++++++++++---- 1 file changed, 48 insertions(+), 11 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index df96bfcd1..7f910606d 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -17,6 +17,7 @@ import argparse import json import os +from pathlib import Path import re import subprocess import tempfile @@ -76,14 +77,17 @@ def obtain_target_metadata(args): output_prefix = os.path.dirname(args.output.name) th_modules = determine_th_modules(args.source, args.source_prefix) ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source) - module_mapping, module_graph = interpret_ghc_depends( - ghc_depends, args.source_prefix) + deps_md = load_dependencies_metadata(args.dependency_metadata) + package_prefixes = calc_package_prefixes(deps_md) + module_mapping, module_graph, extgraph = interpret_ghc_depends( + ghc_depends, args.source_prefix, package_prefixes) return { "pkgname": args.pkgname, "output_prefix": output_prefix, "th_modules": th_modules, "module_mapping": module_mapping, "module_graph": module_graph, + "external": extgraph, } @@ -128,22 +132,40 @@ def run_ghc_depends(ghc, ghc_args, sources): return json.load(f) -def interpret_ghc_depends(ghc_depends, source_prefix): - graph = {} +def load_dependencies_metadata(fnames): + result = {} + + for fname in fnames: + with open(fname) as f: + md = json.load(f) + result[md["pkgname"]] = md + + return result + + +def calc_package_prefixes(dependencies_metadata): + return { + md["output_prefix"]: pkgname + for pkgname, md in dependencies_metadata.items() + } + + +def interpret_ghc_depends(ghc_depends, source_prefix, package_prefixes): mapping = {} + graph = {} + extgraph = {} for k, vs in ghc_depends.items(): # remove lead `./` caused by using `-outputdir '.'`. k = strip_prefix_("./", k) vs = [strip_prefix_("./", v) for v in vs] - # TODO: Handle pkg-deps - vs = filter(lambda x: not x.startswith("buck-out"), vs) - module_name = src_to_module_name(k) - intdeps = parse_module_deps(vs) + intdeps, extdeps = parse_module_deps(vs, package_prefixes) graph.setdefault(module_name, []).extend(intdeps) + for pkg, mods in extdeps.items(): + extgraph.setdefault(module_name, {}).setdefault(pkg, []).extend(mods) ext = os.path.splitext(k)[1] @@ -165,11 +187,12 @@ def interpret_ghc_depends(ghc_depends, source_prefix): if hs_module_name != module_name: mapping[hs_module_name] = module_name - return mapping, graph + return mapping, graph, extgraph -def parse_module_deps(module_deps): +def parse_module_deps(module_deps, package_prefixes): internal_deps = [] + external_deps = {} for module_dep in module_deps: if is_haskell_src(module_dep): @@ -178,9 +201,23 @@ def parse_module_deps(module_deps): if os.path.isabs(module_dep): continue + if (pkgdep := lookup_package_dep(module_dep, package_prefixes)) is not None: + pkgname, modname = pkgdep + external_deps.setdefault(pkgname, []).append(modname) + continue + internal_deps.append(src_to_module_name(module_dep)) - return internal_deps + return internal_deps, external_deps + + +def lookup_package_dep(module_dep, package_prefixes): + module_path = Path(module_dep) + for pkg_prefix, pkgname in package_prefixes.items(): + if module_path.is_relative_to(pkg_prefix): + sub_path = module_path.relative_to(pkg_prefix) + pkgdep = src_to_module_name("/".join(sub_path.parts[1:])) + return pkgname, pkgdep def src_to_module_name(x): From f5b7fb68ad578fcba463fa11c30656833e8d1072 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 11:55:06 +0100 Subject: [PATCH 0060/1133] Calculate cross package transitive deps --- .../haskell/tools/generate_target_metadata.py | 24 +++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 7f910606d..38e07ee55 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -79,15 +79,17 @@ def obtain_target_metadata(args): ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source) deps_md = load_dependencies_metadata(args.dependency_metadata) package_prefixes = calc_package_prefixes(deps_md) - module_mapping, module_graph, extgraph = interpret_ghc_depends( + module_mapping, module_graph, package_deps = interpret_ghc_depends( ghc_depends, args.source_prefix, package_prefixes) + transitive_deps = calc_transitive_deps( + args.pkgname, module_graph, package_deps, deps_md) return { "pkgname": args.pkgname, "output_prefix": output_prefix, "th_modules": th_modules, "module_mapping": module_mapping, "module_graph": module_graph, - "external": extgraph, + "transitive_deps": transitive_deps, } @@ -220,6 +222,24 @@ def lookup_package_dep(module_dep, package_prefixes): return pkgname, pkgdep +def calc_transitive_deps(pkgname, module_graph, package_deps, deps_md): + result = {} + + for modname, dep_mods in module_graph.items(): + result[modname] = { pkgname: dep_mods } if dep_mods else {} + + for modname, dep_pkgs in package_deps.items(): + for dep_pkg, dep_mods in dep_pkgs.items(): + result[modname][dep_pkg] = dep_mods + + for dep_mod in dep_mods: + transitive_deps = deps_md[dep_pkg]["transitive_deps"][dep_mod] + for transitive_pkg, transitive_mods in transitive_deps.items(): + result[modname].setdefault(transitive_pkg, []).extend(transitive_mods) + + return result + + def src_to_module_name(x): base, _ = os.path.splitext(x) return base.replace("/", ".") From d064d777f78498900fb3f76d88c2caf1513e6546 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 12:04:00 +0100 Subject: [PATCH 0061/1133] Avoid duplicate transitive module deps --- prelude/haskell/tools/generate_target_metadata.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 38e07ee55..6df438433 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -70,7 +70,13 @@ def main(): result = obtain_target_metadata(args) - json.dump(result, args.output, indent=4) + json.dump(result, args.output, indent=4, default=json_default_handler) + + +def json_default_handler(o): + if isinstance(o, set): + return list(o) + raise TypeError(f'Object of type {o.__class__.__name__} is not JSON serializable') def obtain_target_metadata(args): @@ -226,16 +232,16 @@ def calc_transitive_deps(pkgname, module_graph, package_deps, deps_md): result = {} for modname, dep_mods in module_graph.items(): - result[modname] = { pkgname: dep_mods } if dep_mods else {} + result[modname] = { pkgname: set(dep_mods) } if dep_mods else {} for modname, dep_pkgs in package_deps.items(): for dep_pkg, dep_mods in dep_pkgs.items(): - result[modname][dep_pkg] = dep_mods + result[modname][dep_pkg] = set(dep_mods) for dep_mod in dep_mods: transitive_deps = deps_md[dep_pkg]["transitive_deps"][dep_mod] for transitive_pkg, transitive_mods in transitive_deps.items(): - result[modname].setdefault(transitive_pkg, []).extend(transitive_mods) + result[modname].setdefault(transitive_pkg, set()).update(set(transitive_mods)) return result From cbbbfa33dca9990eb6643ee9750acdf272d46754 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 15:55:10 +0100 Subject: [PATCH 0062/1133] resolve intra-package transitive dependencies --- prelude/haskell/tools/generate_target_metadata.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 6df438433..9cc036c41 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -15,6 +15,7 @@ """ import argparse +import graphlib import json import os from pathlib import Path @@ -231,8 +232,11 @@ def lookup_package_dep(module_dep, package_prefixes): def calc_transitive_deps(pkgname, module_graph, package_deps, deps_md): result = {} - for modname, dep_mods in module_graph.items(): + for modname in graphlib.TopologicalSorter(module_graph).static_order(): + dep_mods = module_graph[modname] result[modname] = { pkgname: set(dep_mods) } if dep_mods else {} + for dep_mod in dep_mods: + result[modname][pkgname].update(result[dep_mod].get(pkgname, set())) for modname, dep_pkgs in package_deps.items(): for dep_pkg, dep_mods in dep_pkgs.items(): From 51abeae6c5557fcaef537f064b3bb9564dd8f50c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 16:55:51 +0100 Subject: [PATCH 0063/1133] avoid quadratic package module lookup use a trie instead --- .../haskell/tools/generate_target_metadata.py | 44 +++++++++++++------ 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 9cc036c41..6bdd8accc 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -153,10 +153,37 @@ def load_dependencies_metadata(fnames): def calc_package_prefixes(dependencies_metadata): - return { - md["output_prefix"]: pkgname - for pkgname, md in dependencies_metadata.items() - } + """Creates a trie to look up modules in dependency packages. + + Package names are stored under the marker key `//pkgname`. This is + unambiguous since path components may not contain `/` characters. + """ + result = {} + for pkgname, md in dependencies_metadata.items(): + path = Path(md["output_prefix"]) + layer = result + for part in path.parts: + layer = layer.setdefault(part, {}) + layer["//pkgname"] = pkgname + return result + + +def lookup_package_dep(module_dep, package_prefixes): + """Look up a cross-packge module dependency. + + Assumes that `module_dep` is a relative path to an interface file of the form + `buck-out/.../__my_package__/mod-shared/Some/Package.hi`. + """ + module_path = Path(module_dep) + layer = package_prefixes + for offset, part in enumerate(module_path.parts): + layer = layer.get(part, None) + if layer is None: + return None + elif "//pkgname" in layer: + pkgname = layer["//pkgname"] + modname = src_to_module_name("/".join(module_path.parts[offset+2:])) + return pkgname, modname def interpret_ghc_depends(ghc_depends, source_prefix, package_prefixes): @@ -220,15 +247,6 @@ def parse_module_deps(module_deps, package_prefixes): return internal_deps, external_deps -def lookup_package_dep(module_dep, package_prefixes): - module_path = Path(module_dep) - for pkg_prefix, pkgname in package_prefixes.items(): - if module_path.is_relative_to(pkg_prefix): - sub_path = module_path.relative_to(pkg_prefix) - pkgdep = src_to_module_name("/".join(sub_path.parts[1:])) - return pkgname, pkgdep - - def calc_transitive_deps(pkgname, module_graph, package_deps, deps_md): result = {} From 7e79c1c5f9258978931164500f3b160aa438bafd Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 17:20:22 +0100 Subject: [PATCH 0064/1133] Only expose interfaces of required modules --- prelude/haskell/compile.bzl | 29 ++++++++++++++++++++++------- 1 file changed, 22 insertions(+), 7 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index fe5351f99..8dd56f72c 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -243,7 +243,8 @@ def get_packages_info( ctx: AnalysisContext, link_style: LinkStyle, specify_pkg_version: bool, - enable_profiling: bool) -> PackagesInfo: + enable_profiling: bool, + transitive_deps: [None, dict[str, list[str]]] = None) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] # Collect library dependencies. Note that these don't need to be in a @@ -267,7 +268,15 @@ def get_packages_info( packagedb_args = cmd_args() for lib in libs.values(): - exposed_package_imports.extend(lib.import_dirs[enable_profiling]) + if transitive_deps == None: + exposed_package_imports.extend(lib.import_dirs[enable_profiling]) + elif lib.name in transitive_deps: + lib_module_deps = transitive_deps[lib.name] + exposed_package_imports.extend([ + hi + for hi in lib.import_dirs[enable_profiling] + if src_to_module_name(hi.short_path) in lib_module_deps + ]) # libs of dependencies might be needed at compile time if # we're using Template Haskell: @@ -306,7 +315,8 @@ def _common_compile_args( link_style: LinkStyle, enable_profiling: bool, enable_th: bool, - pkgname: str | None) -> cmd_args: + pkgname: str | None, + transitive_deps: [None, dict[str, list[str]]] = None) -> cmd_args: toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] compile_args = cmd_args() @@ -332,6 +342,7 @@ def _common_compile_args( link_style, specify_pkg_version = False, enable_profiling = enable_profiling, + transitive_deps = transitive_deps, ) compile_args.add(packages_info.exposed_package_args) @@ -424,7 +435,8 @@ def _compile_module_args( enable_profiling: bool, enable_th: bool, outputs: dict[Artifact, Artifact], - pkgname = None) -> CompileArgsInfo: + pkgname = None, + transitive_deps: [None, dict[str, list[str]]] = None) -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] compile_cmd = cmd_args() @@ -435,7 +447,7 @@ def _compile_module_args( compile_cmd.add(ctx.attrs.compiler_flags) compile_cmd.add("-c") - compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname) + compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, transitive_deps = transitive_deps) object = outputs[module.object] hi = outputs[module.interface] @@ -477,6 +489,7 @@ def _compile_module( modules: dict[str, _Module], md_file: Artifact, graph: dict[str, list[str]], + transitive_deps: dict[str, list[str]], outputs: dict[Artifact, Artifact], artifact_suffix: str, pkgname: str | None = None, @@ -486,7 +499,7 @@ def _compile_module( haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] compile_cmd = cmd_args(haskell_toolchain.compiler) - args = _compile_module_args(ctx, module, link_style, enable_profiling, enable_th, outputs, pkgname) + args = _compile_module_args(ctx, module, link_style, enable_profiling, enable_th, outputs, pkgname, transitive_deps = transitive_deps) if args.args_for_file: if haskell_toolchain.use_argsfile: @@ -534,8 +547,9 @@ def compile( def do_compile(ctx, artifacts, outputs, md_file=md_file, modules=modules): md = artifacts[md_file].read_json() th_modules = md["th_modules"] - graph = md["module_graph"] module_map = md["module_mapping"] + graph = md["module_graph"] + transitive_deps = md["transitive_deps"] mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } @@ -548,6 +562,7 @@ def compile( module_name = module_name, modules = mapped_modules, graph = graph, + transitive_deps = transitive_deps[module_name], outputs = outputs, md_file=md_file, artifact_suffix = artifact_suffix, From 7b13ad630c113c11b8e01f801fd35a34f815cf51 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 17:48:48 +0100 Subject: [PATCH 0065/1133] capture intra-package transitive cross-package deps --- .../haskell/tools/generate_target_metadata.py | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 6bdd8accc..94bd459a2 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -248,23 +248,25 @@ def parse_module_deps(module_deps, package_prefixes): def calc_transitive_deps(pkgname, module_graph, package_deps, deps_md): - result = {} - - for modname in graphlib.TopologicalSorter(module_graph).static_order(): - dep_mods = module_graph[modname] - result[modname] = { pkgname: set(dep_mods) } if dep_mods else {} - for dep_mod in dep_mods: - result[modname][pkgname].update(result[dep_mod].get(pkgname, set())) + result = { modname: {} for modname in module_graph.keys() } for modname, dep_pkgs in package_deps.items(): - for dep_pkg, dep_mods in dep_pkgs.items(): - result[modname][dep_pkg] = set(dep_mods) + for dep_pkg, dep_pkg_mods in dep_pkgs.items(): + result[modname][dep_pkg] = set(dep_pkg_mods) - for dep_mod in dep_mods: - transitive_deps = deps_md[dep_pkg]["transitive_deps"][dep_mod] + for dep_pkg_mod in dep_pkg_mods: + transitive_deps = deps_md[dep_pkg]["transitive_deps"][dep_pkg_mod] for transitive_pkg, transitive_mods in transitive_deps.items(): result[modname].setdefault(transitive_pkg, set()).update(set(transitive_mods)) + for modname in graphlib.TopologicalSorter(module_graph).static_order(): + dep_mods = module_graph[modname] + if dep_mods: + result[modname][pkgname] = set(dep_mods) + for dep_mod in dep_mods: + for dep_pkg, dep_pkg_mods in result[dep_mod].items(): + result[modname].setdefault(dep_pkg, set()).update(dep_pkg_mods) + return result From 72e3dba0c2e7dd504736890021bb57ae292c9dfe Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 18:01:55 +0100 Subject: [PATCH 0066/1133] reproducible JSON metadata Sort the keys and sets to ensure that the results are reproducible. --- prelude/haskell/tools/generate_target_metadata.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 94bd459a2..80bb9b359 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -71,12 +71,12 @@ def main(): result = obtain_target_metadata(args) - json.dump(result, args.output, indent=4, default=json_default_handler) + json.dump(result, args.output, indent=4, sort_keys=True, default=json_default_handler) def json_default_handler(o): if isinstance(o, set): - return list(o) + return sorted(o) raise TypeError(f'Object of type {o.__class__.__name__} is not JSON serializable') From 2366dfffe5bc08efa79f717ab5c2a2a46fcca427 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 27 Feb 2024 18:12:46 +0100 Subject: [PATCH 0067/1133] Fix inadvertant pkg deps overwrite --- prelude/haskell/tools/generate_target_metadata.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 80bb9b359..5ec1ba6d6 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -252,7 +252,7 @@ def calc_transitive_deps(pkgname, module_graph, package_deps, deps_md): for modname, dep_pkgs in package_deps.items(): for dep_pkg, dep_pkg_mods in dep_pkgs.items(): - result[modname][dep_pkg] = set(dep_pkg_mods) + result[modname].setdefault(dep_pkg, set()).update(dep_pkg_mods) for dep_pkg_mod in dep_pkg_mods: transitive_deps = deps_md[dep_pkg]["transitive_deps"][dep_pkg_mod] @@ -262,7 +262,7 @@ def calc_transitive_deps(pkgname, module_graph, package_deps, deps_md): for modname in graphlib.TopologicalSorter(module_graph).static_order(): dep_mods = module_graph[modname] if dep_mods: - result[modname][pkgname] = set(dep_mods) + result[modname].setdefault(pkgname, set()).update(dep_mods) for dep_mod in dep_mods: for dep_pkg, dep_pkg_mods in result[dep_mod].items(): result[modname].setdefault(dep_pkg, set()).update(dep_pkg_mods) From 9b815875ba9cd128861c5581186c50c2b24a6c69 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 28 Feb 2024 10:04:16 +0100 Subject: [PATCH 0068/1133] simplify trie lookup --- prelude/haskell/tools/generate_target_metadata.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 5ec1ba6d6..ed48831b6 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -177,11 +177,10 @@ def lookup_package_dep(module_dep, package_prefixes): module_path = Path(module_dep) layer = package_prefixes for offset, part in enumerate(module_path.parts): - layer = layer.get(part, None) - if layer is None: + if (layer := layer.get(part)) is None: return None - elif "//pkgname" in layer: - pkgname = layer["//pkgname"] + + if (pkgname := layer.get("//pkgname")) is not None: modname = src_to_module_name("/".join(module_path.parts[offset+2:])) return pkgname, modname From 303e1ebfd84b0fb239e4ee8d598abfb71681dd84 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 29 Feb 2024 17:24:28 +0100 Subject: [PATCH 0069/1133] Update prelude/haskell/tools/generate_target_metadata.py --- prelude/haskell/tools/generate_target_metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index ed48831b6..8b06a54f7 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -126,7 +126,7 @@ def uses_th(filename): def run_ghc_depends(ghc, ghc_args, sources): with tempfile.TemporaryDirectory() as dname: json_fname = os.path.join(dname, "depends.json") - make_fname = os.path.join(dname, "depends.json") + make_fname = os.path.join(dname, "depends.make") args = [ ghc, "-M", "-include-pkg-deps", # Note: `-outputdir '.'` removes the prefix of all targets: From 66bb74ed97fd88dafb616257f0d53ce660704a4e Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 6 Mar 2024 15:06:58 +0100 Subject: [PATCH 0070/1133] [buck2] Use `-opts-json` information to determine TH modules --- .../haskell/tools/generate_target_metadata.py | 38 ++++++++----------- 1 file changed, 16 insertions(+), 22 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 8b06a54f7..1966f7e90 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -19,7 +19,6 @@ import json import os from pathlib import Path -import re import subprocess import tempfile @@ -82,8 +81,8 @@ def json_default_handler(o): def obtain_target_metadata(args): output_prefix = os.path.dirname(args.output.name) - th_modules = determine_th_modules(args.source, args.source_prefix) - ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source) + ghc_depends, ghc_options = run_ghc_depends(args.ghc, args.ghc_arg, args.source) + th_modules = determine_th_modules(ghc_options, args.source_prefix) deps_md = load_dependencies_metadata(args.dependency_metadata) package_prefixes = calc_package_prefixes(deps_md) module_mapping, module_graph, package_deps = interpret_ghc_depends( @@ -100,32 +99,26 @@ def obtain_target_metadata(args): } -def determine_th_modules(sources, source_prefix): - result = [] +def determine_th_modules(ghc_options, source_prefix): + return [ + src_to_module_name(strip_prefix_(source_prefix, fname).lstrip("/")) + for fname, opts in ghc_options.items() + if uses_th(opts) + ] - for fname in sources: - if uses_th(fname): - module_name = src_to_module_name( - strip_prefix_(source_prefix, fname).lstrip("/")) - result.append(module_name) - return result - - -th_regex = re.compile(r"^\s*{-# LANGUAGE (TemplateHaskell|TemplateHaskellQuotes|QuasiQuotes) #-}") +__TH_EXTENSIONS = ["TemplateHaskell", "TemplateHaskellQuotes", "QuasiQuotes"] -def uses_th(filename): - """Determine if the given module uses Template Haskell.""" - with open(filename, "r") as file: - for line in file: - if th_regex.match(line): - return True +def uses_th(opts): + """Determine if a Template Haskell extension is enabled.""" + return any([f"-X{ext}" in opts for ext in __TH_EXTENSIONS]) def run_ghc_depends(ghc, ghc_args, sources): with tempfile.TemporaryDirectory() as dname: json_fname = os.path.join(dname, "depends.json") + opt_json_fname = os.path.join(dname, "options.json") make_fname = os.path.join(dname, "depends.make") args = [ ghc, "-M", "-include-pkg-deps", @@ -133,12 +126,13 @@ def run_ghc_depends(ghc, ghc_args, sources): # backend/src/Foo/Util. => Foo/Util. "-outputdir", ".", "-dep-json", json_fname, + "-opt-json", opt_json_fname, "-dep-makefile", make_fname, ] + ghc_args + sources subprocess.run(args, check=True) - with open(json_fname) as f: - return json.load(f) + with open(json_fname) as f, open(opt_json_fname) as o: + return json.load(f), json.load(o) def load_dependencies_metadata(fnames): From 1ce1a6349cb8e9a2eb007a060f55cdda5db1b04d Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 7 Mar 2024 14:35:21 +0100 Subject: [PATCH 0071/1133] [buck2] Remove unneeded filename normalization --- prelude/haskell/tools/generate_target_metadata.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 1966f7e90..5c2acd98d 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -185,10 +185,6 @@ def interpret_ghc_depends(ghc_depends, source_prefix, package_prefixes): extgraph = {} for k, vs in ghc_depends.items(): - # remove lead `./` caused by using `-outputdir '.'`. - k = strip_prefix_("./", k) - vs = [strip_prefix_("./", v) for v in vs] - module_name = src_to_module_name(k) intdeps, extdeps = parse_module_deps(vs, package_prefixes) From 7961d65bfe68ada8da1435625efb7b08d2e90de6 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 28 Feb 2024 13:40:38 +0100 Subject: [PATCH 0072/1133] [buck2] hide objects for non-TH compile --- prelude/haskell/compile.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 8dd56f72c..190397f13 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -527,7 +527,9 @@ def _compile_module( for dep_name in breadth_first_traversal(graph, [module_name])[1:]: dep = modules[dep_name] - compile_cmd.hidden(dep.interface, dep.object) + compile_cmd.hidden(dep.interface) + if enable_th: + compile_cmd.hidden(dep.object) ctx.actions.run(compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name) From 3e259bf5687e656c9319f5f69644b78887546d5d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 28 Feb 2024 13:47:35 +0100 Subject: [PATCH 0073/1133] [buck2] Remove .so from TH compile inputs This will need to be replaced by module object file inputs and potentially an empty placeholder .so. See https://github.com/tweag/rules_haskell/issues/1631. --- prelude/haskell/compile.bzl | 2 -- 1 file changed, 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 190397f13..cd7541730 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -348,8 +348,6 @@ def _common_compile_args( compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) compile_args.add(packages_info.packagedb_args) - if enable_th: - compile_args.add(packages_info.exposed_package_libs) # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) From 338c49a409ba55458c182056fbb73d0d81325a30 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 28 Feb 2024 13:57:37 +0100 Subject: [PATCH 0074/1133] Track module object files in library info --- prelude/haskell/compile.bzl | 2 ++ prelude/haskell/haskell.bzl | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index cd7541730..4b660b373 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -75,6 +75,8 @@ HaskellLibraryInfo = record( id = str, # Import dirs indexed by profiling enabled/disabled import_dirs = dict[bool, list[Artifact]], + # Object files indexed by profiling enabled/disabled + objects = dict[bool, list[Artifact]], stub_dirs = list[Artifact], # This field is only used as hidden inputs to compilation, to diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 013ba67be..b39bf48d6 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -606,6 +606,10 @@ def _build_haskell_lib( True: compiled.hi, False: non_profiling_hlib.compiled.hi, } + object_artifacts = { + True: compiled.objects, + False: non_profiling_hlib.compiled.objects, + } library_artifacts = { True: lib, False: non_profiling_hlib.libs[0], @@ -616,6 +620,9 @@ def _build_haskell_lib( import_artifacts = { False: compiled.hi, } + object_artifacts = { + False: compiled.objects, + } library_artifacts = { False: lib, } @@ -638,6 +645,7 @@ def _build_haskell_lib( db = db, id = pkgname, import_dirs = import_artifacts, + objects = object_artifacts, stub_dirs = stub_dirs, libs = all_libs, version = "1.0.0", From ef1fe1a498d48249608bbdc2df0950ac68b75feb Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 28 Feb 2024 14:02:56 +0100 Subject: [PATCH 0075/1133] [buck2] Pass objects for TH compilation See https://github.com/tweag/rules_haskell/issues/1631 --- prelude/haskell/compile.bzl | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 4b660b373..9c4842314 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -94,6 +94,7 @@ HaskellLibraryInfo = record( PackagesInfo = record( exposed_package_imports = field(list[Artifact]), + exposed_package_objects = field(list[Artifact]), exposed_package_libs = cmd_args, exposed_package_args = cmd_args, packagedb_args = cmd_args, @@ -264,6 +265,7 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) exposed_package_imports = [] + exposed_package_objects = [] exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) @@ -272,6 +274,7 @@ def get_packages_info( for lib in libs.values(): if transitive_deps == None: exposed_package_imports.extend(lib.import_dirs[enable_profiling]) + exposed_package_objects.extend(lib.objects[enable_profiling]) elif lib.name in transitive_deps: lib_module_deps = transitive_deps[lib.name] exposed_package_imports.extend([ @@ -279,6 +282,11 @@ def get_packages_info( for hi in lib.import_dirs[enable_profiling] if src_to_module_name(hi.short_path) in lib_module_deps ]) + exposed_package_objects.extend([ + o + for o in lib.objects[enable_profiling] + if src_to_module_name(o.short_path) in lib_module_deps + ]) # libs of dependencies might be needed at compile time if # we're using Template Haskell: @@ -305,6 +313,7 @@ def get_packages_info( return PackagesInfo( exposed_package_imports = exposed_package_imports, + exposed_package_objects = exposed_package_objects, exposed_package_libs = exposed_package_libs, exposed_package_args = exposed_package_args, packagedb_args = packagedb_args, @@ -350,6 +359,8 @@ def _common_compile_args( compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) compile_args.add(packages_info.packagedb_args) + if enable_th: + compile_args.add(packages_info.exposed_package_objects) # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) From 10e25ab428d3b3a2557bccff82047535e255b473 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 28 Feb 2024 14:30:09 +0100 Subject: [PATCH 0076/1133] [buck2] Generate empty library versions --- prelude/haskell/compile.bzl | 3 +++ prelude/haskell/haskell.bzl | 29 ++++++++++++++++++++++++++++- 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 9c4842314..6b32c54c4 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -84,6 +84,9 @@ HaskellLibraryInfo = record( # at compile time. The real library flags are propagated up the # dependency graph via MergedLinkInfo. libs = field(list[Artifact], []), + # GHC insists on loading a library, but does not actually need it when we + # pass module granular object files into compilation actions. + empty_libs = field(list[Artifact], []), # Package version, used to specify the full package when exposing it, # e.g. filepath-1.4.2.1, deepseq-1.4.4.0. # Internal packages default to 1.0.0, e.g. `fbcode-dsi-logger-hs-types-1.0.0`. diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index b39bf48d6..dcd0d40f3 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -434,7 +434,7 @@ def _make_package( library_dirs = [ mk_artifact_dir("lib", profiled) for profiled in hi.keys() - ] + ] + ["${pkgroot}/empty/lib-shared"] conf = [ "name: " + pkgname, @@ -580,6 +580,30 @@ def _build_haskell_lib( default = LinkInfo(linkables = [SharedLibLinkable(lib = lib)]), ) + empty_lib = ctx.actions.declare_output("empty", lib_short_path) + empty_link = cmd_args(haskell_toolchain.linker) + empty_link.add("-o", empty_lib.as_output()) + empty_link.add( + get_shared_library_flags(linker_info.type), + "-dynamic", + cmd_args( + _get_haskell_shared_library_name_linker_flags(linker_info.type, libfile), + prepend = "-optl", + ), + ) + empty_link.add(ctx.actions.write("empty.c", "")) + empty_infos = get_link_args_for_strategy( + ctx, + nlis, + to_link_strategy(link_style), + ) + empty_link.add(cmd_args(unpack_link_args(empty_infos), prepend = "-optl")) + ctx.actions.run( + empty_link, + category = "haskell_link_empty" + artifact_suffix.replace("-", "_"), + ) + empty_libs = [empty_lib] + else: # static flavours # TODO: avoid making an archive for a single object, like cxx does # (but would that work with Template Haskell?) @@ -598,6 +622,8 @@ def _build_haskell_lib( ), ) + empty_libs = [] + if enable_profiling and link_style != LinkStyle("shared"): if not non_profiling_hlib: fail("Non-profiling HaskellLibBuildOutput wasn't provided when building profiling lib") @@ -648,6 +674,7 @@ def _build_haskell_lib( objects = object_artifacts, stub_dirs = stub_dirs, libs = all_libs, + empty_libs = empty_libs, version = "1.0.0", is_prebuilt = False, profiling_enabled = enable_profiling, From aa15749a980da89ae89e30fd1e9a5dff493d475f Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 28 Feb 2024 14:32:21 +0100 Subject: [PATCH 0077/1133] [buck2] Pass empty libs for TH compilation --- prelude/haskell/compile.bzl | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 6b32c54c4..ad226a7f7 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -278,6 +278,9 @@ def get_packages_info( if transitive_deps == None: exposed_package_imports.extend(lib.import_dirs[enable_profiling]) exposed_package_objects.extend(lib.objects[enable_profiling]) + # libs of dependencies might be needed at compile time if + # we're using Template Haskell: + exposed_package_libs.hidden(lib.libs) elif lib.name in transitive_deps: lib_module_deps = transitive_deps[lib.name] exposed_package_imports.extend([ @@ -290,10 +293,9 @@ def get_packages_info( for o in lib.objects[enable_profiling] if src_to_module_name(o.short_path) in lib_module_deps ]) - - # libs of dependencies might be needed at compile time if - # we're using Template Haskell: - exposed_package_libs.hidden(lib.libs) + # libs of dependencies might be needed at compile time if + # we're using Template Haskell: + exposed_package_libs.hidden(lib.empty_libs) for lib in libs.values(): # These we need to add for all the packages/dependencies, i.e. @@ -364,6 +366,7 @@ def _common_compile_args( compile_args.add(packages_info.packagedb_args) if enable_th: compile_args.add(packages_info.exposed_package_objects) + compile_args.add(packages_info.exposed_package_libs) # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) From f5c9bbfef64e658cdbceb5a875e2a6efdaaffba0 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 28 Feb 2024 15:49:49 +0100 Subject: [PATCH 0078/1133] Remap object files to .o suffix GHC will only load objects passed on the command line if they use the `.o` file extension. --- prelude/haskell/compile.bzl | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index ad226a7f7..924aacec5 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -332,6 +332,7 @@ def _common_compile_args( enable_profiling: bool, enable_th: bool, pkgname: str | None, + modname: str | None = None, transitive_deps: [None, dict[str, list[str]]] = None) -> cmd_args: toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] @@ -365,8 +366,16 @@ def _common_compile_args( compile_args.hidden(packages_info.exposed_package_imports) compile_args.add(packages_info.packagedb_args) if enable_th: - compile_args.add(packages_info.exposed_package_objects) compile_args.add(packages_info.exposed_package_libs) + if modname: + for o in packages_info.exposed_package_objects: + if o.extension != ".o": + o_copy = ctx.actions.declare_output(modname, paths.replace_extension(o.short_path, ".o")) + compile_args.add(ctx.actions.symlink_file(o_copy, o)) + else: + compile_args.add(o) + else: + compile_args.add(packages_info.exposed_package_objects) # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) @@ -464,7 +473,7 @@ def _compile_module_args( compile_cmd.add(ctx.attrs.compiler_flags) compile_cmd.add("-c") - compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, transitive_deps = transitive_deps) + compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), transitive_deps = transitive_deps) object = outputs[module.object] hi = outputs[module.interface] From 083ad8c558766e28a18e1fe07b7fc78cb6f7a374 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 29 Feb 2024 10:40:31 +0100 Subject: [PATCH 0079/1133] Ensure topological order in transitive deps --- .../haskell/tools/generate_target_metadata.py | 52 +++++++++++-------- 1 file changed, 29 insertions(+), 23 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 5c2acd98d..e60a20c47 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -11,7 +11,7 @@ * `th_modules`: List of modules that require Template Haskell. * `module_mapping`: Mapping from source inferred module name to actual module name, if different. * `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. -* `transitive_deps`: Cross-package module dependencies, `dict[modname, dict[pkgname, list[modname]]]`. +* `transitive_deps`: Cross-package module dependencies in topological order starting at the leafs, `dict[modname, dict[pkgname, list[modname]]]`. """ import argparse @@ -70,13 +70,7 @@ def main(): result = obtain_target_metadata(args) - json.dump(result, args.output, indent=4, sort_keys=True, default=json_default_handler) - - -def json_default_handler(o): - if isinstance(o, set): - return sorted(o) - raise TypeError(f'Object of type {o.__class__.__name__} is not JSON serializable') + json.dump(result, args.output, indent=4) def obtain_target_metadata(args): @@ -237,24 +231,36 @@ def parse_module_deps(module_deps, package_prefixes): def calc_transitive_deps(pkgname, module_graph, package_deps, deps_md): - result = { modname: {} for modname in module_graph.keys() } + result = {} + + topo_modules = graphlib.TopologicalSorter(module_graph).static_order() - for modname, dep_pkgs in package_deps.items(): - for dep_pkg, dep_pkg_mods in dep_pkgs.items(): - result[modname].setdefault(dep_pkg, set()).update(dep_pkg_mods) + for modname in topo_modules: + result[modname] = {} + for dep_pkg, dep_pkg_mods in package_deps.get(modname, {}).items(): + dep_pkg_trans_deps = deps_md[dep_pkg]["transitive_deps"] for dep_pkg_mod in dep_pkg_mods: - transitive_deps = deps_md[dep_pkg]["transitive_deps"][dep_pkg_mod] - for transitive_pkg, transitive_mods in transitive_deps.items(): - result[modname].setdefault(transitive_pkg, set()).update(set(transitive_mods)) - - for modname in graphlib.TopologicalSorter(module_graph).static_order(): - dep_mods = module_graph[modname] - if dep_mods: - result[modname].setdefault(pkgname, set()).update(dep_mods) - for dep_mod in dep_mods: - for dep_pkg, dep_pkg_mods in result[dep_mod].items(): - result[modname].setdefault(dep_pkg, set()).update(dep_pkg_mods) + for trans_pkg, trans_mods in dep_pkg_trans_deps[dep_pkg_mod].items(): + if trans_mods: + result[modname].setdefault(trans_pkg, {}).update((m, None) for m in trans_mods) + + for dep_pkg, dep_pkg_mods in package_deps.get(modname, {}).items(): + if dep_pkg_mods: + result[modname].setdefault(dep_pkg, {}).update((m, None) for m in dep_pkg_mods) + + for dep_mod in module_graph[modname]: + for trans_pkg, trans_mods in result[dep_mod].items(): + if trans_mods: + result[modname].setdefault(trans_pkg, {}).update((m, None) for m in trans_mods) + + if module_graph[modname]: + result[modname].setdefault(pkgname, {}).update((m, None) for m in module_graph[modname]) + + for modname in result: + for dep_pkg in result[modname]: + dep_mods = list(result[modname][dep_pkg].keys()) + result[modname][dep_pkg] = dep_mods return result From a77f8a7a15c1e5f993ec943727e68d62456a3d81 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 29 Feb 2024 10:59:11 +0100 Subject: [PATCH 0080/1133] Import objects in topological order --- prelude/haskell/compile.bzl | 43 ++++++++++++++++++++++--------------- 1 file changed, 26 insertions(+), 17 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 924aacec5..97074cd72 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -274,28 +274,37 @@ def get_packages_info( packagedb_args = cmd_args() - for lib in libs.values(): - if transitive_deps == None: + if transitive_deps != None: + lib_objects = {} + lib_interfaces = {} + for lib in libs.values(): + lib_objects[lib.name] = {} + lib_interfaces[lib.name] = {} + + for o in lib.objects[enable_profiling]: + lib_objects[lib.name][src_to_module_name(o.short_path)] = o + + for hi in lib.import_dirs[enable_profiling]: + lib_interfaces[lib.name][src_to_module_name(hi.short_path)] = hi + + # libs of dependencies might be needed at compile time if + # we're using Template Haskell: + exposed_package_libs.hidden(lib.empty_libs) + + for pkg, mods in transitive_deps.items(): + if not pkg in lib_objects: + # TODO More robust handling. We want to skip self-package references here. + continue + for mod in mods: + exposed_package_objects.append(lib_objects[pkg][mod]) + exposed_package_imports.append(lib_interfaces[pkg][mod]) + else: + for lib in libs.values(): exposed_package_imports.extend(lib.import_dirs[enable_profiling]) exposed_package_objects.extend(lib.objects[enable_profiling]) # libs of dependencies might be needed at compile time if # we're using Template Haskell: exposed_package_libs.hidden(lib.libs) - elif lib.name in transitive_deps: - lib_module_deps = transitive_deps[lib.name] - exposed_package_imports.extend([ - hi - for hi in lib.import_dirs[enable_profiling] - if src_to_module_name(hi.short_path) in lib_module_deps - ]) - exposed_package_objects.extend([ - o - for o in lib.objects[enable_profiling] - if src_to_module_name(o.short_path) in lib_module_deps - ]) - # libs of dependencies might be needed at compile time if - # we're using Template Haskell: - exposed_package_libs.hidden(lib.empty_libs) for lib in libs.values(): # These we need to add for all the packages/dependencies, i.e. From ed5778383e3e51c6eacf75eff3a0eb370b29c046 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 29 Feb 2024 15:03:34 +0100 Subject: [PATCH 0081/1133] Avoid object copy name clash --- prelude/haskell/compile.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 97074cd72..138cd3270 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -379,7 +379,8 @@ def _common_compile_args( if modname: for o in packages_info.exposed_package_objects: if o.extension != ".o": - o_copy = ctx.actions.declare_output(modname, paths.replace_extension(o.short_path, ".o")) + prefix = o.owner.name + "-" + modname + o_copy = ctx.actions.declare_output(prefix, paths.replace_extension(o.short_path, ".o")) compile_args.add(ctx.actions.symlink_file(o_copy, o)) else: compile_args.add(o) From 4185b2bb4f1e724f0b3c76e4da67f8bbe6857618 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 29 Feb 2024 15:17:42 +0100 Subject: [PATCH 0082/1133] non granular objects should be hidden --- prelude/haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 138cd3270..488e421b8 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -385,7 +385,7 @@ def _common_compile_args( else: compile_args.add(o) else: - compile_args.add(packages_info.exposed_package_objects) + compile_args.hidden(packages_info.exposed_package_objects) # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) From 5d1753cdd1c12b3439f51344d5832878e64419a6 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 29 Feb 2024 15:22:37 +0100 Subject: [PATCH 0083/1133] Remove inputs from empty lib --- prelude/haskell/haskell.bzl | 6 ------ 1 file changed, 6 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index dcd0d40f3..5ce616ba2 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -592,12 +592,6 @@ def _build_haskell_lib( ), ) empty_link.add(ctx.actions.write("empty.c", "")) - empty_infos = get_link_args_for_strategy( - ctx, - nlis, - to_link_strategy(link_style), - ) - empty_link.add(cmd_args(unpack_link_args(empty_infos), prepend = "-optl")) ctx.actions.run( empty_link, category = "haskell_link_empty" + artifact_suffix.replace("-", "_"), From 87c2ea42b9c0dbfc4a370d756c84add9b974d1d0 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 11 Mar 2024 08:31:05 +0100 Subject: [PATCH 0084/1133] [buck2] Use `shared` linking for haskell_ghci targets --- prelude/haskell/haskell_ghci.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index a8b8c56ba..111e58292 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -615,7 +615,8 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: enable_profiling, ) - link_style = LinkStyle("static_pic") + link_style = LinkStyle("shared") + #link_style = LinkStyle("static_pic") packages_info = get_packages_info( ctx, From e58d80cfecf4798e05f0960d87e3b00e6e5fa7f0 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 11 Mar 2024 10:36:13 +0100 Subject: [PATCH 0085/1133] [buck2] Make `ghci_lib_path` optional The ghci script would always pass a `-B flag to ghc. This would just use `None` and fail with locating the `settings` file. --- prelude/haskell/haskell_ghci.bzl | 3 ++- .../tools/script_template_processor.py | 26 ++++++++++++------- 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index 111e58292..cf769408f 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -360,7 +360,8 @@ def _replace_macros_in_script_template( replace_cmd = cmd_args(script_template_processor) replace_cmd.add(cmd_args(script_template, format = "--script_template={}")) for name, path in toolchain_paths.items(): - replace_cmd.add(cmd_args("--{}={}".format(name, path))) + if path: + replace_cmd.add(cmd_args("--{}={}".format(name, path))) replace_cmd.add(cmd_args( final_script.as_output(), diff --git a/prelude/haskell/tools/script_template_processor.py b/prelude/haskell/tools/script_template_processor.py index a105c4dbf..b67e0e2ee 100644 --- a/prelude/haskell/tools/script_template_processor.py +++ b/prelude/haskell/tools/script_template_processor.py @@ -66,18 +66,26 @@ def _replace_template_values( string=script_template, ) - # user_ghci_path has to be handled separately because it needs to be passed - # with the ghci_lib_path as the `-B` argument. - ghci_lib_canonical_path = os.path.realpath( - rel_toolchain_paths["ghci_lib_path"], - ) if user_ghci_path is not None: - script_template = re.sub( - pattern="", - repl="${{DIR}}/{user_ghci_path} -B{ghci_lib_path}".format( + # user_ghci_path has to be handled separately because it needs to be passed + # with the ghci_lib_path as the `-B` argument. + ghci_lib_path = rel_toolchain_paths["ghci_lib_path"] + + if ghci_lib_path: + ghci_lib_canonical_path = os.path.realpath(ghci_lib_path) + + replacement="${{DIR}}/{user_ghci_path} -B{ghci_lib_path}".format( user_ghci_path=user_ghci_path, ghci_lib_path=ghci_lib_canonical_path, - ), + ) + else: + replacement="${{DIR}}/{user_ghci_path}".format( + user_ghci_path=user_ghci_path, + ) + + script_template = re.sub( + pattern="", + repl=replacement, string=script_template, ) From cb7f8581561145bcc9c9bae90a3ffa8997c29563 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 11 Mar 2024 10:39:07 +0100 Subject: [PATCH 0086/1133] [buck2] Correct import dir links for import_dir --- prelude/haskell/haskell_ghci.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index cf769408f..f63b532b5 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -648,7 +648,8 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: for prof, import_dir in lib.import_dirs.items(): artifact_suffix = get_artifact_suffix(link_style, prof) - lib_symlinks["hi-" + artifact_suffix] = import_dir + for imp in import_dir: + lib_symlinks["mod-" + artifact_suffix + "/" + imp.short_path] = imp for o in lib.libs: lib_symlinks[o.short_path] = o From ca8ece06a7f49939a0eaf446187b259dc0b15169 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 11 Mar 2024 10:58:22 +0100 Subject: [PATCH 0087/1133] [buck2] Use correct `-rpath` origin setting for `haskell_ghci` --- prelude/haskell/haskell_ghci.bzl | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index f63b532b5..b19d1b027 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -51,6 +51,11 @@ load( "SharedLibraryInfo", "traverse_shared_library_info", ) +load( + "@prelude//cxx:linker.bzl", + "get_rpath_origin", + "get_shared_library_flags", +) load( "@prelude//utils:graph_utils.bzl", "breadth_first_traversal", @@ -294,7 +299,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: soname = "libghci_dependencies.so" extra_ldflags = [ "-rpath", - "$ORIGIN/{}".format(so_symlinks_root_path), + "{}/{}".format(get_rpath_origin(linker_info.type), so_symlinks_root_path) ] link_result = cxx_link_shared_library( ctx, From 0dc100843bd912422fc1838ca1d8ddc06239a18e Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 11 Mar 2024 11:00:54 +0100 Subject: [PATCH 0088/1133] [buck2] Pass linker flags using `-Wl,...` --- prelude/haskell/haskell_ghci.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index b19d1b027..d41d30c1e 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -306,7 +306,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: soname, opts = link_options( links = [ - LinkArgs(flags = extra_ldflags), + LinkArgs(flags = cmd_args(cmd_args(extra_ldflags, delimiter=","), format="-Wl,{}")), LinkArgs(infos = body_link_infos.values()), LinkArgs(infos = tp_deps_shared_link_infos.values()), ], From e21e0114b0f69dd126df4746298fb108896dfae4 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 15 Mar 2024 16:02:41 +0100 Subject: [PATCH 0089/1133] Skip module deps from the same package explicitly --- prelude/haskell/compile.bzl | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 488e421b8..2562dea12 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -250,7 +250,8 @@ def get_packages_info( link_style: LinkStyle, specify_pkg_version: bool, enable_profiling: bool, - transitive_deps: [None, dict[str, list[str]]] = None) -> PackagesInfo: + transitive_deps: [None, dict[str, list[str]]] = None, + pkgname: str | None = None) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] # Collect library dependencies. Note that these don't need to be in a @@ -292,8 +293,8 @@ def get_packages_info( exposed_package_libs.hidden(lib.empty_libs) for pkg, mods in transitive_deps.items(): - if not pkg in lib_objects: - # TODO More robust handling. We want to skip self-package references here. + if pkg == pkgname: + # Skip dependencies from the same package. continue for mod in mods: exposed_package_objects.append(lib_objects[pkg][mod]) @@ -369,6 +370,7 @@ def _common_compile_args( specify_pkg_version = False, enable_profiling = enable_profiling, transitive_deps = transitive_deps, + pkgname = pkgname, ) compile_args.add(packages_info.exposed_package_args) From 6d40bf0326bb6d788a1ce1c08674a89474cfa43c Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 27 Mar 2024 14:38:13 +0100 Subject: [PATCH 0090/1133] [buck2] Create a separate package db for the empty lib After #69, the real dynamic library is no longer needed by template haskell, but is used by GHCi when loading a package into the repl. During compilation we want to avoid race conditions that might occur running without isolation when ghc tries to use an old library file that is removed during the action or a file that is not yet fully written. Fixes #84 --- prelude/haskell/compile.bzl | 7 ++++++- prelude/haskell/haskell.bzl | 34 +++++++++++++++++++++++--------- prelude/haskell/haskell_ghci.bzl | 1 + 3 files changed, 32 insertions(+), 10 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 2562dea12..db999c3b7 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -71,6 +71,8 @@ HaskellLibraryInfo = record( name = str, # package config database: e.g. platform009/build/ghc/lib/package.conf.d db = Artifact, + # package config database, referring to the empty lib which is only used for compilation + empty_db = Artifact, # e.g. "base-4.13.0.0" id = str, # Import dirs indexed by profiling enabled/disabled @@ -161,6 +163,7 @@ def target_metadata( LinkStyle("shared"), specify_pkg_version = False, enable_profiling = False, + use_empty_lib = True, ) # The object and interface file paths are depending on the real module name @@ -250,6 +253,7 @@ def get_packages_info( link_style: LinkStyle, specify_pkg_version: bool, enable_profiling: bool, + use_empty_lib: bool, transitive_deps: [None, dict[str, list[str]]] = None, pkgname: str | None = None) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -310,7 +314,7 @@ def get_packages_info( for lib in libs.values(): # These we need to add for all the packages/dependencies, i.e. # direct and transitive (e.g. `fbcode-common-hs-util-hs-array`) - packagedb_args.add("-package-db", lib.db) + packagedb_args.add("-package-db", lib.empty_db if use_empty_lib else lib.db) haskell_direct_deps_lib_infos = _attr_deps_haskell_lib_infos( ctx, @@ -369,6 +373,7 @@ def _common_compile_args( link_style, specify_pkg_version = False, enable_profiling = enable_profiling, + use_empty_lib = True, transitive_deps = transitive_deps, pkgname = pkgname, ) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 5ce616ba2..7f4e50abc 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -397,9 +397,7 @@ PKGCONF=$3 # - controlling module visibility: only dependencies that are # directly declared as dependencies may be used # -# - Template Haskell: the compiler needs to load libraries itself -# at compile time, so it uses the package specs to find out -# which libraries and where. +# - by GHCi when loading packages into the repl def _make_package( ctx: AnalysisContext, link_style: LinkStyle, @@ -408,7 +406,8 @@ def _make_package( hlis: list[HaskellLibraryInfo], hi: dict[bool, list[Artifact]], lib: dict[bool, Artifact], - enable_profiling: bool) -> Artifact: + enable_profiling: bool, + use_empty_lib: bool) -> Artifact: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) # Don't expose boot sources, as they're only meant to be used for compiling. @@ -431,10 +430,10 @@ def _make_package( mk_artifact_dir("mod", profiled) for profiled in hi.keys() ] - library_dirs = [ + library_dirs = ["${pkgroot}/empty/lib-shared"] if use_empty_lib else [ mk_artifact_dir("lib", profiled) for profiled in hi.keys() - ] + ["${pkgroot}/empty/lib-shared"] + ] conf = [ "name: " + pkgname, @@ -448,9 +447,12 @@ def _make_package( "extra-libraries: " + libname, "depends: " + ", ".join(uniq_hlis), ] - pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + ".conf", conf) - - db = ctx.actions.declare_output("db-" + artifact_suffix) + if use_empty_lib: + pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + "_empty.conf", conf) + db = ctx.actions.declare_output("db-" + artifact_suffix + "_empty", dir = True) + else: + pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + ".conf", conf) + db = ctx.actions.declare_output("db-" + artifact_suffix, dir = True) db_deps = {} for x in uniq_hlis.values(): @@ -475,6 +477,7 @@ def _make_package( pkg_conf, ]), category = "haskell_package_" + artifact_suffix.replace("-", "_"), + identifier = "empty" if use_empty_lib else "final", env = {"GHC_PACKAGE_PATH": ghc_package_path}, ) @@ -658,11 +661,24 @@ def _build_haskell_lib( import_artifacts, library_artifacts, enable_profiling = enable_profiling, + use_empty_lib = False, + ) + empty_db = _make_package( + ctx, + link_style, + pkgname, + libstem, + uniq_infos, + import_artifacts, + library_artifacts, + enable_profiling = enable_profiling, + use_empty_lib = True, ) hlib = HaskellLibraryInfo( name = pkgname, db = db, + empty_db = empty_db, id = pkgname, import_dirs = import_artifacts, objects = object_artifacts, diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index d41d30c1e..c76af6854 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -629,6 +629,7 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: link_style, specify_pkg_version = True, enable_profiling = enable_profiling, + use_empty_lib = False, ) # Create package db symlinks From 3dafbf3454bb195556ec601aab5038659af43798 Mon Sep 17 00:00:00 2001 From: Pepe Iborra Date: Wed, 14 Feb 2024 08:37:27 -0800 Subject: [PATCH 0091/1133] improve handling of files without an owner Summary: Return an empty json value rather than crashing and killing the ide Reviewed By: malanka Differential Revision: D53757189 fbshipit-source-id: e0c9d995dd702b6997649d2303e9ba2956b15264 --- prelude/haskell/ide/ide.bxl | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 9189073e8..b2ec93fd1 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -62,7 +62,18 @@ def _solution_for_file(ctx, file, project_universe): target_universe = ctx.target_universe(unconfigured_owners).target_set() owners = ctx.cquery().owner(file, target_universe) if not owners or len(owners) == 0: - fail("No owner found for " + file) + return { + "external_dependencies": [], + "flags": [], + "generated_dependencies": [], + "haskell_deps": {}, + "import_dirs": [], + "owner": "No owner found for " + file, + "project": "", + "project_type": "", + "sources": [], + "targets": [], + } owner = owners[0] From e78fa78a879ab84321c074d8eeb33233aec811c5 Mon Sep 17 00:00:00 2001 From: Pepe Iborra Date: Mon, 15 Jan 2024 06:32:35 -0800 Subject: [PATCH 0092/1133] deduplicate prebuilt package dbs Summary: Without deduplication we generate hundreds of redundant `--package-db` args which noticeably slow down `ghci` load times Reviewed By: ndmitchell, simonmar Differential Revision: D51025265 fbshipit-source-id: 1d1655ae223990fe85bfe63dd43b7b49fd4f6670 --- prelude/haskell/haskell_ghci.bzl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index c76af6854..0842bdc2f 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -638,11 +638,11 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: package_symlinks_root = ctx.label.name + ".packages" packagedb_args = cmd_args(delimiter = " ") - prebuilt_packagedb_args = cmd_args(delimiter = " ") + prebuilt_packagedb_args_set = {} for lib in packages_info.transitive_deps: if lib.is_prebuilt: - prebuilt_packagedb_args.add(lib.db) + prebuilt_packagedb_args_set[lib.db] = lib.db else: lib_symlinks_root = paths.join( package_symlinks_root, @@ -673,6 +673,7 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: "packagedb", ), ) + prebuilt_packagedb_args = cmd_args(prebuilt_packagedb_args_set.values(), delimiter = " ") script_templates = [] for script_template in ctx.attrs.extra_script_templates: From aa8723fdbc0c981f53277d8b7adcd91561e047b3 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 20 Mar 2024 12:18:30 +0100 Subject: [PATCH 0093/1133] [buck2] Handle sources being a dict or a list of artifacts --- prelude/haskell/ide/ide.bxl | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index b2ec93fd1..8eddb53f0 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -8,6 +8,7 @@ load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") load("@prelude//haskell:link_info.bzl", "HaskellLinkInfo") load("@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo") +load("@prelude//haskell:util.bzl", "srcs_to_pairs") load("@prelude//linking:link_info.bzl", "LinkStyle") load("@prelude//paths.bzl", "paths") @@ -149,13 +150,15 @@ def _solution_for_haskell_lib(ctx, target, exclude): if lb != None: haskellLibs[dep.label] = lb - sources = [] - for item in ctx.output.ensure_multiple(resolved_attrs.srcs.values()): - sources.append(item.abs_path()) + target_srcs = dict(srcs_to_pairs(resolved_attrs.srcs)) + sources = [ + item.abs_path() + for item in ctx.output.ensure_multiple(target_srcs.values()) + ] import_dirs = {} root = ctx.root() - for key, item in resolved_attrs.srcs.items(): + for key, item in target_srcs.items(): # because BXL wont give you the path of an ensured artifact sp = get_path_without_materialization(item, ctx) (_, ext) = paths.split_extension(sp) From fa006d88d27644b78798321419c1f81dac6a4dd3 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 20 Mar 2024 12:19:04 +0100 Subject: [PATCH 0094/1133] [buck2] Only add -pgm* flags when corresponding toolchain attribute is set --- prelude/haskell/ide/ide.bxl | 34 +++++++++++++++++++++++----------- 1 file changed, 23 insertions(+), 11 deletions(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 8eddb53f0..3ed163e4c 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -168,11 +168,6 @@ def _solution_for_haskell_lib(ctx, target, exclude): haskell_toolchain = ctx.analysis(resolved_attrs._haskell_toolchain.label) toolchain = haskell_toolchain.providers().get(HaskellToolchainInfo) - binutils_path = paths.join(root, toolchain.ghci_binutils_path) - cc_path = paths.join(root, toolchain.ghci_cc_path) - cxx_path = paths.join(root, toolchain.ghci_cxx_path) - cpp_path = paths.join(root, toolchain.ghci_cpp_path) - flags = [ "-this-unit-id", "fbcode_fake_unit_id", @@ -182,13 +177,30 @@ def _solution_for_haskell_lib(ctx, target, exclude): "-no-global-package-db", "-no-user-package-db", "-hide-all-packages", - "-pgma%s" % cc_path, - "-pgml%s" % cxx_path, - "-pgmc%s" % cc_path, - "-pgmP%s" % cpp_path, - "-opta-B%s" % binutils_path, - "-optc-B%s" % binutils_path, ] + + if toolchain.ghci_binutils_path: + binutils_path = paths.join(root, toolchain.ghci_binutils_path) + flags.extend([ + "-opta-B%s" % binutils_path, + "-optc-B%s" % binutils_path, + ]) + + if toolchain.ghci_cc_path: + cc_path = paths.join(root, toolchain.ghci_cc_path) + flags.extend([ + "-pgma%s" % cc_path, + "-pgmc%s" % cc_path, + ]) + + if toolchain.ghci_cxx_path: + cxx_path = paths.join(root, toolchain.ghci_cxx_path) + flags.append("-pgml%s" % cxx_path) + + if toolchain.ghci_cpp_path: + cpp_path = paths.join(root, toolchain.ghci_cpp_path) + flags.append("-pgmP%s" % cpp_path) + flags.extend(resolved_attrs.compiler_flags) return { From f392175e656e4751414ecd1d065a41fd63fa7b06 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 20 Mar 2024 12:24:02 +0100 Subject: [PATCH 0095/1133] [buck2] Only pass haskell sources on to the IDE --- prelude/haskell/ide/ide.bxl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 3ed163e4c..92e4377be 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -8,7 +8,7 @@ load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") load("@prelude//haskell:link_info.bzl", "HaskellLinkInfo") load("@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo") -load("@prelude//haskell:util.bzl", "srcs_to_pairs") +load("@prelude//haskell:util.bzl", "is_haskell_src", "srcs_to_pairs") load("@prelude//linking:link_info.bzl", "LinkStyle") load("@prelude//paths.bzl", "paths") @@ -150,7 +150,11 @@ def _solution_for_haskell_lib(ctx, target, exclude): if lb != None: haskellLibs[dep.label] = lb - target_srcs = dict(srcs_to_pairs(resolved_attrs.srcs)) + target_srcs = { + k: v + for k, v in srcs_to_pairs(resolved_attrs.srcs) + if is_haskell_src(k) + } sources = [ item.abs_path() for item in ctx.output.ensure_multiple(target_srcs.values()) From 7a868a436623db957f4bb4eafbc4cac3ad672e1d Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 20 Mar 2024 12:48:01 +0100 Subject: [PATCH 0096/1133] [buck2] Add -package flags for toolchain libraries --- prelude/haskell/ide/ide.bxl | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 92e4377be..32bd23b7d 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -7,7 +7,7 @@ load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") load("@prelude//haskell:link_info.bzl", "HaskellLinkInfo") -load("@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo") +load("@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo", "HaskellToolchainLibrary") load("@prelude//haskell:util.bzl", "is_haskell_src", "srcs_to_pairs") load("@prelude//linking:link_info.bzl", "LinkStyle") load("@prelude//paths.bzl", "paths") @@ -143,12 +143,19 @@ def _solution_for_haskell_lib(ctx, target, exclude): hli = ctx.analysis(target).providers().get(HaskellLibraryProvider) haskellLibs = {} + toolchain_libs = [] + for dep in resolved_attrs.deps + resolved_attrs.template_deps: if exclude.get(dep.label) == None: providers = ctx.analysis(dep.label).providers() lb = providers.get(HaskellLinkInfo) if lb != None: haskellLibs[dep.label] = lb + continue + + lb = providers.get(HaskellToolchainLibrary) + if lb != None: + toolchain_libs.append(lb.name) target_srcs = { k: v @@ -212,6 +219,7 @@ def _solution_for_haskell_lib(ctx, target, exclude): "flags": flags, "generated_dependencies": externalSourcesForTarget(ctx, target), "haskell_deps": haskellLibs, + "toolchain_libs": toolchain_libs, "import_dirs": import_dirs.keys(), "sources": sources, "targets": targetsForTarget(ctx, target), @@ -288,6 +296,9 @@ def _assembleSolution(ctx, linkStyle, result): ctx.output.ensure_multiple(hli.libs) ctx.output.ensure_multiple(hli.import_dirs.values()) package_dbs[hli.db] = () + for lib in result["toolchain_libs"]: + flags.append("-package") + flags.append(lib) for pkgdb in ctx.output.ensure_multiple(package_dbs.keys()): flags.append("-package-db") flags.append(pkgdb.abs_path()) From 8058d033d932761bbff551f5e9f6f3b15cc71ab9 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 20 Mar 2024 13:26:36 +0100 Subject: [PATCH 0097/1133] [buck2] Remove `-no-global-package-db` flag This is the db which contains the haskell toolchain libraries. --- prelude/haskell/ide/ide.bxl | 1 - 1 file changed, 1 deletion(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 32bd23b7d..56b5a5f6e 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -185,7 +185,6 @@ def _solution_for_haskell_lib(ctx, target, exclude): "-optP-undef", "-optP-traditional-cpp", "-I.", - "-no-global-package-db", "-no-user-package-db", "-hide-all-packages", ] From ae48d8e964bd257f2f86245081536fb014cc7277 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 21 Mar 2024 10:19:01 +0100 Subject: [PATCH 0098/1133] [buck2] Flatten import dirs before passing it to ensure_multiple --- prelude/haskell/ide/ide.bxl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 56b5a5f6e..94cb0ca2b 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -293,7 +293,8 @@ def _assembleSolution(ctx, linkStyle, result): flags.append(hli.name) ctx.output.ensure_multiple(hli.stub_dirs) ctx.output.ensure_multiple(hli.libs) - ctx.output.ensure_multiple(hli.import_dirs.values()) + import_dirs = [d for ds in hli.import_dirs.values() for d in ds] + ctx.output.ensure_multiple(import_dirs) package_dbs[hli.db] = () for lib in result["toolchain_libs"]: flags.append("-package") From 909df871dae4288b07772786930ec0d8f1e8ae11 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 21 Mar 2024 12:09:18 +0100 Subject: [PATCH 0099/1133] [buck2] Use link style "shared" for ide --- prelude/haskell/ide/ide.bxl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 94cb0ca2b..2566175cc 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -34,7 +34,7 @@ _HASKELL_BIN = "prelude//rules.bzl:haskell_binary" _HASKELL_IDE = "prelude//rules.bzl:haskell_ide" _HASKELL_LIB = "prelude//rules.bzl:haskell_library" -linkStyle = LinkStyle("static") +linkStyle = LinkStyle("shared") def _impl_target(ctx): target = ctx.cli_args.target From 6e2405f94c5cdc3fb52e2d58782c685fd1b700fe Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 22 Mar 2024 09:20:01 +0100 Subject: [PATCH 0100/1133] [buck2] Add `-dynamic` to flags for IDE This is required to let ghcide being able to load the .dyn_hi files from local dependent packages. --- prelude/haskell/ide/ide.bxl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 2566175cc..edb506f56 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -182,6 +182,7 @@ def _solution_for_haskell_lib(ctx, target, exclude): flags = [ "-this-unit-id", "fbcode_fake_unit_id", + "-dynamic", "-optP-undef", "-optP-traditional-cpp", "-I.", From 781ba0e4b467f0626040646ef2f2b589fb3b7f4d Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 25 Mar 2024 09:30:00 +0100 Subject: [PATCH 0101/1133] [buck2] Clear search path for IDE --- prelude/haskell/ide/ide.bxl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index edb506f56..aaf528b91 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -188,6 +188,7 @@ def _solution_for_haskell_lib(ctx, target, exclude): "-I.", "-no-user-package-db", "-hide-all-packages", + "-i", ] if toolchain.ghci_binutils_path: From 4f13480d4366b0b9100ab1ea59b2fc8f93b414d1 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 25 Mar 2024 09:32:43 +0100 Subject: [PATCH 0102/1133] [buck2] Do not use the same unit id for each component Not sure this is a problem per se, but it looks a bit fishy to me and it helps with debugging haskell-language-server output / cache directory entries since it is easier to identify the package. This should probably take cell and package in account too. --- prelude/haskell/ide/ide.bxl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index aaf528b91..843250d09 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -181,7 +181,7 @@ def _solution_for_haskell_lib(ctx, target, exclude): flags = [ "-this-unit-id", - "fbcode_fake_unit_id", + "{}-fake_unit_id".format(target.label.name), "-dynamic", "-optP-undef", "-optP-traditional-cpp", From 2be09d0cb4691e8b09e25d44bfdc9300b7d5adbd Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Tue, 9 Jan 2024 09:32:22 -0800 Subject: [PATCH 0103/1133] rust-project: Configure OUT_DIR for a single protobuf target Summary: Previously, we would add OUT_DIR for every target that depended on protobuf. If we tried to use rust-target with multiple targets using protobuf, buck would error on duplicate outputs. Reviewed By: davidbarsky Differential Revision: D52613945 fbshipit-source-id: 253d0a7a33e46e42fc96f25b2f3aef999ff7bed5 --- prelude/rust/rust-analyzer/resolve_deps.bxl | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/prelude/rust/rust-analyzer/resolve_deps.bxl b/prelude/rust/rust-analyzer/resolve_deps.bxl index 1d031ca09..5ee8e8f24 100644 --- a/prelude/rust/rust-analyzer/resolve_deps.bxl +++ b/prelude/rust/rust-analyzer/resolve_deps.bxl @@ -100,21 +100,28 @@ def cquery_deps(ctx, top_targets, workspaces, actions): cfg["crate_root"] = thrift["artifact"] out[target.label.raw_target()] = cfg elif "generated_protobuf_library_rust" in labels.value(): - protobuf = materialize_generated_protobufs(ctx, target, actions) - out[target.label.raw_target()] = _process_target_config(ctx, target, in_workspace, out_dir = protobuf.abs_path()) + protobuf_out_dir = materialize_generated_protobufs(ctx, target, actions, seen) + out[target.label.raw_target()] = _process_target_config(ctx, target, in_workspace, protobuf_out_dir) else: out[target.label.raw_target()] = _process_target_config(ctx, target, in_workspace) return out -def materialize_generated_protobufs(ctx, target, actions): +def materialize_generated_protobufs(ctx, target, actions, seen): + """If `target` has a dependency that generates code from protobufs, + materialize the generated code and return the path to the output directory. + """ prost_target = target.attrs_lazy().get("named_deps").value().get("generated_prost_target") t = prost_target.raw_target() analysis = ctx.analysis(t) output = analysis.providers()[DefaultInfo].default_outputs[0] outfile = "{}/{}/{}".format(t.cell, t.package, t.name) - copied = ctx.output.ensure(actions.copy_file(outfile, output)) - return copied + if outfile in seen: + return None + seen[outfile] = () + + copied = ctx.output.ensure(actions.copy_file(outfile, output)) + return copied.abs_path() def materialize_generated_thrift(ctx, target, actions, seen): mapped_srcs = target.attrs_lazy().get("mapped_srcs").value() From f13a8e4be285e0c950e050f6e4fe655973a8eda3 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Tue, 9 Jan 2024 22:25:11 -0800 Subject: [PATCH 0104/1133] rules: `preferred_linkage` attribute on `prebuilt_rust_library` Summary: The `link_style` attribute that was here before makes little sense - that attribute doesn't even exist on regular rust libraries. The `preferred_linkage` attribute also has limited use, since we don't give prebuilt libraries a way to specify DSOs, but it does have extra semantics that may be useful (later in the stack) Reviewed By: dtolnay Differential Revision: D52648699 fbshipit-source-id: cfc726475ff9ac6470a925620c0d52ece0db7e9d --- prelude/decls/rust_rules.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/decls/rust_rules.bzl b/prelude/decls/rust_rules.bzl index 4ac22cbe9..703436c81 100644 --- a/prelude/decls/rust_rules.bzl +++ b/prelude/decls/rust_rules.bzl @@ -9,7 +9,7 @@ load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") load("@prelude//rust:link_info.bzl", "RustProcMacroPlugin") load("@prelude//rust:rust_binary.bzl", "rust_binary_impl", "rust_test_impl") load("@prelude//rust:rust_library.bzl", "prebuilt_rust_library_impl", "rust_library_impl") -load(":common.bzl", "LinkableDepType", "Linkage", "buck", "prelude_rule") +load(":common.bzl", "Linkage", "buck", "prelude_rule") load(":native_common.bzl", "native_common") load(":re_test_common.bzl", "re_test_common") load(":rust_common.bzl", "rust_common", "rust_target_dep") @@ -51,6 +51,7 @@ prebuilt_rust_library = prelude_rule( 'libfoo-abc123def456.rlib' if it has symbol versioning metadata. """), } | + native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage, default = "any")) | rust_common.crate(crate_type = attrs.string(default = "")) | rust_common.deps_arg(is_binary = False) | { @@ -58,7 +59,6 @@ prebuilt_rust_library = prelude_rule( "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), - "link_style": attrs.option(attrs.enum(LinkableDepType), default = None), "proc_macro": attrs.bool(default = False), } | rust_common.cxx_toolchain_arg() | From 9981c67a038f835e7bf792e0ee226c4eee3f332b Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Wed, 10 Jan 2024 06:49:55 -0800 Subject: [PATCH 0105/1133] Move the resource broker to prelude under apple/tools Summary: We need it for testing. Move it from xplat to prelude//apple/tools. Make the necessary reference updates in the platform, etc. Reviewed By: blackm00n Differential Revision: D52636612 fbshipit-source-id: 75e59fbf644ab9d18edf2d15720e14af08fd23e4 --- prelude/apple/tools/resource_broker/BUCK.v2 | 31 +++ .../tools/resource_broker/idb_companion.py | 22 ++ .../apple/tools/resource_broker/idb_target.py | 40 ++++ prelude/apple/tools/resource_broker/ios.py | 204 ++++++++++++++++++ prelude/apple/tools/resource_broker/macos.py | 41 ++++ prelude/apple/tools/resource_broker/main.py | 101 +++++++++ .../tools/resource_broker/simctl_runtime.py | 64 ++++++ .../apple/tools/resource_broker/timeouts.py | 12 ++ prelude/apple/tools/resource_broker/utils.py | 140 ++++++++++++ 9 files changed, 655 insertions(+) create mode 100644 prelude/apple/tools/resource_broker/BUCK.v2 create mode 100644 prelude/apple/tools/resource_broker/idb_companion.py create mode 100644 prelude/apple/tools/resource_broker/idb_target.py create mode 100644 prelude/apple/tools/resource_broker/ios.py create mode 100644 prelude/apple/tools/resource_broker/macos.py create mode 100644 prelude/apple/tools/resource_broker/main.py create mode 100644 prelude/apple/tools/resource_broker/simctl_runtime.py create mode 100644 prelude/apple/tools/resource_broker/timeouts.py create mode 100644 prelude/apple/tools/resource_broker/utils.py diff --git a/prelude/apple/tools/resource_broker/BUCK.v2 b/prelude/apple/tools/resource_broker/BUCK.v2 new file mode 100644 index 000000000..6c14216df --- /dev/null +++ b/prelude/apple/tools/resource_broker/BUCK.v2 @@ -0,0 +1,31 @@ +python_binary( + name = "resource_broker", + main = "main.py", + visibility = ["PUBLIC"], + deps = [ + ":main", + ], +) + +python_library( + name = "main", + srcs = ["main.py"], + deps = [ + ":lib", + ], +) + +python_library( + name = "lib", + srcs = glob( + [ + "*.py", + ], + exclude = [ + "main.py", + ], + ), + deps = [ + "fbsource//third-party/pypi/dataclasses-json:dataclasses-json", + ], +) diff --git a/prelude/apple/tools/resource_broker/idb_companion.py b/prelude/apple/tools/resource_broker/idb_companion.py new file mode 100644 index 000000000..f831e32cc --- /dev/null +++ b/prelude/apple/tools/resource_broker/idb_companion.py @@ -0,0 +1,22 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import os +import signal +from dataclasses import dataclass +from io import TextIOWrapper + + +@dataclass +class IdbCompanion: + socket_address: str + pid: int + stderr: TextIOWrapper + + def cleanup(self) -> None: + os.kill(self.pid, signal.SIGTERM) + self.stderr.close() diff --git a/prelude/apple/tools/resource_broker/idb_target.py b/prelude/apple/tools/resource_broker/idb_target.py new file mode 100644 index 000000000..37de481dc --- /dev/null +++ b/prelude/apple/tools/resource_broker/idb_target.py @@ -0,0 +1,40 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import json +from dataclasses import dataclass +from enum import Enum +from typing import List, Optional + +from dataclasses_json import dataclass_json + + +class SimState(str, Enum): + booted = "Booted" + shutdown = "Shutdown" + + +@dataclass_json +@dataclass +class IdbTarget: + name: str + os_version: str + udid: str + state: SimState + host: str = "" + port: int = 0 + + +def managed_simulators_from_stdout(stdout: Optional[str]) -> List[IdbTarget]: + if not stdout: + return [] + targets = map( + # pyre-ignore[16]: `from_dict` is dynamically provided by `dataclass_json` + IdbTarget.from_dict, + json.loads(stdout), + ) + return list(targets) diff --git a/prelude/apple/tools/resource_broker/ios.py b/prelude/apple/tools/resource_broker/ios.py new file mode 100644 index 000000000..a3ae35d19 --- /dev/null +++ b/prelude/apple/tools/resource_broker/ios.py @@ -0,0 +1,204 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import os + +from distutils.version import StrictVersion +from typing import List, Optional + +from .idb_companion import IdbCompanion + +from .idb_target import IdbTarget, managed_simulators_from_stdout, SimState + +from .simctl_runtime import list_ios_runtimes, XCSimRuntime + +from .timeouts import SIMULATOR_BOOT_TIMEOUT + +from .utils import ( + execute_generic_text_producing_command, + spawn_companion, + wait_for_idb_companions, +) + + +def _device_set_path() -> str: + return os.path.expanduser("~/Library/Developer/Buck2IdbDeviceSet") + + +def _list_managed_simulators_command(simulator_manager: str) -> List[str]: + return [ + simulator_manager, + "list", + "--device-set-path", + _device_set_path(), + "--only", + "simulator", + ] + + +def _create_simulator_command(simulator_manager: str, sim_spec: str) -> List[str]: + return [ + simulator_manager, + "create", + "--device-set-path", + _device_set_path(), + "--configuration", + sim_spec, + ] + + +def _boot_simulator_command(simulator_manager: str, udid: str) -> List[str]: + return [ + simulator_manager, + "boot", + "--device-set-path", + _device_set_path(), + udid, + ] + + +def _compatible_device_type_from_runtime(runtime: XCSimRuntime) -> Optional[str]: + iphones = filter( + lambda t: t.product_family == "iPhone", runtime.supported_device_types + ) + if not iphones: + return None + default = next(iphones) + return next( + (device_type.name for device_type in iphones if device_type.name == "iPhone 8"), + default.name, + ) + + +def _select_latest_simulator_spec(runtimes: List[XCSimRuntime]) -> str: + runtimes.sort(key=lambda x: StrictVersion(x.version), reverse=True) + for runtime in runtimes: + device_type = _compatible_device_type_from_runtime(runtime) + if device_type: + return f"{device_type},{runtime.name}" + raise RuntimeError( + "No XCode simctl compatible iOS runtime and device available. Try to `sudo xcode-select -s ` and *open Xcode to install all required components*." + ) + + +def _spawn_companion_for_simulator_command( + udid: str, grpc_domain_sock: str +) -> List[str]: + return [ + "idb_companion", + "--device-set-path", + _device_set_path(), + "--udid", + udid, + "--only", + "simulator", + "--grpc-domain-sock", + grpc_domain_sock, + ] + + +async def _generic_managed_simulators_command( + name: str, cmd: List[str] +) -> List[IdbTarget]: + stdout = await execute_generic_text_producing_command(name=name, cmd=cmd) + return managed_simulators_from_stdout(stdout) + + +async def _list_managed_simulators(simulator_manager: str) -> List[IdbTarget]: + list_cmd = _list_managed_simulators_command(simulator_manager=simulator_manager) + return await _generic_managed_simulators_command( + name="list managed simulators", cmd=list_cmd + ) + + +async def _create_simulator(simulator_manager: str) -> List[IdbTarget]: + runtimes = await list_ios_runtimes() + spec = _select_latest_simulator_spec(runtimes) + create_cmd = _create_simulator_command( + simulator_manager=simulator_manager, sim_spec=spec + ) + return await _generic_managed_simulators_command( + name="create simulators", cmd=create_cmd + ) + + +async def _get_managed_simulators_create_if_needed( + simulator_manager: str, +) -> List[IdbTarget]: + managed_simulators = await _list_managed_simulators( + simulator_manager=simulator_manager + ) + if managed_simulators: + return managed_simulators + + managed_simulators = await _create_simulator(simulator_manager=simulator_manager) + if managed_simulators: + return managed_simulators + + raise RuntimeError( + "Failed to create an iOS simulator. Try to `sudo xcode-select -s ` and *open Xcode to install all required components*." + ) + + +def _select_simulator( + only_booted: bool, all_simulators: List[IdbTarget] +) -> Optional[IdbTarget]: + return next( + filter( + lambda s: s.state == SimState.booted if only_booted else True, + iter(all_simulators), + ), + None, + ) + + +def _select_simulator_with_preference( + prefer_booted: bool, all_simulators: List[IdbTarget] +) -> IdbTarget: + simulator = _select_simulator( + only_booted=prefer_booted, all_simulators=all_simulators + ) + if not simulator and prefer_booted: + simulator = _select_simulator(only_booted=False, all_simulators=all_simulators) + if not simulator: + raise RuntimeError("Expected at least unbooted simulator entity to be selected") + return simulator + + +async def _ios_simulator(simulator_manager: str, booted: bool) -> List[IdbCompanion]: + managed_simulators = await _get_managed_simulators_create_if_needed( + simulator_manager=simulator_manager + ) + simulator = _select_simulator_with_preference( + prefer_booted=booted, all_simulators=managed_simulators + ) + if simulator.state != SimState.booted and booted: + boot_cmd = _boot_simulator_command( + simulator_manager=simulator_manager, udid=simulator.udid + ) + await execute_generic_text_producing_command( + name="boot simulator", + cmd=boot_cmd, + timeout=SIMULATOR_BOOT_TIMEOUT, + ) + + grpc_domain_sock = f"/tmp/buck2_idb_companion_{simulator.udid}" + process = await spawn_companion( + command=_spawn_companion_for_simulator_command( + simulator.udid, grpc_domain_sock + ), + log_file_suffix=f"companion_launch_logs_for_{simulator.udid}.log", + ) + return await wait_for_idb_companions([process]) + + +async def ios_unbooted_simulator(simulator_manager: str) -> List[IdbCompanion]: + return await _ios_simulator(simulator_manager=simulator_manager, booted=False) + + +async def ios_booted_simulator(simulator_manager: str) -> List[IdbCompanion]: + return await _ios_simulator(simulator_manager=simulator_manager, booted=True) diff --git a/prelude/apple/tools/resource_broker/macos.py b/prelude/apple/tools/resource_broker/macos.py new file mode 100644 index 000000000..d3aeaa4f9 --- /dev/null +++ b/prelude/apple/tools/resource_broker/macos.py @@ -0,0 +1,41 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import asyncio +from typing import cast, List + +from .idb_companion import IdbCompanion + +from .utils import IdbCompanionProcess, spawn_companion, wait_for_idb_companions + + +def _boot_macos_companion_command(grpc_domain_sock: str) -> List[str]: + return [ + "idb_companion", + "--udid", + "mac", + "--grpc-domain-sock", + grpc_domain_sock, + ] + + +async def macos_idb_companions() -> List[IdbCompanion]: + addresses = [(i, f"/tmp/buck2_idb_companion_mac_{i}") for i in range(10)] + awaitables = [ + spawn_companion( + command=_boot_macos_companion_command(addr), + log_file_suffix=f"macos_companion_{i}.log", + ) + for i, addr in addresses + ] + results = await asyncio.gather(*awaitables, return_exceptions=True) + + if exception := next(filter(lambda r: isinstance(r, BaseException), results), None): + [r.cleanup() for r in results if isinstance(r, IdbCompanionProcess)] + raise cast(BaseException, exception) + + return await wait_for_idb_companions(cast(List[IdbCompanionProcess], results)) diff --git a/prelude/apple/tools/resource_broker/main.py b/prelude/apple/tools/resource_broker/main.py new file mode 100644 index 000000000..01f1f940b --- /dev/null +++ b/prelude/apple/tools/resource_broker/main.py @@ -0,0 +1,101 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import asyncio +import json +import os +import signal +import sys +from enum import Enum +from time import sleep +from typing import List, Optional + +from .idb_companion import IdbCompanion + +from .ios import ios_booted_simulator, ios_unbooted_simulator + +from .macos import macos_idb_companions + +idb_companions: List[IdbCompanion] = [] + + +def _args_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + description="Utility which helps to set up IDB companions which are used later by buck2 it runs tests locally." + ) + parser.add_argument( + "--simulator-manager", + required=False, + type=str, + help="Tool to manage simulators and their lifecycle. Required for iOS testing", + ) + parser.add_argument( + "--type", + metavar="", + type=_ResourceType, + choices=[e.value for e in _ResourceType], + required=True, + help=f""" + Type of required resources. + Pass `{_ResourceType.iosUnbootedSimulator}` to get a companion for iOS unbooted simulator. + Pass `{_ResourceType.iosBootedSimulator}` to get a companion for iOS booted simulator. + Pass `{_ResourceType.macosIdbCompanion}` to get MacOS companions. + """, + ) + return parser + + +class _ResourceType(str, Enum): + iosUnbootedSimulator = "ios_unbooted_simulator" + iosBootedSimulator = "ios_booted_simulator" + macosIdbCompanion = "macos_idb_companion" + + +def _exit_gracefully(*args): + for idb_companion in idb_companions: + idb_companion.cleanup() + exit(0) + + +def _check_simulator_manager_exists(simulator_manager: Optional[str]) -> None: + if not simulator_manager: + raise Exception("Simulator manager is not specified") + + +def main(): + args = _args_parser().parse_args() + if args.type == _ResourceType.iosBootedSimulator: + _check_simulator_manager_exists(args.simulator_manager) + idb_companions.extend(asyncio.run(ios_booted_simulator(args.simulator_manager))) + elif args.type == _ResourceType.iosUnbootedSimulator: + _check_simulator_manager_exists(args.simulator_manager) + idb_companions.extend( + asyncio.run(ios_unbooted_simulator(args.simulator_manager)) + ) + elif args.type == _ResourceType.macosIdbCompanion: + idb_companions.extend(asyncio.run(macos_idb_companions())) + pid = os.fork() + if pid == 0: + # child + signal.signal(signal.SIGINT, _exit_gracefully) + signal.signal(signal.SIGTERM, _exit_gracefully) + while True: + sleep(0.1) + else: + # Do not leak open FDs in parent + for c in idb_companions: + c.stderr.close() + result = { + "pid": pid, + "resources": [{"socket_address": c.socket_address} for c in idb_companions], + } + json.dump(result, sys.stdout) + + +if __name__ == "__main__": + main() diff --git a/prelude/apple/tools/resource_broker/simctl_runtime.py b/prelude/apple/tools/resource_broker/simctl_runtime.py new file mode 100644 index 000000000..55a5740d2 --- /dev/null +++ b/prelude/apple/tools/resource_broker/simctl_runtime.py @@ -0,0 +1,64 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import json +from dataclasses import dataclass, field +from typing import List, Optional + +from dataclasses_json import config, dataclass_json + +from .utils import execute_generic_text_producing_command + + +@dataclass_json +@dataclass +class XCSimDevice: + name: str + product_family: str = field(metadata=config(field_name="productFamily")) + + +@dataclass_json +@dataclass +class XCSimRuntime: + name: str + version: str + supported_device_types: List[XCSimDevice] = field( + metadata=config(field_name="supportedDeviceTypes") + ) + + +@dataclass_json +@dataclass +class _XCSimRuntimes: + runtimes: List[XCSimRuntime] + + +def _list_ios_runtimes_command() -> List[str]: + return [ + "xcrun", + "simctl", + "list", + "runtimes", + "iOS", + "available", + "--json", + ] + + +def _simctl_runtimes_from_stdout(stdout: Optional[str]) -> List[XCSimRuntime]: + if not stdout: + return [] + data = json.loads(stdout) + # pyre-ignore[16]: `from_dict` is dynamically provided by `dataclass_json` + return _XCSimRuntimes.from_dict(data).runtimes + + +async def list_ios_runtimes() -> List[XCSimRuntime]: + stdout = await execute_generic_text_producing_command( + name="list iOS runtimes", cmd=_list_ios_runtimes_command() + ) + return _simctl_runtimes_from_stdout(stdout) diff --git a/prelude/apple/tools/resource_broker/timeouts.py b/prelude/apple/tools/resource_broker/timeouts.py new file mode 100644 index 000000000..018044687 --- /dev/null +++ b/prelude/apple/tools/resource_broker/timeouts.py @@ -0,0 +1,12 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +DEFAULT_OPERATION_TIMEOUT = 10 + +# Simulator boot is an expensive command and can take a long time to complete +# depending on machine configuration and current machine load. +SIMULATOR_BOOT_TIMEOUT = 90 diff --git a/prelude/apple/tools/resource_broker/utils.py b/prelude/apple/tools/resource_broker/utils.py new file mode 100644 index 000000000..4e6119bdb --- /dev/null +++ b/prelude/apple/tools/resource_broker/utils.py @@ -0,0 +1,140 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import asyncio +import json +import shlex +from dataclasses import dataclass +from io import TextIOWrapper +from pathlib import Path +from typing import Any, List, Tuple + +from dataclasses_json import dataclass_json + +from .idb_companion import IdbCompanion +from .timeouts import DEFAULT_OPERATION_TIMEOUT + + +@dataclass_json +@dataclass +class _IdbStdout: + grpc_path: str + + +@dataclass +class IdbCompanionProcess: + process: asyncio.subprocess.Process + stderr: TextIOWrapper + stderr_path: Path + + def cleanup(self) -> None: + self.process.terminate() + self.stderr.close() + + +async def _read_until_valid_json(stream: asyncio.StreamReader) -> Any: + buffer = b"" + while True: + data = await stream.readuntil(b"}") + buffer += data + try: + return json.loads(buffer.decode()) + except json.JSONDecodeError: + pass + raise RuntimeError( + "Should not be reachable since either the valid JSON is there or `asyncio.IncompleteReadError` is raised." + ) + + +async def _read_stdout(p: IdbCompanionProcess) -> Tuple[int, TextIOWrapper, Any]: + if not p.process.stdout: + raise ValueError("Expected stdout to be set for idb companion launch process.") + try: + json = await _read_until_valid_json(p.process.stdout) + except asyncio.IncompleteReadError as e: + if not e.partial: + with open(p.stderr_path) as f: + lines = f.readlines() + raise RuntimeError( + f"idb companion terminated unexpectedly with the following stderr:\n{lines}" + ) from e + else: + raise + return p.process.pid, p.stderr, json + + +async def wait_for_idb_companions( + processes: List[IdbCompanionProcess], + timeout: float = DEFAULT_OPERATION_TIMEOUT, +) -> List[IdbCompanion]: + reads = [asyncio.Task(_read_stdout(p)) for p in processes] + done, pending = await asyncio.wait( + reads, + timeout=timeout, + ) + if not pending: + results = [task.result() for task in done] + return [ + IdbCompanion( + # pyre-ignore[16]: `from_dict` is dynamically provided by `dataclass_json` + socket_address=_IdbStdout.from_dict(json_dict).grpc_path, + pid=pid, + stderr=stderr, + ) + for pid, stderr, json_dict in results + ] + + process_index = {reads[i]: processes[i] for i in range(len(processes))} + + stderr_paths = [] + + for task in pending: + task.cancel() + process_info = process_index[task] + stderr_paths.append(str(process_info.stderr_path)) + process_info.process.terminate() + + raise RuntimeError( + f"Timeout when trying to launch idb companions. List of files with stderr for pending companions: {stderr_paths}" + ) + + +async def execute_generic_text_producing_command( + name: str, cmd: List[str], timeout: float = DEFAULT_OPERATION_TIMEOUT +) -> str: + process = await asyncio.create_subprocess_exec( + *cmd, + stdin=asyncio.subprocess.DEVNULL, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout, stderr = await asyncio.wait_for(process.communicate(), timeout=timeout) + if process.returncode != 0: + raise RuntimeError( + f"Failed to {name} with command:\n```\n{shlex.join(cmd)}\n```\nstdout:\n```\n{stdout.decode(errors='ignore')}\n```\nstdout:\n```\n{stderr.decode(errors='ignore')}\n```\n" + ) + return stdout.decode() + + +async def spawn_companion( + command: List[str], + log_file_suffix: str, +) -> IdbCompanionProcess: + stderr_path = Path("/tmp/buck2_idb_companion_logs") / f"stderr-{log_file_suffix}" + stderr_path.parent.mkdir(parents=True, exist_ok=True) + stderr = stderr_path.open(mode="w") + process = await asyncio.create_subprocess_exec( + *command, + stdin=asyncio.subprocess.DEVNULL, + stdout=asyncio.subprocess.PIPE, + stderr=stderr, + ) + return IdbCompanionProcess( + process=process, + stderr=stderr, + stderr_path=stderr_path, + ) From 1bc621b7a41b2cdd289630ea947d0981b2ad475f Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 10 Jan 2024 07:10:50 -0800 Subject: [PATCH 0106/1133] Back out "Pass ldflags to cgo tool" Summary: Original commit changeset: 7b94be3db582 Original Phabricator Diff: D52099825 Reviewed By: andrewjcg Differential Revision: D52658443 fbshipit-source-id: d4b431092fe2bdb12f42b22c9424a8cb5f4ca84b --- prelude/go/cgo_library.bzl | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 2b2ab13ed..666bbb9b8 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -102,12 +102,14 @@ def _cgo( args.add(cmd_args(go_toolchain.cgo, format = "--cgo={}")) c_compiler = cxx_toolchain.c_compiler_info - linker = cxx_toolchain.linker_info + # linker = cxx_toolchain.linker_info - ldflags = cmd_args( - linker.linker_flags, - go_toolchain.external_linker_flags, - ) + # Passing fbcode-platform ldflags may create S365277, so I would + # comment this change until we really need to do it. + # ldflags = cmd_args( + # linker.linker_flags, + # go_toolchain.external_linker_flags, + # ) # Construct the full C/C++ command needed to preprocess/compile sources. cxx_cmd = cmd_args() @@ -137,7 +139,6 @@ def _cgo( is_executable = True, ) args.add(cmd_args(cxx_wrapper, format = "--env-cc={}")) - args.add(cmd_args(ldflags, format = "--env-ldflags={}")) args.hidden(cxx_cmd) # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not From bacbe3b219e2121c68f609b11083243be13c43bf Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Wed, 10 Jan 2024 19:48:27 -0800 Subject: [PATCH 0107/1133] rules: Fix pre-stripped artifacts (take 2) Summary: Attempt to reland D52499870 Reviewed By: dtolnay Differential Revision: D52683424 fbshipit-source-id: 6e6bd359cfc9d0b6fbb0543cc3cb1d4558e454c0 --- prelude/rust/rust_library.bzl | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index cfca154be..f39a39629 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -666,6 +666,16 @@ def _native_providers( linkables = [SharedLibLinkable(lib = lib.output)], external_debug_info = external_debug_info, ), + ) + else: + link_infos[output_style] = LinkInfos( + default = LinkInfo( + linkables = [ArchiveLinkable( + archive = Archive(artifact = lib.output), + linker_type = linker_type, + )], + external_debug_info = external_debug_info, + ), stripped = LinkInfo( linkables = [ArchiveLinkable( archive = Archive( @@ -679,16 +689,6 @@ def _native_providers( )], ), ) - else: - link_infos[output_style] = LinkInfos( - default = LinkInfo( - linkables = [ArchiveLinkable( - archive = Archive(artifact = lib.output), - linker_type = linker_type, - )], - external_debug_info = external_debug_info, - ), - ) preferred_linkage = Linkage(ctx.attrs.preferred_linkage) From 51c11f77e0f35e5398898c82af14f0d80635e666 Mon Sep 17 00:00:00 2001 From: Anna Kukliansky Date: Wed, 10 Jan 2024 20:31:42 -0800 Subject: [PATCH 0108/1133] enable package regex matching for group_mapping Summary: We would like to be able to define link group map by regex matching on a target package. Today we can either to regex on labels or match target pattern by exact target or directory prefix. But for cases such as dsi/logger/configs there are a lot of dircetories (~100K) under it which we would like to be able to split into a finer granularity groups. The trigger for this is user reloc error that is coming from the logger.so itself after it was split into a seperate general link group : [post](https://fb.workplace.com/groups/linker.support/posts/1092376945262480/?comment_id=1093963691770472&reply_comment_id=1094532725046902) Reviewed By: chatura-atapattu, athmasagar Differential Revision: D51926276 fbshipit-source-id: a5ad537f84696c9d98a573e0efefcb009a8cc0c6 --- prelude/cxx/groups.bzl | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index 818786991..1237e783a 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -44,6 +44,8 @@ FilterType = enum( "label", # Filters for targets for the build target pattern defined after "pattern:". "pattern", + # Filters for targets matching the regex pattern defined after "target_regex:". + "target_regex", ) BuildTargetFilter = record( @@ -56,6 +58,11 @@ LabelFilter = record( _type = field(FilterType, FilterType("label")), ) +TargetRegexFilter = record( + regex = regex, + _type = field(FilterType, FilterType("target_regex")), +) + # Label for special group mapping which makes every target associated with it to be included in all groups MATCH_ALL_LABEL = "MATCH_ALL" @@ -70,7 +77,7 @@ GroupMapping = record( # The type of traversal to use. traversal = field(Traversal, Traversal("tree")), # Optional filter type to apply to the traversal. - filters = field(list[[BuildTargetFilter, LabelFilter]], []), + filters = field(list[[BuildTargetFilter, LabelFilter, TargetRegexFilter]], []), # Preferred linkage for this target when added to a link group. preferred_linkage = field([Linkage, None], None), ) @@ -191,7 +198,7 @@ def _parse_traversal_from_mapping(entry: str) -> Traversal: else: fail("Unrecognized group traversal type: " + entry) -def _parse_filter(entry: str) -> [BuildTargetFilter, LabelFilter]: +def _parse_filter(entry: str) -> [BuildTargetFilter, LabelFilter, TargetRegexFilter]: for prefix in ("label:", "tag:"): label_regex = strip_prefix(prefix, entry) if label_regex != None: @@ -203,15 +210,19 @@ def _parse_filter(entry: str) -> [BuildTargetFilter, LabelFilter]: regex = regex("^{}$".format(label_regex), fancy = True), ) + target_regex = strip_prefix("target_regex:", entry) + if target_regex != None: + return TargetRegexFilter(regex = regex("^{}$".format(target_regex), fancy = True)) + pattern = strip_prefix("pattern:", entry) if pattern != None: return BuildTargetFilter( pattern = parse_build_target_pattern(pattern), ) - fail("Invalid group mapping filter: {}\nFilter must begin with `label:`, `tag:`, or `pattern:`.".format(entry)) + fail("Invalid group mapping filter: {}\nFilter must begin with `label:`, `tag:`, `target_regex` or `pattern:`.".format(entry)) -def _parse_filter_from_mapping(entry: [list[str], str, None]) -> list[[BuildTargetFilter, LabelFilter]]: +def _parse_filter_from_mapping(entry: [list[str], str, None]) -> list[[BuildTargetFilter, LabelFilter, TargetRegexFilter]]: if type(entry) == type([]): return [_parse_filter(e) for e in entry] if type(entry) == type(""): @@ -266,6 +277,9 @@ def _find_targets_in_mapping( if filter._type == FilterType("label"): if not any_labels_match(filter.regex, labels): return False + elif filter._type == FilterType("target_regex"): + target_str = str(target.raw_target()) + return filter.regex.match(target_str) elif not filter.pattern.matches(target): return False return True From 69cbf303df1332605192745ffd28b25495734a83 Mon Sep 17 00:00:00 2001 From: Scott Cao Date: Wed, 10 Jan 2024 23:56:19 -0800 Subject: [PATCH 0109/1133] Add configuration_alias rule Summary: Add a `configuration_alias` rule, which is an alias rule that sets `is_configuration_rule = True`. We need this because modifiers can only be of configuration rules, so right now an alias of a constraint target cannot be used as a modifier. Reviewed By: JakobDegen Differential Revision: D52305659 fbshipit-source-id: 2c1131a6ce019ba97c3d11559c445ff5dc8f19bf --- prelude/configurations/rules.bzl | 4 ++++ prelude/decls/core_rules.bzl | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/prelude/configurations/rules.bzl b/prelude/configurations/rules.bzl index 323560833..66536b833 100644 --- a/prelude/configurations/rules.bzl +++ b/prelude/configurations/rules.bzl @@ -67,6 +67,9 @@ def platform_impl(ctx): ), ] +def configuration_alias_impl(ctx: AnalysisContext) -> list[Provider]: + return ctx.attrs.actual.providers + # TODO(cjhopman): Update the attributes for these ruletypes to declare the types of providers that they expect in their references. extra_attributes = { "platform": { @@ -76,6 +79,7 @@ extra_attributes = { implemented_rules = { "config_setting": config_setting_impl, + "configuration_alias": configuration_alias_impl, "constraint_setting": constraint_setting_impl, "constraint_value": constraint_value_impl, "platform": platform_impl, diff --git a/prelude/decls/core_rules.bzl b/prelude/decls/core_rules.bzl index 08e5528bc..2831ac2cf 100644 --- a/prelude/decls/core_rules.bzl +++ b/prelude/decls/core_rules.bzl @@ -221,6 +221,23 @@ config_setting = prelude_rule( ), ) +configuration_alias = prelude_rule( + name = "configuration_alias", + docs = "", + examples = None, + further = None, + attrs = ( + # @unsorted-dict-items + { + # configuration_alias acts like alias but for configuration rules. + + # The configuration_alias itself is a configuration rule and the `actual` argument is + # expected to be a configuration rule as well. + "actual": attrs.dep(pulls_and_pushes_plugins = plugins.All), + } + ), +) + configured_alias = prelude_rule( name = "configured_alias", docs = "", @@ -1486,6 +1503,7 @@ core_rules = struct( alias = alias, command_alias = command_alias, config_setting = config_setting, + configuration_alias = configuration_alias, configured_alias = configured_alias, constraint_setting = constraint_setting, constraint_value = constraint_value, From b44161acc6e923479964cdd06c99061b579d68c6 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Thu, 11 Jan 2024 03:16:00 -0800 Subject: [PATCH 0110/1133] Add `validation_deps` field Summary: ## Context In Buck1, `genrule()` target which appear in the `deps` field of targets would get executed. This was utilised to attach various "validation" genrules which would run some logic. In Buck2, such genrule targets would not be executed unless their outputs are explicitly requested, which for validation rules does not happen. ## Genrule Deps We added the ability to execute all genrule `deps` of `apple_*` rules. While this approach works in making sure validation rules run, it has several downsides: - **Performance**: it ends up slowing down builds because *not all* genrules are used for validation. Thus we end up with overbuilding and creating additional waiting on dependents. - **Long Term State**: replicating buck1 behaviour is undesirable as a long-term state because of performance and usability implications. - For example, clearly separating validation deps allows us to handle them differently in the future (e.g., skipping validation in certain uses case, providing explicit `[validation]` subtargets etc). - Prevents unification of `apple_*` rules with `cxx_*` rules due to dependence on the behaviour. ## Overview The solution is to have a separate `validation_deps` field on `apple_binary()`, `apple_library()`, `apple_bundle()` and `apple_test()`. Any targets in the `validation_deps` fields are *guaranteed* to have been built as part of the outputs of the respective rules. This is affectively an opt-in approach to transitioning the pre-existing validation targets. Reviewed By: blackm00n Differential Revision: D52656976 fbshipit-source-id: 80e3f4d3865dccea6b6096c23beeeef5b4180530 --- prelude/apple/apple_binary.bzl | 7 ++++--- prelude/apple/apple_bundle.bzl | 7 ++++--- prelude/apple/apple_library.bzl | 7 ++++--- prelude/apple/apple_rules_impl.bzl | 4 ++++ prelude/apple/apple_rules_impl_utility.bzl | 4 ++++ prelude/apple/apple_validation_deps.bzl | 17 +++++++++++++++++ 6 files changed, 37 insertions(+), 9 deletions(-) create mode 100644 prelude/apple/apple_validation_deps.bzl diff --git a/prelude/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl index 514ec2c23..7b0b9d501 100644 --- a/prelude/apple/apple_binary.bzl +++ b/prelude/apple/apple_binary.bzl @@ -66,6 +66,7 @@ load(":apple_frameworks.bzl", "get_framework_search_path_flags") load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_apple_genrule_deps_outputs") load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback") +load(":apple_validation_deps.bzl", "get_apple_validation_deps_outputs") load(":debug.bzl", "AppleDebuggableInfo") load(":resource_groups.bzl", "create_resource_graph") load(":xcode.bzl", "apple_populate_xcode_attributes") @@ -111,16 +112,16 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: swift_compile, ) - genrule_deps_outputs = [] + validation_deps_outputs = get_apple_validation_deps_outputs(ctx) if get_apple_build_genrule_deps_attr_value(ctx): - genrule_deps_outputs = get_apple_genrule_deps_outputs(cxx_attr_deps(ctx)) + validation_deps_outputs += get_apple_genrule_deps_outputs(cxx_attr_deps(ctx)) stripped = get_apple_stripped_attr_value_with_default_fallback(ctx) constructor_params = CxxRuleConstructorParams( rule_type = "apple_binary", headers_layout = get_apple_cxx_headers_layout(ctx), extra_link_flags = extra_link_flags, - extra_hidden = genrule_deps_outputs, + extra_hidden = validation_deps_outputs, srcs = cxx_srcs, additional = CxxRuleAdditionalParams( srcs = swift_srcs, diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index c517763ed..74afaf964 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -64,6 +64,7 @@ load(":apple_dsym.bzl", "DSYM_INFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_apple_genrule_deps_outputs") load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_universal_binaries.bzl", "create_universal_binary") +load(":apple_validation_deps.bzl", "get_apple_validation_deps_outputs") load( ":debug.bzl", "AggregatedAppleDebugInfo", @@ -320,9 +321,9 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: primary_binary_rel_path = get_apple_bundle_part_relative_destination_path(ctx, primary_binary_part) - genrule_deps_outputs = [] + validation_deps_outputs = get_apple_validation_deps_outputs(ctx) if get_apple_build_genrule_deps_attr_value(ctx): - genrule_deps_outputs = get_apple_genrule_deps_outputs(ctx.attrs.deps) + validation_deps_outputs += get_apple_genrule_deps_outputs(ctx.attrs.deps) sub_targets = assemble_bundle( ctx, @@ -330,7 +331,7 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: apple_bundle_part_list_output.parts, apple_bundle_part_list_output.info_plist_part, SwiftStdlibArguments(primary_binary_rel_path = primary_binary_rel_path), - genrule_deps_outputs, + validation_deps_outputs, ) sub_targets.update(aggregated_debug_info.sub_targets) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index ce7cf4704..520f2a305 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -75,6 +75,7 @@ load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_ load(":apple_modular_utility.bzl", "MODULE_CACHE_PATH") load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback", "get_module_name") +load(":apple_validation_deps.bzl", "get_apple_validation_deps_outputs") load( ":debug.bzl", "AppleDebuggableInfo", @@ -221,16 +222,16 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte relative_args = CPreprocessorArgs(args = [framework_search_paths_flags]), ) - genrule_deps_outputs = [] + validation_deps_outputs = get_apple_validation_deps_outputs(ctx) if get_apple_build_genrule_deps_attr_value(ctx): - genrule_deps_outputs = get_apple_genrule_deps_outputs(cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx)) + validation_deps_outputs += get_apple_genrule_deps_outputs(cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx)) return CxxRuleConstructorParams( rule_type = params.rule_type, is_test = (params.rule_type == "apple_test"), headers_layout = get_apple_cxx_headers_layout(ctx), extra_exported_link_flags = params.extra_exported_link_flags, - extra_hidden = genrule_deps_outputs, + extra_hidden = validation_deps_outputs, extra_link_flags = [_get_linker_flags(ctx)], extra_link_input = swift_object_files, extra_link_input_has_external_debug_info = True, diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 3674ed732..fc6a5345f 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -34,6 +34,8 @@ load(":apple_resource.bzl", "apple_resource_impl") load( ":apple_rules_impl_utility.bzl", "APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME", + "APPLE_VALIDATION_DEPS_ATTR_NAME", + "APPLE_VALIDATION_DEPS_ATTR_TYPE", "apple_bundle_extra_attrs", "apple_dsymutil_attrs", "apple_test_extra_attrs", @@ -99,6 +101,7 @@ def _apple_binary_extra_attrs(): APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, + APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, } attribs.update(apple_dsymutil_attrs()) return attribs @@ -127,6 +130,7 @@ def _apple_library_extra_attrs(): APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, + APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, } attribs.update(apple_dsymutil_attrs()) return attribs diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index f8d157ba4..e3f48f29c 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -47,6 +47,9 @@ APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME = "_archive_objects_locally_ove APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME = "_use_entitlements_when_adhoc_code_signing" APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME = "use_entitlements_when_adhoc_code_signing" +APPLE_VALIDATION_DEPS_ATTR_NAME = "validation_deps" +APPLE_VALIDATION_DEPS_ATTR_TYPE = attrs.set(attrs.dep(), sorted = True, default = []) + def apple_dsymutil_attrs(): return { "_dsymutil_extra_flags": attrs.list(attrs.string()), @@ -75,6 +78,7 @@ def _apple_bundle_like_common_attrs(): APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME: attrs.bool(default = False), BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, + APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, } attribs.update(get_apple_info_plist_build_system_identification_attrs()) attribs.update(apple_dsymutil_attrs()) diff --git a/prelude/apple/apple_validation_deps.bzl b/prelude/apple/apple_validation_deps.bzl new file mode 100644 index 000000000..fa948a4f6 --- /dev/null +++ b/prelude/apple/apple_validation_deps.bzl @@ -0,0 +1,17 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +_VALIDATION_DEPS_ATTR_NAME = "validation_deps" + +def get_apple_validation_deps_outputs(ctx: AnalysisContext) -> list[Artifact]: + artifacts = [] + if hasattr(ctx.attrs, _VALIDATION_DEPS_ATTR_NAME): + validation_deps = getattr(ctx.attrs, _VALIDATION_DEPS_ATTR_NAME) + for dep in validation_deps: + default_info = dep[DefaultInfo] + artifacts += default_info.default_outputs + return artifacts From 58db57395472d50da5083aa49ed0519eaffaaaff Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Thu, 11 Jan 2024 07:29:59 -0800 Subject: [PATCH 0111/1133] add apple_xcuitest rule Summary: Add basic support for XCUITest runner application bundles. These need to package a test bundle in the PlugIns folder, along with their required frameworks in Frameworks. For now we are duplicating the frameworks across the test runner and the test application, removing them would require some refactoring of `assemble_bundle` to take in an exclude list or similar. Reviewed By: blackm00n Differential Revision: D52547900 fbshipit-source-id: 523986ff39c556e4c07ae991882bef7a18be56f6 --- prelude/apple/apple_rules_impl.bzl | 4 ++ prelude/apple/apple_rules_impl_utility.bzl | 20 +++++++ prelude/apple/apple_xcuitest.bzl | 70 ++++++++++++++++++++++ 3 files changed, 94 insertions(+) create mode 100644 prelude/apple/apple_xcuitest.bzl diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index fc6a5345f..e593c13b9 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -39,6 +39,7 @@ load( "apple_bundle_extra_attrs", "apple_dsymutil_attrs", "apple_test_extra_attrs", + "apple_xcuitest_extra_attrs", "get_apple_bundle_toolchain_attr", "get_apple_toolchain_attr", "get_apple_xctoolchain_attr", @@ -48,6 +49,7 @@ load(":apple_test.bzl", "apple_test_impl") load(":apple_toolchain.bzl", "apple_toolchain_impl") load(":apple_toolchain_types.bzl", "AppleToolsInfo") load(":apple_universal_executable.bzl", "apple_universal_executable_impl") +load(":apple_xcuitest.bzl", "apple_xcuitest_impl") load(":prebuilt_apple_framework.bzl", "prebuilt_apple_framework_impl") load(":scene_kit_assets.bzl", "scene_kit_assets_impl") load(":xcode_postbuild_script.bzl", "xcode_postbuild_script_impl") @@ -63,6 +65,7 @@ implemented_rules = { "apple_test": apple_test_impl, "apple_toolchain": apple_toolchain_impl, "apple_universal_executable": apple_universal_executable_impl, + "apple_xcuitest": apple_xcuitest_impl, "core_data_model": apple_core_data_impl, "prebuilt_apple_framework": prebuilt_apple_framework_impl, "scene_kit_assets": scene_kit_assets_impl, @@ -219,6 +222,7 @@ extra_attributes = { "_internal_sdk_path": attrs.option(attrs.string(), default = None), }, "apple_universal_executable": _apple_universal_executable_extra_attrs(), + "apple_xcuitest": apple_xcuitest_extra_attrs(), "core_data_model": { "path": attrs.source(allow_directory = True), }, diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index e3f48f29c..6a50588fb 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -120,6 +120,26 @@ def apple_test_extra_attrs(): }) return attribs +def apple_xcuitest_extra_attrs(): + attribs = { + # This is ignored, but required for info plist processing. + "binary": attrs.option(attrs.source(), default = None), + "codesign_identity": attrs.option(attrs.string(), default = None), + "entitlements_file": attrs.option(attrs.source(), default = None), + "extension": attrs.default_only(attrs.string(default = "app")), + "incremental_bundling_enabled": attrs.bool(default = False), + "info_plist": attrs.source(), + "info_plist_substitutions": attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False, default = {}), + "target_sdk_version": attrs.option(attrs.string(), default = None), + # The test bundle to package in the UI test runner app. + "test_bundle": attrs.dep(), + "_apple_toolchain": get_apple_toolchain_attr(), + } + attribs.update(_apple_bundle_like_common_attrs()) + attribs.pop("_dsymutil_extra_flags", None) + + return attribs + def apple_bundle_extra_attrs(): attribs = { "binary": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), diff --git a/prelude/apple/apple_xcuitest.bzl b/prelude/apple/apple_xcuitest.bzl new file mode 100644 index 000000000..14941bfd5 --- /dev/null +++ b/prelude/apple/apple_xcuitest.bzl @@ -0,0 +1,70 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") +load(":apple_bundle_destination.bzl", "AppleBundleDestination") +load(":apple_bundle_part.bzl", "AppleBundlePart", "assemble_bundle") +load(":apple_info_plist.bzl", "process_info_plist") + +def apple_xcuitest_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: + # The XCUITest runner app bundle copies the application from the platform + # directory, and includes the UI test bundle in the PlugIns folder. + output_bundle = ctx.actions.declare_output(ctx.attrs.name + "." + ctx.attrs.extension) + bundle_parts = [ + _get_xctrunner_binary(ctx), + _get_uitest_bundle(ctx), + ] + _get_xctrunner_frameworks(ctx) + assemble_bundle( + ctx = ctx, + bundle = output_bundle, + info_plist_part = process_info_plist(ctx, override_input = None), + parts = bundle_parts, + swift_stdlib_args = None, + ) + + return [DefaultInfo(default_output = output_bundle)] + +def _get_uitest_bundle(ctx: AnalysisContext) -> AppleBundlePart: + return AppleBundlePart( + source = ctx.attrs.test_bundle[DefaultInfo].default_outputs[0], + destination = AppleBundleDestination("plugins"), + ) + +def _get_xctrunner_binary(ctx: AnalysisContext) -> AppleBundlePart: + platform_path = ctx.attrs._apple_toolchain[AppleToolchainInfo].platform_path + copied_binary = ctx.actions.declare_output(ctx.attrs.name) + xctrunner_path = cmd_args(platform_path, "Developer/Library/Xcode/Agents/XCTRunner.app/XCTRunner", delimiter = "/") + ctx.actions.run(["cp", "-PR", xctrunner_path, copied_binary.as_output()], category = "copy_xctrunner") + return AppleBundlePart( + source = copied_binary, + destination = AppleBundleDestination("executables"), + ) + +def _get_xctrunner_frameworks(ctx: AnalysisContext) -> list[AppleBundlePart]: + # We need to copy the framework as AppleBundlePart requires an artifact. + # It would be nicer to make this an arglike and avoid the copies. + # It would also be nicer to exclude the headers. + def copy_platform_framework(platform_relative_path: str) -> AppleBundlePart: + copied_framework = ctx.actions.declare_output(paths.basename(platform_relative_path)) + path = cmd_args(ctx.attrs._apple_toolchain[AppleToolchainInfo].platform_path, platform_relative_path, delimiter = "/") + ctx.actions.run(["cp", "-PR", path, copied_framework.as_output()], category = "copy_framework", identifier = platform_relative_path) + return AppleBundlePart( + source = copied_framework, + destination = AppleBundleDestination("frameworks"), + codesign_on_copy = True, + ) + + runner_frameworks = [ + "Developer/Library/Frameworks/XCTest.framework", + "Developer/Library/PrivateFrameworks/XCTAutomationSupport.framework", + "Developer/Library/PrivateFrameworks/XCTestCore.framework", + "Developer/Library/PrivateFrameworks/XCTestSupport.framework", + "Developer/Library/PrivateFrameworks/XCUIAutomation.framework", + "Developer/Library/PrivateFrameworks/XCUnit.framework", + ] + return [copy_platform_framework(p) for p in runner_frameworks] From ae4bfb7d14a4424b2c81b16d451be0086c43ae10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20L=C3=B6scher?= Date: Thu, 11 Jan 2024 09:29:09 -0800 Subject: [PATCH 0112/1133] add attribute to add extra flags to erl invocation for tests Summary: We want to be able to specify erl flags per test suite. This adds an extra attribute that get's forwarded to the test node start call. Reviewed By: jcpetruzza Differential Revision: D52663292 fbshipit-source-id: 88224f328881d487e12e9336ae32b5132bed5a12 --- prelude/decls/erlang_rules.bzl | 4 ++ .../common/include/buck_ct_records.hrl | 2 + .../test_binary/src/test_binary.erl | 2 + .../test_binary/src/test_runner.erl | 1 + .../test_exec/src/ct_daemon_node.erl | 1 + .../common_test/test_exec/src/ct_runner.erl | 39 ++++++++++++++++--- prelude/erlang/erlang_tests.bzl | 1 + 7 files changed, 44 insertions(+), 6 deletions(-) diff --git a/prelude/decls/erlang_rules.bzl b/prelude/decls/erlang_rules.bzl index 11171cd09..86154b034 100644 --- a/prelude/decls/erlang_rules.bzl +++ b/prelude/decls/erlang_rules.bzl @@ -242,6 +242,10 @@ rules_attributes = { "extra_ct_hooks": attrs.list(attrs.string(), default = [], doc = """ List of additional Common Test hooks. The strings are interpreted as Erlang terms. """), + "extra_erl_flags": attrs.list(attrs.string(), default = [], doc = """ + List of additional command line arguments given to the erl command invocation. These + arguments are added to the front of the argument list. + """), "preamble": attrs.string(default = read_root_config("erlang", "erlang_test_preamble", "test:info(),test:ensure_initialized(),test:start_shell()."), doc = """ """), "property_tests": attrs.list(attrs.dep(), default = [], doc = """ diff --git a/prelude/erlang/common_test/common/include/buck_ct_records.hrl b/prelude/erlang/common_test/common/include/buck_ct_records.hrl index 071d64354..1f1ebdd3e 100644 --- a/prelude/erlang/common_test/common/include/buck_ct_records.hrl +++ b/prelude/erlang/common_test/common/include/buck_ct_records.hrl @@ -14,6 +14,7 @@ providers :: [{atom(), [term()]}], ct_opts :: [term()], erl_cmd :: string(), + extra_flags :: [string()], common_app_env :: #{string() => string()}, artifact_annotation_mfa :: artifact_annotations:annotation_function() }). @@ -31,6 +32,7 @@ ct_opts :: [term()], common_app_env :: #{string() => string()}, erl_cmd :: string(), + extra_flags :: [string()], artifact_annotation_mfa :: artifact_annotations:annotation_function() }). diff --git a/prelude/erlang/common_test/test_binary/src/test_binary.erl b/prelude/erlang/common_test/test_binary/src/test_binary.erl index ea35b5133..08d2cf114 100644 --- a/prelude/erlang/common_test/test_binary/src/test_binary.erl +++ b/prelude/erlang/common_test/test_binary/src/test_binary.erl @@ -105,6 +105,7 @@ load_test_info(TestInfoFile) -> "ct_opts" := CtOpts, "extra_ct_hooks" := ExtraCtHooks, "erl_cmd" := ErlCmd, + "extra_flags" := ExtraFlags, "artifact_annotation_mfa" := ArtifactAnnotationMFA, "common_app_env" := CommonAppEnv } @@ -122,6 +123,7 @@ load_test_info(TestInfoFile) -> artifact_annotation_mfa = parse_mfa(ArtifactAnnotationMFA), ct_opts = CtOpts1, erl_cmd = ErlCmd, + extra_flags = ExtraFlags, common_app_env = CommonAppEnv }. diff --git a/prelude/erlang/common_test/test_binary/src/test_runner.erl b/prelude/erlang/common_test/test_binary/src/test_runner.erl index a85c13f70..a022ec418 100644 --- a/prelude/erlang/common_test/test_binary/src/test_runner.erl +++ b/prelude/erlang/common_test/test_binary/src/test_runner.erl @@ -41,6 +41,7 @@ run_tests(Tests, #test_info{} = TestInfo, OutputDir, Listing) -> ct_opts = TestInfo#test_info.ct_opts, common_app_env = TestInfo#test_info.common_app_env, erl_cmd = TestInfo#test_info.erl_cmd, + extra_flags = TestInfo#test_info.extra_flags, artifact_annotation_mfa = TestInfo#test_info.artifact_annotation_mfa }) end. diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl index d260bc249..bbe7810c5 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl @@ -73,6 +73,7 @@ start( % see T129435667 Port = ct_runner:start_test_node( os:find_executable("erl"), + [], CodePaths, ConfigFiles, OutputDir, diff --git a/prelude/erlang/common_test/test_exec/src/ct_runner.erl b/prelude/erlang/common_test/test_exec/src/ct_runner.erl index 1b5dacf40..5240f28b1 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_runner.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_runner.erl @@ -28,8 +28,8 @@ ]). -export([ - start_test_node/5, start_test_node/6, + start_test_node/7, cookie/0, generate_arg_tuple/2, project_root/0 @@ -144,6 +144,7 @@ run_test( providers = Providers, suite = Suite, erl_cmd = ErlCmd, + extra_flags = ExtraFlags, common_app_env = CommonAppEnv } = _TestEnv, PortEpmd @@ -159,6 +160,7 @@ run_test( start_test_node( ErlCmd, + ExtraFlags, CodePath, ConfigFiles, OutputDir, @@ -209,25 +211,50 @@ common_app_env_args(Env) -> -spec start_test_node( Erl :: string(), + ExtraFlags :: [string()], CodePath :: [file:filename_all()], ConfigFiles :: [file:filename_all()], OutputDir :: file:filename_all(), PortSettings :: port_settings() ) -> port(). -start_test_node(ErlCmd, CodePath, ConfigFiles, OutputDir, PortSettings0) -> - start_test_node(ErlCmd, CodePath, ConfigFiles, OutputDir, PortSettings0, false). +start_test_node( + ErlCmd, + ExtraFlags, + CodePath, + ConfigFiles, + OutputDir, + PortSettings0 +) -> + start_test_node( + ErlCmd, + ExtraFlags, + CodePath, + ConfigFiles, + OutputDir, + PortSettings0, + false + ). -spec start_test_node( Erl :: string(), + ExtraFlags :: [string()], CodePath :: [file:filename_all()], ConfigFiles :: [file:filename_all()], OutputDir :: file:filename_all(), PortSettings :: port_settings(), ReplayIo :: boolean() ) -> port(). -start_test_node(ErlCmd, CodePath, ConfigFiles, OutputDir, PortSettings0, ReplayIo) -> +start_test_node( + ErlCmd, + ExtraFlags, + CodePath, + ConfigFiles, + OutputDir, + PortSettings0, + ReplayIo +) -> % split of args from Erl which can contain emulator flags - [_Executable | ExtraFlags] = string:split(ErlCmd, " ", all), + [_Executable | Flags] = string:split(ErlCmd, " ", all), % we ignore the executable we got, and use the erl command from the % toolchain that executes this code ErlExecutable = os:find_executable("erl"), @@ -237,7 +264,7 @@ start_test_node(ErlCmd, CodePath, ConfigFiles, OutputDir, PortSettings0, ReplayI %% merge args, enc, cd settings LaunchArgs = - ExtraFlags ++ + Flags ++ ExtraFlags ++ build_common_args(CodePath, ConfigFiles) ++ proplists:get_value(args, PortSettings0, []), diff --git a/prelude/erlang/erlang_tests.bzl b/prelude/erlang/erlang_tests.bzl index 7a7d8a364..2fda91993 100644 --- a/prelude/erlang/erlang_tests.bzl +++ b/prelude/erlang/erlang_tests.bzl @@ -272,6 +272,7 @@ def _write_test_info_file( "dependencies": _list_code_paths(dependencies), "erl_cmd": cmd_args(['"', cmd_args(erl_cmd, delimiter = " "), '"'], delimiter = ""), "extra_ct_hooks": ctx.attrs.extra_ct_hooks, + "extra_flags": ctx.attrs.extra_erl_flags, "providers": ctx.attrs._providers, "test_dir": test_dir, "test_suite": test_suite, From c9fdb8e94a4096b9d9d3d9a5a6dc3e2cc22484eb Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Thu, 11 Jan 2024 10:30:35 -0800 Subject: [PATCH 0113/1133] rules: Handle stripped outputs a bit better Summary: This is a little cleaner since now its available for all output styles and has a filename in the expected format, but also it's kind of important because `strip_debug_info` is side-effecting (it defines actions) and so shouldn't be called just anywhere Reviewed By: dtolnay Differential Revision: D52499872 fbshipit-source-id: 312d3da63827d0182f49b09ac579d2b7d65d4f18 --- prelude/rust/build.bzl | 13 +++++++++++++ prelude/rust/rust_library.bzl | 13 +++++-------- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index bc20f74dd..0b24a6d49 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -39,6 +39,7 @@ load( "merge_shared_libraries", "traverse_shared_library_info", ) +load("@prelude//linking:strip.bzl", "strip_debug_info") load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") load("@prelude//utils:set.bzl", "set") @@ -87,6 +88,7 @@ load(":rust_toolchain.bzl", "PanicRuntime", "RustToolchainInfo") RustcOutput = record( output = field(Artifact), + stripped_output = field(Artifact), diag = field(dict[str, Artifact]), pdb = field([Artifact, None]), dwp_output = field([Artifact, None]), @@ -617,8 +619,19 @@ def rust_compile( else: dwp_output = None + stripped_output = strip_debug_info( + ctx, + paths.join(common_args.subdir, "stripped", output_filename( + attr_simple_crate_for_filenames(ctx), + Emit("link"), + params, + )), + filtered_output, + ) + return RustcOutput( output = filtered_output, + stripped_output = stripped_output, diag = diag, pdb = pdb_artifact, dwp_output = dwp_output, diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index f39a39629..ff894bb56 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -10,7 +10,6 @@ load( "ArtifactTSet", "make_artifact_tset", ) -load("@prelude//:paths.bzl", "paths") load("@prelude//:resources.bzl", "ResourceInfo", "gather_resources") load( "@prelude//android:android_providers.bzl", @@ -666,6 +665,10 @@ def _native_providers( linkables = [SharedLibLinkable(lib = lib.output)], external_debug_info = external_debug_info, ), + stripped = LinkInfo( + linkables = [SharedLibLinkable(lib = lib.stripped_output)], + external_debug_info = external_debug_info, + ), ) else: link_infos[output_style] = LinkInfos( @@ -678,13 +681,7 @@ def _native_providers( ), stripped = LinkInfo( linkables = [ArchiveLinkable( - archive = Archive( - artifact = strip_debug_info( - ctx, - paths.join(output_style.value, lib.output.short_path), - lib.output, - ), - ), + archive = Archive(artifact = lib.stripped_output), linker_type = linker_type, )], ), From 5a95b49f6c7c5b3693b1fc6ef21f788c502a0167 Mon Sep 17 00:00:00 2001 From: Jacob Rodal Date: Thu, 11 Jan 2024 11:48:18 -0800 Subject: [PATCH 0114/1133] combine_native_libraries_dirs always setup relative symlinks Summary: If `combine_native_libs` is run locally on windows, symlinks with absolute paths are created. If `create_aar` is then run remotely, we end up with dangling symlinks, causing flaky build failures. This diff makes it such that `combine_native_libs` always creates symlinks with relative paths, which seems to fix the issue. Reviewed By: IanChilds Differential Revision: D52682805 fbshipit-source-id: 7f6f3fe52b8fb53d9dc30ce5f0e1208c414b4310 --- prelude/android/tools/combine_native_library_dirs.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/android/tools/combine_native_library_dirs.py b/prelude/android/tools/combine_native_library_dirs.py index a7aa7e589..01fb3df82 100644 --- a/prelude/android/tools/combine_native_library_dirs.py +++ b/prelude/android/tools/combine_native_library_dirs.py @@ -52,7 +52,11 @@ def main() -> None: ) output_path.parent.mkdir(exist_ok=True) - output_path.symlink_to(os.readlink(lib)) + relative_path_to_lib = os.path.relpath( + os.path.realpath(lib), + start=os.path.realpath(os.path.dirname(output_path)), + ) + output_path.symlink_to(relative_path_to_lib) if args.metadata_file: with open(lib, "rb") as f: From 6dba322c036236f0bddc133af10ab9794db64c88 Mon Sep 17 00:00:00 2001 From: Wilfred Hughes Date: Thu, 11 Jan 2024 12:57:25 -0800 Subject: [PATCH 0115/1133] rust-project: Be resilient against broken thrift targets Summary: `resolve_deps.bxl` was assuming that the build artifacts was always non-empty. This isn't true in a target where dependencies are failing to build. Carry on in this case, so we can still provide IDE services. Reviewed By: davidbarsky Differential Revision: D52704689 fbshipit-source-id: 621e9e7570e35fcd1a800bf5a0e9734cbc3507c6 --- prelude/rust/rust-analyzer/resolve_deps.bxl | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/prelude/rust/rust-analyzer/resolve_deps.bxl b/prelude/rust/rust-analyzer/resolve_deps.bxl index 5ee8e8f24..ef07a15e0 100644 --- a/prelude/rust/rust-analyzer/resolve_deps.bxl +++ b/prelude/rust/rust-analyzer/resolve_deps.bxl @@ -139,14 +139,16 @@ def materialize_generated_thrift(ctx, target, actions, seen): else: label = label.raw_target() - copied = actions.copy_file(outfile, artifacts.artifacts()[0]) - copied = ctx.output.ensure(copied) - artifact = { - "artifact": copied.abs_path(), - "label": label, - "mapped_src": mapped_src, - } - out.append(artifact) + if len(artifacts.artifacts()) > 0: + copied = actions.copy_file(outfile, artifacts.artifacts()[0]) + copied = ctx.output.ensure(copied) + artifact = { + "artifact": copied.abs_path(), + "label": label, + "mapped_src": mapped_src, + } + out.append(artifact) + seen[outfile] = () return out From 0d7d28c9e05f9bcafbabb9bc050ea6536d4544d5 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Thu, 11 Jan 2024 13:30:53 -0800 Subject: [PATCH 0116/1133] rules: Shuffle around `LinkInfos` code Summary: I need this in `_rust_providers` in the next diff, so need to pull some of this out into a function Reviewed By: dtolnay Differential Revision: D52499865 fbshipit-source-id: 5a0744acf99217a4cc9561b30bb9506d79e76c20 --- prelude/rust/rust_library.bzl | 145 +++++++++++++++++++--------------- 1 file changed, 83 insertions(+), 62 deletions(-) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index ff894bb56..9393333f0 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -131,6 +131,34 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: panic_runtime = rust_toolchain.panic_runtime, ) + linker_info = get_cxx_toolchain_info(ctx).linker_info + + archive_info = LinkInfos( + default = LinkInfo( + linkables = [ + ArchiveLinkable( + archive = Archive(artifact = ctx.attrs.rlib), + linker_type = linker_info.type, + ), + ], + ), + stripped = LinkInfo( + linkables = [ + ArchiveLinkable( + archive = Archive( + artifact = strip_debug_info( + ctx = ctx, + out = ctx.attrs.rlib.short_path, + obj = ctx.attrs.rlib, + ), + ), + linker_type = linker_info.type, + ), + ], + ), + ) + link_infos = {LibOutputStyle("archive"): archive_info, LibOutputStyle("pic_archive"): archive_info} + # Rust link provider. crate = attr_crate(ctx) strategies = {} @@ -162,38 +190,12 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: ), ) - linker_info = get_cxx_toolchain_info(ctx).linker_info - # Native link provier. - link = LinkInfos( - default = LinkInfo( - linkables = [ - ArchiveLinkable( - archive = Archive(artifact = ctx.attrs.rlib), - linker_type = linker_info.type, - ), - ], - ), - stripped = LinkInfo( - linkables = [ - ArchiveLinkable( - archive = Archive( - artifact = strip_debug_info( - ctx = ctx, - out = ctx.attrs.rlib.short_path, - obj = ctx.attrs.rlib, - ), - ), - linker_type = linker_info.type, - ), - ], - ), - ) providers.append( create_merged_link_info( ctx, PicBehavior("supported"), - {output_style: link for output_style in LibOutputStyle}, + link_infos, exported_deps = [d[MergedLinkInfo] for d in ctx.attrs.deps], # TODO(agallagher): This matches v1 behavior, but some of these libs # have prebuilt DSOs which might be usable. @@ -210,7 +212,7 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx = ctx, preferred_linkage = Linkage("static"), exported_deps = ctx.attrs.deps, - link_infos = {output_style: link for output_style in LibOutputStyle}, + link_infos = link_infos, default_soname = get_default_shared_library_name(linker_info, ctx.label), ), ), @@ -340,6 +342,13 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: providers = [] + link_infos = _link_infos( + ctx = ctx, + compile_ctx = compile_ctx, + lang_style_param = lang_style_param, + param_artifact = native_param_artifact, + ) + providers += _default_providers( lang_style_param = lang_style_param, param_artifact = rust_param_artifact, @@ -361,6 +370,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_ctx, lang_style_param = lang_style_param, param_artifact = native_param_artifact, + link_infos = link_infos, ) deps = [dep.dep for dep in resolve_deps(ctx, compile_ctx.dep_ctx)] @@ -616,49 +626,28 @@ def _rust_providers( return providers -def _native_providers( +def _link_infos( ctx: AnalysisContext, compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, RustcOutput]) -> list[Provider]: - """ - Return the set of providers needed to link Rust as a dependency for native - (ie C/C++) code, along with relevant dependencies. - """ + param_artifact: dict[BuildParams, RustcOutput]) -> dict[LibOutputStyle, LinkInfos]: + if ctx.attrs.proc_macro: + # Don't need any of this for proc macros + return {} - # If advanced_unstable_linking is set on the the rust toolchain, then build this artifact - # using the "native-unbundled" linkage language. See LinkageLang docs for more details advanced_unstable_linking = compile_ctx.toolchain_info.advanced_unstable_linking lang = LinkageLang("native-unbundled") if advanced_unstable_linking else LinkageLang("native") + linker_type = compile_ctx.cxx_toolchain_info.linker_info.type - inherited_link_infos = inherited_merged_link_infos(ctx, compile_ctx.dep_ctx) - inherited_shlibs = inherited_shared_libs(ctx, compile_ctx.dep_ctx) - inherited_link_graphs = inherited_linkable_graphs(ctx, compile_ctx.dep_ctx) - linker_info = compile_ctx.cxx_toolchain_info.linker_info - linker_type = linker_info.type - - providers = [] - - if ctx.attrs.proc_macro: - # Proc-macros never have a native form - return providers - - libraries = {} link_infos = {} - external_debug_infos = {} for output_style in LibOutputStyle: - params = lang_style_param[(lang, output_style)] - lib = param_artifact[params] - libraries[output_style] = lib - + lib = param_artifact[lang_style_param[(lang, output_style)]] external_debug_info = make_artifact_tset( actions = ctx.actions, label = ctx.label, artifacts = filter(None, [lib.dwo_output_directory]), children = lib.extra_external_debug_info, ) - external_debug_infos[output_style] = external_debug_info - if output_style == LibOutputStyle("shared_lib"): link_infos[output_style] = LinkInfos( default = LinkInfo( @@ -686,6 +675,39 @@ def _native_providers( )], ), ) + return link_infos + +def _native_providers( + ctx: AnalysisContext, + compile_ctx: CompileContext, + lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], + param_artifact: dict[BuildParams, RustcOutput], + link_infos: dict[LibOutputStyle, LinkInfos]) -> list[Provider]: + """ + Return the set of providers needed to link Rust as a dependency for native + (ie C/C++) code, along with relevant dependencies. + """ + + # If advanced_unstable_linking is set on the the rust toolchain, then build this artifact + # using the "native-unbundled" linkage language. See LinkageLang docs for more details + advanced_unstable_linking = compile_ctx.toolchain_info.advanced_unstable_linking + lang = LinkageLang("native-unbundled") if advanced_unstable_linking else LinkageLang("native") + + inherited_link_infos = inherited_merged_link_infos(ctx, compile_ctx.dep_ctx) + inherited_shlibs = inherited_shared_libs(ctx, compile_ctx.dep_ctx) + inherited_link_graphs = inherited_linkable_graphs(ctx, compile_ctx.dep_ctx) + + linker_info = compile_ctx.cxx_toolchain_info.linker_info + linker_type = linker_info.type + + providers = [] + + if ctx.attrs.proc_macro: + # Proc-macros never have a native form + return providers + + shared_lib_params = lang_style_param[(lang, LibOutputStyle("shared_lib"))] + shared_lib_output = param_artifact[shared_lib_params].output preferred_linkage = Linkage(ctx.attrs.preferred_linkage) @@ -701,7 +723,6 @@ def _native_providers( solibs = {} # Add the shared library to the list of shared libs. - linker_info = compile_ctx.cxx_toolchain_info.linker_info shlib_name = get_default_shared_library_name(linker_info, ctx.label) # Only add a shared library if we generated one. @@ -710,9 +731,9 @@ def _native_providers( # to remove the SharedLibraries provider, maybe just wait for that to resolve this. if get_lib_output_style(LinkStrategy("shared"), preferred_linkage, compile_ctx.cxx_toolchain_info.pic_behavior) == LibOutputStyle("shared_lib"): solibs[shlib_name] = LinkedObject( - output = libraries[LibOutputStyle("shared_lib")].output, - unstripped_output = libraries[LibOutputStyle("shared_lib")].output, - external_debug_info = external_debug_infos[LibOutputStyle("shared_lib")], + output = shared_lib_output, + unstripped_output = shared_lib_output, + external_debug_info = link_infos[LibOutputStyle("shared_lib")].default.external_debug_info, ) # Native shared library provider. @@ -729,12 +750,12 @@ def _native_providers( default = LinkInfo( linkables = [ArchiveLinkable( archive = Archive( - artifact = libraries[LibOutputStyle("shared_lib")].output, + artifact = shared_lib_output, ), linker_type = linker_type, link_whole = True, )], - external_debug_info = external_debug_infos[LibOutputStyle("pic_archive")], + external_debug_info = link_infos[LibOutputStyle("pic_archive")].default.external_debug_info, ), ), deps = inherited_link_graphs, From bbcee5450413e9f08d89131338cadc6933c9fb5d Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Thu, 11 Jan 2024 13:30:53 -0800 Subject: [PATCH 0117/1133] linkable graph: Support explicitly setting label Summary: Need this in the next diff Reviewed By: dtolnay Differential Revision: D52499883 fbshipit-source-id: 17af29ff994ce3c43fb3d4d3a2db3e6743fa6ed9 --- prelude/linking/linkable_graph.bzl | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index db5ef6882..2ed062ae3 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -171,9 +171,12 @@ def create_linkable_graph_node( ctx: AnalysisContext, linkable_node: [LinkableNode, None] = None, roots: dict[Label, LinkableRootInfo] = {}, - excluded: dict[Label, None] = {}) -> LinkableGraphNode: + excluded: dict[Label, None] = {}, + label: Label | None = None) -> LinkableGraphNode: + if not label: + label = ctx.label return LinkableGraphNode( - label = ctx.label, + label = label, linkable = linkable_node, roots = roots, excluded = excluded, @@ -208,8 +211,11 @@ def create_linkable_graph( } if node: kwargs["value"] = node + label = node.label + else: + label = ctx.label return LinkableGraph( - label = ctx.label, + label = label, nodes = ctx.actions.tset(LinkableGraphTSet, **kwargs), ) From c14e444ca2e0914b61f5b620dc4a9616c6dadfc8 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Thu, 11 Jan 2024 13:30:53 -0800 Subject: [PATCH 0118/1133] rules: Reimplement native unbundled deps to better support `FORCE_RLIB` Summary: The current implementation of native unbundled deps works by never using the Rust -> Rust link providers, instead using the native link providers even on Rust -> Rust edges. The result of this is that when a Rust binary depends on a library, building the binary ends up building the library both as a dylib and as an rlib - as an rlib because the Rust dependency handling requires that, and as a dylib because producing the link args for the Rust binary uses the regular, native version of the library's providers. This diff removes that behavior, instead choosing to fix up the Rust -> Rust link providers in such a way that they include a correct "force rlib"-like representation of the Rust library This is necessary because: 1. we need FORCE_RLIB to work, there's a reason it exist, 2. the alternative effectively disables pipelined builds, and so would be a major build speed loss, 3. the alternative results in static initializer duplication Reviewed By: dtolnay Differential Revision: D52499881 fbshipit-source-id: c3a889ac7abd67fa10b33c4ea140338111526958 --- prelude/rust/link_info.bzl | 36 ++++++---------- prelude/rust/rust_library.bzl | 77 ++++++++++++++++++++++++++++++----- 2 files changed, 79 insertions(+), 34 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 4b3492003..a7032d8b9 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -139,9 +139,6 @@ RustLinkInfo = provider( # `FORCE_RLIB` behavior, in which Rust -> Rust dependency edges are always statically # linked. The native link provider is identical, except that it does not respect the # `FORCE_RLIB` behavior. - # - # FIXME(JakobDegen): The `advanced_unstable_linking` case is currently aspirational and not - # how things are actually implemented. "merged_link_info": MergedLinkInfo, "shared_libs": SharedLibraryInfo, # Because of the weird representation of `LinkableGraph`, there is no @@ -353,14 +350,11 @@ def _native_link_dependencies( """ first_order_deps = [dep.dep for dep in resolve_deps(ctx, dep_ctx)] - if dep_ctx.advanced_unstable_linking: - return [d for d in first_order_deps if MergedLinkInfo in d] - else: - return [ - d - for d in first_order_deps - if RustLinkInfo not in d and MergedLinkInfo in d - ] + return [ + d + for d in first_order_deps + if RustLinkInfo not in d and MergedLinkInfo in d + ] # Returns the rust link infos for non-proc macro deps. # @@ -374,10 +368,9 @@ def inherited_exported_link_deps(ctx: AnalysisContext, dep_ctx: DepCollectionCon deps = {} for dep in _native_link_dependencies(ctx, dep_ctx): deps[dep.label] = dep - if not dep_ctx.advanced_unstable_linking: - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for dep in info.exported_link_deps: - deps[dep.label] = dep + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for dep in info.exported_link_deps: + deps[dep.label] = dep return deps.values() def inherited_rust_cxx_link_group_info( @@ -467,8 +460,7 @@ def inherited_merged_link_infos( dep_ctx: DepCollectionContext) -> list[MergedLinkInfo]: infos = [] infos.extend([d[MergedLinkInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - if not dep_ctx.advanced_unstable_linking: - infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) + infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) return infos def inherited_shared_libs( @@ -476,8 +468,7 @@ def inherited_shared_libs( dep_ctx: DepCollectionContext) -> list[SharedLibraryInfo]: infos = [] infos.extend([d[SharedLibraryInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - if not dep_ctx.advanced_unstable_linking: - infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) + infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) return infos def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkableGraph]: @@ -486,10 +477,9 @@ def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContex g = d.get(LinkableGraph) if g: deps[g.label] = g - if not dep_ctx.advanced_unstable_linking: - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for g in info.linkable_graphs: - deps[g.label] = g + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for g in info.linkable_graphs: + deps[g.label] = g return deps.values() def inherited_link_group_lib_infos(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkGroupLibInfo]: diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 9393333f0..1af9245d8 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -19,7 +19,7 @@ load( "@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", "PicBehavior") load( "@prelude//cxx:linker.bzl", "PDB_SUB_TARGET", @@ -131,7 +131,8 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: panic_runtime = rust_toolchain.panic_runtime, ) - linker_info = get_cxx_toolchain_info(ctx).linker_info + cxx_toolchain = get_cxx_toolchain_info(ctx) + linker_info = cxx_toolchain.linker_info archive_info = LinkInfos( default = LinkInfo( @@ -178,7 +179,7 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: external_debug_info = external_debug_info, ) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx, cxx_toolchain, link_infos) providers.append( RustLinkInfo( crate = crate, @@ -364,6 +365,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_ctx, lang_style_param = lang_style_param, param_artifact = rust_param_artifact, + link_infos = link_infos, ) providers += _native_providers( ctx = ctx, @@ -568,7 +570,14 @@ def _default_providers( def _rust_link_providers( ctx: AnalysisContext, - dep_ctx: DepCollectionContext) -> (MergedLinkInfo, SharedLibraryInfo, list[LinkableGraph], list[Dependency]): + dep_ctx: DepCollectionContext, + cxx_toolchain: CxxToolchainInfo, + link_infos: dict[LibOutputStyle, LinkInfos]) -> ( + MergedLinkInfo, + SharedLibraryInfo, + list[LinkableGraph], + list[Dependency], +): # These are never accessed in the case of proc macros, so just return some dummy # values if ctx.attrs.proc_macro: @@ -584,18 +593,64 @@ def _rust_link_providers( inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) inherited_link_deps = inherited_exported_link_deps(ctx, dep_ctx) - merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) - shared_libs = merge_shared_libraries( - ctx.actions, - deps = inherited_shlibs, - ) + if dep_ctx.advanced_unstable_linking: + # We have to produce a version of the providers that are defined in such + # a way that native rules looking at these providers will also pick up + # the `FORCE_RLIB` behavior. The general approach to that will be to + # claim that we have `preferred_linkage = "static"`. + # + # Note that all of this code is FORCE_RLIB specific. Disabling that + # setting requires replacing this with the "real" native providers + merged_link_info = create_merged_link_info( + ctx, + cxx_toolchain.pic_behavior, + link_infos, + exported_deps = inherited_link_infos, + preferred_linkage = Linkage("static"), + ) + shared_libs = merge_shared_libraries( + # We never actually have any shared libraries to add + ctx.actions, + deps = inherited_shlibs, + ) + + # The link graph representation is a little bit weird, since instead of + # just building up a graph via tsets, it uses a flat list of labeled + # nodes, each with a list of labels for dependency edges. The node that + # we create here cannot just use this target's label, since that would + # conflict with the node created for the native providers. As a result, + # we make up a fake subtarget to get a distinct label + new_label = ctx.label.configured_target().with_sub_target((ctx.label.sub_target or []) + ["fake_force_rlib_subtarget"]) + linkable_graph = create_linkable_graph( + ctx, + node = create_linkable_graph_node( + ctx, + linkable_node = create_linkable_node( + ctx = ctx, + preferred_linkage = Linkage("static"), + exported_deps = inherited_graphs, + link_infos = link_infos, + default_soname = "", + ), + label = new_label, + ), + deps = inherited_graphs, + ) + inherited_graphs = [linkable_graph] + else: + merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) + shared_libs = merge_shared_libraries( + ctx.actions, + deps = inherited_shlibs, + ) return (merged_link_info, shared_libs, inherited_graphs, inherited_link_deps) def _rust_providers( ctx: AnalysisContext, compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)]) -> list[Provider]: + param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)], + link_infos: dict[LibOutputStyle, LinkInfos]) -> list[Provider]: """ Return the set of providers for Rust linkage. """ @@ -610,7 +665,7 @@ def _rust_providers( link, meta = param_artifact[params] strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, link, meta) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos) providers = [] From 536a26ddc7a455c204f0c7f4b6ffd474311616fa Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Thu, 11 Jan 2024 13:30:53 -0800 Subject: [PATCH 0119/1133] rules: Add a FIXME Summary: No idea who understands this code, cc christolliday or IanChilds maybe? Reviewed By: dtolnay Differential Revision: D52507600 fbshipit-source-id: afe1ad6472eed57057b186d6df257b55a43d5900 --- prelude/android/android_binary_native_library_rules.bzl | 3 +++ prelude/rust/rust_library.bzl | 2 ++ 2 files changed, 5 insertions(+) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 79f0857a1..4b606d506 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -681,6 +681,9 @@ def encode_linkable_graph_for_mergemap(graph_node_map_by_platform: dict[str, dic platform: { target: _LinkableSharedNode( raw_target = str(target.raw_target()), + # FIXME(JakobDegen): The definition of `LinkableNode` claims that it's ok for this + # to be `None` (I assume in the case of static preferred linkage), so either that is + # wrong or this is. See the diff that added this FIXME for how to reproduce soname = node.default_soname, labels = node.labels, deps = node.deps + node.exported_deps, diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 1af9245d8..f39b2c93b 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -630,6 +630,8 @@ def _rust_link_providers( preferred_linkage = Linkage("static"), exported_deps = inherited_graphs, link_infos = link_infos, + # FIXME(JakobDegen): It should be ok to set this to `None`, + # but that breaks arc focus default_soname = "", ), label = new_label, From a49f55ba22647e7d0b4152f6168587654c185460 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Thu, 11 Jan 2024 13:30:53 -0800 Subject: [PATCH 0120/1133] rules: Drop native providers from `prebuilt_rust_library` Summary: Without `advanced_unstable_linking`, it's not actually possible to correctly output native providers for prebuilt Rust libraries because we have no cdylib/staticlib build. With `advanced_unstable_linking`, it would be possible - however, the last diff removed what is afaik the only way in which that was ever used, and keeping this around would require extra work from me on the next few diffs. If someone wants to bring it back (or if it turns out that this is needed), we should refactor to share the code with the `rust_library` native provider creation Reviewed By: capickett, dtolnay Differential Revision: D52499875 fbshipit-source-id: 4648bbf0b510012aacf40faf62ccbb4beb975f42 --- prelude/rust/rust_library.bzl | 52 ++--------------------------------- 1 file changed, 3 insertions(+), 49 deletions(-) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index f39b2c93b..6cc06110b 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -19,7 +19,7 @@ load( "@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", "PicBehavior") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") load( "@prelude//cxx:linker.bzl", "PDB_SUB_TARGET", @@ -44,7 +44,7 @@ load( "LinkStrategy", "Linkage", "LinkedObject", - "MergedLinkInfo", + "MergedLinkInfo", # @unused Used as a type "SharedLibLinkable", "create_merged_link_info", "create_merged_link_info_for_propagation", @@ -61,7 +61,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibraryInfo", + "SharedLibraryInfo", # @unused Used as a type "create_shared_libraries", "merge_shared_libraries", ) @@ -191,52 +191,6 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: ), ) - # Native link provier. - providers.append( - create_merged_link_info( - ctx, - PicBehavior("supported"), - link_infos, - exported_deps = [d[MergedLinkInfo] for d in ctx.attrs.deps], - # TODO(agallagher): This matches v1 behavior, but some of these libs - # have prebuilt DSOs which might be usable. - preferred_linkage = Linkage("static"), - ), - ) - - # Native link graph setup. - linkable_graph = create_linkable_graph( - ctx, - node = create_linkable_graph_node( - ctx, - linkable_node = create_linkable_node( - ctx = ctx, - preferred_linkage = Linkage("static"), - exported_deps = ctx.attrs.deps, - link_infos = link_infos, - default_soname = get_default_shared_library_name(linker_info, ctx.label), - ), - ), - deps = ctx.attrs.deps, - ) - providers.append(linkable_graph) - - providers.append(merge_link_group_lib_info(children = inherited_link_group_lib_infos(ctx, dep_ctx))) - - # FIXME(JakobDegen): I am about 85% confident that this matches what C++ - # does for prebuilt libraries if they don't have a shared variant and have - # preferred linkage static. C++ doesn't require static preferred linkage on - # their prebuilt libraries, and so they incur extra complexity here that we - # don't have to deal with. - # - # However, Rust linking is not the same as C++ linking. If Rust were - # disciplined about its use of `LibOutputStyle`, `Linkage` and - # `LinkStrategy`, then this would at least be no more wrong than what C++ - # does. In the meantime however... - providers.append(SharedLibraryInfo(set = None)) - - providers.append(merge_android_packageable_info(ctx.label, ctx.actions, ctx.attrs.deps)) - return providers def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: From 7a753869fca795ba9ce2b177847fda2560f8f9be Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Thu, 11 Jan 2024 13:30:53 -0800 Subject: [PATCH 0121/1133] rules: Better handling of exported deps Summary: The Rust rules currently treat all dependencies as being exported. Without native unbundled deps this didn't matter. With native unbundled deps it does though; consider that we have a dependency chain like C++ -> Rust(a) -> Rust(b). Because the Rust -> Rust dependency now shows up in the native link graph as a `Linkage("static")` node and it's exported from Rust(a), Rust(b) ends up getting linked into the C++ library in `LinkStrategy("shared")`. This is a size/build speed regression. This diff updates the rules to use the previously unused `exported_link_deps` field to track which deps actually need to be exported. Reviewed By: dtolnay Differential Revision: D52499874 fbshipit-source-id: d2e8947d39bdf696b916a84f8ce26a5dc248bf81 --- prelude/rust/link_info.bzl | 11 +++++++++-- prelude/rust/rust_library.bzl | 21 ++++++++++++++------- 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index a7032d8b9..d2780cefc 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -148,8 +148,15 @@ RustLinkInfo = provider( "linkable_graphs": list[LinkableGraph], # LinkGroupLibInfo intentionally omitted because the Rust -> Rust version # never needs to be different from the Rust -> native version - # The native dependencies reachable from this Rust library through other - # Rust libraries + # + # Rust currently treats all native dependencies as being exported, in + # the sense of C++ `exported_deps`. However, they are not only exported + # from the Rust library that directly depends on them, they are also + # exported through any further chains of Rust libraries. This list + # tracks those dependencies + # + # FIXME(JakobDegen): We should not default to treating all native deps + # as exported. "exported_link_deps": list[Dependency], }, ) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 6cc06110b..e8f18467b 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -545,7 +545,7 @@ def _rust_link_providers( inherited_link_infos = inherited_merged_link_infos(ctx, dep_ctx) inherited_shlibs = inherited_shared_libs(ctx, dep_ctx) inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) - inherited_link_deps = inherited_exported_link_deps(ctx, dep_ctx) + inherited_exported_deps = inherited_exported_link_deps(ctx, dep_ctx) if dep_ctx.advanced_unstable_linking: # We have to produce a version of the providers that are defined in such @@ -555,11 +555,15 @@ def _rust_link_providers( # # Note that all of this code is FORCE_RLIB specific. Disabling that # setting requires replacing this with the "real" native providers + # + # As an optimization, we never bother reporting exported deps here. + # Whichever dependent uses the providers created here will take care of + # that for us. merged_link_info = create_merged_link_info( ctx, cxx_toolchain.pic_behavior, link_infos, - exported_deps = inherited_link_infos, + deps = inherited_link_infos, preferred_linkage = Linkage("static"), ) shared_libs = merge_shared_libraries( @@ -582,7 +586,7 @@ def _rust_link_providers( linkable_node = create_linkable_node( ctx = ctx, preferred_linkage = Linkage("static"), - exported_deps = inherited_graphs, + deps = inherited_graphs, link_infos = link_infos, # FIXME(JakobDegen): It should be ok to set this to `None`, # but that breaks arc focus @@ -599,7 +603,7 @@ def _rust_link_providers( ctx.actions, deps = inherited_shlibs, ) - return (merged_link_info, shared_libs, inherited_graphs, inherited_link_deps) + return (merged_link_info, shared_libs, inherited_graphs, inherited_exported_deps) def _rust_providers( ctx: AnalysisContext, @@ -707,6 +711,7 @@ def _native_providers( inherited_link_infos = inherited_merged_link_infos(ctx, compile_ctx.dep_ctx) inherited_shlibs = inherited_shared_libs(ctx, compile_ctx.dep_ctx) inherited_link_graphs = inherited_linkable_graphs(ctx, compile_ctx.dep_ctx) + inherited_exported_deps = inherited_exported_link_deps(ctx, compile_ctx.dep_ctx) linker_info = compile_ctx.cxx_toolchain_info.linker_info linker_type = linker_info.type @@ -727,7 +732,8 @@ def _native_providers( ctx, compile_ctx.cxx_toolchain_info.pic_behavior, link_infos, - exported_deps = inherited_link_infos, + deps = inherited_link_infos, + exported_deps = filter(None, [d.get(MergedLinkInfo) for d in inherited_exported_deps]), preferred_linkage = preferred_linkage, )) @@ -784,13 +790,14 @@ def _native_providers( linkable_node = create_linkable_node( ctx = ctx, preferred_linkage = preferred_linkage, - exported_deps = inherited_link_graphs, + deps = inherited_link_graphs, + exported_deps = inherited_exported_deps, link_infos = link_infos, shared_libs = solibs, default_soname = shlib_name, ), ), - deps = inherited_link_graphs, + deps = inherited_link_graphs + inherited_exported_deps, ) providers.append(linkable_graph) From 225f92776f03617db883731edf1b584defdca66e Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Thu, 11 Jan 2024 13:30:53 -0800 Subject: [PATCH 0122/1133] link groups: Add `ignore_force_static_follows_dependents` option Summary: Mostly explained in comments in the diff. I'm somewhat confident that this is correct from the Rust side, but much less confident on the link groups side This should resolve the dev-lg artifact size regressions from v4 of D51303584 Reviewed By: andrewjcg Differential Revision: D52536616 fbshipit-source-id: e40eb39c2c851ad9095ac365aa0a88d9403ef323 --- prelude/cxx/link_groups.bzl | 6 ++++-- prelude/linking/linkable_graph.bzl | 6 +++++- prelude/rust/rust_library.bzl | 20 +++++++++++++++++--- 3 files changed, 26 insertions(+), 6 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index fa353e20a..d037a6fdd 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -419,7 +419,9 @@ def get_filtered_labels_to_links_map( target_link_group = link_group_mappings.get(target) # Always add force-static libs to the link. - if force_static_follows_dependents and node.preferred_linkage == Linkage("static"): + if (force_static_follows_dependents and + node.preferred_linkage == Linkage("static") and + not node.ignore_force_static_follows_dependents): add_link(target, output_style) elif not target_link_group and not link_group: # Ungrouped linkable targets belong to the unlabeled executable @@ -535,7 +537,7 @@ def find_relevant_roots( # link group. def collect_and_traverse_roots(roots, node_target): node = linkable_graph_node_map.get(node_target) - if node.preferred_linkage == Linkage("static"): + if node.preferred_linkage == Linkage("static") and not node.ignore_force_static_follows_dependents: return node.deps + node.exported_deps node_link_group = link_group_mappings.get(node_target) if node_link_group == MATCH_ALL_LABEL: diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index 2ed062ae3..1f73160ed 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -87,6 +87,8 @@ LinkableNode = record( # Whether the node should appear in the android mergemap (which provides information about the original # soname->final merged lib mapping) include_in_android_mergemap = field(bool), + # Don't follow dependents on this node even if has preferred linkage static + ignore_force_static_follows_dependents = field(bool), # Only allow constructing within this file. _private = _DisallowConstruction, @@ -144,7 +146,8 @@ def create_linkable_node( shared_libs: dict[str, LinkedObject] = {}, can_be_asset: bool = True, include_in_android_mergemap: bool = True, - linker_flags: [LinkerFlags, None] = None) -> LinkableNode: + linker_flags: [LinkerFlags, None] = None, + ignore_force_static_follows_dependents: bool = False) -> LinkableNode: for output_style in _get_required_outputs_for_linkage(preferred_linkage): expect( output_style in link_infos, @@ -164,6 +167,7 @@ def create_linkable_node( include_in_android_mergemap = include_in_android_mergemap, default_soname = default_soname, linker_flags = linker_flags, + ignore_force_static_follows_dependents = ignore_force_static_follows_dependents, _private = _DisallowConstruction(), ) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index e8f18467b..36770bc7c 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -179,7 +179,13 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: external_debug_info = external_debug_info, ) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx, cxx_toolchain, link_infos) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers( + ctx, + dep_ctx, + cxx_toolchain, + link_infos, + Linkage(ctx.attrs.preferred_linkage), + ) providers.append( RustLinkInfo( crate = crate, @@ -526,7 +532,8 @@ def _rust_link_providers( ctx: AnalysisContext, dep_ctx: DepCollectionContext, cxx_toolchain: CxxToolchainInfo, - link_infos: dict[LibOutputStyle, LinkInfos]) -> ( + link_infos: dict[LibOutputStyle, LinkInfos], + preferred_linkage: Linkage) -> ( MergedLinkInfo, SharedLibraryInfo, list[LinkableGraph], @@ -591,6 +598,13 @@ def _rust_link_providers( # FIXME(JakobDegen): It should be ok to set this to `None`, # but that breaks arc focus default_soname = "", + # Link groups have a heuristic in which they assume that a + # preferred_linkage = "static" library needs to be linked + # into every single link group, instead of just one. + # Applying that same heuristic to Rust seems right, but only + # if this target actually requested that. Opt ourselves out + # if it didn't. + ignore_force_static_follows_dependents = preferred_linkage != Linkage("static"), ), label = new_label, ), @@ -625,7 +639,7 @@ def _rust_providers( link, meta = param_artifact[params] strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, link, meta) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos, Linkage(ctx.attrs.preferred_linkage)) providers = [] From a62fdf5c91cfe30cae93275592428aeceafe0082 Mon Sep 17 00:00:00 2001 From: Liang Qiu Date: Thu, 11 Jan 2024 17:04:00 -0800 Subject: [PATCH 0123/1133] Revert D52536616: link groups: Add `ignore_force_static_follows_dependents` option Differential Revision: D52536616 Original commit changeset: e40eb39c2c85 Original Phabricator Diff: D52536616 fbshipit-source-id: 023f9f7c3f584dbdda31c978592aca2b84ecebe3 --- prelude/cxx/link_groups.bzl | 6 ++---- prelude/linking/linkable_graph.bzl | 6 +----- prelude/rust/rust_library.bzl | 20 +++----------------- 3 files changed, 6 insertions(+), 26 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index d037a6fdd..fa353e20a 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -419,9 +419,7 @@ def get_filtered_labels_to_links_map( target_link_group = link_group_mappings.get(target) # Always add force-static libs to the link. - if (force_static_follows_dependents and - node.preferred_linkage == Linkage("static") and - not node.ignore_force_static_follows_dependents): + if force_static_follows_dependents and node.preferred_linkage == Linkage("static"): add_link(target, output_style) elif not target_link_group and not link_group: # Ungrouped linkable targets belong to the unlabeled executable @@ -537,7 +535,7 @@ def find_relevant_roots( # link group. def collect_and_traverse_roots(roots, node_target): node = linkable_graph_node_map.get(node_target) - if node.preferred_linkage == Linkage("static") and not node.ignore_force_static_follows_dependents: + if node.preferred_linkage == Linkage("static"): return node.deps + node.exported_deps node_link_group = link_group_mappings.get(node_target) if node_link_group == MATCH_ALL_LABEL: diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index 1f73160ed..2ed062ae3 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -87,8 +87,6 @@ LinkableNode = record( # Whether the node should appear in the android mergemap (which provides information about the original # soname->final merged lib mapping) include_in_android_mergemap = field(bool), - # Don't follow dependents on this node even if has preferred linkage static - ignore_force_static_follows_dependents = field(bool), # Only allow constructing within this file. _private = _DisallowConstruction, @@ -146,8 +144,7 @@ def create_linkable_node( shared_libs: dict[str, LinkedObject] = {}, can_be_asset: bool = True, include_in_android_mergemap: bool = True, - linker_flags: [LinkerFlags, None] = None, - ignore_force_static_follows_dependents: bool = False) -> LinkableNode: + linker_flags: [LinkerFlags, None] = None) -> LinkableNode: for output_style in _get_required_outputs_for_linkage(preferred_linkage): expect( output_style in link_infos, @@ -167,7 +164,6 @@ def create_linkable_node( include_in_android_mergemap = include_in_android_mergemap, default_soname = default_soname, linker_flags = linker_flags, - ignore_force_static_follows_dependents = ignore_force_static_follows_dependents, _private = _DisallowConstruction(), ) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 36770bc7c..e8f18467b 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -179,13 +179,7 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: external_debug_info = external_debug_info, ) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers( - ctx, - dep_ctx, - cxx_toolchain, - link_infos, - Linkage(ctx.attrs.preferred_linkage), - ) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx, cxx_toolchain, link_infos) providers.append( RustLinkInfo( crate = crate, @@ -532,8 +526,7 @@ def _rust_link_providers( ctx: AnalysisContext, dep_ctx: DepCollectionContext, cxx_toolchain: CxxToolchainInfo, - link_infos: dict[LibOutputStyle, LinkInfos], - preferred_linkage: Linkage) -> ( + link_infos: dict[LibOutputStyle, LinkInfos]) -> ( MergedLinkInfo, SharedLibraryInfo, list[LinkableGraph], @@ -598,13 +591,6 @@ def _rust_link_providers( # FIXME(JakobDegen): It should be ok to set this to `None`, # but that breaks arc focus default_soname = "", - # Link groups have a heuristic in which they assume that a - # preferred_linkage = "static" library needs to be linked - # into every single link group, instead of just one. - # Applying that same heuristic to Rust seems right, but only - # if this target actually requested that. Opt ourselves out - # if it didn't. - ignore_force_static_follows_dependents = preferred_linkage != Linkage("static"), ), label = new_label, ), @@ -639,7 +625,7 @@ def _rust_providers( link, meta = param_artifact[params] strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, link, meta) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos, Linkage(ctx.attrs.preferred_linkage)) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos) providers = [] From 703a7791c9210c65094c45262b87027a8dbaa9ee Mon Sep 17 00:00:00 2001 From: Liang Qiu Date: Thu, 11 Jan 2024 17:04:00 -0800 Subject: [PATCH 0124/1133] Revert D52499874: rules: Better handling of exported deps Differential Revision: D52499874 Original commit changeset: d2e8947d39bd Original Phabricator Diff: D52499874 fbshipit-source-id: 0a89ff543b1a0057b87e29c79cc173f0b76ca6b1 --- prelude/rust/link_info.bzl | 11 ++--------- prelude/rust/rust_library.bzl | 21 +++++++-------------- 2 files changed, 9 insertions(+), 23 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index d2780cefc..a7032d8b9 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -148,15 +148,8 @@ RustLinkInfo = provider( "linkable_graphs": list[LinkableGraph], # LinkGroupLibInfo intentionally omitted because the Rust -> Rust version # never needs to be different from the Rust -> native version - # - # Rust currently treats all native dependencies as being exported, in - # the sense of C++ `exported_deps`. However, they are not only exported - # from the Rust library that directly depends on them, they are also - # exported through any further chains of Rust libraries. This list - # tracks those dependencies - # - # FIXME(JakobDegen): We should not default to treating all native deps - # as exported. + # The native dependencies reachable from this Rust library through other + # Rust libraries "exported_link_deps": list[Dependency], }, ) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index e8f18467b..6cc06110b 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -545,7 +545,7 @@ def _rust_link_providers( inherited_link_infos = inherited_merged_link_infos(ctx, dep_ctx) inherited_shlibs = inherited_shared_libs(ctx, dep_ctx) inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) - inherited_exported_deps = inherited_exported_link_deps(ctx, dep_ctx) + inherited_link_deps = inherited_exported_link_deps(ctx, dep_ctx) if dep_ctx.advanced_unstable_linking: # We have to produce a version of the providers that are defined in such @@ -555,15 +555,11 @@ def _rust_link_providers( # # Note that all of this code is FORCE_RLIB specific. Disabling that # setting requires replacing this with the "real" native providers - # - # As an optimization, we never bother reporting exported deps here. - # Whichever dependent uses the providers created here will take care of - # that for us. merged_link_info = create_merged_link_info( ctx, cxx_toolchain.pic_behavior, link_infos, - deps = inherited_link_infos, + exported_deps = inherited_link_infos, preferred_linkage = Linkage("static"), ) shared_libs = merge_shared_libraries( @@ -586,7 +582,7 @@ def _rust_link_providers( linkable_node = create_linkable_node( ctx = ctx, preferred_linkage = Linkage("static"), - deps = inherited_graphs, + exported_deps = inherited_graphs, link_infos = link_infos, # FIXME(JakobDegen): It should be ok to set this to `None`, # but that breaks arc focus @@ -603,7 +599,7 @@ def _rust_link_providers( ctx.actions, deps = inherited_shlibs, ) - return (merged_link_info, shared_libs, inherited_graphs, inherited_exported_deps) + return (merged_link_info, shared_libs, inherited_graphs, inherited_link_deps) def _rust_providers( ctx: AnalysisContext, @@ -711,7 +707,6 @@ def _native_providers( inherited_link_infos = inherited_merged_link_infos(ctx, compile_ctx.dep_ctx) inherited_shlibs = inherited_shared_libs(ctx, compile_ctx.dep_ctx) inherited_link_graphs = inherited_linkable_graphs(ctx, compile_ctx.dep_ctx) - inherited_exported_deps = inherited_exported_link_deps(ctx, compile_ctx.dep_ctx) linker_info = compile_ctx.cxx_toolchain_info.linker_info linker_type = linker_info.type @@ -732,8 +727,7 @@ def _native_providers( ctx, compile_ctx.cxx_toolchain_info.pic_behavior, link_infos, - deps = inherited_link_infos, - exported_deps = filter(None, [d.get(MergedLinkInfo) for d in inherited_exported_deps]), + exported_deps = inherited_link_infos, preferred_linkage = preferred_linkage, )) @@ -790,14 +784,13 @@ def _native_providers( linkable_node = create_linkable_node( ctx = ctx, preferred_linkage = preferred_linkage, - deps = inherited_link_graphs, - exported_deps = inherited_exported_deps, + exported_deps = inherited_link_graphs, link_infos = link_infos, shared_libs = solibs, default_soname = shlib_name, ), ), - deps = inherited_link_graphs + inherited_exported_deps, + deps = inherited_link_graphs, ) providers.append(linkable_graph) From 81ac6816286ce630679fbe59b5da80c6ea9b7719 Mon Sep 17 00:00:00 2001 From: Liang Qiu Date: Thu, 11 Jan 2024 17:04:00 -0800 Subject: [PATCH 0125/1133] Revert D52499875: rules: Drop native providers from `prebuilt_rust_library` Differential Revision: D52499875 Original commit changeset: 4648bbf0b510 Original Phabricator Diff: D52499875 fbshipit-source-id: 5c4943b409dc4972b0d571bb3bb90c7ce51745f1 --- prelude/rust/rust_library.bzl | 52 +++++++++++++++++++++++++++++++++-- 1 file changed, 49 insertions(+), 3 deletions(-) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 6cc06110b..f39b2c93b 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -19,7 +19,7 @@ load( "@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", "PicBehavior") load( "@prelude//cxx:linker.bzl", "PDB_SUB_TARGET", @@ -44,7 +44,7 @@ load( "LinkStrategy", "Linkage", "LinkedObject", - "MergedLinkInfo", # @unused Used as a type + "MergedLinkInfo", "SharedLibLinkable", "create_merged_link_info", "create_merged_link_info_for_propagation", @@ -61,7 +61,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibraryInfo", # @unused Used as a type + "SharedLibraryInfo", "create_shared_libraries", "merge_shared_libraries", ) @@ -191,6 +191,52 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: ), ) + # Native link provier. + providers.append( + create_merged_link_info( + ctx, + PicBehavior("supported"), + link_infos, + exported_deps = [d[MergedLinkInfo] for d in ctx.attrs.deps], + # TODO(agallagher): This matches v1 behavior, but some of these libs + # have prebuilt DSOs which might be usable. + preferred_linkage = Linkage("static"), + ), + ) + + # Native link graph setup. + linkable_graph = create_linkable_graph( + ctx, + node = create_linkable_graph_node( + ctx, + linkable_node = create_linkable_node( + ctx = ctx, + preferred_linkage = Linkage("static"), + exported_deps = ctx.attrs.deps, + link_infos = link_infos, + default_soname = get_default_shared_library_name(linker_info, ctx.label), + ), + ), + deps = ctx.attrs.deps, + ) + providers.append(linkable_graph) + + providers.append(merge_link_group_lib_info(children = inherited_link_group_lib_infos(ctx, dep_ctx))) + + # FIXME(JakobDegen): I am about 85% confident that this matches what C++ + # does for prebuilt libraries if they don't have a shared variant and have + # preferred linkage static. C++ doesn't require static preferred linkage on + # their prebuilt libraries, and so they incur extra complexity here that we + # don't have to deal with. + # + # However, Rust linking is not the same as C++ linking. If Rust were + # disciplined about its use of `LibOutputStyle`, `Linkage` and + # `LinkStrategy`, then this would at least be no more wrong than what C++ + # does. In the meantime however... + providers.append(SharedLibraryInfo(set = None)) + + providers.append(merge_android_packageable_info(ctx.label, ctx.actions, ctx.attrs.deps)) + return providers def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: From 4bc7c087df6818736f5eacb45a5132cad04996f4 Mon Sep 17 00:00:00 2001 From: Liang Qiu Date: Thu, 11 Jan 2024 17:04:00 -0800 Subject: [PATCH 0126/1133] Revert D52507600: rules: Add a FIXME Differential Revision: D52507600 Original commit changeset: afe1ad6472ee Original Phabricator Diff: D52507600 fbshipit-source-id: 20730b7e6ba9dfa4eecb7321f101b9cfd5e7df1b --- prelude/android/android_binary_native_library_rules.bzl | 3 --- prelude/rust/rust_library.bzl | 2 -- 2 files changed, 5 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 4b606d506..79f0857a1 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -681,9 +681,6 @@ def encode_linkable_graph_for_mergemap(graph_node_map_by_platform: dict[str, dic platform: { target: _LinkableSharedNode( raw_target = str(target.raw_target()), - # FIXME(JakobDegen): The definition of `LinkableNode` claims that it's ok for this - # to be `None` (I assume in the case of static preferred linkage), so either that is - # wrong or this is. See the diff that added this FIXME for how to reproduce soname = node.default_soname, labels = node.labels, deps = node.deps + node.exported_deps, diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index f39b2c93b..1af9245d8 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -630,8 +630,6 @@ def _rust_link_providers( preferred_linkage = Linkage("static"), exported_deps = inherited_graphs, link_infos = link_infos, - # FIXME(JakobDegen): It should be ok to set this to `None`, - # but that breaks arc focus default_soname = "", ), label = new_label, From 45881deb49aee519632b5e80dcbf32d27fdc52fa Mon Sep 17 00:00:00 2001 From: Liang Qiu Date: Thu, 11 Jan 2024 17:04:00 -0800 Subject: [PATCH 0127/1133] Revert D52499881: rules: Reimplement native unbundled deps to better support `FORCE_RLIB` Differential Revision: D52499881 Original commit changeset: c3a889ac7abd Original Phabricator Diff: D52499881 fbshipit-source-id: 671eee394f4eff28331443db251edff70fafcf17 --- prelude/rust/link_info.bzl | 36 ++++++++++------ prelude/rust/rust_library.bzl | 77 +++++------------------------------ 2 files changed, 34 insertions(+), 79 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index a7032d8b9..4b3492003 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -139,6 +139,9 @@ RustLinkInfo = provider( # `FORCE_RLIB` behavior, in which Rust -> Rust dependency edges are always statically # linked. The native link provider is identical, except that it does not respect the # `FORCE_RLIB` behavior. + # + # FIXME(JakobDegen): The `advanced_unstable_linking` case is currently aspirational and not + # how things are actually implemented. "merged_link_info": MergedLinkInfo, "shared_libs": SharedLibraryInfo, # Because of the weird representation of `LinkableGraph`, there is no @@ -350,11 +353,14 @@ def _native_link_dependencies( """ first_order_deps = [dep.dep for dep in resolve_deps(ctx, dep_ctx)] - return [ - d - for d in first_order_deps - if RustLinkInfo not in d and MergedLinkInfo in d - ] + if dep_ctx.advanced_unstable_linking: + return [d for d in first_order_deps if MergedLinkInfo in d] + else: + return [ + d + for d in first_order_deps + if RustLinkInfo not in d and MergedLinkInfo in d + ] # Returns the rust link infos for non-proc macro deps. # @@ -368,9 +374,10 @@ def inherited_exported_link_deps(ctx: AnalysisContext, dep_ctx: DepCollectionCon deps = {} for dep in _native_link_dependencies(ctx, dep_ctx): deps[dep.label] = dep - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for dep in info.exported_link_deps: - deps[dep.label] = dep + if not dep_ctx.advanced_unstable_linking: + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for dep in info.exported_link_deps: + deps[dep.label] = dep return deps.values() def inherited_rust_cxx_link_group_info( @@ -460,7 +467,8 @@ def inherited_merged_link_infos( dep_ctx: DepCollectionContext) -> list[MergedLinkInfo]: infos = [] infos.extend([d[MergedLinkInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) + if not dep_ctx.advanced_unstable_linking: + infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) return infos def inherited_shared_libs( @@ -468,7 +476,8 @@ def inherited_shared_libs( dep_ctx: DepCollectionContext) -> list[SharedLibraryInfo]: infos = [] infos.extend([d[SharedLibraryInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) + if not dep_ctx.advanced_unstable_linking: + infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) return infos def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkableGraph]: @@ -477,9 +486,10 @@ def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContex g = d.get(LinkableGraph) if g: deps[g.label] = g - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for g in info.linkable_graphs: - deps[g.label] = g + if not dep_ctx.advanced_unstable_linking: + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for g in info.linkable_graphs: + deps[g.label] = g return deps.values() def inherited_link_group_lib_infos(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkGroupLibInfo]: diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 1af9245d8..9393333f0 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -19,7 +19,7 @@ load( "@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", "PicBehavior") +load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load( "@prelude//cxx:linker.bzl", "PDB_SUB_TARGET", @@ -131,8 +131,7 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: panic_runtime = rust_toolchain.panic_runtime, ) - cxx_toolchain = get_cxx_toolchain_info(ctx) - linker_info = cxx_toolchain.linker_info + linker_info = get_cxx_toolchain_info(ctx).linker_info archive_info = LinkInfos( default = LinkInfo( @@ -179,7 +178,7 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: external_debug_info = external_debug_info, ) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx, cxx_toolchain, link_infos) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx) providers.append( RustLinkInfo( crate = crate, @@ -365,7 +364,6 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_ctx, lang_style_param = lang_style_param, param_artifact = rust_param_artifact, - link_infos = link_infos, ) providers += _native_providers( ctx = ctx, @@ -570,14 +568,7 @@ def _default_providers( def _rust_link_providers( ctx: AnalysisContext, - dep_ctx: DepCollectionContext, - cxx_toolchain: CxxToolchainInfo, - link_infos: dict[LibOutputStyle, LinkInfos]) -> ( - MergedLinkInfo, - SharedLibraryInfo, - list[LinkableGraph], - list[Dependency], -): + dep_ctx: DepCollectionContext) -> (MergedLinkInfo, SharedLibraryInfo, list[LinkableGraph], list[Dependency]): # These are never accessed in the case of proc macros, so just return some dummy # values if ctx.attrs.proc_macro: @@ -593,64 +584,18 @@ def _rust_link_providers( inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) inherited_link_deps = inherited_exported_link_deps(ctx, dep_ctx) - if dep_ctx.advanced_unstable_linking: - # We have to produce a version of the providers that are defined in such - # a way that native rules looking at these providers will also pick up - # the `FORCE_RLIB` behavior. The general approach to that will be to - # claim that we have `preferred_linkage = "static"`. - # - # Note that all of this code is FORCE_RLIB specific. Disabling that - # setting requires replacing this with the "real" native providers - merged_link_info = create_merged_link_info( - ctx, - cxx_toolchain.pic_behavior, - link_infos, - exported_deps = inherited_link_infos, - preferred_linkage = Linkage("static"), - ) - shared_libs = merge_shared_libraries( - # We never actually have any shared libraries to add - ctx.actions, - deps = inherited_shlibs, - ) - - # The link graph representation is a little bit weird, since instead of - # just building up a graph via tsets, it uses a flat list of labeled - # nodes, each with a list of labels for dependency edges. The node that - # we create here cannot just use this target's label, since that would - # conflict with the node created for the native providers. As a result, - # we make up a fake subtarget to get a distinct label - new_label = ctx.label.configured_target().with_sub_target((ctx.label.sub_target or []) + ["fake_force_rlib_subtarget"]) - linkable_graph = create_linkable_graph( - ctx, - node = create_linkable_graph_node( - ctx, - linkable_node = create_linkable_node( - ctx = ctx, - preferred_linkage = Linkage("static"), - exported_deps = inherited_graphs, - link_infos = link_infos, - default_soname = "", - ), - label = new_label, - ), - deps = inherited_graphs, - ) - inherited_graphs = [linkable_graph] - else: - merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) - shared_libs = merge_shared_libraries( - ctx.actions, - deps = inherited_shlibs, - ) + merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) + shared_libs = merge_shared_libraries( + ctx.actions, + deps = inherited_shlibs, + ) return (merged_link_info, shared_libs, inherited_graphs, inherited_link_deps) def _rust_providers( ctx: AnalysisContext, compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)], - link_infos: dict[LibOutputStyle, LinkInfos]) -> list[Provider]: + param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)]) -> list[Provider]: """ Return the set of providers for Rust linkage. """ @@ -665,7 +610,7 @@ def _rust_providers( link, meta = param_artifact[params] strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, link, meta) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx) providers = [] From 83c09d1d915324c5e95506b76562b0d70c15894b Mon Sep 17 00:00:00 2001 From: Itamar Oren Date: Thu, 11 Jan 2024 22:43:10 -0800 Subject: [PATCH 0128/1133] Fix Python 3.12 compatibility Summary: use the C API patched in 3.12 in D52642782 when compiling for 3.12 instead of the no-longer-available `_Py_PackageContext` symbol. also stop using `_PyImport_FindExtensionObject` in favor of the public `PyImport_GetModule`. Reviewed By: zsol Differential Revision: D52643539 fbshipit-source-id: a1955f93fd565a7891cc6160cbd1760a83319eb4 --- prelude/python/tools/static_extension_utils.cpp | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/prelude/python/tools/static_extension_utils.cpp b/prelude/python/tools/static_extension_utils.cpp index f35e2a682..1470561cb 100644 --- a/prelude/python/tools/static_extension_utils.cpp +++ b/prelude/python/tools/static_extension_utils.cpp @@ -24,15 +24,13 @@ namespace { static PyObject* _create_module(PyObject* self, PyObject* spec) { PyObject* name; PyObject* mod; - const char* oldcontext; name = PyObject_GetAttrString(spec, "name"); if (name == nullptr) { return nullptr; } - // TODO private api usage - mod = _PyImport_FindExtensionObject(name, name); + mod = PyImport_GetModule(name); if (mod || PyErr_Occurred()) { Py_DECREF(name); Py_XINCREF(mod); @@ -58,7 +56,15 @@ static PyObject* _create_module(PyObject* self, PyObject* spec) { PyObject* modules = nullptr; PyModuleDef* def; - oldcontext = _Py_PackageContext; + +#if PY_VERSION_HEX >= 0x030C0000 + // Use our custom Python 3.12 C-API to call the statically linked module init + // function + mod = _Ci_PyImport_CallInitFuncWithContext(namestr.c_str(), initfunc); +#else + // In Python 3.10 (and earlier) we need to handle package context swapping + // ourselves + const char* oldcontext = _Py_PackageContext; _Py_PackageContext = namestr.c_str(); if (_Py_PackageContext == nullptr) { _Py_PackageContext = oldcontext; @@ -67,6 +73,7 @@ static PyObject* _create_module(PyObject* self, PyObject* spec) { } mod = initfunc(); _Py_PackageContext = oldcontext; +#endif if (mod == nullptr) { Py_DECREF(name); return nullptr; From 6b4e1779ea369e13daf290ca8a515bc97e9ae309 Mon Sep 17 00:00:00 2001 From: Ken Ahn Date: Fri, 12 Jan 2024 09:28:04 -0800 Subject: [PATCH 0129/1133] BE | Pipe M1 simulator option to apple_test Summary: ## Problem https://fb.workplace.com/groups/735885229793428/permalink/25587488444206429/ ## Solution Adding a flag to run the screenshot tests on dedicated m1 platform instead of mixed platform build[fbsource//fbobjc/Apps/Internal/AREngineSnapshotTests:AREngineSnapshotTests_scene_object_transform_value - AREngineSnapshotTests/test_base_metal_jsc_vEffect_uncompressed ] Reviewed By: blackm00n Differential Revision: D52527132 fbshipit-source-id: 6e0105ff7dd764434a9b2efa81146d4622f78079 --- prelude/apple/apple_rules_impl_utility.bzl | 1 + prelude/apple/apple_test.bzl | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 6a50588fb..2d21fc485 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -108,6 +108,7 @@ def apple_test_extra_attrs(): "resource_group_map": attrs.option(attrs.string(), default = None), "stripped": attrs.bool(default = False), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), + "use_m1_simulator": attrs.bool(default = False), "_apple_toolchain": get_apple_toolchain_attr(), "_ios_booted_simulator": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:ios_booted_simulator", providers = [LocalResourceInfo]), "_ios_unbooted_simulator": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:ios_unbooted_simulator", providers = [LocalResourceInfo]), diff --git a/prelude/apple/apple_test.bzl b/prelude/apple/apple_test.bzl index ea5a765fd..f3da39d96 100644 --- a/prelude/apple/apple_test.bzl +++ b/prelude/apple/apple_test.bzl @@ -199,10 +199,10 @@ def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_ else: # @oss-disable: requires_ios_booted_simulator = ctx.attrs.test_host_app != None or ctx.attrs.ui_test_target_app != None - # @oss-disable: remote_execution_properties = ios_test_re_capabilities(use_unbooted_simulator = not requires_ios_booted_simulator) + # @oss-disable: remote_execution_properties = ios_test_re_capabilities(use_unbooted_simulator = not requires_ios_booted_simulator, use_m1_simulator = ctx.attrs.use_m1_simulator) remote_execution_properties = None # @oss-enable - # @oss-disable: remote_execution_use_case = apple_test_re_use_case(macos_test = sdk_name == MacOSXSdkMetadata.name) + # @oss-disable: remote_execution_use_case = apple_test_re_use_case(macos_test = sdk_name == MacOSXSdkMetadata.name, use_m1_simulator = ctx.attrs.use_m1_simulator) remote_execution_use_case = None # @oss-enable local_enabled = remote_execution_use_case == None From c8b52f600a81ede0c47632039119450509411035 Mon Sep 17 00:00:00 2001 From: Thomas David Cuvillier Date: Fri, 12 Jan 2024 09:40:21 -0800 Subject: [PATCH 0130/1133] Fix long hanging execution time. Summary: init:stop() takes 200s to returns ... Surprisingly, I tried to the following code spawn(fun() -> init:stop() end), receive after ?INIT_STOP_TIMEOUT -> erlang:halt(0) but this didn't solve the issue neither ???? Reviewed By: TheGeorge Differential Revision: D52734518 fbshipit-source-id: ded55b1be650e390222f0078e1bb5013f62d8c79 --- .../common_test/test_exec/src/ct_executor.erl | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/prelude/erlang/common_test/test_exec/src/ct_executor.erl b/prelude/erlang/common_test/test_exec/src/ct_executor.erl index fa61612be..f9caec0d8 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_executor.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_executor.erl @@ -88,21 +88,7 @@ run(Args) when is_list(Args) -> io:format("~ts\n", [erl_error:format_exception(Class1, Reason1, Stack1)]), 1 end, - case ExitCode of - 0 -> - init:stop(0), - receive - after ?INIT_STOP_TIMEOUT -> - ?LOG_ERROR( - io_lib:format("~p failed to terminate within ~c millisecond", [ - ?MODULE, ?INIT_STOP_TIMEOUT - ]) - ), - erlang:halt(0) - end; - _ -> - erlang:halt(ExitCode) - end. + erlang:halt(ExitCode). -spec parse_arguments([string()]) -> {proplists:proplist(), [term()]}. parse_arguments(Args) -> From 6d0df6cdb41d608860b471350e51acec11b13904 Mon Sep 17 00:00:00 2001 From: Tianyu Li Date: Fri, 12 Jan 2024 10:16:50 -0800 Subject: [PATCH 0131/1133] Make default platform configurable through buckconfig Summary: "config//platform/android:x86_32-fbsource" is very specific to fbsource. In order to prepare for buck2 android OSS, default platform should be configurable so projects do not have to rely on it. Reviewed By: IanChilds Differential Revision: D52717201 fbshipit-source-id: ed275c5852962e64274df92d6408c287f4a860bd --- prelude/android/configuration.bzl | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/prelude/android/configuration.bzl b/prelude/android/configuration.bzl index 3a6229003..f383e99f0 100644 --- a/prelude/android/configuration.bzl +++ b/prelude/android/configuration.bzl @@ -23,13 +23,14 @@ load("@prelude//utils:expect.bzl", "expect") # platforms). We only use the "arm64" native libraries if it is one of the specified platforms. We # "throw away" the non-native libraries for all other configured sub-graphs. +_DEFAULT_PLATFORM = "config//platform/android:x86_32-fbsource" + _REFS = { "arm64": "config//cpu/constraints:arm64", "armv7": "config//cpu/constraints:arm32", "build_only_native_code": "prelude//android/constraints:build_only_native_code", "building_android_binary": "prelude//os:building_android_binary", "cpu": "config//cpu/constraints:cpu", - "default_platform": "config//platform/android:x86_32-fbsource", "maybe_build_only_native_code": "prelude//android/constraints:maybe_build_only_native_code", "maybe_building_android_binary": "prelude//os:maybe_building_android_binary", "min_sdk_version": "prelude//android/constraints:min_sdk_version", @@ -40,6 +41,8 @@ for min_sdk in get_min_sdk_version_range(): constraint_value_name = get_min_sdk_version_constraint_value_name(min_sdk) _REFS[constraint_value_name] = "prelude//android/constraints:{}".format(constraint_value_name) +_REFS["default_platform"] = read_root_config("build", "default_platform", _DEFAULT_PLATFORM) + def _cpu_split_transition_impl( platform: PlatformInfo, refs: struct, From fa6927a13be25105ec5872b2fbe52913c24e8aa6 Mon Sep 17 00:00:00 2001 From: Jacob Rodal Date: Fri, 12 Jan 2024 11:26:56 -0800 Subject: [PATCH 0132/1133] fix apk_genrule on windows Summary: `apk_genrule` doesn't set `_exec_os_type`, which causes `process_genrule` to use bash instead of cmd.exe on windows. This diff fixes that. Differential Revision: D52716771 fbshipit-source-id: 8e7c14800b1b91f226eec8c918d31208c4af8190 --- prelude/android/android.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 48cec50fb..ab1eb4259 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -187,6 +187,7 @@ extra_attributes = { "apk_genrule": genrule_attributes() | { "type": attrs.string(default = "apk"), "_android_toolchain": toolchains_common.android(), + "_exec_os_type": buck.exec_os_type_arg(), }, "gen_aidl": { "import_paths": attrs.list(attrs.arg(), default = []), From 91651832ac6fcf6bb703415bb4ced37dccd620d6 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Fri, 12 Jan 2024 18:14:44 -0800 Subject: [PATCH 0133/1133] Back out "Revert D52499881: [rust] rules: Reimplement native unbundled deps to better support `FORCE_RLIB`" Summary: Change wrt to previous version: ``` diff --git a/fbcode/buck2/prelude/rust/rust_library.bzl b/fbcode/buck2/prelude/rust/rust_library.bzl --- a/fbcode/buck2/prelude/rust/rust_library.bzl +++ b/fbcode/buck2/prelude/rust/rust_library.bzl @@ -632,3 +632,3 @@ link_infos = link_infos, - default_soname = "", + default_soname = get_default_shared_library_name(cxx_toolchain.linker_info, ctx.label), ), ``` Reviewed By: dtolnay Differential Revision: D52749222 fbshipit-source-id: 4d6a7fb481daf94b9daa9fabd2631f9c567960a7 --- prelude/rust/link_info.bzl | 36 ++++++---------- prelude/rust/rust_library.bzl | 77 ++++++++++++++++++++++++++++++----- 2 files changed, 79 insertions(+), 34 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 4b3492003..a7032d8b9 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -139,9 +139,6 @@ RustLinkInfo = provider( # `FORCE_RLIB` behavior, in which Rust -> Rust dependency edges are always statically # linked. The native link provider is identical, except that it does not respect the # `FORCE_RLIB` behavior. - # - # FIXME(JakobDegen): The `advanced_unstable_linking` case is currently aspirational and not - # how things are actually implemented. "merged_link_info": MergedLinkInfo, "shared_libs": SharedLibraryInfo, # Because of the weird representation of `LinkableGraph`, there is no @@ -353,14 +350,11 @@ def _native_link_dependencies( """ first_order_deps = [dep.dep for dep in resolve_deps(ctx, dep_ctx)] - if dep_ctx.advanced_unstable_linking: - return [d for d in first_order_deps if MergedLinkInfo in d] - else: - return [ - d - for d in first_order_deps - if RustLinkInfo not in d and MergedLinkInfo in d - ] + return [ + d + for d in first_order_deps + if RustLinkInfo not in d and MergedLinkInfo in d + ] # Returns the rust link infos for non-proc macro deps. # @@ -374,10 +368,9 @@ def inherited_exported_link_deps(ctx: AnalysisContext, dep_ctx: DepCollectionCon deps = {} for dep in _native_link_dependencies(ctx, dep_ctx): deps[dep.label] = dep - if not dep_ctx.advanced_unstable_linking: - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for dep in info.exported_link_deps: - deps[dep.label] = dep + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for dep in info.exported_link_deps: + deps[dep.label] = dep return deps.values() def inherited_rust_cxx_link_group_info( @@ -467,8 +460,7 @@ def inherited_merged_link_infos( dep_ctx: DepCollectionContext) -> list[MergedLinkInfo]: infos = [] infos.extend([d[MergedLinkInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - if not dep_ctx.advanced_unstable_linking: - infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) + infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) return infos def inherited_shared_libs( @@ -476,8 +468,7 @@ def inherited_shared_libs( dep_ctx: DepCollectionContext) -> list[SharedLibraryInfo]: infos = [] infos.extend([d[SharedLibraryInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - if not dep_ctx.advanced_unstable_linking: - infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) + infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) return infos def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkableGraph]: @@ -486,10 +477,9 @@ def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContex g = d.get(LinkableGraph) if g: deps[g.label] = g - if not dep_ctx.advanced_unstable_linking: - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for g in info.linkable_graphs: - deps[g.label] = g + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for g in info.linkable_graphs: + deps[g.label] = g return deps.values() def inherited_link_group_lib_infos(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkGroupLibInfo]: diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 9393333f0..74053364d 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -19,7 +19,7 @@ load( "@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", "PicBehavior") load( "@prelude//cxx:linker.bzl", "PDB_SUB_TARGET", @@ -131,7 +131,8 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: panic_runtime = rust_toolchain.panic_runtime, ) - linker_info = get_cxx_toolchain_info(ctx).linker_info + cxx_toolchain = get_cxx_toolchain_info(ctx) + linker_info = cxx_toolchain.linker_info archive_info = LinkInfos( default = LinkInfo( @@ -178,7 +179,7 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: external_debug_info = external_debug_info, ) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx, cxx_toolchain, link_infos) providers.append( RustLinkInfo( crate = crate, @@ -364,6 +365,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_ctx, lang_style_param = lang_style_param, param_artifact = rust_param_artifact, + link_infos = link_infos, ) providers += _native_providers( ctx = ctx, @@ -568,7 +570,14 @@ def _default_providers( def _rust_link_providers( ctx: AnalysisContext, - dep_ctx: DepCollectionContext) -> (MergedLinkInfo, SharedLibraryInfo, list[LinkableGraph], list[Dependency]): + dep_ctx: DepCollectionContext, + cxx_toolchain: CxxToolchainInfo, + link_infos: dict[LibOutputStyle, LinkInfos]) -> ( + MergedLinkInfo, + SharedLibraryInfo, + list[LinkableGraph], + list[Dependency], +): # These are never accessed in the case of proc macros, so just return some dummy # values if ctx.attrs.proc_macro: @@ -584,18 +593,64 @@ def _rust_link_providers( inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) inherited_link_deps = inherited_exported_link_deps(ctx, dep_ctx) - merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) - shared_libs = merge_shared_libraries( - ctx.actions, - deps = inherited_shlibs, - ) + if dep_ctx.advanced_unstable_linking: + # We have to produce a version of the providers that are defined in such + # a way that native rules looking at these providers will also pick up + # the `FORCE_RLIB` behavior. The general approach to that will be to + # claim that we have `preferred_linkage = "static"`. + # + # Note that all of this code is FORCE_RLIB specific. Disabling that + # setting requires replacing this with the "real" native providers + merged_link_info = create_merged_link_info( + ctx, + cxx_toolchain.pic_behavior, + link_infos, + exported_deps = inherited_link_infos, + preferred_linkage = Linkage("static"), + ) + shared_libs = merge_shared_libraries( + # We never actually have any shared libraries to add + ctx.actions, + deps = inherited_shlibs, + ) + + # The link graph representation is a little bit weird, since instead of + # just building up a graph via tsets, it uses a flat list of labeled + # nodes, each with a list of labels for dependency edges. The node that + # we create here cannot just use this target's label, since that would + # conflict with the node created for the native providers. As a result, + # we make up a fake subtarget to get a distinct label + new_label = ctx.label.configured_target().with_sub_target((ctx.label.sub_target or []) + ["fake_force_rlib_subtarget"]) + linkable_graph = create_linkable_graph( + ctx, + node = create_linkable_graph_node( + ctx, + linkable_node = create_linkable_node( + ctx = ctx, + preferred_linkage = Linkage("static"), + exported_deps = inherited_graphs, + link_infos = link_infos, + default_soname = get_default_shared_library_name(cxx_toolchain.linker_info, ctx.label), + ), + label = new_label, + ), + deps = inherited_graphs, + ) + inherited_graphs = [linkable_graph] + else: + merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) + shared_libs = merge_shared_libraries( + ctx.actions, + deps = inherited_shlibs, + ) return (merged_link_info, shared_libs, inherited_graphs, inherited_link_deps) def _rust_providers( ctx: AnalysisContext, compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)]) -> list[Provider]: + param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)], + link_infos: dict[LibOutputStyle, LinkInfos]) -> list[Provider]: """ Return the set of providers for Rust linkage. """ @@ -610,7 +665,7 @@ def _rust_providers( link, meta = param_artifact[params] strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, link, meta) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos) providers = [] From 55535b315658acbefcf826abdd2b43f9ae5f333f Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Fri, 12 Jan 2024 19:12:07 -0800 Subject: [PATCH 0134/1133] system toolchain: Enable pipelining by default Summary: I actually want to get rid of this config entirely. But at the very least, having it off seems like the wrong default. It works great, makes builds faster, and as far as I know cargo doesn't even support disabling it. Let's see if anyone complains Reviewed By: dtolnay Differential Revision: D52756363 fbshipit-source-id: 3b8e57f6f70ba4eff51a1bbcec7fa61b58876bad --- prelude/toolchains/rust.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/toolchains/rust.bzl b/prelude/toolchains/rust.bzl index 3b4972daa..9f5a85251 100644 --- a/prelude/toolchains/rust.bzl +++ b/prelude/toolchains/rust.bzl @@ -74,7 +74,7 @@ system_rust_toolchain = rule( "deny_lints": attrs.list(attrs.string(), default = []), "doctests": attrs.bool(default = False), "extern_html_root_url_prefix": attrs.option(attrs.string(), default = None), - "pipelined": attrs.bool(default = False), + "pipelined": attrs.bool(default = True), "report_unused_deps": attrs.bool(default = False), "rustc_binary_flags": attrs.list(attrs.string(), default = []), "rustc_check_flags": attrs.list(attrs.string(), default = []), From 01a1b4fc01c6bcf6d806334e8395fd666c8bc9b6 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Fri, 12 Jan 2024 22:11:01 -0800 Subject: [PATCH 0135/1133] Add support for dynamically computed crate names Summary: As part of D44933146, the crate name for Thrift-generated Rust crates becomes determined by a `namespace rust` statement within the Thrift source file, rather than by `rust_crate_name` in a TARGETS file as before. Parsing these `namespace rust` statements involves running the Thrift compiler. The same Thrift compiler invocation that writes out Rust generated sources also writes out a file containing the crate name. Resolve_deps.bxl already involves running the Thrift compiler to materialize thrift-generated Rust sources, so additionally materializing the crate name file should be effectively free. Reviewed By: diliop Differential Revision: D52758870 fbshipit-source-id: 3b4e5bf031300ab7379892956b89b10c55b1ce54 --- prelude/rust/rust-analyzer/resolve_deps.bxl | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/prelude/rust/rust-analyzer/resolve_deps.bxl b/prelude/rust/rust-analyzer/resolve_deps.bxl index ef07a15e0..61dd48b89 100644 --- a/prelude/rust/rust-analyzer/resolve_deps.bxl +++ b/prelude/rust/rust-analyzer/resolve_deps.bxl @@ -47,6 +47,12 @@ def _process_target_config(ctx, target, in_workspace, out_dir = None): for test in resolved_attrs.tests: tests.append(test.raw_target()) + # materialize a file containing the dynamic crate name + crate_dynamic = getattr(resolved_attrs, "crate_dynamic", None) + if crate_dynamic: + cratename_artifact = crate_dynamic.get(DefaultInfo).default_outputs[0] + crate_dynamic = ctx.output.ensure(cratename_artifact).abs_path() + # copy over the absolute paths and raw targets into the output copy = {} attrs = target.attrs_eager() @@ -61,6 +67,8 @@ def _process_target_config(ctx, target, in_workspace, out_dir = None): copy["named_deps"] = named_deps elif k == "tests": copy["tests"] = tests + elif k == "crate_dynamic": + copy["crate_dynamic"] = crate_dynamic else: copy[k] = getattr(attrs, k) From 59e09f4e9514bc56302b877bfc2d9f6c5422ad0e Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Sat, 13 Jan 2024 10:38:30 -0800 Subject: [PATCH 0136/1133] Work around command line length overflow in transitive_dependency_symlinks Summary: Previously, transitive dependencies were passed to this script in the form: ``` transitive_dependency_symlinks.py \ --out-dir path/to/out \ --artifact path/to/cratename ../../libprovisional.rlib \ --artifact ... ``` This doesn't work because there can be a lot of them, overflowing the platform's command line length limit. This diff passes transitive dependencies through a JSON file: ``` transitive_dependency_symlinks.py \ --out-dir path/to/out \ --artifacts path/to/artifacts.json ``` where artifact.json is an array of pairs, each an rlib and a file containing the crate name for it. ``` [ ["../../libprovisional.rlib", "path/to/cratename"], ... ] ``` Reviewed By: diliop Differential Revision: D52759696 fbshipit-source-id: d7c81c8cedc1ed31cb76fc4a8cda3b1c175ff7d7 --- prelude/rust/build.bzl | 29 +++++++++++++---- .../tools/transitive_dependency_symlinks.py | 32 +++++++++++-------- 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 0b24a6d49..d1ef33a3e 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -739,18 +739,33 @@ def dynamic_symlinked_dirs( artifacts: dict[Artifact, CrateName]) -> cmd_args: name = "{}-dyn".format(prefix) transitive_dependency_dir = ctx.actions.declare_output(name, dir = True) - do_symlinks = cmd_args( - compile_ctx.toolchain_info.transitive_dependency_symlinks_tool, - cmd_args(transitive_dependency_dir.as_output(), format = "--out-dir={}"), + + # Pass the list of rlibs to transitive_dependency_symlinks.py through a file + # because there can be a lot of them. This avoids running out of command + # line length, particularly on Windows. + relative_path = lambda artifact: (cmd_args(artifact, delimiter = "") + .relative_to(transitive_dependency_dir.project("i")) + .ignore_artifacts()) + artifacts_json = ctx.actions.write_json( + ctx.actions.declare_output("{}-dyn.json".format(prefix)), + [ + (relative_path(artifact), crate.dynamic) + for artifact, crate in artifacts.items() + ], + with_inputs = True, + pretty = True, ) - for artifact, crate in artifacts.items(): - relative_path = cmd_args(artifact).relative_to(transitive_dependency_dir.project("i")) - do_symlinks.add("--artifact", crate.dynamic, relative_path.ignore_artifacts()) + ctx.actions.run( - do_symlinks, + [ + compile_ctx.toolchain_info.transitive_dependency_symlinks_tool, + cmd_args(transitive_dependency_dir.as_output(), format = "--out-dir={}"), + cmd_args(artifacts_json, format = "--artifacts={}"), + ], category = "tdep_symlinks", identifier = str(len(compile_ctx.transitive_dependency_dirs)), ) + compile_ctx.transitive_dependency_dirs[transitive_dependency_dir] = None return cmd_args(transitive_dependency_dir, format = "@{}/dirs").hidden(artifacts.keys()) diff --git a/prelude/rust/tools/transitive_dependency_symlinks.py b/prelude/rust/tools/transitive_dependency_symlinks.py index 247d683fc..77959079b 100755 --- a/prelude/rust/tools/transitive_dependency_symlinks.py +++ b/prelude/rust/tools/transitive_dependency_symlinks.py @@ -29,22 +29,31 @@ # # transitive_dependency_symlinks.py \ # --out-dir path/to/out \ -# --artifact path/to/cratename ../../libprovisional.rlib \ -# --artifact ... +# --artifacts path/to/artifacts.json # -# The tool reads the crate name from the file at "path/to/out". Suppose it's +# The input file artifact.json is an array of pairs, each an rlib and a file +# containing a crate name for it. +# +# [ +# ["../../libprovisional.rlib", "path/to/cratename"], +# ... +# ] +# +# The tool reads the crate name from the file at "path/to/cratename". Suppose it's # "thriftgenerated". It symlinks the given artifact as "0/libthriftgenerated.rlib" # within the specified output directory. In the event of collisions, there might # be multiple dirs created, just as we do for analysis-time named crates. import argparse +import json +import os from pathlib import Path -from typing import List, NamedTuple, Tuple +from typing import IO, NamedTuple class Args(NamedTuple): out_dir: Path - artifact: List[Tuple[Path, Path]] + artifacts: IO[str] def main(): @@ -55,11 +64,8 @@ def main(): required=True, ) parser.add_argument( - "--artifact", - action="append", - nargs=2, - type=Path, - metavar=("CRATENAME", "ARTIFACT"), + "--artifacts", + type=argparse.FileType(), required=True, ) args = Args(**vars(parser.parse_args())) @@ -69,9 +75,9 @@ def main(): # Add as many -Ldependency dirs as we need to avoid name conflicts deps_dirs = [{}] - for crate_name, artifact in args.artifact: - crate_name = crate_name.read_text().strip() - original_filename = artifact.name + for artifact, crate_name in json.load(args.artifacts): + crate_name = Path(crate_name).read_text().strip() + original_filename = os.path.basename(artifact) new_filename = "lib{}-{}".format( crate_name, original_filename.rsplit("-", 1)[1], From d3e28c45b738bb766e94f798a6efa42f5dd03cb0 Mon Sep 17 00:00:00 2001 From: John Haitas Date: Sat, 13 Jan 2024 20:57:09 -0800 Subject: [PATCH 0137/1133] Revert D52749222: Back out "Revert D52499881: [rust] rules: Reimplement native unbundled deps to better support `FORCE_RLIB`" Differential Revision: D52749222 Original commit changeset: 4d6a7fb481da Original Phabricator Diff: D52749222 fbshipit-source-id: cab7adb01550bfc85342f9797748b66438ef5319 --- prelude/rust/link_info.bzl | 36 ++++++++++------ prelude/rust/rust_library.bzl | 77 +++++------------------------------ 2 files changed, 34 insertions(+), 79 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index a7032d8b9..4b3492003 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -139,6 +139,9 @@ RustLinkInfo = provider( # `FORCE_RLIB` behavior, in which Rust -> Rust dependency edges are always statically # linked. The native link provider is identical, except that it does not respect the # `FORCE_RLIB` behavior. + # + # FIXME(JakobDegen): The `advanced_unstable_linking` case is currently aspirational and not + # how things are actually implemented. "merged_link_info": MergedLinkInfo, "shared_libs": SharedLibraryInfo, # Because of the weird representation of `LinkableGraph`, there is no @@ -350,11 +353,14 @@ def _native_link_dependencies( """ first_order_deps = [dep.dep for dep in resolve_deps(ctx, dep_ctx)] - return [ - d - for d in first_order_deps - if RustLinkInfo not in d and MergedLinkInfo in d - ] + if dep_ctx.advanced_unstable_linking: + return [d for d in first_order_deps if MergedLinkInfo in d] + else: + return [ + d + for d in first_order_deps + if RustLinkInfo not in d and MergedLinkInfo in d + ] # Returns the rust link infos for non-proc macro deps. # @@ -368,9 +374,10 @@ def inherited_exported_link_deps(ctx: AnalysisContext, dep_ctx: DepCollectionCon deps = {} for dep in _native_link_dependencies(ctx, dep_ctx): deps[dep.label] = dep - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for dep in info.exported_link_deps: - deps[dep.label] = dep + if not dep_ctx.advanced_unstable_linking: + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for dep in info.exported_link_deps: + deps[dep.label] = dep return deps.values() def inherited_rust_cxx_link_group_info( @@ -460,7 +467,8 @@ def inherited_merged_link_infos( dep_ctx: DepCollectionContext) -> list[MergedLinkInfo]: infos = [] infos.extend([d[MergedLinkInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) + if not dep_ctx.advanced_unstable_linking: + infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) return infos def inherited_shared_libs( @@ -468,7 +476,8 @@ def inherited_shared_libs( dep_ctx: DepCollectionContext) -> list[SharedLibraryInfo]: infos = [] infos.extend([d[SharedLibraryInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) + if not dep_ctx.advanced_unstable_linking: + infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) return infos def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkableGraph]: @@ -477,9 +486,10 @@ def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContex g = d.get(LinkableGraph) if g: deps[g.label] = g - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for g in info.linkable_graphs: - deps[g.label] = g + if not dep_ctx.advanced_unstable_linking: + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for g in info.linkable_graphs: + deps[g.label] = g return deps.values() def inherited_link_group_lib_infos(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkGroupLibInfo]: diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 74053364d..9393333f0 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -19,7 +19,7 @@ load( "@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", "PicBehavior") +load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load( "@prelude//cxx:linker.bzl", "PDB_SUB_TARGET", @@ -131,8 +131,7 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: panic_runtime = rust_toolchain.panic_runtime, ) - cxx_toolchain = get_cxx_toolchain_info(ctx) - linker_info = cxx_toolchain.linker_info + linker_info = get_cxx_toolchain_info(ctx).linker_info archive_info = LinkInfos( default = LinkInfo( @@ -179,7 +178,7 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: external_debug_info = external_debug_info, ) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx, cxx_toolchain, link_infos) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx) providers.append( RustLinkInfo( crate = crate, @@ -365,7 +364,6 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_ctx, lang_style_param = lang_style_param, param_artifact = rust_param_artifact, - link_infos = link_infos, ) providers += _native_providers( ctx = ctx, @@ -570,14 +568,7 @@ def _default_providers( def _rust_link_providers( ctx: AnalysisContext, - dep_ctx: DepCollectionContext, - cxx_toolchain: CxxToolchainInfo, - link_infos: dict[LibOutputStyle, LinkInfos]) -> ( - MergedLinkInfo, - SharedLibraryInfo, - list[LinkableGraph], - list[Dependency], -): + dep_ctx: DepCollectionContext) -> (MergedLinkInfo, SharedLibraryInfo, list[LinkableGraph], list[Dependency]): # These are never accessed in the case of proc macros, so just return some dummy # values if ctx.attrs.proc_macro: @@ -593,64 +584,18 @@ def _rust_link_providers( inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) inherited_link_deps = inherited_exported_link_deps(ctx, dep_ctx) - if dep_ctx.advanced_unstable_linking: - # We have to produce a version of the providers that are defined in such - # a way that native rules looking at these providers will also pick up - # the `FORCE_RLIB` behavior. The general approach to that will be to - # claim that we have `preferred_linkage = "static"`. - # - # Note that all of this code is FORCE_RLIB specific. Disabling that - # setting requires replacing this with the "real" native providers - merged_link_info = create_merged_link_info( - ctx, - cxx_toolchain.pic_behavior, - link_infos, - exported_deps = inherited_link_infos, - preferred_linkage = Linkage("static"), - ) - shared_libs = merge_shared_libraries( - # We never actually have any shared libraries to add - ctx.actions, - deps = inherited_shlibs, - ) - - # The link graph representation is a little bit weird, since instead of - # just building up a graph via tsets, it uses a flat list of labeled - # nodes, each with a list of labels for dependency edges. The node that - # we create here cannot just use this target's label, since that would - # conflict with the node created for the native providers. As a result, - # we make up a fake subtarget to get a distinct label - new_label = ctx.label.configured_target().with_sub_target((ctx.label.sub_target or []) + ["fake_force_rlib_subtarget"]) - linkable_graph = create_linkable_graph( - ctx, - node = create_linkable_graph_node( - ctx, - linkable_node = create_linkable_node( - ctx = ctx, - preferred_linkage = Linkage("static"), - exported_deps = inherited_graphs, - link_infos = link_infos, - default_soname = get_default_shared_library_name(cxx_toolchain.linker_info, ctx.label), - ), - label = new_label, - ), - deps = inherited_graphs, - ) - inherited_graphs = [linkable_graph] - else: - merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) - shared_libs = merge_shared_libraries( - ctx.actions, - deps = inherited_shlibs, - ) + merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) + shared_libs = merge_shared_libraries( + ctx.actions, + deps = inherited_shlibs, + ) return (merged_link_info, shared_libs, inherited_graphs, inherited_link_deps) def _rust_providers( ctx: AnalysisContext, compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)], - link_infos: dict[LibOutputStyle, LinkInfos]) -> list[Provider]: + param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)]) -> list[Provider]: """ Return the set of providers for Rust linkage. """ @@ -665,7 +610,7 @@ def _rust_providers( link, meta = param_artifact[params] strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, link, meta) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx) providers = [] From 12722477af6645d1d4faa074b18d4b90ee965c63 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Sun, 14 Jan 2024 11:18:04 -0800 Subject: [PATCH 0138/1133] Delete support for None values in doc_env Summary: {D49004163} already deleted support for this (unintentionally) so there hasn't been any use case for this. Before that diff, `doc_env` used to be `dict[str, ArgLike | None]`. https://www.internalfb.com/code/fbsource/[b93ac49e230d49bb62b17f8c114c80972c2d645f]/fbcode/buck2/prelude/decls/rust_rules.bzl?lines=242 while `env` is `dict[str, ArgLike]`. https://www.internalfb.com/code/fbsource/[b93ac49e230d49bb62b17f8c114c80972c2d645f]/fbcode/buck2/prelude/decls/rust_common.bzl?lines=88 After that diff, `None` in `doc_env` was no longer accepted. ``` error: Error coercing attribute `doc_env` of `fbcode//common/rust/shed/codegen_includer_proc_macro:_codegen_includer_proc_macro` --> fbcode/buck2/prelude/rust/rust_common.bzl:13:9 | 13 | rust_rule(_workspaces = workspaces, **kwargs) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | 1: Error coercing attribute `doc_env` of type `attrs.dict(attrs.string(), attrs.arg(), sorted=False, default={})` 2: Error coercing {"OUT_DIR": "$(location //common/rust/shed/codegen_includer_proc_macro/example:doc_test_out_dir)", "REPRO": None} 3: Error coercing None 4: Expected value of type `string`, got value with type `NoneType` (value was `None`) ``` This diff deletes related dead code. The code is from {D45633262}. Reviewed By: zertosh Differential Revision: D52764609 fbshipit-source-id: 90ca9199ce6ea5adfbe6bce6f0ec924c5315567f --- prelude/rust/build.bzl | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index d1ef33a3e..0b28f559c 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -340,13 +340,8 @@ def generate_rustdoc_test( plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) rustdoc_env = plain_env | path_env - - # Pass everything in env + doc_env, except ones with value None in doc_env. for k, v in ctx.attrs.doc_env.items(): - if v == None: - rustdoc_env.pop(k, None) - else: - rustdoc_env[k] = cmd_args(v) + rustdoc_env[k] = cmd_args(v) rustdoc_env["RUSTC_BOOTSTRAP"] = cmd_args("1") # for `-Zunstable-options` return (rustdoc_cmd, rustdoc_env) From 40babbe73f5a6869efafea8917a3e31cc39ed36e Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 14 Jan 2024 18:10:28 -0800 Subject: [PATCH 0139/1133] rules: Reimplement native unbundled deps to better support `FORCE_RLIB`: Take 3 Summary: Reland of D52749222 , the only thing that has changed is that D52719606 is squashed in Reviewed By: dtolnay Differential Revision: D52771691 fbshipit-source-id: 40054a8ee634bbc753e2c57637ecc8df87181915 --- prelude/rust/link_info.bzl | 48 ++++++-------- prelude/rust/rust_library.bzl | 116 ++++++++++++++++++++++++++-------- 2 files changed, 111 insertions(+), 53 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 4b3492003..3cc056460 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -121,9 +121,11 @@ RustLinkInfo = provider( # from `:A` (concrete differences discussed below). # # This distinction is implemented by effectively having each Rust library provide two sets - # of link providers. The first set is the one that is used by C++ and other non-Rust - # dependents, and is returned from the rule like normal. The second is the link providers - # used across Rust -> Rust dependency edges - this is what the fields below are. + # of link providers. The first is the link providers used across Rust -> Rust dependency + # edges - this is what the fields below are. The second set is the one that is used by C++ + # and other non-Rust dependents, and is returned from the rule like normal. The second set + # is a superset of the first, that is it includes anything that the first link providers + # added. # # The way in which the native link providers and Rust link providers differ depends on # whether `advanced_unstable_linking` is set on the toolchain. @@ -137,11 +139,8 @@ RustLinkInfo = provider( # include a linkable from `:A`, however that linkable is always the rlib (a static # library), regardless of `:A`'s preferred linkage or the link strategy. This matches the # `FORCE_RLIB` behavior, in which Rust -> Rust dependency edges are always statically - # linked. The native link provider is identical, except that it does not respect the - # `FORCE_RLIB` behavior. - # - # FIXME(JakobDegen): The `advanced_unstable_linking` case is currently aspirational and not - # how things are actually implemented. + # linked. The native link provider then depends on that, and only adds a linkable for the + # `shared_lib` case. "merged_link_info": MergedLinkInfo, "shared_libs": SharedLibraryInfo, # Because of the weird representation of `LinkableGraph`, there is no @@ -353,14 +352,11 @@ def _native_link_dependencies( """ first_order_deps = [dep.dep for dep in resolve_deps(ctx, dep_ctx)] - if dep_ctx.advanced_unstable_linking: - return [d for d in first_order_deps if MergedLinkInfo in d] - else: - return [ - d - for d in first_order_deps - if RustLinkInfo not in d and MergedLinkInfo in d - ] + return [ + d + for d in first_order_deps + if RustLinkInfo not in d and MergedLinkInfo in d + ] # Returns the rust link infos for non-proc macro deps. # @@ -374,10 +370,9 @@ def inherited_exported_link_deps(ctx: AnalysisContext, dep_ctx: DepCollectionCon deps = {} for dep in _native_link_dependencies(ctx, dep_ctx): deps[dep.label] = dep - if not dep_ctx.advanced_unstable_linking: - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for dep in info.exported_link_deps: - deps[dep.label] = dep + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for dep in info.exported_link_deps: + deps[dep.label] = dep return deps.values() def inherited_rust_cxx_link_group_info( @@ -467,8 +462,7 @@ def inherited_merged_link_infos( dep_ctx: DepCollectionContext) -> list[MergedLinkInfo]: infos = [] infos.extend([d[MergedLinkInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - if not dep_ctx.advanced_unstable_linking: - infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) + infos.extend([d.merged_link_info for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx) if d.merged_link_info]) return infos def inherited_shared_libs( @@ -476,8 +470,7 @@ def inherited_shared_libs( dep_ctx: DepCollectionContext) -> list[SharedLibraryInfo]: infos = [] infos.extend([d[SharedLibraryInfo] for d in _native_link_dependencies(ctx, dep_ctx)]) - if not dep_ctx.advanced_unstable_linking: - infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) + infos.extend([d.shared_libs for d in _rust_non_proc_macro_link_infos(ctx, dep_ctx)]) return infos def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkableGraph]: @@ -486,10 +479,9 @@ def inherited_linkable_graphs(ctx: AnalysisContext, dep_ctx: DepCollectionContex g = d.get(LinkableGraph) if g: deps[g.label] = g - if not dep_ctx.advanced_unstable_linking: - for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): - for g in info.linkable_graphs: - deps[g.label] = g + for info in _rust_non_proc_macro_link_infos(ctx, dep_ctx): + for g in info.linkable_graphs: + deps[g.label] = g return deps.values() def inherited_link_group_lib_infos(ctx: AnalysisContext, dep_ctx: DepCollectionContext) -> list[LinkGroupLibInfo]: diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 9393333f0..729e8f498 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -19,7 +19,7 @@ load( "@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", "PicBehavior") load( "@prelude//cxx:linker.bzl", "PDB_SUB_TARGET", @@ -131,7 +131,8 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: panic_runtime = rust_toolchain.panic_runtime, ) - linker_info = get_cxx_toolchain_info(ctx).linker_info + cxx_toolchain = get_cxx_toolchain_info(ctx) + linker_info = cxx_toolchain.linker_info archive_info = LinkInfos( default = LinkInfo( @@ -178,7 +179,7 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: external_debug_info = external_debug_info, ) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx, cxx_toolchain, link_infos) providers.append( RustLinkInfo( crate = crate, @@ -359,18 +360,21 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: expand = expand.output, sources = compile_ctx.symlinked_srcs, ) - providers += _rust_providers( + rust_link_info = _rust_providers( ctx = ctx, compile_ctx = compile_ctx, lang_style_param = lang_style_param, param_artifact = rust_param_artifact, + link_infos = link_infos, ) + providers.append(rust_link_info) providers += _native_providers( ctx = ctx, compile_ctx = compile_ctx, lang_style_param = lang_style_param, param_artifact = native_param_artifact, link_infos = link_infos, + rust_link_info = rust_link_info, ) deps = [dep.dep for dep in resolve_deps(ctx, compile_ctx.dep_ctx)] @@ -568,7 +572,14 @@ def _default_providers( def _rust_link_providers( ctx: AnalysisContext, - dep_ctx: DepCollectionContext) -> (MergedLinkInfo, SharedLibraryInfo, list[LinkableGraph], list[Dependency]): + dep_ctx: DepCollectionContext, + cxx_toolchain: CxxToolchainInfo, + link_infos: dict[LibOutputStyle, LinkInfos]) -> ( + MergedLinkInfo, + SharedLibraryInfo, + list[LinkableGraph], + list[Dependency], +): # These are never accessed in the case of proc macros, so just return some dummy # values if ctx.attrs.proc_macro: @@ -584,18 +595,64 @@ def _rust_link_providers( inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) inherited_link_deps = inherited_exported_link_deps(ctx, dep_ctx) - merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) - shared_libs = merge_shared_libraries( - ctx.actions, - deps = inherited_shlibs, - ) + if dep_ctx.advanced_unstable_linking: + # We have to produce a version of the providers that are defined in such + # a way that native rules looking at these providers will also pick up + # the `FORCE_RLIB` behavior. The general approach to that will be to + # claim that we have `preferred_linkage = "static"`. + # + # Note that all of this code is FORCE_RLIB specific. Disabling that + # setting requires replacing this with the "real" native providers + merged_link_info = create_merged_link_info( + ctx, + cxx_toolchain.pic_behavior, + link_infos, + exported_deps = inherited_link_infos, + preferred_linkage = Linkage("static"), + ) + shared_libs = merge_shared_libraries( + # We never actually have any shared libraries to add + ctx.actions, + deps = inherited_shlibs, + ) + + # The link graph representation is a little bit weird, since instead of + # just building up a graph via tsets, it uses a flat list of labeled + # nodes, each with a list of labels for dependency edges. The node that + # we create here cannot just use this target's label, since that would + # conflict with the node created for the native providers. As a result, + # we make up a fake subtarget to get a distinct label + new_label = ctx.label.configured_target().with_sub_target((ctx.label.sub_target or []) + ["fake_force_rlib_subtarget"]) + linkable_graph = create_linkable_graph( + ctx, + node = create_linkable_graph_node( + ctx, + linkable_node = create_linkable_node( + ctx = ctx, + preferred_linkage = Linkage("static"), + exported_deps = inherited_graphs, + link_infos = link_infos, + default_soname = get_default_shared_library_name(cxx_toolchain.linker_info, ctx.label), + ), + label = new_label, + ), + deps = inherited_graphs, + ) + inherited_graphs = [linkable_graph] + else: + merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) + shared_libs = merge_shared_libraries( + ctx.actions, + deps = inherited_shlibs, + ) return (merged_link_info, shared_libs, inherited_graphs, inherited_link_deps) def _rust_providers( ctx: AnalysisContext, compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)]) -> list[Provider]: + param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)], + link_infos: dict[LibOutputStyle, LinkInfos]) -> RustLinkInfo: """ Return the set of providers for Rust linkage. """ @@ -610,21 +667,19 @@ def _rust_providers( link, meta = param_artifact[params] strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, link, meta) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx) - - providers = [] + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos) # Create rust library provider. - providers.append(RustLinkInfo( + rust_link_info = RustLinkInfo( crate = crate, strategies = strategy_info, merged_link_info = merged_link_info, exported_link_deps = inherited_link_deps, shared_libs = shared_libs, linkable_graphs = inherited_graphs, - )) + ) - return providers + return rust_link_info def _link_infos( ctx: AnalysisContext, @@ -682,30 +737,41 @@ def _native_providers( compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], param_artifact: dict[BuildParams, RustcOutput], - link_infos: dict[LibOutputStyle, LinkInfos]) -> list[Provider]: + link_infos: dict[LibOutputStyle, LinkInfos], + rust_link_info: RustLinkInfo) -> list[Provider]: """ Return the set of providers needed to link Rust as a dependency for native (ie C/C++) code, along with relevant dependencies. """ + if ctx.attrs.proc_macro: + # Proc-macros never have a native form + return [] + # If advanced_unstable_linking is set on the the rust toolchain, then build this artifact # using the "native-unbundled" linkage language. See LinkageLang docs for more details advanced_unstable_linking = compile_ctx.toolchain_info.advanced_unstable_linking lang = LinkageLang("native-unbundled") if advanced_unstable_linking else LinkageLang("native") - inherited_link_infos = inherited_merged_link_infos(ctx, compile_ctx.dep_ctx) - inherited_shlibs = inherited_shared_libs(ctx, compile_ctx.dep_ctx) - inherited_link_graphs = inherited_linkable_graphs(ctx, compile_ctx.dep_ctx) + if advanced_unstable_linking: + # The rust link providers already contain the linkables for the `archive` and `pic_archive` + # cases + link_infos = { + LibOutputStyle("shared_lib"): link_infos[LibOutputStyle("shared_lib")], + LibOutputStyle("archive"): LinkInfos(default = LinkInfo()), + LibOutputStyle("pic_archive"): LinkInfos(default = LinkInfo()), + } + + # We collected transitive deps in the Rust link providers + inherited_link_infos = [rust_link_info.merged_link_info] + inherited_shlibs = [rust_link_info.shared_libs] + inherited_link_graphs = rust_link_info.linkable_graphs linker_info = compile_ctx.cxx_toolchain_info.linker_info linker_type = linker_info.type providers = [] - if ctx.attrs.proc_macro: - # Proc-macros never have a native form - return providers - shared_lib_params = lang_style_param[(lang, LibOutputStyle("shared_lib"))] shared_lib_output = param_artifact[shared_lib_params].output From 446b3db3d9a51109af8aeadf94f9afe7296404d4 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Mon, 15 Jan 2024 17:40:16 -0800 Subject: [PATCH 0140/1133] Backout "Revert D52507600: [buck2] rules: Add a FIXME" Summary: ^ Reviewed By: dtolnay Differential Revision: D52749219 fbshipit-source-id: 5d203a9e342bd524d95c5f6483dff914b615a6bb --- prelude/android/android_binary_native_library_rules.bzl | 3 +++ prelude/rust/rust_library.bzl | 3 +++ 2 files changed, 6 insertions(+) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 79f0857a1..4b606d506 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -681,6 +681,9 @@ def encode_linkable_graph_for_mergemap(graph_node_map_by_platform: dict[str, dic platform: { target: _LinkableSharedNode( raw_target = str(target.raw_target()), + # FIXME(JakobDegen): The definition of `LinkableNode` claims that it's ok for this + # to be `None` (I assume in the case of static preferred linkage), so either that is + # wrong or this is. See the diff that added this FIXME for how to reproduce soname = node.default_soname, labels = node.labels, deps = node.deps + node.exported_deps, diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 729e8f498..4dfec3a33 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -632,6 +632,9 @@ def _rust_link_providers( preferred_linkage = Linkage("static"), exported_deps = inherited_graphs, link_infos = link_infos, + # FIXME(JakobDegen): It should be ok to set this to `None`, + # but that breaks arc focus, and setting it to "" breaks + # somerge default_soname = get_default_shared_library_name(cxx_toolchain.linker_info, ctx.label), ), label = new_label, From 0222b5e548493662d60ce25093031903155090e1 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Mon, 15 Jan 2024 17:40:16 -0800 Subject: [PATCH 0141/1133] Back out "Revert D52499874: [rust] rules: Better handling of exported deps" Summary: Original commit changeset: 0a89ff543b1a Original Phabricator Diff: D52499874 Reviewed By: dtolnay Differential Revision: D52749218 fbshipit-source-id: dbef3c9f7b65197134bce3bfe9b6119c99ee042c --- prelude/rust/link_info.bzl | 11 +++++++++-- prelude/rust/rust_library.bzl | 21 ++++++++++++++------- 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 3cc056460..e392a7405 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -150,8 +150,15 @@ RustLinkInfo = provider( "linkable_graphs": list[LinkableGraph], # LinkGroupLibInfo intentionally omitted because the Rust -> Rust version # never needs to be different from the Rust -> native version - # The native dependencies reachable from this Rust library through other - # Rust libraries + # + # Rust currently treats all native dependencies as being exported, in + # the sense of C++ `exported_deps`. However, they are not only exported + # from the Rust library that directly depends on them, they are also + # exported through any further chains of Rust libraries. This list + # tracks those dependencies + # + # FIXME(JakobDegen): We should not default to treating all native deps + # as exported. "exported_link_deps": list[Dependency], }, ) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 4dfec3a33..5780a7b15 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -593,7 +593,7 @@ def _rust_link_providers( inherited_link_infos = inherited_merged_link_infos(ctx, dep_ctx) inherited_shlibs = inherited_shared_libs(ctx, dep_ctx) inherited_graphs = inherited_linkable_graphs(ctx, dep_ctx) - inherited_link_deps = inherited_exported_link_deps(ctx, dep_ctx) + inherited_exported_deps = inherited_exported_link_deps(ctx, dep_ctx) if dep_ctx.advanced_unstable_linking: # We have to produce a version of the providers that are defined in such @@ -603,11 +603,15 @@ def _rust_link_providers( # # Note that all of this code is FORCE_RLIB specific. Disabling that # setting requires replacing this with the "real" native providers + # + # As an optimization, we never bother reporting exported deps here. + # Whichever dependent uses the providers created here will take care of + # that for us. merged_link_info = create_merged_link_info( ctx, cxx_toolchain.pic_behavior, link_infos, - exported_deps = inherited_link_infos, + deps = inherited_link_infos, preferred_linkage = Linkage("static"), ) shared_libs = merge_shared_libraries( @@ -630,7 +634,7 @@ def _rust_link_providers( linkable_node = create_linkable_node( ctx = ctx, preferred_linkage = Linkage("static"), - exported_deps = inherited_graphs, + deps = inherited_graphs, link_infos = link_infos, # FIXME(JakobDegen): It should be ok to set this to `None`, # but that breaks arc focus, and setting it to "" breaks @@ -648,7 +652,7 @@ def _rust_link_providers( ctx.actions, deps = inherited_shlibs, ) - return (merged_link_info, shared_libs, inherited_graphs, inherited_link_deps) + return (merged_link_info, shared_libs, inherited_graphs, inherited_exported_deps) def _rust_providers( ctx: AnalysisContext, @@ -769,6 +773,7 @@ def _native_providers( inherited_link_infos = [rust_link_info.merged_link_info] inherited_shlibs = [rust_link_info.shared_libs] inherited_link_graphs = rust_link_info.linkable_graphs + inherited_exported_deps = rust_link_info.exported_link_deps linker_info = compile_ctx.cxx_toolchain_info.linker_info linker_type = linker_info.type @@ -785,7 +790,8 @@ def _native_providers( ctx, compile_ctx.cxx_toolchain_info.pic_behavior, link_infos, - exported_deps = inherited_link_infos, + deps = inherited_link_infos, + exported_deps = filter(None, [d.get(MergedLinkInfo) for d in inherited_exported_deps]), preferred_linkage = preferred_linkage, )) @@ -842,13 +848,14 @@ def _native_providers( linkable_node = create_linkable_node( ctx = ctx, preferred_linkage = preferred_linkage, - exported_deps = inherited_link_graphs, + deps = inherited_link_graphs, + exported_deps = inherited_exported_deps, link_infos = link_infos, shared_libs = solibs, default_soname = shlib_name, ), ), - deps = inherited_link_graphs, + deps = inherited_link_graphs + inherited_exported_deps, ) providers.append(linkable_graph) From 9e840d942a113c38bc93e4701e9ce507b0570ec4 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Mon, 15 Jan 2024 17:40:16 -0800 Subject: [PATCH 0142/1133] Back out "Revert D52536616: [buck2] link groups: Add `ignore_force_static_follows_dependents` option" Summary: ^ Reviewed By: dtolnay Differential Revision: D52749220 fbshipit-source-id: 41a0b6d3c5639d54901c79d91cd6887f20c81c4d --- prelude/cxx/link_groups.bzl | 6 ++++-- prelude/linking/linkable_graph.bzl | 6 +++++- prelude/rust/rust_library.bzl | 20 +++++++++++++++++--- 3 files changed, 26 insertions(+), 6 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index fa353e20a..d037a6fdd 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -419,7 +419,9 @@ def get_filtered_labels_to_links_map( target_link_group = link_group_mappings.get(target) # Always add force-static libs to the link. - if force_static_follows_dependents and node.preferred_linkage == Linkage("static"): + if (force_static_follows_dependents and + node.preferred_linkage == Linkage("static") and + not node.ignore_force_static_follows_dependents): add_link(target, output_style) elif not target_link_group and not link_group: # Ungrouped linkable targets belong to the unlabeled executable @@ -535,7 +537,7 @@ def find_relevant_roots( # link group. def collect_and_traverse_roots(roots, node_target): node = linkable_graph_node_map.get(node_target) - if node.preferred_linkage == Linkage("static"): + if node.preferred_linkage == Linkage("static") and not node.ignore_force_static_follows_dependents: return node.deps + node.exported_deps node_link_group = link_group_mappings.get(node_target) if node_link_group == MATCH_ALL_LABEL: diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index 2ed062ae3..1f73160ed 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -87,6 +87,8 @@ LinkableNode = record( # Whether the node should appear in the android mergemap (which provides information about the original # soname->final merged lib mapping) include_in_android_mergemap = field(bool), + # Don't follow dependents on this node even if has preferred linkage static + ignore_force_static_follows_dependents = field(bool), # Only allow constructing within this file. _private = _DisallowConstruction, @@ -144,7 +146,8 @@ def create_linkable_node( shared_libs: dict[str, LinkedObject] = {}, can_be_asset: bool = True, include_in_android_mergemap: bool = True, - linker_flags: [LinkerFlags, None] = None) -> LinkableNode: + linker_flags: [LinkerFlags, None] = None, + ignore_force_static_follows_dependents: bool = False) -> LinkableNode: for output_style in _get_required_outputs_for_linkage(preferred_linkage): expect( output_style in link_infos, @@ -164,6 +167,7 @@ def create_linkable_node( include_in_android_mergemap = include_in_android_mergemap, default_soname = default_soname, linker_flags = linker_flags, + ignore_force_static_follows_dependents = ignore_force_static_follows_dependents, _private = _DisallowConstruction(), ) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 5780a7b15..4fd9e4b03 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -179,7 +179,13 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: external_debug_info = external_debug_info, ) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, dep_ctx, cxx_toolchain, link_infos) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers( + ctx, + dep_ctx, + cxx_toolchain, + link_infos, + Linkage(ctx.attrs.preferred_linkage), + ) providers.append( RustLinkInfo( crate = crate, @@ -574,7 +580,8 @@ def _rust_link_providers( ctx: AnalysisContext, dep_ctx: DepCollectionContext, cxx_toolchain: CxxToolchainInfo, - link_infos: dict[LibOutputStyle, LinkInfos]) -> ( + link_infos: dict[LibOutputStyle, LinkInfos], + preferred_linkage: Linkage) -> ( MergedLinkInfo, SharedLibraryInfo, list[LinkableGraph], @@ -640,6 +647,13 @@ def _rust_link_providers( # but that breaks arc focus, and setting it to "" breaks # somerge default_soname = get_default_shared_library_name(cxx_toolchain.linker_info, ctx.label), + # Link groups have a heuristic in which they assume that a + # preferred_linkage = "static" library needs to be linked + # into every single link group, instead of just one. + # Applying that same heuristic to Rust seems right, but only + # if this target actually requested that. Opt ourselves out + # if it didn't. + ignore_force_static_follows_dependents = preferred_linkage != Linkage("static"), ), label = new_label, ), @@ -674,7 +688,7 @@ def _rust_providers( link, meta = param_artifact[params] strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, link, meta) - merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos) + merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos, Linkage(ctx.attrs.preferred_linkage)) # Create rust library provider. rust_link_info = RustLinkInfo( From a2f43a85408bd35603cd141e7b2b0cf92d73ae1e Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Tue, 16 Jan 2024 04:41:37 -0800 Subject: [PATCH 0143/1133] Add noncgo stdlib to the toolchain Summary: - Added `prebuilt_stdlib_noncgo` to the toolchain - Added `non_cgo` arg to `stdlib_pkg_artifacts` to expose non-cgo libraries for using in actions Reviewed By: leoleovich Differential Revision: D52332749 fbshipit-source-id: 68359efc6ca2477471d8787b3b9f629e1a7e9c45 --- prelude/go/packages.bzl | 13 +++++++++++-- prelude/go/toolchain.bzl | 1 + 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index 68d14aec4..b8d40a1b6 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -65,12 +65,21 @@ def pkg_artifacts(pkgs: dict[str, GoPkg], shared: bool, coverage_mode: [GoCovera for name, pkg in pkgs.items() } -def stdlib_pkg_artifacts(toolchain: GoToolchainInfo, shared: bool = False) -> dict[str, Artifact]: +def stdlib_pkg_artifacts(toolchain: GoToolchainInfo, shared: bool = False, non_cgo: bool = False) -> dict[str, Artifact]: """ Return a map package name to a `shared` or `static` package artifact of stdlib. """ - prebuilt_stdlib = toolchain.prebuilt_stdlib_shared if shared else toolchain.prebuilt_stdlib + # shared == True && non_cgo == True is not supported yet, + # we'll temporarily use non_cgo if both flags are true, this will be wixed with on-demand building of stdlib. + + if non_cgo: + prebuilt_stdlib = toolchain.prebuilt_stdlib_noncgo + elif shared: + prebuilt_stdlib = toolchain.prebuilt_stdlib_shared + else: + prebuilt_stdlib = toolchain.prebuilt_stdlib + stdlib_pkgs = prebuilt_stdlib[ArtifactGroupInfo].artifacts if len(stdlib_pkgs) == 0: diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 3f00630e3..ae014cee0 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -31,6 +31,7 @@ GoToolchainInfo = provider( "packer": provider_field(typing.Any, default = None), "prebuilt_stdlib": provider_field(typing.Any, default = None), "prebuilt_stdlib_shared": provider_field(typing.Any, default = None), + "prebuilt_stdlib_noncgo": provider_field(typing.Any, default = None), "tags": provider_field(typing.Any, default = None), }, ) From 6c4c9c07e28253d500331153acab21d9baa6bf4a Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Tue, 16 Jan 2024 04:41:37 -0800 Subject: [PATCH 0144/1133] Add cgo_enabled attribute to buck rules Summary: - Added `cgo_enabled` property to top-level rules `go_binary/go_library/go_test/go_exported_library` - Added "derived" `_cgo_enabled` property to `go_library` rule. It's being propagated to `go_library` from one of top-level rules via [Configuration Transition](https://buck2.build/docs/rule_authors/configuration_transitions/) This enables users to specify an analog of "CGO_ENEBLED" env-var per target. If cgo_enabled=False user will always get static binary. At the moment it only affects which standard library is used. But eventually will enable us to unify go_library and cgo_library rules Reviewed By: awalterschulze Differential Revision: D52695207 fbshipit-source-id: 2ce999c8cc80cfea3850dd102184576ac2485b55 --- prelude/decls/go_common.bzl | 9 ++++++ prelude/decls/go_rules.bzl | 9 ++++++ prelude/go/cgo_library.bzl | 3 ++ prelude/go/compile.bzl | 6 ++-- prelude/go/constraints/BUCK.v2 | 22 +++++++++++++++ prelude/go/go_binary.bzl | 6 ++++ prelude/go/go_exported_library.bzl | 6 ++++ prelude/go/go_library.bzl | 13 +++++++-- prelude/go/go_test.bzl | 8 +++++- prelude/go/link.bzl | 3 +- prelude/go/toolchain.bzl | 13 +++++++++ prelude/go/transitions/cgo_enabled.bzl | 39 ++++++++++++++++++++++++++ prelude/rules_impl.bzl | 4 +++ 13 files changed, 134 insertions(+), 7 deletions(-) create mode 100644 prelude/go/constraints/BUCK.v2 create mode 100644 prelude/go/transitions/cgo_enabled.bzl diff --git a/prelude/decls/go_common.bzl b/prelude/decls/go_common.bzl index 845f3861b..46e5305d4 100644 --- a/prelude/decls/go_common.bzl +++ b/prelude/decls/go_common.bzl @@ -124,6 +124,14 @@ def _embedcfg_arg(): """), } +def _cgo_enabled_arg(): + return { + "cgo_enabled": attrs.option(attrs.bool(), default = None, doc = """ + Experimental: Analog of CGO_ENABLED environment-variable. + None will be coverted to True if cxx_toolchain availabe for current configuration, otherwiese False. +"""), + } + go_common = struct( deps_arg = _deps_arg, srcs_arg = _srcs_arg, @@ -136,4 +144,5 @@ go_common = struct( linker_flags_arg = _linker_flags_arg, external_linker_flags_arg = _external_linker_flags_arg, embedcfg_arg = _embedcfg_arg, + cgo_enabled_arg = _cgo_enabled_arg, ) diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index 2ec5f5ebd..bedb9e94d 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -182,6 +182,7 @@ go_binary = prelude_rule( go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | go_common.embedcfg_arg() | + go_common.cgo_enabled_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files to be symlinked into the working directory of the test. You can access these in your @@ -266,6 +267,7 @@ go_exported_library = prelude_rule( go_common.assembler_flags_arg() | go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | + go_common.cgo_enabled_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files to be symlinked into the working directory of the test. You can access these in your @@ -321,6 +323,12 @@ go_library = prelude_rule( "exported_deps": attrs.list(attrs.dep(), default = []), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), + "_cgo_enabled": attrs.default_only(attrs.option(attrs.bool(), default = select({ + "DEFAULT": None, + "prelude//go/constraints:cgo_enabled_auto": None, + "prelude//go/constraints:cgo_enabled_false": False, + "prelude//go/constraints:cgo_enabled_true": True, + }))), } ), ) @@ -415,6 +423,7 @@ go_test = prelude_rule( go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | go_common.embedcfg_arg() | + go_common.cgo_enabled_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files that are symlinked into the working directory of the diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 666bbb9b8..3a146e622 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -168,6 +168,7 @@ def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, ctx, pkg_name, srcs = srcs, + cgo_enabled = True, deps = ctx.attrs.deps + ctx.attrs.exported_deps, coverage_mode = coverage_mode, shared = shared, @@ -245,6 +246,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx, pkg_name, all_srcs, + cgo_enabled = True, deps = ctx.attrs.deps + ctx.attrs.exported_deps, shared = False, ) @@ -252,6 +254,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx, pkg_name, all_srcs, + cgo_enabled = True, deps = ctx.attrs.deps + ctx.attrs.exported_deps, shared = True, ) diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index dde83289e..3c9c46cf6 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -92,6 +92,7 @@ def _assemble_cmd( def _compile_cmd( ctx: AnalysisContext, pkg_name: str, + cgo_enabled: bool, pkgs: dict[str, Artifact] = {}, deps: list[Dependency] = [], flags: list[str] = [], @@ -118,7 +119,7 @@ def _compile_cmd( all_pkgs = merge_pkgs([ pkgs, pkg_artifacts(get_inherited_compile_pkgs(deps), shared = shared, coverage_mode = coverage_mode), - stdlib_pkg_artifacts(go_toolchain, shared = shared), + stdlib_pkg_artifacts(go_toolchain, shared = shared, non_cgo = not cgo_enabled), ]) importcfg_content = [] @@ -144,6 +145,7 @@ def compile( ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, + cgo_enabled: bool, pkgs: dict[str, Artifact] = {}, deps: list[Dependency] = [], compile_flags: list[str] = [], @@ -157,7 +159,7 @@ def compile( cmd = get_toolchain_cmd_args(go_toolchain) cmd.add(go_toolchain.compile_wrapper[RunInfo]) cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(_compile_cmd(ctx, pkg_name, pkgs, deps, compile_flags, shared = shared, coverage_mode = coverage_mode), format = "--compiler={}")) + cmd.add(cmd_args(_compile_cmd(ctx, pkg_name, cgo_enabled, pkgs, deps, compile_flags, shared = shared, coverage_mode = coverage_mode), format = "--compiler={}")) cmd.add(cmd_args(_assemble_cmd(ctx, pkg_name, assemble_flags, shared = shared), format = "--assembler={}")) cmd.add(cmd_args(go_toolchain.packer, format = "--packer={}")) if ctx.attrs.embedcfg: diff --git a/prelude/go/constraints/BUCK.v2 b/prelude/go/constraints/BUCK.v2 new file mode 100644 index 000000000..0fbffe4c8 --- /dev/null +++ b/prelude/go/constraints/BUCK.v2 @@ -0,0 +1,22 @@ +constraint_setting( + name = "cgo_enabled", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "cgo_enabled_auto", + constraint_setting = ":cgo_enabled", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "cgo_enabled_true", + constraint_setting = ":cgo_enabled", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "cgo_enabled_false", + constraint_setting = ":cgo_enabled", + visibility = ["PUBLIC"], +) diff --git a/prelude/go/go_binary.bzl b/prelude/go/go_binary.bzl index 2ce58d917..a6ac99e33 100644 --- a/prelude/go/go_binary.bzl +++ b/prelude/go/go_binary.bzl @@ -18,18 +18,24 @@ load( ) load(":compile.bzl", "compile", "get_filtered_srcs") load(":link.bzl", "link") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs.cgo_enabled) + lib = compile( ctx, "main", get_filtered_srcs(ctx, ctx.attrs.srcs), + cgo_enabled = cgo_enabled, deps = ctx.attrs.deps, compile_flags = ctx.attrs.compiler_flags, ) (bin, runtime_files, external_debug_info) = link( ctx, lib, + cgo_enabled = cgo_enabled, deps = ctx.attrs.deps, link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, diff --git a/prelude/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl index 28101cf52..f8daa8708 100644 --- a/prelude/go/go_exported_library.bzl +++ b/prelude/go/go_exported_library.bzl @@ -16,12 +16,17 @@ load( ) load(":compile.bzl", "compile", "get_filtered_srcs") load(":link.bzl", "GoBuildMode", "link") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs.cgo_enabled) + lib = compile( ctx, "main", get_filtered_srcs(ctx, ctx.attrs.srcs), + cgo_enabled = cgo_enabled, deps = ctx.attrs.deps, compile_flags = ctx.attrs.compiler_flags, shared = True, @@ -29,6 +34,7 @@ def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: (bin, runtime_files, _external_debug_info) = link( ctx, lib, + cgo_enabled = cgo_enabled, deps = ctx.attrs.deps, build_mode = GoBuildMode(ctx.attrs.build_mode), link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static_pic")), diff --git a/prelude/go/go_library.bzl b/prelude/go/go_library.bzl index 33a265339..04515caad 100644 --- a/prelude/go/go_library.bzl +++ b/prelude/go/go_library.bzl @@ -27,8 +27,9 @@ load(":compile.bzl", "GoPkgCompileInfo", "GoTestInfo", "compile", "get_filtered_ load(":coverage.bzl", "GoCoverageMode", "cover_srcs") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") -def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, coverage_mode: GoCoverageMode, shared: bool = False) -> (Artifact, cmd_args): +def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, coverage_mode: GoCoverageMode, cgo_enabled: bool, shared: bool = False) -> (Artifact, cmd_args): cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, shared) srcs = cov_res.srcs coverage_vars = cov_res.variables @@ -36,6 +37,7 @@ def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, ctx, pkg_name, srcs = srcs, + cgo_enabled = cgo_enabled, deps = ctx.attrs.deps + ctx.attrs.exported_deps, compile_flags = ctx.attrs.compiler_flags, coverage_mode = coverage_mode, @@ -44,6 +46,8 @@ def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, return (coverage_pkg, coverage_vars) def go_library_impl(ctx: AnalysisContext) -> list[Provider]: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + pkgs = {} default_output = None pkg_name = None @@ -52,11 +56,13 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, force_disable_cgo = True) + cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs._cgo_enabled) static_pkg = compile( ctx, pkg_name, srcs = srcs, + cgo_enabled = cgo_enabled, deps = ctx.attrs.deps + ctx.attrs.exported_deps, compile_flags = ctx.attrs.compiler_flags, assemble_flags = ctx.attrs.assembler_flags, @@ -67,14 +73,15 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx, pkg_name, srcs = srcs, + cgo_enabled = cgo_enabled, deps = ctx.attrs.deps + ctx.attrs.exported_deps, compile_flags = ctx.attrs.compiler_flags, assemble_flags = ctx.attrs.assembler_flags, shared = True, ) - coverage_shared = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, True) for mode in GoCoverageMode} - coverage_static = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, False) for mode in GoCoverageMode} + coverage_shared = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, cgo_enabled, True) for mode in GoCoverageMode} + coverage_static = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, cgo_enabled, False) for mode in GoCoverageMode} default_output = static_pkg pkgs[pkg_name] = GoPkg( diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index d54cc658e..67200ffeb 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -19,6 +19,7 @@ load(":compile.bzl", "GoTestInfo", "compile", "get_filtered_srcs", "get_inherite load(":coverage.bzl", "GoCoverageMode", "cover_srcs") load(":link.bzl", "link") load(":packages.bzl", "go_attr_pkg_name", "pkg_artifact", "pkg_coverage_vars") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") def _gen_test_main( ctx: AnalysisContext, @@ -83,11 +84,15 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: coverage_vars[name] = vars pkgs[name] = artifact + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs.cgo_enabled) + # Compile all tests into a package. tests = compile( ctx, pkg_name, srcs, + cgo_enabled = cgo_enabled, deps = deps, pkgs = pkgs, compile_flags = ctx.attrs.compiler_flags, @@ -98,13 +103,14 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: # package. gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, srcs) pkgs[pkg_name] = tests - main = compile(ctx, "main", cmd_args(gen_main), pkgs = pkgs, coverage_mode = coverage_mode) + main = compile(ctx, "main", cmd_args(gen_main), cgo_enabled = cgo_enabled, pkgs = pkgs, coverage_mode = coverage_mode) # Link the above into a Go binary. (bin, runtime_files, external_debug_info) = link( ctx = ctx, main = main, pkgs = pkgs, + cgo_enabled = cgo_enabled, deps = deps, link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 778321c4d..9427a73e1 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -106,6 +106,7 @@ def _process_shared_dependencies( def link( ctx: AnalysisContext, main: Artifact, + cgo_enabled: bool, pkgs: dict[str, Artifact] = {}, deps: list[Dependency] = [], build_mode: GoBuildMode = GoBuildMode("executable"), @@ -141,7 +142,7 @@ def link( all_pkgs = merge_pkgs([ pkgs, pkg_artifacts(get_inherited_link_pkgs(deps), shared = shared, coverage_mode = coverage_mode), - stdlib_pkg_artifacts(go_toolchain, shared = shared), + stdlib_pkg_artifacts(go_toolchain, shared = shared, non_cgo = not cgo_enabled), ]) importcfg_content = [] diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index ae014cee0..dd888bcff 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -61,3 +61,16 @@ def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True, force_dis cmd.add("CGO_ENABLED=1") return cmd + +# Sets default value of cgo_enabled attribute based on the presence of C++ toolchain. +def evaluate_cgo_enabled(toolchain: GoToolchainInfo, cgo_enabled: [bool, None]) -> bool: + cxx_toolchain_available = toolchain.cxx_toolchain_for_linking != None + + if cgo_enabled and not cxx_toolchain_available: + fail("Cgo requires a C++ toolchain. Set cgo_enabled=None|False.") + + if cgo_enabled != None: + return cgo_enabled + + # Return True if cxx_toolchain availabe for current configuration, otherwiese to False. + return cxx_toolchain_available diff --git a/prelude/go/transitions/cgo_enabled.bzl b/prelude/go/transitions/cgo_enabled.bzl new file mode 100644 index 000000000..c1adf3b10 --- /dev/null +++ b/prelude/go/transitions/cgo_enabled.bzl @@ -0,0 +1,39 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def _cgo_enabled_transition(platform, refs, attrs): + if attrs.cgo_enabled == None: + cgo_enabled_ref = refs.cgo_enabled_auto + elif attrs.cgo_enabled == True: + cgo_enabled_ref = refs.cgo_enabled_true + else: + cgo_enabled_ref = refs.cgo_enabled_false + + cgo_enabled_value = cgo_enabled_ref[ConstraintValueInfo] + constraints = platform.configuration.constraints + constraints[cgo_enabled_value.setting.label] = cgo_enabled_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + +cgo_enabled_transition = transition( + impl = _cgo_enabled_transition, + refs = { + "cgo_enabled": "prelude//go/constraints:cgo_enabled", + "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", + "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", + "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", + }, + attrs = ["cgo_enabled"], +) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 9a9eb1239..d7b97b47d 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -27,6 +27,7 @@ load("@prelude//go:go_library.bzl", "go_library_impl") load("@prelude//go:go_test.bzl", "go_test_impl") load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl", "haskell_toolchain_library_impl") +load("@prelude//go/transitions:cgo_enabled.bzl", "cgo_enabled_transition") load("@prelude//haskell:haskell_ghci.bzl", "haskell_ghci_impl") load("@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_impl") load("@prelude//haskell:haskell_ide.bzl", "haskell_ide_impl") @@ -588,6 +589,9 @@ extra_attributes = struct(**all_extra_attributes) transitions = { "android_binary": constraint_overrides_transition, "apple_resource": apple_resource_transition, + "go_binary": cgo_enabled_transition, + "go_exported_library": cgo_enabled_transition, + "go_test": cgo_enabled_transition, "python_binary": constraint_overrides_transition, "python_test": constraint_overrides_transition, } From da576b40d5091eed69768ff8c81619ac485b00b3 Mon Sep 17 00:00:00 2001 From: Alessandro Liparoti Date: Tue, 16 Jan 2024 07:30:26 -0800 Subject: [PATCH 0145/1133] pass new depedencies parameter to test utils Summary: Allow tests using RE to pass a list of dependencies. It can be used in tests as in D52787760 ``` remote_execution = re_test_utils.remote_execution( ..., dependencies = [ re_test_utils.re_dependency( smc_tier = "asic_workload.re_grid", id = "Limit:vcs_batch") ]) ) ``` Differential Revision: D50883231 fbshipit-source-id: 337012c63211d17204e8534c63cfe495e7de2936 --- prelude/decls/re_test_common.bzl | 2 ++ prelude/tests/re_utils.bzl | 2 ++ 2 files changed, 4 insertions(+) diff --git a/prelude/decls/re_test_common.bzl b/prelude/decls/re_test_common.bzl index 97d8f4e42..c4c4cd934 100644 --- a/prelude/decls/re_test_common.bzl +++ b/prelude/decls/re_test_common.bzl @@ -17,6 +17,7 @@ def _opts_for_tests_arg() -> Attr: # "listing_capabilities": Dict | None # "use_case": str | None # "remote_cache_enabled": bool | None + # "dependencies": list> | [] # } return attrs.dict( key = attrs.string(), @@ -29,6 +30,7 @@ def _opts_for_tests_arg() -> Attr: ), attrs.string(), attrs.bool(), + attrs.list(attrs.dict(key = attrs.string(), value = attrs.string()), default = []), ), # TODO(cjhopman): I think this default does nothing, it should be deleted default = None, diff --git a/prelude/tests/re_utils.bzl b/prelude/tests/re_utils.bzl index 93c884752..514396604 100644 --- a/prelude/tests/re_utils.bzl +++ b/prelude/tests/re_utils.bzl @@ -44,6 +44,7 @@ def get_re_executors_from_props(ctx: AnalysisContext) -> ([CommandExecutorConfig use_case = re_props_copy.pop("use_case") listing_capabilities = re_props_copy.pop("listing_capabilities", None) remote_cache_enabled = re_props_copy.pop("remote_cache_enabled", None) + re_dependencies = re_props_copy.pop("dependencies", []) if re_props_copy: unexpected_props = ", ".join(re_props_copy.keys()) fail("found unexpected re props: " + unexpected_props) @@ -60,6 +61,7 @@ def get_re_executors_from_props(ctx: AnalysisContext) -> ([CommandExecutorConfig remote_execution_use_case = use_case or "tpx-default", remote_cache_enabled = remote_cache_enabled, remote_execution_action_key = remote_execution_action_key, + remote_execution_dependencies = re_dependencies, ) listing_executor = default_executor if listing_capabilities: From db9df857469766e6800f8f3094c6cdf9f962ef60 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Tue, 16 Jan 2024 07:56:01 -0800 Subject: [PATCH 0146/1133] Pass rustdoc test command through rustc_action Summary: Rustdoc does not support nested `@...` arguments. ``` $ echo 'pub fn f() {}' > lib.rs $ rustdoc --edition=2021 lib.rs # success $ echo '--edition=2021' > args1 $ rustdoc args1 lib.rs # success $ echo 'args1' > args2 $ rustdoc args2 lib.rs # FAIL error: too many file operands $ buck2/prelude/rust/tools/rustc_action.py rustdoc args2 lib.rs # success ``` The rustc_action.py wrapper takes care of opening and flattening every argument that starts with `@`: https://www.internalfb.com/code/fbsource/[7c821fbc604bca5d1b736f136eb77d72de8ea6a7]/fbcode/buck2/prelude/rust/tools/rustc_action.py?lines=67 and then re-writing them back out as a single flat argsfile to run the command against: https://www.internalfb.com/code/fbsource/[7c821fbc604bca5d1b736f136eb77d72de8ea6a7]/fbcode/buck2/prelude/rust/tools/rustc_action.py?lines=289-296%2C298-300%2C306 Reviewed By: zertosh Differential Revision: D52764001 fbshipit-source-id: 3d4774086e579e70bf85e4ecc6ed4e477235983e --- prelude/rust/build.bzl | 27 ++++++++++++++++----------- prelude/rust/rust_library.bzl | 6 ++---- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 0b28f559c..4634a22a6 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -231,7 +231,7 @@ def generate_rustdoc_test( link_strategy: LinkStrategy, rlib: Artifact, params: BuildParams, - default_roots: list[str]) -> (cmd_args, dict[str, cmd_args]): + default_roots: list[str]) -> cmd_args: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" toolchain_info = compile_ctx.toolchain_info @@ -307,7 +307,20 @@ def generate_rustdoc_test( else: runtool = ["--runtool=/usr/bin/env"] + plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + doc_plain_env, doc_path_env = _process_env(compile_ctx, ctx.attrs.doc_env, exec_is_windows) + for k, v in doc_plain_env.items(): + path_env.pop(k, None) + plain_env[k] = v + for k, v in doc_path_env.items(): + plain_env.pop(k, None) + path_env[k] = v + plain_env["RUSTC_BOOTSTRAP"] = cmd_args("1") # for `-Zunstable-options` + rustdoc_cmd = cmd_args( + [cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()], + [cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()], + toolchain_info.rustdoc, "--test", "-Zunstable-options", cmd_args("--test-builder=", toolchain_info.compiler, delimiter = ""), @@ -331,21 +344,13 @@ def generate_rustdoc_test( executable_args.runtime_files, ) - rustdoc_cmd = _long_command( + return _long_command( ctx = ctx, - exe = toolchain_info.rustdoc, + exe = toolchain_info.rustc_action, args = rustdoc_cmd, argfile_name = "{}.args".format(common_args.subdir), ) - plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) - rustdoc_env = plain_env | path_env - for k, v in ctx.attrs.doc_env.items(): - rustdoc_env[k] = cmd_args(v) - rustdoc_env["RUSTC_BOOTSTRAP"] = cmd_args("1") # for `-Zunstable-options` - - return (rustdoc_cmd, rustdoc_env) - # Generate multiple compile artifacts so that distinct sets of artifacts can be # generated concurrently. def rust_compile_multi( diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 4fd9e4b03..b724f85b0 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -518,7 +518,7 @@ def _default_providers( lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)], rustdoc: Artifact, - rustdoc_test: (cmd_args, dict[str, cmd_args]), + rustdoc_test: cmd_args, doctests_enabled: bool, check_artifacts: dict[str, Artifact], expand: Artifact, @@ -554,12 +554,10 @@ def _default_providers( providers = [] - (rustdoc_cmd, rustdoc_env) = rustdoc_test rustdoc_test_info = ExternalRunnerTestInfo( type = "rustdoc", - command = [rustdoc_cmd], + command = [rustdoc_test], run_from_project_root = True, - env = rustdoc_env, ) # Always let the user run doctests via `buck2 test :crate[doc]` From a464d4202469b71f669ab7cf48684a594f916b7a Mon Sep 17 00:00:00 2001 From: Alvaro Leiva Geisse Date: Tue, 16 Jan 2024 13:45:43 -0800 Subject: [PATCH 0147/1133] sort STARTUP_ variables at sitecustomize Summary: this just sort the env variables, so they are read in a predictable way, and not at the wimp of the os. Reviewed By: itamaro, cxxxs Differential Revision: D52807038 fbshipit-source-id: e239a57133846372256fd0beea6826847022fe48 --- .../python/tools/make_par/sitecustomize.py | 35 +++++++++++-------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/prelude/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py index 5b29b8225..1ee948107 100644 --- a/prelude/python/tools/make_par/sitecustomize.py +++ b/prelude/python/tools/make_par/sitecustomize.py @@ -74,20 +74,27 @@ def __clear_env(patch_spawn=True): # pyre-fixme[3]: Return type must be annotated. def __startup__(): - for name, var in os.environ.items(): - if name.startswith("STARTUP_"): - name, sep, func = var.partition(":") - if sep: - try: - module = importlib.import_module(name) - getattr(module, func)() - except Exception as e: - # TODO: Ignoring errors for now. The way to properly fix this should be to make - # sure we are still at the same binary that configured `STARTUP_` before importing. - print( - "Error running startup function %s:%s: %s" % (name, func, e), - file=sys.stderr, - ) + # ALL STARTUP_* methods will be called here in lexicographic order. + startup_functions = sorted( + [ + (name, var) + for name, var in os.environ.items() + if name.startswith("STARTUP_") + ], + ) + for name, var in startup_functions: + name, sep, func = var.partition(":") + if sep: + try: + module = importlib.import_module(name) + getattr(module, func)() + except Exception as e: + # TODO: Ignoring errors for now. The way to properly fix this should be to make + # sure we are still at the same binary that configured `STARTUP_` before importing. + print( + "Error running startup function %s:%s: %s" % (name, func, e), + file=sys.stderr, + ) # pyre-fixme[3]: Return type must be annotated. From 1253c63bf51808b92538426c311342cb4d9d582d Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Tue, 16 Jan 2024 20:20:35 -0800 Subject: [PATCH 0148/1133] Preserve execute mode when unzipping Summary: Some `remote_file`s with `type='exploded_zip'` contain executables, this makes them executable if they are executable in the zip file metadata. Equivalent buck1 code: https://www.internalfb.com/code/fbsource/[3f2e4cbf194208e64ff460dafa786e347f2da822]/xplat/build_infra/buck_client/src/com/facebook/buck/util/unarchive/Unzip.java?lines=148-154 Reviewed By: IanChilds Differential Revision: D52685246 fbshipit-source-id: 91169345d962971a8d5df07e8d054686107ebf82 --- prelude/zip_file/tools/unzip.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/prelude/zip_file/tools/unzip.py b/prelude/zip_file/tools/unzip.py index e571c3987..3ec289156 100644 --- a/prelude/zip_file/tools/unzip.py +++ b/prelude/zip_file/tools/unzip.py @@ -28,6 +28,11 @@ def do_unzip(archive, output_dir): # That way we don't need to pass `target_is_directory` argument to `os.symlink` function. for info in (i for i in z.infolist() if not _is_symlink(i)): z.extract(info, path=output_dir) + if _is_executable(info): + os.chmod( + os.path.join(output_dir, info.filename), + _file_attributes(info) | stat.S_IXUSR, + ) for info in (i for i in z.infolist() if _is_symlink(i)): symlink_path = os.path.join(output_dir, info.filename) symlink_dst = z.read(info).decode("utf-8") @@ -54,6 +59,10 @@ def _is_symlink(zip_info): return stat.S_ISLNK(_file_attributes(zip_info)) +def _is_executable(zip_info): + return stat.S_IMODE(_file_attributes(zip_info)) & stat.S_IXUSR + + def main(): args = _parse_args() print("Source zip is: {}".format(args.src), file=sys.stderr) From 04b7f454f5fb55aee4d51c8af8e385fdb2c45e93 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 17 Jan 2024 00:31:23 -0800 Subject: [PATCH 0149/1133] Revert dummy change Summary: This is from {D48469092} and was ~~not intended to be landed~~ intended to be reverted a long time ago. Reviewed By: lmvasquezg Differential Revision: D52827383 fbshipit-source-id: 1f474badf8d3520a9deadfe45469ce6568baddd2 --- prelude/prelude.bzl | 2 -- 1 file changed, 2 deletions(-) diff --git a/prelude/prelude.bzl b/prelude/prelude.bzl index 6ef06c1ea..ac15950e8 100644 --- a/prelude/prelude.bzl +++ b/prelude/prelude.bzl @@ -10,5 +10,3 @@ load("@prelude//:native.bzl", _native = "native") # Public symbols in this file become globals everywhere except `bzl` files in prelude. # Additionally, members of `native` struct also become globals in `BUCK` files. native = _native - -# This is a test to get CI to notice me From 7710b7b1c6ad9ca34a4debff60b99f10a402b0a2 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 17 Jan 2024 08:48:54 -0800 Subject: [PATCH 0150/1133] use versioning from packaging instead of distutils Summary: Seeing message: ``` DeprecationWarning: distutils Version classes are deprecated. Use packaging.version instead. ``` Reviewed By: milend Differential Revision: D52836865 fbshipit-source-id: f8815eae93f50dabb906d250040275c69bc1b787 --- prelude/apple/tools/resource_broker/BUCK.v2 | 1 + prelude/apple/tools/resource_broker/ios.py | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/prelude/apple/tools/resource_broker/BUCK.v2 b/prelude/apple/tools/resource_broker/BUCK.v2 index 6c14216df..5aef86d06 100644 --- a/prelude/apple/tools/resource_broker/BUCK.v2 +++ b/prelude/apple/tools/resource_broker/BUCK.v2 @@ -27,5 +27,6 @@ python_library( ), deps = [ "fbsource//third-party/pypi/dataclasses-json:dataclasses-json", + "fbsource//third-party/pypi/packaging:packaging", ], ) diff --git a/prelude/apple/tools/resource_broker/ios.py b/prelude/apple/tools/resource_broker/ios.py index a3ae35d19..379367c08 100644 --- a/prelude/apple/tools/resource_broker/ios.py +++ b/prelude/apple/tools/resource_broker/ios.py @@ -6,10 +6,10 @@ # of this source tree. import os - -from distutils.version import StrictVersion from typing import List, Optional +from packaging.version import Version + from .idb_companion import IdbCompanion from .idb_target import IdbTarget, managed_simulators_from_stdout, SimState @@ -75,7 +75,7 @@ def _compatible_device_type_from_runtime(runtime: XCSimRuntime) -> Optional[str] def _select_latest_simulator_spec(runtimes: List[XCSimRuntime]) -> str: - runtimes.sort(key=lambda x: StrictVersion(x.version), reverse=True) + runtimes.sort(key=lambda x: Version(x.version), reverse=True) for runtime in runtimes: device_type = _compatible_device_type_from_runtime(runtime) if device_type: From a529fdcae4fe089924ac34835ea3e60ce8603ab6 Mon Sep 17 00:00:00 2001 From: Ivan Balaksha Date: Wed, 17 Jan 2024 10:26:47 -0800 Subject: [PATCH 0151/1133] Set default primary dex patterns for android_instrumentation_apk Summary: Buck1 allows building android_instrumentation_apk with enabled multidex without specifying `primary_dex_patterns`, but fails with ``` Exception in thread "main" java.lang.IllegalStateException: No primary dex classes were specified! Please add primary_dex_patterns to ensure that at least one class exists in the primary dex. ``` This diff introduce default value for primary_dex_patterns and adds same patterns as regular apk. Now it is possible to skip `primary_dex_patterns`. ``` fb_native.android_instrumentation_apk( name = "test_apk_multidex_no_primary_dex_patterns", aapt_mode = "aapt2", apk = ":apk_under_test", labels = ["buck2-only"], manifest_skeleton = "TestApkAndroidManifest.xml", use_split_dex = True, deps = [ "fbsource//fbandroid/buck2/tests/good/apk:android_prebuilt_aar_with_native_libs1", "fbsource//fbandroid/buck2/tests/good/apk:cxx_library", ":resources", ":resources_with_different_package", ":split_apk_test_class", ":test_activity", ], ) ``` This diff based on suggestion in D52804766. Reviewed By: IanChilds Differential Revision: D52838715 fbshipit-source-id: e6bd9866cceb589a018b3d2b51a1921c07544510 --- prelude/native.bzl | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/prelude/native.bzl b/prelude/native.bzl index 9c37d87c2..1bbadd2ab 100644 --- a/prelude/native.bzl +++ b/prelude/native.bzl @@ -203,9 +203,17 @@ def _android_binary_macro_stub( def _android_instrumentation_apk_macro_stub( cpu_filters = None, + primary_dex_patterns = [], **kwargs): + primary_dex_patterns = primary_dex_patterns + [ + "/R^", + "/R$", + # Pin this to the primary for apps with no primary dex classes. + "^com/facebook/buck_generated/AppWithoutResourcesStub^", + ] __rules__["android_instrumentation_apk"]( cpu_filters = _get_valid_cpu_filters(cpu_filters), + primary_dex_patterns = primary_dex_patterns, **kwargs ) From 94782b3d078846d1310974148ecebb4e197b7075 Mon Sep 17 00:00:00 2001 From: Maxwell Heiber Date: Thu, 18 Jan 2024 05:53:54 -0800 Subject: [PATCH 0152/1133] buck2-ocaml: Don't link when generating IDE support files Summary: it's faster! see fb-only section for more context Reviewed By: ndmitchell Differential Revision: D52840634 fbshipit-source-id: 0ccfe7ff409467bc906241982737065f1296da8a --- prelude/ocaml/ocaml.bzl | 8 +++----- prelude/ocaml/ocaml_toolchain_types.bzl | 6 ++++++ 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/prelude/ocaml/ocaml.bzl b/prelude/ocaml/ocaml.bzl index abd652b83..99694ff4a 100644 --- a/prelude/ocaml/ocaml.bzl +++ b/prelude/ocaml/ocaml.bzl @@ -96,7 +96,7 @@ load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal", "post_order_t load("@prelude//utils:platform_flavors_util.bzl", "by_platform") load("@prelude//utils:utils.bzl", "filter_and_map_idx", "flatten") load(":makefile.bzl", "parse_makefile") -load(":ocaml_toolchain_types.bzl", "OCamlLibraryInfo", "OCamlLinkInfo", "OCamlToolchainInfo", "OtherOutputsInfo", "merge_ocaml_link_infos", "merge_other_outputs_info") +load(":ocaml_toolchain_types.bzl", "OCamlLibraryInfo", "OCamlLinkInfo", "OCamlToolchainInfo", "OcamlIdeInfo", "OtherOutputsInfo", "merge_ocaml_link_infos", "merge_other_outputs_info") BuildMode = enum("native", "bytecode", "expand") @@ -688,9 +688,8 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: other_outputs_info = merge_other_outputs_info(ctx, other_outputs, _attr_deps_other_outputs_infos(ctx)) info_ide = [ - DefaultInfo( - default_output = cmxa, - other_outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], + OcamlIdeInfo( + outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], ), ] info_byt = [ @@ -784,7 +783,6 @@ def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: info_ide = [ DefaultInfo( - default_output = binary_nat, other_outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], ), ] diff --git a/prelude/ocaml/ocaml_toolchain_types.bzl b/prelude/ocaml/ocaml_toolchain_types.bzl index de38f9e8f..f301a29de 100644 --- a/prelude/ocaml/ocaml_toolchain_types.bzl +++ b/prelude/ocaml/ocaml_toolchain_types.bzl @@ -58,6 +58,12 @@ OCamlLinkInfo = provider( fields = {"info": provider_field(typing.Any, default = None)}, ) +OcamlIdeInfo = provider( + fields = { + "outputs": provider_field(typing.Any, default = []), + }, +) + # A record of an OCaml library. OCamlLibraryInfo = record( # The library target name: e.g. "`foo`" From d9cbf0383f42cbd5e4096b1f44330410bc14a89e Mon Sep 17 00:00:00 2001 From: Daniel James Date: Thu, 18 Jan 2024 06:29:06 -0800 Subject: [PATCH 0153/1133] buck: make objcopy available via cxx_genrule Summary: following advice in: https://fb.workplace.com/groups/930797200910874/permalink/1352691642054759/ this diff makes it possible to use a reference to the toolchain's objcopy via, e.g. ``` cxx_genrule( name = "embed_default_sensor_data", srcs = [ "sensor_data", "embed.sh", ], outs = { "object": ["default_sensor_data_lib"], }, bash = "./embed.sh $(ld) $(objcopy)", visibility = ["PUBLIC"], ) ``` Reviewed By: dtolnay Differential Revision: D52817209 fbshipit-source-id: 92ae52a595a185641310c8c1fe200fdaa1dcc54b --- prelude/cxx/cxx_toolchain_types.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index d920fb38e..1456cad11 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -306,6 +306,7 @@ def cxx_toolchain_infos( "ldflags-shared": _shell_quote(linker_info.linker_flags), "ldflags-static": _shell_quote(linker_info.linker_flags), "ldflags-static-pic": _shell_quote(linker_info.linker_flags), + "objcopy": binary_utilities_info.objcopy, # TODO(T110378148): $(platform-name) is almost unusued. Should we remove it? "platform-name": platform_name, } From 53f7c8ced952a64b2e0955f818e0d7277dc4745c Mon Sep 17 00:00:00 2001 From: Jacob Rodal Date: Thu, 18 Jan 2024 14:17:26 -0800 Subject: [PATCH 0154/1133] fix http archive extraction for compatible bsdtar binaries Summary: - The `-P` flag seems necessary to extract through the UNC long path syntax. There is precedent for this, e.g. D51482034, D50643354 - cygwin tar also doesn't work, so we can opt to use the system tar instead. There is precedent for this, e.g. D51531469 Reviewed By: 8Keep Differential Revision: D52856804 fbshipit-source-id: d8b5188657f54d902f1d95803cf948614e0c13d5 --- prelude/http_archive/http_archive.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/http_archive/http_archive.bzl b/prelude/http_archive/http_archive.bzl index 0c1601675..599e271d6 100644 --- a/prelude/http_archive/http_archive.bzl +++ b/prelude/http_archive/http_archive.bzl @@ -66,8 +66,9 @@ def _unarchive_cmd( archive, "--stdout", "|", - "tar", + "%WINDIR%\\System32\\tar.exe", "-x", + "-P", "-f", "-", _tar_strip_prefix_flags(strip_prefix), From bb92fc0e8890b0ae7c976708cb47e67f646d885c Mon Sep 17 00:00:00 2001 From: Josiah Gaskin Date: Thu, 18 Jan 2024 18:07:47 -0800 Subject: [PATCH 0155/1133] When uploading main binary to cache, also upload auto link groups Summary: Even when a top level binary is marked as cacheable, the link groups are not cached. This diff propogates the allow_cache_upload flag from cxx_executable down to the auto link group map to enable caching of the shared libs as well as the top level executable. Reviewed By: athmasagar Differential Revision: D52891530 fbshipit-source-id: 4f22a5d0209402fcc87ff35723863f2984bf8e95 --- prelude/cxx/cxx_executable.bzl | 1 + prelude/cxx/link_groups.bzl | 8 ++++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index bf99f1346..eb3577123 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -325,6 +325,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, other_roots = link_group_extra_link_roots, prefer_stripped_objects = impl_params.prefer_stripped_objects, anonymous = ctx.attrs.anonymous_link_groups, + allow_cache_upload = impl_params.exe_allow_cache_upload, ) for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index d037a6fdd..8bd273b64 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -575,7 +575,8 @@ def _create_link_group( link_group_libs: dict[str, ([Label, None], LinkInfos)] = {}, prefer_stripped_objects: bool = False, category_suffix: [str, None] = None, - anonymous: bool = False) -> [LinkedObject, None]: + anonymous: bool = False, + allow_cache_upload = False) -> [LinkedObject, None]: """ Link a link group library, described by a `LinkGroupLibSpec`. This is intended to handle regular shared libs and e.g. Python extensions. @@ -664,6 +665,7 @@ def _create_link_group( # TODO: anonymous targets cannot be used with dynamic output yet enable_distributed_thinlto = False if anonymous else spec.group.attrs.enable_distributed_thinlto, link_execution_preference = LinkExecutionPreference("any"), + allow_cache_upload = allow_cache_upload, ), anonymous = anonymous, ) @@ -784,7 +786,8 @@ def create_link_groups( linkable_graph_node_map: dict[Label, LinkableNode] = {}, link_group_preferred_linkage: dict[Label, Linkage] = {}, link_group_mappings: [dict[Label, str], None] = None, - anonymous: bool = False) -> _LinkedLinkGroups: + anonymous: bool = False, + allow_cache_upload = False) -> _LinkedLinkGroups: # Generate stubs first, so that subsequent links can link against them. link_group_shared_links = {} specs = [] @@ -842,6 +845,7 @@ def create_link_groups( prefer_stripped_objects = prefer_stripped_objects, category_suffix = "link_group", anonymous = anonymous, + allow_cache_upload = allow_cache_upload, ) if link_group_lib == None: From 146be234cf23a10cf6fb429bdeb2fcfe9a9dc097 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Fri, 19 Jan 2024 01:58:21 -0800 Subject: [PATCH 0156/1133] Remove genrule validation functionality Summary: Removes the genrule validation functionality which is no longer needed. Reviewed By: d16r, blackm00n Differential Revision: D52869821 fbshipit-source-id: 7c15f96c5ec7c019bbb56042024b7ace305f6eb0 --- prelude/apple/apple_binary.bzl | 4 -- prelude/apple/apple_bundle.bzl | 4 -- prelude/apple/apple_genrule_deps.bzl | 47 ---------------------- prelude/apple/apple_library.bzl | 4 -- prelude/apple/apple_macro_layer.bzl | 5 --- prelude/apple/apple_rules_impl.bzl | 11 ----- prelude/apple/apple_rules_impl_utility.bzl | 13 ------ prelude/genrule.bzl | 9 ----- prelude/genrule_types.bzl | 12 ------ 9 files changed, 109 deletions(-) delete mode 100644 prelude/apple/apple_genrule_deps.bzl delete mode 100644 prelude/genrule_types.bzl diff --git a/prelude/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl index 7b0b9d501..f07121842 100644 --- a/prelude/apple/apple_binary.bzl +++ b/prelude/apple/apple_binary.bzl @@ -63,7 +63,6 @@ load(":apple_code_signing_types.bzl", "AppleEntitlementsInfo") load(":apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym") load(":apple_entitlements.bzl", "entitlements_link_flags") load(":apple_frameworks.bzl", "get_framework_search_path_flags") -load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_apple_genrule_deps_outputs") load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback") load(":apple_validation_deps.bzl", "get_apple_validation_deps_outputs") @@ -113,9 +112,6 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: ) validation_deps_outputs = get_apple_validation_deps_outputs(ctx) - if get_apple_build_genrule_deps_attr_value(ctx): - validation_deps_outputs += get_apple_genrule_deps_outputs(cxx_attr_deps(ctx)) - stripped = get_apple_stripped_attr_value_with_default_fallback(ctx) constructor_params = CxxRuleConstructorParams( rule_type = "apple_binary", diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index 74afaf964..c7ecb179e 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -61,7 +61,6 @@ load( ) load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_default_binary_dep", "get_flattened_binary_deps", "get_product_name") load(":apple_dsym.bzl", "DSYM_INFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym", "get_apple_dsym_ext", "get_apple_dsym_info") -load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_apple_genrule_deps_outputs") load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_universal_binaries.bzl", "create_universal_binary") load(":apple_validation_deps.bzl", "get_apple_validation_deps_outputs") @@ -322,9 +321,6 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: primary_binary_rel_path = get_apple_bundle_part_relative_destination_path(ctx, primary_binary_part) validation_deps_outputs = get_apple_validation_deps_outputs(ctx) - if get_apple_build_genrule_deps_attr_value(ctx): - validation_deps_outputs += get_apple_genrule_deps_outputs(ctx.attrs.deps) - sub_targets = assemble_bundle( ctx, bundle, diff --git a/prelude/apple/apple_genrule_deps.bzl b/prelude/apple/apple_genrule_deps.bzl deleted file mode 100644 index 756a5bdd3..000000000 --- a/prelude/apple/apple_genrule_deps.bzl +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -load("@prelude//:genrule_types.bzl", "GENRULE_MARKER_SUBTARGET_NAME") - -def get_apple_genrule_deps_outputs(deps: list[Dependency]) -> list[Artifact]: - artifacts = [] - for dep in deps: - default_info = dep[DefaultInfo] - if GENRULE_MARKER_SUBTARGET_NAME in default_info.sub_targets: - artifacts += default_info.default_outputs - return artifacts - -def get_apple_build_genrule_deps_attr_value(ctx: AnalysisContext) -> bool: - build_genrule_deps = ctx.attrs.build_genrule_deps - if build_genrule_deps != None: - # `build_genrule_deps` present on a target takes priority - return build_genrule_deps - - # Fallback to the default value which is driven by buckconfig + select() - return ctx.attrs._build_genrule_deps - -def get_apple_build_genrule_deps_default_kwargs() -> dict[str, typing.Any]: - return { - APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: _build_genrule_deps_default_enabled(), - } - -def _build_genrule_deps_default_enabled() -> typing.Any: - buckconfig_value = read_root_config("apple", "build_genrule_deps", None) - if buckconfig_value != None: - return buckconfig_value.lower() == "true" - - return select({ - "DEFAULT": False, - # TODO(mgd): Make `config//` references possible from macro layer - "ovr_config//features/apple/constraints:build_genrule_deps_enabled": True, - }) - -APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME = "_build_genrule_deps" -APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE = attrs.bool(default = False) - -APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME = "build_genrule_deps" -APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE = attrs.option(attrs.bool(), default = None) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index 520f2a305..530bfc336 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -71,7 +71,6 @@ load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") load(":apple_bundle_types.bzl", "AppleBundleLinkerMapInfo", "AppleMinDeploymentVersionInfo") load(":apple_frameworks.bzl", "get_framework_search_path_flags") -load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_attr_value", "get_apple_genrule_deps_outputs") load(":apple_modular_utility.bzl", "MODULE_CACHE_PATH") load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback", "get_module_name") @@ -223,9 +222,6 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte ) validation_deps_outputs = get_apple_validation_deps_outputs(ctx) - if get_apple_build_genrule_deps_attr_value(ctx): - validation_deps_outputs += get_apple_genrule_deps_outputs(cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx)) - return CxxRuleConstructorParams( rule_type = params.rule_type, is_test = (params.rule_type == "apple_test"), diff --git a/prelude/apple/apple_macro_layer.bzl b/prelude/apple/apple_macro_layer.bzl index faf2fab3d..3d39921e1 100644 --- a/prelude/apple/apple_macro_layer.bzl +++ b/prelude/apple/apple_macro_layer.bzl @@ -7,7 +7,6 @@ load(":apple_bundle_config.bzl", "apple_bundle_config") load(":apple_dsym_config.bzl", "apple_dsym_config") -load(":apple_genrule_deps.bzl", "get_apple_build_genrule_deps_default_kwargs") load(":apple_info_plist_substitutions_parsing.bzl", "parse_codesign_entitlements") load(":apple_package_config.bzl", "apple_package_config") load(":apple_resource_bundle.bzl", "make_resource_bundle_rule") @@ -76,7 +75,6 @@ def apple_test_macro_impl(apple_test_rule, apple_resource_bundle_rule, **kwargs) kwargs.update(apple_bundle_config()) kwargs.update(apple_dsym_config()) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_TEST_LOCAL_EXECUTION_OVERRIDES)) - kwargs.update(get_apple_build_genrule_deps_default_kwargs()) # `extension` is used both by `apple_test` and `apple_resource_bundle`, so provide default here kwargs["extension"] = kwargs.pop("extension", "xctest") @@ -89,7 +87,6 @@ def apple_bundle_macro_impl(apple_bundle_rule, apple_resource_bundle_rule, **kwa info_plist_substitutions = kwargs.get("info_plist_substitutions") kwargs.update(apple_bundle_config()) kwargs.update(apple_dsym_config()) - kwargs.update(get_apple_build_genrule_deps_default_kwargs()) apple_bundle_rule( _codesign_entitlements = parse_codesign_entitlements(info_plist_substitutions), _resource_bundle = make_resource_bundle_rule(apple_resource_bundle_rule, **kwargs), @@ -100,7 +97,6 @@ def apple_library_macro_impl(apple_library_rule = None, **kwargs): kwargs.update(apple_dsym_config()) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_LIBRARY_LOCAL_EXECUTION_OVERRIDES)) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config([APPLE_STRIPPED_DEFAULT])) - kwargs.update(get_apple_build_genrule_deps_default_kwargs()) apple_library_rule(**kwargs) def apple_binary_macro_impl(apple_binary_rule = None, apple_universal_executable = None, **kwargs): @@ -108,7 +104,6 @@ def apple_binary_macro_impl(apple_binary_rule = None, apple_universal_executable kwargs.update(dsym_args) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES)) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config([APPLE_STRIPPED_DEFAULT])) - kwargs.update(get_apple_build_genrule_deps_default_kwargs()) original_binary_name = kwargs.pop("name") diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index e593c13b9..a3cef8fa4 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -6,13 +6,6 @@ # of this source tree. load("@prelude//apple:apple_buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") -load( - "@prelude//apple:apple_genrule_deps.bzl", - "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME", - "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE", - "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME", - "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE", -) load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") load("@prelude//apple/swift:swift_toolchain.bzl", "swift_toolchain_impl") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") @@ -101,8 +94,6 @@ def _apple_binary_extra_attrs(): "_apple_xctoolchain": get_apple_xctoolchain_attr(), "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), "_stripped_default": attrs.bool(default = False), - APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, - APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, } @@ -130,8 +121,6 @@ def _apple_library_extra_attrs(): "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), "_stripped_default": attrs.bool(default = False), APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), - APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, - APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, } diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 2d21fc485..fcdc77269 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -9,13 +9,6 @@ load("@prelude//apple:apple_buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRI load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_system_identification_attrs") load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo", "AppleBundleTypeAttributeType") load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignType") -load( - "@prelude//apple:apple_genrule_deps.bzl", - "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME", - "APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE", - "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME", - "APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE", -) load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") load("@prelude//apple/user:apple_selective_debugging.bzl", "AppleSelectiveDebuggingInfo") @@ -115,10 +108,6 @@ def apple_test_extra_attrs(): "_macos_idb_companion": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:macos_idb_companion", providers = [LocalResourceInfo]), } attribs.update(_apple_bundle_like_common_attrs()) - attribs.update({ - APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, - APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, - }) return attribs def apple_xcuitest_extra_attrs(): @@ -151,8 +140,6 @@ def apple_bundle_extra_attrs(): "universal": attrs.option(attrs.bool(), default = None), "_apple_toolchain": get_apple_bundle_toolchain_attr(), "_codesign_entitlements": attrs.option(attrs.source(), default = None), - APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_DEFAULT_ATTRIB_TYPE, - APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_NAME: APPLE_BUILD_GENRULE_DEPS_TARGET_ATTRIB_TYPE, } attribs.update(_apple_bundle_like_common_attrs()) return attribs diff --git a/prelude/genrule.bzl b/prelude/genrule.bzl index 638add6b1..d999f92be 100644 --- a/prelude/genrule.bzl +++ b/prelude/genrule.bzl @@ -10,11 +10,9 @@ load("@prelude//:cache_mode.bzl", "CacheModeInfo") load("@prelude//:genrule_local_labels.bzl", "genrule_labels_require_local") load("@prelude//:genrule_toolchain.bzl", "GenruleToolchainInfo") -load("@prelude//:genrule_types.bzl", "GENRULE_MARKER_SUBTARGET_NAME", "GenruleMarkerInfo") load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") load("@prelude//android:build_only_native_code.bzl", "is_build_only_native_code") load("@prelude//os_lookup:defs.bzl", "OsLookup") -load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "flatten", "value_or") GENRULE_OUT_DIR = "out" @@ -348,14 +346,7 @@ def process_genrule( **metadata_args ) - # Use a subtarget to insert a marker, as callsites make assumptions about - # the providers of `process_genrule()`. We want to have the marker in - # `DefaultInfo` rather than in `genrule_impl()` because we want to identify - # all classes of genrule-like rules. sub_targets = {k: [DefaultInfo(default_outputs = v)] for (k, v) in named_outputs.items()} - expect(GENRULE_MARKER_SUBTARGET_NAME not in sub_targets, "Conflicting private `{}` subtarget and named output".format(GENRULE_MARKER_SUBTARGET_NAME)) - sub_targets[GENRULE_MARKER_SUBTARGET_NAME] = [GenruleMarkerInfo()] - providers = [DefaultInfo( default_outputs = default_outputs, sub_targets = sub_targets, diff --git a/prelude/genrule_types.bzl b/prelude/genrule_types.bzl deleted file mode 100644 index 0793c705d..000000000 --- a/prelude/genrule_types.bzl +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -# A provider that's used as a marker for `genrule()`, allows dependents -# to distinguish such outputs -GenruleMarkerInfo = provider(fields = {}) - -GENRULE_MARKER_SUBTARGET_NAME = "genrule_marker" From 444092dd7ce0b64ceeec93b2ca7751801b34dbba Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 19 Jan 2024 04:31:51 -0800 Subject: [PATCH 0157/1133] delay site import Summary: `import site` might run some user code as part of `site_customize` that needs access to compiled-in modules. This diff defers running `site_customize` for native python-enabled binaries until the static extension finder is set up, so imports to compiled-in modules in `site_customize` can work. Reviewed By: jbower-fb, aleivag, cxxxs Differential Revision: D52877026 fbshipit-source-id: c5b3e116555fddc190a0910b8d61e34e4c1cd2d9 --- prelude/python/tools/embedded_main.cpp | 33 ++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/prelude/python/tools/embedded_main.cpp b/prelude/python/tools/embedded_main.cpp index a8fa98369..2f0426683 100644 --- a/prelude/python/tools/embedded_main.cpp +++ b/prelude/python/tools/embedded_main.cpp @@ -51,6 +51,32 @@ std::optional MaybeGetExitCode(PyStatus* status, PyConfig* config) { extern struct _inittab _static_extension_info[]; PyMODINIT_FUNC PyInit__static_extension_utils(); +void call_site_main() { + PyObject* siteModule = PyImport_ImportModule("site"); + if (siteModule == nullptr) { + PyErr_Print(); + fprintf(stderr, "Error: could not import module 'site'\n"); + } else { + PyObject* siteMain = PyObject_GetAttrString(siteModule, "main"); + Py_DECREF(siteModule); + if (siteMain == nullptr || !PyCallable_Check(siteMain)) { + PyErr_Print(); + fprintf( + stderr, "Error: could not find function 'main' in module 'site'\n"); + Py_DECREF(siteMain); + } else { + PyObject* siteMainResult = PyObject_CallObject(siteMain, nullptr); + Py_DECREF(siteMain); + if (siteMainResult == nullptr) { + PyErr_Print(); + fprintf( + stderr, "Error: could not call function 'main' in module 'site'\n"); + } + Py_DECREF(siteMainResult); + } + } +} + int main(int argc, char* argv[]) { PyStatus status; PyConfig config; @@ -88,6 +114,10 @@ int main(int argc, char* argv[]) { } } + // Defer importing `site` until after static extension finder is initialized. + auto siteImport = config.site_import; + config.site_import = 0; + // Check if we're using par_style="native", if so, modify sys.path to include // the executable-zipfile to it, and set a main module to run when invoked. #ifdef NATIVE_PAR_STYLE @@ -188,5 +218,8 @@ int main(int argc, char* argv[]) { } PyConfig_Clear(&config); + if (siteImport) { + call_site_main(); + } return Py_RunMain(); } From 89f2ad213c19b8da095103899ff7e3bf650721a8 Mon Sep 17 00:00:00 2001 From: generatedunixname89002005232357 Date: Fri, 19 Jan 2024 10:47:10 -0800 Subject: [PATCH 0158/1133] Revert D52877026: Multisect successfully blamed "D52877026: delay site import" for otest failure Summary: This diff is reverting D52877026 D52877026: delay site import by zsol has been identified to be causing the following test failure: Tests affected: - [tools/make_par:environment_tests - test_ld_preload_xar_forkserver (tools.make_par.environment_tests.EnvironmentTests)](https://www.internalfb.com/intern/test/281475066289502/) Here's the Multisect link: https://www.internalfb.com/multisect/4026549 Here are the tasks that are relevant to this breakage: We're generating a revert to back out the changes in this diff, please note the backout may land if someone accepts it. If you believe this diff has been generated in error you may Commandeer and Abandon it. Reviewed By: zsol Differential Revision: D52906644 fbshipit-source-id: d26c06e22921a4337f35572a73fcabf7dfad84bb --- prelude/python/tools/embedded_main.cpp | 33 -------------------------- 1 file changed, 33 deletions(-) diff --git a/prelude/python/tools/embedded_main.cpp b/prelude/python/tools/embedded_main.cpp index 2f0426683..a8fa98369 100644 --- a/prelude/python/tools/embedded_main.cpp +++ b/prelude/python/tools/embedded_main.cpp @@ -51,32 +51,6 @@ std::optional MaybeGetExitCode(PyStatus* status, PyConfig* config) { extern struct _inittab _static_extension_info[]; PyMODINIT_FUNC PyInit__static_extension_utils(); -void call_site_main() { - PyObject* siteModule = PyImport_ImportModule("site"); - if (siteModule == nullptr) { - PyErr_Print(); - fprintf(stderr, "Error: could not import module 'site'\n"); - } else { - PyObject* siteMain = PyObject_GetAttrString(siteModule, "main"); - Py_DECREF(siteModule); - if (siteMain == nullptr || !PyCallable_Check(siteMain)) { - PyErr_Print(); - fprintf( - stderr, "Error: could not find function 'main' in module 'site'\n"); - Py_DECREF(siteMain); - } else { - PyObject* siteMainResult = PyObject_CallObject(siteMain, nullptr); - Py_DECREF(siteMain); - if (siteMainResult == nullptr) { - PyErr_Print(); - fprintf( - stderr, "Error: could not call function 'main' in module 'site'\n"); - } - Py_DECREF(siteMainResult); - } - } -} - int main(int argc, char* argv[]) { PyStatus status; PyConfig config; @@ -114,10 +88,6 @@ int main(int argc, char* argv[]) { } } - // Defer importing `site` until after static extension finder is initialized. - auto siteImport = config.site_import; - config.site_import = 0; - // Check if we're using par_style="native", if so, modify sys.path to include // the executable-zipfile to it, and set a main module to run when invoked. #ifdef NATIVE_PAR_STYLE @@ -218,8 +188,5 @@ int main(int argc, char* argv[]) { } PyConfig_Clear(&config); - if (siteImport) { - call_site_main(); - } return Py_RunMain(); } From 5012adddca920339ecae189e9172b3aa74057a6d Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Mon, 22 Jan 2024 02:56:02 -0800 Subject: [PATCH 0159/1133] python_binary: add a [debuginfo] subtarget Summary: This subtarget produces a json file with all debug information associated with the binary in the following format: 1. the file is a single list 2. each entry in the list is a pair of (location in buck-out, logical location), represented as a 2 element list 3. logical location refers to the place where the file would be in the par (next to the .so file), which (I think) is the same as the relative location in the `#debuginfo` folder Reviewed By: itamaro Differential Revision: D52905959 fbshipit-source-id: d0ee79d0a0d6b8ce7ccc2c6c9fea32aa33f99151 --- prelude/python/make_py_package.bzl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 96faa653d..983901f0b 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -190,6 +190,8 @@ def make_py_package( allow_cache_upload = allow_cache_upload, ) default.sub_targets[style] = make_py_package_providers(pex_providers) + + default.sub_targets["debuginfo"] = _debuginfo_subtarget(ctx, debug_artifacts) return default def _make_py_package_impl( @@ -325,6 +327,10 @@ def _make_py_package_impl( run_cmd = cmd_args(run_args).hidden([a for a, _ in runtime_files] + hidden_resources), ) +def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[(ArgLike, str)]) -> list[Provider]: + out = ctx.actions.write_json("debuginfo.manifest.json", debug_artifacts) + return [DefaultInfo(default_output = out, other_outputs = [a for a, _ in debug_artifacts])] + def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: dict[str, (LinkedObject, bool)]) -> cmd_args: preload_libraries_path = ctx.actions.write( "__preload_libraries.txt", From 13e097b8263f2db38efdcc470e182c2c3acdbad1 Mon Sep 17 00:00:00 2001 From: Thomas David Cuvillier Date: Mon, 22 Jan 2024 07:11:14 -0800 Subject: [PATCH 0160/1133] halting instead of stopping the test binary Summary: We have experienced some issues where the ct_executor would take a long time to stop. From that we have learnt the following lessons: 1) init:stop can take a long time to stop. 2) The code ``` init:stop(ExitCode), receive after ?INIT_STOP_TIMEOUT -> ?LOG_ERROR( io_lib:format("~p failed to terminate within ~c millisecond", [ ?MODULE, ?INIT_STOP_TIMEOUT ]) ), erlang:halt(ExitCode) end; ``` That was supposed to prevent against that didn't do its job properly. To prevent future problem like this one happening in the test binary, we do the same mitigation as with the ct_executor, that is, using halt/ instead of stop/ Reviewed By: TheGeorge Differential Revision: D52877538 fbshipit-source-id: d5105a81d6d29f9395098629a53b87d9fd20e4de --- .../test_binary/src/test_binary.erl | 22 ++----------------- 1 file changed, 2 insertions(+), 20 deletions(-) diff --git a/prelude/erlang/common_test/test_binary/src/test_binary.erl b/prelude/erlang/common_test/test_binary/src/test_binary.erl index 08d2cf114..2403db9f2 100644 --- a/prelude/erlang/common_test/test_binary/src/test_binary.erl +++ b/prelude/erlang/common_test/test_binary/src/test_binary.erl @@ -31,16 +31,7 @@ main([TestInfoFile, "list", OutputDir]) -> after test_logger:flush() end, - init:stop(ExitCode), - receive - after ?INIT_STOP_TIMEOUT -> - ?LOG_ERROR( - io_lib:format("~p failed to terminate within ~c millisecond", [ - ?MODULE, ?INIT_STOP_TIMEOUT - ]) - ), - erlang:halt(ExitCode) - end; + erlang:halt(ExitCode); main([TestInfoFile, "run", OutputDir | Tests]) -> test_logger:set_up_logger(OutputDir, test_runner), ExitCode = @@ -55,16 +46,7 @@ main([TestInfoFile, "run", OutputDir | Tests]) -> after test_logger:flush() end, - init:stop(ExitCode), - receive - after ?INIT_STOP_TIMEOUT -> - ?LOG_ERROR( - io_lib:format("~p failed to terminate within ~c millisecond", [ - ?MODULE, ?INIT_STOP_TIMEOUT - ]) - ), - erlang:halt(ExitCode) - end; + erlang:halt(ExitCode); main([TestInfoFile]) -> %% without test runner support we run all tests and need to create our own test dir OutputDir = string:trim(os:cmd("mktemp -d")), From 093c30533194436eba3bf861820a8fc4e3da6d84 Mon Sep 17 00:00:00 2001 From: Lukasz Indyk Date: Mon, 22 Jan 2024 13:26:56 -0800 Subject: [PATCH 0161/1133] android screenshot tests on real devices - 1/n Summary: for context: https://fb.workplace.com/groups/1867196870217466/posts/7416524221704523 this diff allows to specify android devices for screenshot tests. Reviewed By: IanChilds Differential Revision: D52350626 fbshipit-source-id: 7a6327e60c2916b5312cbef7cb17012625f1a5dd --- .../android/android_instrumentation_test.bzl | 25 +++++++++++++------ 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/prelude/android/android_instrumentation_test.bzl b/prelude/android/android_instrumentation_test.bzl index 27d56c21f..87bb007db 100644 --- a/prelude/android/android_instrumentation_test.bzl +++ b/prelude/android/android_instrumentation_test.bzl @@ -14,6 +14,7 @@ load("@prelude//utils:expect.bzl", "expect") load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") DEFAULT_ANDROID_SUBPLATFORM = "android-30" +DEFAULT_ANDROID_PLATFORM = "android-emulator" def android_instrumentation_test_impl(ctx: AnalysisContext): android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] @@ -91,8 +92,8 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): local_enabled = android_toolchain.instrumentation_test_can_run_locally, remote_enabled = True, remote_execution_properties = { - "platform": "android-emulator", - "subplatform": _compute_emulator_target(ctx.attrs.labels or []), + "platform": _compute_emulator_platform(ctx.attrs.labels or []), + "subplatform": _compute_emulator_subplatform(ctx.attrs.labels or []), }, remote_execution_use_case = "instrumentation-tests", ), @@ -117,10 +118,18 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): ] + classmap_source_info # replicating the logic in https://fburl.com/code/1fqowxu4 to match buck1's behavior -def _compute_emulator_target(labels: list[str]) -> str: - emulator_target_labels = [label for label in labels if label.startswith("re_emulator_")] - expect(len(emulator_target_labels) <= 1, "multiple 're_emulator_' labels were found:[{}], there must be only one!".format(", ".join(emulator_target_labels))) - if len(emulator_target_labels) == 0: +def _compute_emulator_subplatform(labels: list[str]) -> str: + emulator_subplatform_labels = [label for label in labels if label.startswith("re_emulator_")] + expect(len(emulator_subplatform_labels) <= 1, "multiple 're_emulator_' labels were found:[{}], there must be only one!".format(", ".join(emulator_subplatform_labels))) + if len(emulator_subplatform_labels) == 0: return DEFAULT_ANDROID_SUBPLATFORM - else: # len(emulator_target_labels) == 1: - return emulator_target_labels[0].replace("re_emulator_", "") + else: # len(emulator_subplatform_labels) == 1: + return emulator_subplatform_labels[0].replace("re_emulator_", "") + +def _compute_emulator_platform(labels: list[str]) -> str: + emulator_platform_labels = [label for label in labels if label.startswith("re_platform")] + expect(len(emulator_platform_labels) <= 1, "multiple 're_platform' labels were found:[{}], there must be only one!".format(", ".join(emulator_platform_labels))) + if len(emulator_platform_labels) == 0: + return DEFAULT_ANDROID_PLATFORM + else: # len(emulator_platform_labels) == 1: + return emulator_platform_labels[0].replace("re_platform", "") From e9439b26012b87defd099bf6c9a6663fcf38e4fd Mon Sep 17 00:00:00 2001 From: Maxwell Heiber Date: Tue, 23 Jan 2024 02:41:50 -0800 Subject: [PATCH 0162/1133] buck2 ocaml ide output: enable `--show-output` and be more consistent Summary: ## What Use `DefaultInfo` instead of a custom provider type for the `.cmt` and `.cmti` files we produce for IDE support. ## Why D52840634 introduced the ability to build IDE outputs without linking, but had the following downsides: - `--show-output` and `--show-full-output` didn't show anything for IDE targets, which impaired debuggability - (warning: long explanation, skippable:) I didn't make the change to use `OCamlIdeInfo` all OCaml rules, which was inconsistent and probably didn't avoid all linking. When I did try to make the change for all OCaml IDE rules, the expected .cmt files were not generated. I suspect a target needs to have a default_output in order for this line of our BXL for generating build artifacts to do anything: https://www.internalfb.com/code/fbsource/[3905ae9c88fb4245b97cbac20c5a387c1f816655]/fbcode/common/ocaml/gen_merlin.bxl?lines=18-21 Reviewed By: ndmitchell Differential Revision: D52952776 fbshipit-source-id: 9409ef710d6f4def1a7a4ed5ebf280850fa4d169 --- prelude/ocaml/ocaml.bzl | 12 +++++++----- prelude/ocaml/ocaml_toolchain_types.bzl | 6 ------ 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/prelude/ocaml/ocaml.bzl b/prelude/ocaml/ocaml.bzl index 99694ff4a..7afc804b0 100644 --- a/prelude/ocaml/ocaml.bzl +++ b/prelude/ocaml/ocaml.bzl @@ -96,7 +96,7 @@ load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal", "post_order_t load("@prelude//utils:platform_flavors_util.bzl", "by_platform") load("@prelude//utils:utils.bzl", "filter_and_map_idx", "flatten") load(":makefile.bzl", "parse_makefile") -load(":ocaml_toolchain_types.bzl", "OCamlLibraryInfo", "OCamlLinkInfo", "OCamlToolchainInfo", "OcamlIdeInfo", "OtherOutputsInfo", "merge_ocaml_link_infos", "merge_other_outputs_info") +load(":ocaml_toolchain_types.bzl", "OCamlLibraryInfo", "OCamlLinkInfo", "OCamlToolchainInfo", "OtherOutputsInfo", "merge_ocaml_link_infos", "merge_other_outputs_info") BuildMode = enum("native", "bytecode", "expand") @@ -688,8 +688,9 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: other_outputs_info = merge_other_outputs_info(ctx, other_outputs, _attr_deps_other_outputs_infos(ctx)) info_ide = [ - OcamlIdeInfo( - outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], + DefaultInfo( + default_output = cmts_nat[0] if cmts_nat else None, + other_outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], ), ] info_byt = [ @@ -783,6 +784,7 @@ def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: info_ide = [ DefaultInfo( + default_output = cmts_nat[0] if cmts_nat else None, other_outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], ), ] @@ -874,7 +876,7 @@ def ocaml_object_impl(ctx: AnalysisContext) -> list[Provider]: info_ide = [ DefaultInfo( - default_output = obj, + default_output = cmts[0] if cmts else None, other_outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], ), ] @@ -956,7 +958,7 @@ def ocaml_shared_impl(ctx: AnalysisContext) -> list[Provider]: info_ide = [ DefaultInfo( - default_output = binary_nat, + default_output = cmts_nat[0] if cmts_nat else None, other_outputs = [cmd_args(other_outputs_info.info.project_as_args("ide"))], ), ] diff --git a/prelude/ocaml/ocaml_toolchain_types.bzl b/prelude/ocaml/ocaml_toolchain_types.bzl index f301a29de..de38f9e8f 100644 --- a/prelude/ocaml/ocaml_toolchain_types.bzl +++ b/prelude/ocaml/ocaml_toolchain_types.bzl @@ -58,12 +58,6 @@ OCamlLinkInfo = provider( fields = {"info": provider_field(typing.Any, default = None)}, ) -OcamlIdeInfo = provider( - fields = { - "outputs": provider_field(typing.Any, default = []), - }, -) - # A record of an OCaml library. OCamlLibraryInfo = record( # The library target name: e.g. "`foo`" From ecaad588d5cd4315590547e8536f3ba13aa6f87f Mon Sep 17 00:00:00 2001 From: Alvaro Leiva Geisse Date: Tue, 23 Jan 2024 08:29:48 -0800 Subject: [PATCH 0163/1133] early load of static extension finder v2 Summary: if static extension is on, we load it as early as possible NOTE: if this end up breaking a cogwheel, or a untested binary, just disable native python on it! Reviewed By: zsol Differential Revision: D52860272 fbshipit-source-id: 994e48ad60fc33e80c08badf5a0ff620a25f6c99 --- prelude/python/tools/static_extension_finder.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/prelude/python/tools/static_extension_finder.py b/prelude/python/tools/static_extension_finder.py index 9b278d3b7..f3be8f919 100644 --- a/prelude/python/tools/static_extension_finder.py +++ b/prelude/python/tools/static_extension_finder.py @@ -5,8 +5,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -import sys -from importlib.machinery import ModuleSpec # Add a try except to force eager importing try: @@ -17,6 +15,9 @@ class StaticExtensionFinder: + # pyre-fixme + ModuleSpec = None + @classmethod # pyre-fixme[3]: Return type must be annotated. # pyre-fixme[2]: Parameter must be annotated. @@ -25,16 +26,22 @@ def find_spec(cls, fullname, path, target=None): Use fullname to look up the PyInit function in the main binary. Returns None if not present. This allows importing CExtensions that have been statically linked in. """ + if not fullname: return None if not _check_module(fullname): return None - spec = ModuleSpec( + spec = cls.ModuleSpec( fullname, StaticExtensionLoader, origin="static-extension", is_package=False ) return spec -# pyre-fixme[3]: Return type must be annotated. -def _initialize(): +def _initialize() -> None: + # This imports are here to avoid tricking circular dependencies. see S389486 + import sys + from importlib.machinery import ModuleSpec + + StaticExtensionFinder.ModuleSpec = ModuleSpec + sys.meta_path.insert(0, StaticExtensionFinder) From 7e7309510a48578917a3132ba669dca3ccf420d7 Mon Sep 17 00:00:00 2001 From: Justin Trudell Date: Tue, 23 Jan 2024 15:31:05 -0800 Subject: [PATCH 0164/1133] python_binary: rename [debuginfo] subtarget to [par-debuginfo] Summary: Follow-up of D52905959, see in-code comments: cpp binaries already emit a `debuginfo` subtarget with a format like P1085371139, quite different than the py one. As such, the easiest short-term option here is to rename. Reviewed By: zsol Differential Revision: D53010272 fbshipit-source-id: b411572c80aa84e6e5512e41ed41eb68e6a9eb82 --- prelude/python/make_py_package.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 983901f0b..3f227b334 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -191,7 +191,9 @@ def make_py_package( ) default.sub_targets[style] = make_py_package_providers(pex_providers) - default.sub_targets["debuginfo"] = _debuginfo_subtarget(ctx, debug_artifacts) + # cpp binaries already emit a `debuginfo` subtarget with a different format, + # so we opt to use a more specific subtarget + default.sub_targets["par-debuginfo"] = _debuginfo_subtarget(ctx, debug_artifacts) return default def _make_py_package_impl( From ab40f7b5c704c21e04176ef5895920a3640bf2cd Mon Sep 17 00:00:00 2001 From: Tianyu Li Date: Wed, 24 Jan 2024 14:27:43 -0800 Subject: [PATCH 0165/1133] Pass through allow_cache_upload in cxx library Summary: WhatsApp cxx library rules do not run on RE right now because it uses toolchains from NDK installed locally. This diff passes through allow_cache_upload which allows cxx library to be uploaded to RE cache. Differential Revision: D53033381 fbshipit-source-id: 8b6c7fc100fc0b76bdb4fbb0e2a208b62a05b2f8 --- prelude/cxx/compile.bzl | 4 +++- prelude/cxx/cxx_library.bzl | 4 ++-- prelude/decls/cxx_rules.bzl | 3 ++- prelude/decls/go_rules.bzl | 3 ++- prelude/decls/ios_rules.bzl | 6 ++++-- 5 files changed, 13 insertions(+), 7 deletions(-) diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index 025f09919..03a5f4a44 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -360,7 +360,8 @@ def create_compile_cmds( def compile_cxx( ctx: AnalysisContext, src_compile_cmds: list[CxxSrcCompileCommand], - pic: bool = False) -> list[CxxCompileOutput]: + pic: bool = False, + allow_cache_upload: bool = False) -> list[CxxCompileOutput]: """ For a given list of src_compile_cmds, generate output artifacts. """ @@ -461,6 +462,7 @@ def compile_cxx( category = src_compile_cmd.cxx_compile_cmd.category, identifier = identifier, dep_files = action_dep_files, + allow_cache_upload = allow_cache_upload, ) # If we're building with split debugging, where the debug info is in the diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 714aaf1b3..0b1f3fdf2 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -906,12 +906,12 @@ def cxx_compile_srcs( ) # Define object files. - pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = True) + pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = True, allow_cache_upload = ctx.attrs.allow_cache_upload) pic = _get_library_compile_output(ctx, pic_cxx_outs, impl_params.extra_link_input) non_pic = None if preferred_linkage != Linkage("shared"): - non_pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = False) + non_pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = False, allow_cache_upload = ctx.attrs.allow_cache_upload) non_pic = _get_library_compile_output(ctx, non_pic_cxx_outs, impl_params.extra_link_input) return _CxxCompiledSourcesOutput( diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index a942522ab..ebc326fce 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -588,7 +588,8 @@ cxx_library = prelude_rule( "weak_framework_names": attrs.list(attrs.string(), default = []), "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), - } + } | + buck.allow_cache_upload_arg() ), ) diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index bedb9e94d..8b3fbc982 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -125,7 +125,8 @@ cgo_library = prelude_rule( "thin_lto": attrs.bool(default = False), "version_universe": attrs.option(attrs.string(), default = None), "weak_framework_names": attrs.list(attrs.string(), default = []), - } + } | + buck.allow_cache_upload_arg() ), ) diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index a42dbfd9b..7bfbf477d 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -513,7 +513,8 @@ apple_library = prelude_rule( "uses_modules": attrs.bool(default = False), "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), - } + } | + buck.allow_cache_upload_arg() ), ) @@ -781,7 +782,8 @@ apple_test = prelude_rule( "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), - } + } | + buck.allow_cache_upload_arg() ), ) From 649e45b37d9f7e3fb342ba814d5e72a972412bf0 Mon Sep 17 00:00:00 2001 From: Anna Kukliansky Date: Wed, 24 Jan 2024 14:49:53 -0800 Subject: [PATCH 0166/1133] support GO tests Summary: as title Reviewed By: ndmitchell Differential Revision: D53019875 fbshipit-source-id: 8a9f001c40aa3d50cb41e54fce98b9963256cbf6 --- prelude/decls/go_rules.bzl | 4 +++- prelude/go/go_test.bzl | 12 +++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index 8b3fbc982..525f0b9b7 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -14,6 +14,7 @@ load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode" load(":cxx_common.bzl", "cxx_common") load(":go_common.bzl", "go_common") load(":native_common.bzl", "native_common") +load(":re_test_common.bzl", "re_test_common") BuildMode = ["executable", "c_shared", "c_archive"] @@ -447,7 +448,8 @@ go_test = prelude_rule( "platform": attrs.option(attrs.string(), default = None), "runner": attrs.option(attrs.dep(), default = None), "specs": attrs.option(attrs.arg(json = True), default = None), - } + } | + re_test_common.test_args() ), ) diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index 67200ffeb..cd15e0458 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -9,6 +9,10 @@ load( "@prelude//linking:link_info.bzl", "LinkStyle", ) +load( + "@prelude//tests:re_utils.bzl", + "get_re_executors_from_props", +) load( "@prelude//utils:utils.bzl", "map_val", @@ -124,6 +128,9 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: for resource in ctx.attrs.resources: run_cmd.hidden(ctx.actions.copy_file(resource.short_path, resource)) + # Setup RE executors based on the `remote_execution` param. + re_executor, executor_overrides = get_re_executors_from_props(ctx) + return inject_test_run_info( ctx, ExternalRunnerTestInfo( @@ -132,8 +139,11 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: env = ctx.attrs.env, labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, + default_executor = re_executor, + executor_overrides = executor_overrides, # FIXME: Consider setting to true - run_from_project_root = False, + run_from_project_root = re_executor != None, + use_project_relative_paths = re_executor != None, ), ) + [ DefaultInfo( From 1127cf87f0c498d168cf05dbf645f16104d4b65a Mon Sep 17 00:00:00 2001 From: Nate Stedman Date: Thu, 25 Jan 2024 07:43:57 -0800 Subject: [PATCH 0167/1133] Encode a pluginProperties for each processor Reviewed By: IanChilds Differential Revision: D53010672 fbshipit-source-id: 63254c90ec7f2691076a8af43b8004812647a164 --- prelude/jvm/cd_jar_creator_util.bzl | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index 18966fb32..30db9ddce 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -250,22 +250,27 @@ def encode_ap_params(annotation_processor_properties: AnnotationProcessorPropert return encoded_ap_params def encode_plugin_params(plugin_params: [PluginParams, None]) -> [struct, None]: - # TODO(cjhopman): We should change plugins to not be merged together just like APs. encoded_plugin_params = None if plugin_params: encoded_plugin_params = struct( parameters = [], - pluginProperties = [struct( - canReuseClassLoader = False, - doesNotAffectAbi = False, - supportsAbiGenerationFromSource = False, - processorNames = plugin_params.processors, - classpath = plugin_params.deps.project_as_json("javacd_json") if plugin_params.deps else [], - pathParams = {}, - )], + pluginProperties = [ + encode_plugin_properties(processor, plugin_params) + for processor in plugin_params.processors + ], ) return encoded_plugin_params +def encode_plugin_properties(processor: str, plugin_params: PluginParams) -> struct: + return struct( + canReuseClassLoader = False, + doesNotAffectAbi = False, + supportsAbiGenerationFromSource = False, + processorNames = [processor], + classpath = plugin_params.deps.project_as_json("javacd_json") if plugin_params.deps else [], + pathParams = {}, + ) + def encode_base_jar_command( javac_tool: [str, RunInfo, Artifact, None], target_type: TargetType, From d55aa05796a498e3bc72b2f9b4284b822432cc8d Mon Sep 17 00:00:00 2001 From: Lukasz Indyk Date: Thu, 25 Jan 2024 21:09:14 -0800 Subject: [PATCH 0168/1133] android screenshot tests on real devices - 2/n Summary: changed platform and emulator constants to re_platform_ and re_emulator_ since without this following the same pattern of some_name_ (with trailing _) it is awfully easy to use it wrong, as i already did in next diff. Reviewed By: IanChilds Differential Revision: D53007893 fbshipit-source-id: bc915177a67a67b4eb6c3424a22404765d94145c --- prelude/android/android_instrumentation_test.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prelude/android/android_instrumentation_test.bzl b/prelude/android/android_instrumentation_test.bzl index 87bb007db..1fd2e61be 100644 --- a/prelude/android/android_instrumentation_test.bzl +++ b/prelude/android/android_instrumentation_test.bzl @@ -127,9 +127,9 @@ def _compute_emulator_subplatform(labels: list[str]) -> str: return emulator_subplatform_labels[0].replace("re_emulator_", "") def _compute_emulator_platform(labels: list[str]) -> str: - emulator_platform_labels = [label for label in labels if label.startswith("re_platform")] - expect(len(emulator_platform_labels) <= 1, "multiple 're_platform' labels were found:[{}], there must be only one!".format(", ".join(emulator_platform_labels))) + emulator_platform_labels = [label for label in labels if label.startswith("re_platform_")] + expect(len(emulator_platform_labels) <= 1, "multiple 're_platform_' labels were found:[{}], there must be only one!".format(", ".join(emulator_platform_labels))) if len(emulator_platform_labels) == 0: return DEFAULT_ANDROID_PLATFORM else: # len(emulator_platform_labels) == 1: - return emulator_platform_labels[0].replace("re_platform", "") + return emulator_platform_labels[0].replace("re_platform_", "") From d13495040d0a7aedf194c5140391b33c01a2c389 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Fri, 26 Jan 2024 01:04:46 -0800 Subject: [PATCH 0169/1133] Backout D48548854: buck2/rust: use early-kill approach to generating metadata for pipelined builds Summary: Backout {D48548854}. See next diff Reviewed By: dtolnay Differential Revision: D52537366 fbshipit-source-id: d56d0a8d303bac005da11b566203660f66a67706 --- prelude/rust/build.bzl | 46 ++++++++++++++---------------- prelude/rust/tools/rustc_action.py | 37 ++++-------------------- 2 files changed, 28 insertions(+), 55 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 4634a22a6..ed4d8aea6 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -422,7 +422,6 @@ def rust_compile( lints, # Report unused --extern crates in the notification stream. ["--json=unused-externs-silent", "-Wunused-crate-dependencies"] if toolchain_info.report_unused_deps else [], - "--json=artifacts", # only needed for pipeline but no harm in always leaving it enabled common_args.args, cmd_args("--remap-path-prefix=", compile_ctx.symlinked_srcs, path_sep, "=", ctx.label.path, path_sep, delimiter = ""), compile_ctx.linker_args, @@ -510,7 +509,6 @@ def rust_compile( is_binary = is_binary, allow_cache_upload = allow_cache_upload, crate_map = common_args.crate_map, - only_artifact = "metadata" if toolchain_info.pipelined and emit == Emit("metadata") else None, ) # Add clippy diagnostic targets for check builds @@ -1061,16 +1059,16 @@ def _rustc_emit( simple_crate = attr_simple_crate_for_filenames(ctx) crate_type = params.crate_type - # Metadata for pipelining needs has enough info to be used as an input for - # dependents. To do this reliably, follow Cargo's pattern of always doing - # --emit metadata,link, but only using the output we actually need. + # Metadata for pipelining needs has enough info to be used as an input + # for dependents. To do this reliably, we actually emit "link" but + # suppress actual codegen with -Zno-codegen. # # We don't bother to do this with "codegen" crates - ie, ones which are - # linked into an artifact like binaries and dylib, since they're not used as - # a pipelined dependency input. - pipeline_artifact = toolchain_info.pipelined and \ - emit in (Emit("metadata"), Emit("link")) and \ - not crate_type_codegen(crate_type) + # linked into an artifact like binaries and dylib, since they're not + # used as a pipelined dependency input. + pipeline_meta = emit == Emit("metadata") and \ + toolchain_info.pipelined and \ + not crate_type_codegen(crate_type) emit_args = cmd_args() if emit in predeclared_outputs: @@ -1078,19 +1076,23 @@ def _rustc_emit( else: extra_hash = "-" + _metadata(ctx.label, False)[1] emit_args.add("-Cextra-filename={}".format(extra_hash)) - filename = subdir + "/" + output_filename(simple_crate, emit, params, extra_hash) + if pipeline_meta: + # Make sure hollow rlibs are distinct from real ones + filename = subdir + "/hollow/" + output_filename(simple_crate, Emit("link"), params, extra_hash) + else: + filename = subdir + "/" + output_filename(simple_crate, emit, params, extra_hash) emit_output = ctx.actions.declare_output(filename) - # For pipelined builds if we're emitting either metadata or link then make - # sure we generate both and take the one we want. - if pipeline_artifact: - metaext = "" if emit == Emit("metadata") else "_unwanted" - linkext = "" if emit == Emit("link") else "_unwanted" - + if pipeline_meta: + # If we're doing a pipelined build, instead of emitting an actual rmeta + # we emit a "hollow" .rlib - ie, it only contains lib.rmeta and no object + # code. It should contain full information needed by any dependent + # crate which is generating code (MIR, etc). + # Requires https://github.com/rust-lang/rust/pull/86045 emit_args.add( - cmd_args("--emit=metadata=", emit_output.as_output(), metaext, delimiter = ""), - cmd_args("--emit=link=", emit_output.as_output(), linkext, delimiter = ""), + cmd_args(emit_output.as_output(), format = "--emit=link={}"), + "-Zno-codegen", ) elif emit == Emit("expand"): emit_args.add( @@ -1130,8 +1132,7 @@ def _rustc_invoke( is_binary: bool, allow_cache_upload: bool, crate_map: list[(CrateName, Label)], - env: dict[str, [ResolvedStringWithMacros, Artifact]] = {}, - only_artifact: [None, str] = None) -> (dict[str, Artifact], [Artifact, None]): + env: dict[str, [ResolvedStringWithMacros, Artifact]] = {}) -> (dict[str, Artifact], [Artifact, None]): exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" toolchain_info = compile_ctx.toolchain_info @@ -1153,9 +1154,6 @@ def _rustc_invoke( "--buck-target={}".format(ctx.label.raw_target()), ) - if only_artifact: - compile_cmd.add("--only-artifact=" + only_artifact) - for k, v in crate_map: compile_cmd.add(crate_map_arg(ctx, compile_ctx, k, v)) for k, v in plain_env.items(): diff --git a/prelude/rust/tools/rustc_action.py b/prelude/rust/tools/rustc_action.py index a4097076b..32c055e02 100755 --- a/prelude/rust/tools/rustc_action.py +++ b/prelude/rust/tools/rustc_action.py @@ -58,7 +58,6 @@ class Args(NamedTuple): buck_target: Optional[str] failure_filter: Optional[IO[bytes]] required_output: Optional[List[Tuple[str, str]]] - only_artifact: Optional[str] rustc: List[str] @@ -119,12 +118,6 @@ def arg_parse() -> Args: help="Required output path we expect rustc to generate " "(and filled with a placeholder on a filtered failure)", ) - parser.add_argument( - "--only-artifact", - metavar="TYPE", - help="Terminate rustc after requested artifact type (metadata, link, etc) has been emitted. " - "(Assumes compiler is invoked with --error-format=json --json=artifacts)", - ) parser.add_argument( "rustc", nargs=argparse.REMAINDER, @@ -139,14 +132,13 @@ async def handle_output( # noqa: C901 proc: asyncio.subprocess.Process, args: Args, crate_map: Dict[str, str], -) -> Tuple[bool, bool]: +) -> bool: got_error_diag = False - shutdown = False proc_stderr = proc.stderr assert proc_stderr is not None - while not shutdown: + while True: line = await proc_stderr.readline() if line is None or line == b"": @@ -161,12 +153,7 @@ async def handle_output( # noqa: C901 if DEBUG: print(f"diag={repr(diag)}", end="\n") - # We have to sniff the shape of diag record based on what fields it has set. - if "artifact" in diag and "emit" in diag: - if diag["emit"] == args.only_artifact: - shutdown = True - continue - elif "unused_extern_names" in diag: + if "unused_extern_names" in diag: unused_names = diag["unused_extern_names"] # Empty unused_extern_names is just noise. @@ -219,7 +206,7 @@ async def handle_output( # noqa: C901 if args.diag_txt: args.diag_txt.close() - return (got_error_diag, shutdown) + return got_error_diag async def main() -> int: @@ -304,24 +291,12 @@ async def main() -> int: stderr=subprocess.PIPE, limit=1_000_000, ) - (got_error_diag, shutdown) = await handle_output(proc, args, crate_map) - - if shutdown: - # We got what we want so shut down early - try: - proc.terminate() - except ProcessLookupError: - # The process already terminated on its own. - pass - await proc.wait() - res = 0 - else: - res = await proc.wait() + got_error_diag = await handle_output(proc, args, crate_map) + res = await proc.wait() if DEBUG: print( f"res={repr(res)} " - f"shutdown={shutdown} " f"got_error_diag={got_error_diag} " f"args.failure_filter {args.failure_filter}", end="\n", From af687f086e02971912b47990ecff649592f3472f Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 26 Jan 2024 01:07:29 -0800 Subject: [PATCH 0170/1133] deduplicate bundle spec for code signing too Summary: If there are 2 identical entries in bundle specwith codesign on copy enabled, signing will fail unless we deduplicate. Reviewed By: chatura-atapattu Differential Revision: D52920950 fbshipit-source-id: 9539997c6d2ccc249134df842c13ba914cc8f5bf --- .../apple/tools/bundling/assemble_bundle.py | 23 ++++--------------- prelude/apple/tools/bundling/main.py | 17 ++++++++++++++ 2 files changed, 22 insertions(+), 18 deletions(-) diff --git a/prelude/apple/tools/bundling/assemble_bundle.py b/prelude/apple/tools/bundling/assemble_bundle.py index 174840015..86ef7d409 100644 --- a/prelude/apple/tools/bundling/assemble_bundle.py +++ b/prelude/apple/tools/bundling/assemble_bundle.py @@ -27,36 +27,23 @@ def assemble_bundle( incremental_context: Optional[IncrementalContext], check_conflicts: bool, ) -> Optional[List[IncrementalStateItem]]: - # It's possible to have the same spec multiple times as different - # apple_resource() targets can refer to the _same_ resource file. - # - # On RE, we're not allowed to overwrite files, so prevent doing - # identical file copies. - # - # Do not reorder spec items to achieve determinism. - # Rely on the fact that `dict` preserves key order. - deduplicated_spec = list(dict.fromkeys(spec)) - # Force same sorting as in Buck1 for `SourcePathWithAppleBundleDestination` - # WARNING: This logic is tightly coupled with how spec filtering is done in `_filter_conflicting_paths` method during incremental bundling. Don't change unless you fully understand what is going on here. - deduplicated_spec.sort() - incremental_result = None if incremental_context: - if should_assemble_incrementally(deduplicated_spec, incremental_context): + if should_assemble_incrementally(spec, incremental_context): incremental_result = _assemble_incrementally( bundle_path, - deduplicated_spec, + spec, incremental_context.metadata, cast(IncrementalState, incremental_context.state), check_conflicts, ) else: - _assemble_non_incrementally(bundle_path, deduplicated_spec, check_conflicts) + _assemble_non_incrementally(bundle_path, spec, check_conflicts) incremental_result = calculate_incremental_state( - deduplicated_spec, incremental_context.metadata + spec, incremental_context.metadata ) else: - _assemble_non_incrementally(bundle_path, deduplicated_spec, check_conflicts) + _assemble_non_incrementally(bundle_path, spec, check_conflicts) # External tooling (e.g., Xcode) might depend on the timestamp of the bundle bundle_path.touch() diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 0bf0cb1d6..752ed2d31 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -289,6 +289,7 @@ def _main() -> None: with args.spec.open(mode="rb") as spec_file: spec = json.load(spec_file, object_hook=lambda d: BundleSpecItem(**d)) + spec = _deduplicate_spec(spec) incremental_context = _incremental_context( incremenatal_state_path=args.incremental_state, @@ -470,6 +471,22 @@ def _write_incremental_state( raise +def _deduplicate_spec(spec: List[BundleSpecItem]) -> List[BundleSpecItem]: + # It's possible to have the same spec multiple times as different + # apple_resource() targets can refer to the _same_ resource file. + # + # On RE, we're not allowed to overwrite files, so prevent doing + # identical file copies. + # + # Do not reorder spec items to achieve determinism. + # Rely on the fact that `dict` preserves key order. + deduplicated_spec = list(dict.fromkeys(spec)) + # Force same sorting as in Buck1 for `SourcePathWithAppleBundleDestination` + # WARNING: This logic is tightly coupled with how spec filtering is done in `_filter_conflicting_paths` method during incremental bundling. Don't change unless you fully understand what is going on here. + deduplicated_spec.sort() + return deduplicated_spec + + def _setup_logging( stderr_level: int, file_level: int, log_path: Optional[Path] ) -> None: From 7938448d9e2cc04df0911d64c82e828e64e5c015 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Fri, 26 Jan 2024 06:32:16 -0800 Subject: [PATCH 0171/1133] Add option to exclude java deps from android_aar Summary: Requested here, seems reasonable: https://fb.workplace.com/groups/kotlinandroidqa/permalink/3681846588761224 Reviewed By: navidqar Differential Revision: D53084551 fbshipit-source-id: 0b11dcdb60ea8d3cb437812722e77d49662f0783 --- prelude/android/android_aar.bzl | 5 ++++- prelude/decls/android_rules.bzl | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/android/android_aar.bzl b/prelude/android/android_aar.bzl index 1b07a4ed8..b0c736f2c 100644 --- a/prelude/android/android_aar.bzl +++ b/prelude/android/android_aar.bzl @@ -16,13 +16,16 @@ load("@prelude//android:cpu_filters.bzl", "CPU_FILTER_FOR_DEFAULT_PLATFORM", "CP load("@prelude//android:util.bzl", "create_enhancement_context") load("@prelude//java:java_providers.bzl", "get_all_java_packaging_deps", "get_all_java_packaging_deps_from_packaging_infos") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") +load("@prelude//utils:set.bzl", "set") def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: deps_by_platform = get_deps_by_platform(ctx) primary_platform = CPU_FILTER_FOR_PRIMARY_PLATFORM if CPU_FILTER_FOR_PRIMARY_PLATFORM in deps_by_platform else CPU_FILTER_FOR_DEFAULT_PLATFORM deps = deps_by_platform[primary_platform] - java_packaging_deps = [packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, deps)] + excluded_java_packaging_deps = get_all_java_packaging_deps(ctx, ctx.attrs.excluded_java_deps) + excluded_java_packaging_deps_targets = set([excluded_dep.label.raw_target() for excluded_dep in excluded_java_packaging_deps]) + java_packaging_deps = [packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, deps) if not excluded_java_packaging_deps_targets.contains(packaging_dep.label.raw_target())] android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, deps) android_manifest = get_manifest(ctx, android_packageable_info, manifest_entries = {}) diff --git a/prelude/decls/android_rules.bzl b/prelude/decls/android_rules.bzl index 892a916f9..55d11529b 100644 --- a/prelude/decls/android_rules.bzl +++ b/prelude/decls/android_rules.bzl @@ -116,6 +116,7 @@ android_aar = prelude_rule( "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), "enable_relinker": attrs.bool(default = False), + "excluded_java_deps": attrs.list(attrs.dep(), default = []), "extra_arguments": attrs.list(attrs.string(), default = []), "extra_kotlinc_arguments": attrs.list(attrs.string(), default = []), "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), From 5d71fe38103c210e1b0b7d54aa45e35c07734abf Mon Sep 17 00:00:00 2001 From: Adrian Catana Date: Fri, 26 Jan 2024 08:21:55 -0800 Subject: [PATCH 0172/1133] Only xz-compress native libs in modules if base apk does the same Summary: As in the title. We will get rid of this compression option (with XZ) altogether in the last diff on the stack. The aim is to remove the concatenation hack + XZ compression, and rely on superpack only to compress everything for us. Otherwise, asset libraries will be shipped uncompressed. Since D52296279, all major apps should have voltron libs compression turned on. Without this change, once we switch from xzso to noxz/xzdex, native libraries in voltron modules will still be packaged as libs.xzs. We don't want that, since with D52389378 we're expecting individual libraries to run superpack on top of. Reviewed By: IanChilds Differential Revision: D53000687 fbshipit-source-id: 4844e24cfeecf4be46166f24f513420ae678d565 --- .../android_binary_native_library_rules.bzl | 39 ++++++++++++++----- .../tools/combine_native_library_dirs.py | 2 +- 2 files changed, 30 insertions(+), 11 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 4b606d506..b4aba849a 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -88,8 +88,8 @@ load("@prelude//utils:utils.bzl", "dedupe_by_value") # # Any native library that is not part of the root module (i.e. it is part of some other Voltron # module) is automatically packaged as an asset, and the assets for each module are compressed -# to a single `assets//libs.xz`. Similarly, the metadata for each module is stored -# at `assets//libs.txt`. +# to a single `assets//libs.xz` only if `compress_asset_libraries` is set to True. +# Similarly, the metadata for each module is stored at `assets//libs.txt`. def get_android_binary_native_library_info( enhance_ctx: EnhancementContext, @@ -138,7 +138,7 @@ def get_android_binary_native_library_info( root_module_metadata_assets = ctx.actions.declare_output("root_module_metadata_assets_symlink") root_module_compressed_lib_assets = ctx.actions.declare_output("root_module_compressed_lib_assets_symlink") non_root_module_metadata_assets = ctx.actions.declare_output("non_root_module_metadata_assets_symlink") - non_root_module_compressed_lib_assets = ctx.actions.declare_output("non_root_module_compressed_lib_assets_symlink") + non_root_module_lib_assets = ctx.actions.declare_output("non_root_module_lib_assets_symlink") unstripped_native_libraries = ctx.actions.declare_output("unstripped_native_libraries") unstripped_native_libraries_json = ctx.actions.declare_output("unstripped_native_libraries_json") @@ -156,7 +156,7 @@ def get_android_binary_native_library_info( root_module_metadata_assets, root_module_compressed_lib_assets, non_root_module_metadata_assets, - non_root_module_compressed_lib_assets, + non_root_module_lib_assets, ] fake_input = ctx.actions.write("dynamic.trigger", "") @@ -345,7 +345,7 @@ def get_android_binary_native_library_info( ctx.actions.symlink_file(outputs[root_module_metadata_assets], dynamic_info.root_module_metadata_assets) ctx.actions.symlink_file(outputs[root_module_compressed_lib_assets], dynamic_info.root_module_compressed_lib_assets) ctx.actions.symlink_file(outputs[non_root_module_metadata_assets], dynamic_info.non_root_module_metadata_assets) - ctx.actions.symlink_file(outputs[non_root_module_compressed_lib_assets], dynamic_info.non_root_module_compressed_lib_assets) + ctx.actions.symlink_file(outputs[non_root_module_lib_assets], dynamic_info.non_root_module_lib_assets) ctx.actions.dynamic_output(dynamic = dynamic_inputs, inputs = [], outputs = dynamic_outputs, f = dynamic_native_libs_info) all_native_libs = ctx.actions.symlinked_dir("debug_all_native_libs", {"others": native_libs, "primary": native_libs_always_in_primary_apk}) @@ -364,7 +364,7 @@ def get_android_binary_native_library_info( native_libs_for_primary_apk = native_libs_for_primary_apk, exopackage_info = exopackage_info, root_module_native_lib_assets = [native_lib_assets_for_primary_apk, stripped_native_linkable_assets_for_primary_apk, root_module_metadata_assets, root_module_compressed_lib_assets], - non_root_module_native_lib_assets = [non_root_module_metadata_assets, non_root_module_compressed_lib_assets], + non_root_module_native_lib_assets = [non_root_module_metadata_assets, non_root_module_lib_assets], generated_java_code = generated_java_code, ) @@ -381,7 +381,7 @@ _NativeLibsAndAssetsInfo = record( root_module_metadata_assets = Artifact, root_module_compressed_lib_assets = Artifact, non_root_module_metadata_assets = Artifact, - non_root_module_compressed_lib_assets = Artifact, + non_root_module_lib_assets = Artifact, ) def _get_exopackage_info( @@ -465,6 +465,7 @@ def _get_native_libs_and_assets( root_module_compressed_lib_srcs = {} non_root_module_metadata_srcs = {} non_root_module_compressed_lib_srcs = {} + non_root_module_uncompressed_libs = [] assets_for_primary_apk = filter(None, [native_lib_assets_for_primary_apk, stripped_linkables.linkable_assets_for_primary_apk]) stripped_linkable_assets_for_primary_apk = stripped_linkables.linkable_assets_for_primary_apk if assets_for_primary_apk: @@ -481,8 +482,26 @@ def _get_native_libs_and_assets( for module, native_lib_assets in native_lib_module_assets_map.items(): metadata_file, native_library_paths = _get_native_libs_as_assets_metadata(ctx, native_lib_assets, module) non_root_module_metadata_srcs[paths.join(_get_native_libs_as_assets_dir(module), "libs.txt")] = metadata_file - compressed_lib_dir = _get_compressed_native_libs_as_assets(ctx, native_lib_assets, native_library_paths, module) - non_root_module_compressed_lib_srcs[_get_native_libs_as_assets_dir(module)] = compressed_lib_dir + if ctx.attrs.compress_asset_libraries: + compressed_lib_dir = _get_compressed_native_libs_as_assets(ctx, native_lib_assets, native_library_paths, module) + non_root_module_compressed_lib_srcs[_get_native_libs_as_assets_dir(module)] = compressed_lib_dir + else: + non_root_module_uncompressed_libs.extend(native_lib_assets) + + if non_root_module_uncompressed_libs: + expect(not non_root_module_compressed_lib_srcs, "Cannot have both uncompressed and compressed native libraries for a non-root module") + non_root_module_libs = ctx.actions.declare_output("non_root_module_libs") + ctx.actions.run( + cmd_args([ + ctx.attrs._android_toolchain[AndroidToolchainInfo].combine_native_library_dirs[RunInfo], + "--output-dir", + non_root_module_libs.as_output(), + "--library-dirs", + ] + non_root_module_uncompressed_libs), + category = "combine_non_root_module_native_libs", + ) + else: + non_root_module_libs = ctx.actions.symlinked_dir("non_root_module_libs", non_root_module_compressed_lib_srcs) combined_native_libs = ctx.actions.declare_output("combined_native_libs", dir = True) native_libs_metadata = ctx.actions.declare_output("native_libs_metadata.txt") @@ -516,7 +535,7 @@ def _get_native_libs_and_assets( root_module_metadata_assets = ctx.actions.symlinked_dir("root_module_metadata_assets", root_module_metadata_srcs), root_module_compressed_lib_assets = ctx.actions.symlinked_dir("root_module_compressed_lib_assets", root_module_compressed_lib_srcs), non_root_module_metadata_assets = ctx.actions.symlinked_dir("non_root_module_metadata_assets", non_root_module_metadata_srcs), - non_root_module_compressed_lib_assets = ctx.actions.symlinked_dir("non_root_module_compressed_lib_assets", non_root_module_compressed_lib_srcs), + non_root_module_lib_assets = non_root_module_libs, ) def _filter_prebuilt_native_library_dir( diff --git a/prelude/android/tools/combine_native_library_dirs.py b/prelude/android/tools/combine_native_library_dirs.py index 01fb3df82..bbb52597e 100644 --- a/prelude/android/tools/combine_native_library_dirs.py +++ b/prelude/android/tools/combine_native_library_dirs.py @@ -51,7 +51,7 @@ def main() -> None: lib, ) - output_path.parent.mkdir(exist_ok=True) + output_path.parent.mkdir(exist_ok=True, parents=True) relative_path_to_lib = os.path.relpath( os.path.realpath(lib), start=os.path.realpath(os.path.dirname(output_path)), From 140aff0b7fc01a14e5997fc72354febd701cf8f9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Germ=C3=A1n=20M=C3=A9ndez=20Bravo?= Date: Fri, 26 Jan 2024 10:25:37 -0800 Subject: [PATCH 0173/1133] Avoid using Error word Summary: Avoid using the word "Error" for warnings about startup functions not being ran. This is missleading and makes people thing unrelated errors are coming from this warning. Reviewed By: itamaro Differential Revision: D53061718 fbshipit-source-id: 0cf3870b016ffc5276535932ee687321d8e60fb3 --- prelude/python/tools/make_par/sitecustomize.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/prelude/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py index 1ee948107..f7cb4a165 100644 --- a/prelude/python/tools/make_par/sitecustomize.py +++ b/prelude/python/tools/make_par/sitecustomize.py @@ -11,6 +11,7 @@ import os import sys import threading +import warnings from importlib.machinery import PathFinder from importlib.util import module_from_spec @@ -83,17 +84,18 @@ def __startup__(): ], ) for name, var in startup_functions: - name, sep, func = var.partition(":") + mod, sep, func = var.partition(":") if sep: try: - module = importlib.import_module(name) + module = importlib.import_module(mod) getattr(module, func)() except Exception as e: # TODO: Ignoring errors for now. The way to properly fix this should be to make # sure we are still at the same binary that configured `STARTUP_` before importing. - print( - "Error running startup function %s:%s: %s" % (name, func, e), - file=sys.stderr, + warnings.warn( + "Startup function %s (%s:%s) not executed: %s" + % (mod, name, func, e), + stacklevel=1, ) From e951d473701c3a3eca632ac4fbcd8e7cd3f54751 Mon Sep 17 00:00:00 2001 From: Navid Qaragozlou Date: Fri, 26 Jan 2024 12:09:21 -0800 Subject: [PATCH 0174/1133] Make the print message clearer Summary: TSIA Reviewed By: IanChilds Differential Revision: D53092372 fbshipit-source-id: 261507008db289d63c7bfec9d9a9e1f0883b7c15 --- prelude/jvm/cd_jar_creator_util.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index 30db9ddce..24022c72a 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -211,7 +211,7 @@ def _get_source_only_abi_compiling_deps(compiling_deps_tset: [JavaCompilingDepsT for d in source_only_abi_deps: info = d.get(JavaLibraryInfo) if not info: - fail("source_only_abi_deps must produce a JavaLibraryInfo but {} does not, please remove it".format(d)) + fail("source_only_abi_deps must produce a JavaLibraryInfo but '{}' does not, please remove it".format(d.label)) if info.library_output: source_only_abi_deps_filter[info.library_output.abi] = True From 686e233fe94c5e1e2e5b593a48345915618f1923 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Fri, 26 Jan 2024 14:17:09 -0800 Subject: [PATCH 0175/1133] Move `_rustc_emit` return values into record Summary: I need to add `env` to this in D53140347 and do not want to use a 4-tuple. Reviewed By: diliop Differential Revision: D53140348 fbshipit-source-id: 704a5b513b79ea70bfaf3e6c3df85f1bc5db6d17 --- prelude/rust/build.bzl | 38 ++++++++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 14 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index ed4d8aea6..65146bfcb 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -435,7 +435,7 @@ def rust_compile( # use the predeclared one as the output after the failure filter action # below. Otherwise we'll use the predeclared outputs directly. if toolchain_info.failure_filter: - emit_output, emit_args, extra_out = _rustc_emit( + emit_op = _rustc_emit( ctx = ctx, compile_ctx = compile_ctx, emit = emit, @@ -444,7 +444,7 @@ def rust_compile( params = params, ) else: - emit_output, emit_args, extra_out = _rustc_emit( + emit_op = _rustc_emit( ctx = ctx, compile_ctx = compile_ctx, emit = emit, @@ -485,7 +485,7 @@ def rust_compile( inherited_link_args, ], "{}-{}".format(subdir, tempfile), - output_short_path = emit_output.short_path, + output_short_path = emit_op.output.short_path, ) linker_argsfile, _ = ctx.actions.write( "{}/__{}_linker_args.txt".format(subdir, tempfile), @@ -502,9 +502,9 @@ def rust_compile( ctx = ctx, compile_ctx = compile_ctx, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), - rustc_cmd = cmd_args(toolchain_info.compiler, rustc_cmd, emit_args), + rustc_cmd = cmd_args(toolchain_info.compiler, rustc_cmd, emit_op.args), diag = "diag", - required_outputs = [emit_output], + required_outputs = [emit_op.output], short_cmd = common_args.short_cmd, is_binary = is_binary, allow_cache_upload = allow_cache_upload, @@ -514,7 +514,7 @@ def rust_compile( # Add clippy diagnostic targets for check builds if common_args.is_check: # We don't really need the outputs from this build, just to keep the artifact accounting straight - clippy_out, clippy_emit_args, _extra_out = _rustc_emit( + clippy_emit_op = _rustc_emit( ctx = ctx, compile_ctx = compile_ctx, emit = emit, @@ -539,10 +539,10 @@ def rust_compile( compile_ctx = compile_ctx, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), # Lints go first to allow other args to override them. - rustc_cmd = cmd_args(compile_ctx.clippy_wrapper, clippy_lints, rustc_cmd, clippy_emit_args), + rustc_cmd = cmd_args(compile_ctx.clippy_wrapper, clippy_lints, rustc_cmd, clippy_emit_op.args), env = clippy_env, diag = "clippy", - required_outputs = [clippy_out], + required_outputs = [clippy_emit_op.output], short_cmd = common_args.short_cmd, is_binary = False, allow_cache_upload = False, @@ -557,7 +557,7 @@ def rust_compile( stderr = diag["diag.txt"] filter_prov = RustFailureFilter( buildstatus = build_status, - required = emit_output, + required = emit_op.output, stderr = stderr, ) @@ -570,11 +570,11 @@ def rust_compile( short_cmd = common_args.short_cmd, ) else: - filtered_output = emit_output + filtered_output = emit_op.output split_debug_mode = compile_ctx.cxx_toolchain_info.split_debug_mode or SplitDebugMode("none") if emit == Emit("link") and split_debug_mode != SplitDebugMode("none"): - dwo_output_directory = extra_out + dwo_output_directory = emit_op.extra_out # staticlibs and cdylibs are "bundled" in the sense that they are used # without their dependencies by the rest of the rules. This is normally @@ -605,7 +605,7 @@ def rust_compile( dwp_output = dwp( ctx, compile_ctx.cxx_toolchain_info, - emit_output, + emit_op.output, identifier = "{}/__{}_{}_dwp".format(common_args.subdir, common_args.tempfile, str(emit)), category_suffix = "rust", # TODO(T110378142): Ideally, referenced objects are a list of @@ -1047,6 +1047,12 @@ def _crate_root( fail("Could not infer crate_root. candidates=%s\nAdd 'crate_root = \"src/example.rs\"' to your attributes to disambiguate." % candidates.list()) +EmitOperation = record( + output = field(Artifact), + args = field(cmd_args), + extra_out = field(Artifact | None), +) + # Take a desired output and work out how to convince rustc to generate it def _rustc_emit( ctx: AnalysisContext, @@ -1054,7 +1060,7 @@ def _rustc_emit( emit: Emit, predeclared_outputs: dict[Emit, Artifact], subdir: str, - params: BuildParams) -> (Artifact, cmd_args, [Artifact, None]): + params: BuildParams) -> EmitOperation: toolchain_info = compile_ctx.toolchain_info simple_crate = attr_simple_crate_for_filenames(ctx) crate_type = params.crate_type @@ -1118,7 +1124,11 @@ def _rustc_emit( incremental_cmd = cmd_args(incremental_out.as_output(), format = "-Cincremental={}") emit_args.add(incremental_cmd) - return (emit_output, emit_args, extra_out) + return EmitOperation( + output = emit_output, + args = emit_args, + extra_out = extra_out, + ) # Invoke rustc and capture outputs def _rustc_invoke( From 6f987f0b99c53354f42036abb8a06d1656d39ec7 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Fri, 26 Jan 2024 14:17:32 -0800 Subject: [PATCH 0176/1133] Consistently set RUSTC_BOOTSTRAP when using -Z options Summary: `cxx`'s Buck2 CI is broken by {D52537366}. ``` Action failed: root//third-party:unicode-width-0.1.11 (prelude//platforms:default#524f8da68ea2a374) (rustc rlib-pic-static_pic-metadata/unicode_width-metadata rlib,pic,metadata [diag]) error: the option `Z` is only accepted on the nightly compiler help: consider switching to a nightly toolchain: `rustup default nightly` note: selecting a toolchain with `+toolchain` arguments require a rustup proxy; see note: for more information about Rust's stability policy, see error: 1 nightly option were parsed ``` https://github.com/dtolnay/cxx/actions/runs/7668824008/job/20901419639 Reviewed By: diliop Differential Revision: D53140347 fbshipit-source-id: e2b6d3bbde5112bee950dca0d41fea6e6c05cbe4 --- prelude/rust/build.bzl | 39 ++++++++++++++++++++++++++++++--------- 1 file changed, 30 insertions(+), 9 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 65146bfcb..bcf136967 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -176,11 +176,9 @@ def generate_rustdoc( output = ctx.actions.declare_output(subdir) plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + plain_env["RUSTDOC_BUCK_TARGET"] = cmd_args(str(ctx.label.raw_target())) rustdoc_cmd = cmd_args( - [cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()], - [cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()], - cmd_args(str(ctx.label.raw_target()), format = "--env=RUSTDOC_BUCK_TARGET={}"), toolchain_info.rustdoc, toolchain_info.rustdoc_flags, ctx.attrs.rustdoc_flags, @@ -194,6 +192,7 @@ def generate_rustdoc( url_prefix = toolchain_info.extern_html_root_url_prefix if url_prefix != None: # Flag --extern-html-root-url used below is only supported on nightly. + plain_env["RUSTC_BOOTSTRAP"] = cmd_args("1") rustdoc_cmd.add("-Zunstable-options") for dep in resolve_rust_deps(ctx, compile_ctx.dep_ctx): @@ -214,10 +213,16 @@ def generate_rustdoc( rustdoc_cmd.hidden(toolchain_info.rustdoc, compile_ctx.symlinked_srcs) + rustdoc_cmd_action = cmd_args( + [cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()], + [cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()], + rustdoc_cmd, + ) + rustdoc_cmd = _long_command( ctx = ctx, exe = toolchain_info.rustc_action, - args = rustdoc_cmd, + args = rustdoc_cmd_action, argfile_name = "{}.args".format(subdir), ) @@ -315,14 +320,17 @@ def generate_rustdoc_test( for k, v in doc_path_env.items(): plain_env.pop(k, None) path_env[k] = v - plain_env["RUSTC_BOOTSTRAP"] = cmd_args("1") # for `-Zunstable-options` + + # `--runtool` is unstable. + plain_env["RUSTC_BOOTSTRAP"] = cmd_args("1") + unstable_options = ["-Zunstable-options"] rustdoc_cmd = cmd_args( [cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()], [cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()], toolchain_info.rustdoc, "--test", - "-Zunstable-options", + unstable_options, cmd_args("--test-builder=", toolchain_info.compiler, delimiter = ""), toolchain_info.rustdoc_flags, ctx.attrs.rustdoc_flags, @@ -509,6 +517,7 @@ def rust_compile( is_binary = is_binary, allow_cache_upload = allow_cache_upload, crate_map = common_args.crate_map, + env = emit_op.env, ) # Add clippy diagnostic targets for check builds @@ -522,7 +531,7 @@ def rust_compile( subdir = common_args.subdir + "-clippy", params = params, ) - clippy_env = dict() + clippy_env = clippy_emit_op.env if toolchain_info.clippy_toml: # Clippy wants to be given a path to a directory containing a # clippy.toml (or .clippy.toml). Our buckconfig accepts an arbitrary @@ -1050,6 +1059,7 @@ def _crate_root( EmitOperation = record( output = field(Artifact), args = field(cmd_args), + env = field(dict[str, str]), extra_out = field(Artifact | None), ) @@ -1077,6 +1087,8 @@ def _rustc_emit( not crate_type_codegen(crate_type) emit_args = cmd_args() + emit_env = {} + if emit in predeclared_outputs: emit_output = predeclared_outputs[emit] else: @@ -1096,16 +1108,24 @@ def _rustc_emit( # code. It should contain full information needed by any dependent # crate which is generating code (MIR, etc). # Requires https://github.com/rust-lang/rust/pull/86045 + emit_env["RUSTC_BOOTSTRAP"] = "1" emit_args.add( cmd_args(emit_output.as_output(), format = "--emit=link={}"), "-Zno-codegen", ) elif emit == Emit("expand"): + emit_env["RUSTC_BOOTSTRAP"] = "1" emit_args.add( "-Zunpretty=expanded", cmd_args(emit_output.as_output(), format = "-o{}"), ) else: + if toolchain_info.pipelined: + # Even though no unstable flag is set on this branch, we need an identical + # environment between the `-Zno-codegen` and non-`-Zno-codegen` command or + # else there are "found possibly newer version of crate" errors. + emit_env["RUSTC_BOOTSTRAP"] = "1" + # Assume https://github.com/rust-lang/rust/issues/85356 is fixed (ie # https://github.com/rust-lang/rust/pull/85362 is applied) emit_args.add(cmd_args("--emit=", emit.value, "=", emit_output.as_output(), delimiter = "")) @@ -1127,6 +1147,7 @@ def _rustc_emit( return EmitOperation( output = emit_output, args = emit_args, + env = emit_env, extra_out = extra_out, ) @@ -1142,7 +1163,7 @@ def _rustc_invoke( is_binary: bool, allow_cache_upload: bool, crate_map: list[(CrateName, Label)], - env: dict[str, [ResolvedStringWithMacros, Artifact]] = {}) -> (dict[str, Artifact], [Artifact, None]): + env: dict[str, str | ResolvedStringWithMacros | Artifact]) -> (dict[str, Artifact], [Artifact, None]): exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" toolchain_info = compile_ctx.toolchain_info @@ -1234,7 +1255,7 @@ _ESCAPED_NEWLINE_RE = regex("\\n") # path and non-path content, but we'll burn that bridge when we get to it.) def _process_env( compile_ctx: CompileContext, - env: dict[str, [ResolvedStringWithMacros, Artifact]], + env: dict[str, str | ResolvedStringWithMacros | Artifact], exec_is_windows: bool) -> (dict[str, cmd_args], dict[str, cmd_args]): # Values with inputs (ie artifact references). path_env = {} From 02f0b44cd19466cdae9c8b91c42c6b0b35a28378 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Fri, 26 Jan 2024 14:17:53 -0800 Subject: [PATCH 0177/1133] Rearrange `-Zno-codegen` setup to keep environment in sync Reviewed By: diliop Differential Revision: D53140346 fbshipit-source-id: bcc341ae48fd134635583bf584e43a933265cbb5 --- prelude/rust/build.bzl | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index bcf136967..b45900347 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -1088,6 +1088,7 @@ def _rustc_emit( emit_args = cmd_args() emit_env = {} + extra_out = None if emit in predeclared_outputs: emit_output = predeclared_outputs[emit] @@ -1102,18 +1103,7 @@ def _rustc_emit( emit_output = ctx.actions.declare_output(filename) - if pipeline_meta: - # If we're doing a pipelined build, instead of emitting an actual rmeta - # we emit a "hollow" .rlib - ie, it only contains lib.rmeta and no object - # code. It should contain full information needed by any dependent - # crate which is generating code (MIR, etc). - # Requires https://github.com/rust-lang/rust/pull/86045 - emit_env["RUSTC_BOOTSTRAP"] = "1" - emit_args.add( - cmd_args(emit_output.as_output(), format = "--emit=link={}"), - "-Zno-codegen", - ) - elif emit == Emit("expand"): + if emit == Emit("expand"): emit_env["RUSTC_BOOTSTRAP"] = "1" emit_args.add( "-Zunpretty=expanded", @@ -1121,17 +1111,27 @@ def _rustc_emit( ) else: if toolchain_info.pipelined: - # Even though no unstable flag is set on this branch, we need an identical - # environment between the `-Zno-codegen` and non-`-Zno-codegen` command or - # else there are "found possibly newer version of crate" errors. + # Even though the unstable flag only appears on one of the branches, we need + # an identical environment between the `-Zno-codegen` and non-`-Zno-codegen` + # command or else there are "found possibly newer version of crate" errors. emit_env["RUSTC_BOOTSTRAP"] = "1" - # Assume https://github.com/rust-lang/rust/issues/85356 is fixed (ie - # https://github.com/rust-lang/rust/pull/85362 is applied) - emit_args.add(cmd_args("--emit=", emit.value, "=", emit_output.as_output(), delimiter = "")) + if pipeline_meta: + # If we're doing a pipelined build, instead of emitting an actual rmeta + # we emit a "hollow" .rlib - ie, it only contains lib.rmeta and no object + # code. It should contain full information needed by any dependent + # crate which is generating code (MIR, etc). + # + # IMPORTANT: this flag is the only way that the Emit("metadata") and + # Emit("link") operations are allowed to diverge without causing them to + # get different crate hashes. + emit_args.add("-Zno-codegen") + effective_emit = Emit("link") + else: + effective_emit = emit + + emit_args.add(cmd_args("--emit=", effective_emit.value, "=", emit_output.as_output(), delimiter = "")) - extra_out = None - if emit != Emit("expand"): # Strip file extension from directory name. base, _ext = paths.split_extension(output_filename(simple_crate, emit, params)) extra_dir = subdir + "/extras/" + base From e611741e733d2fce435ae4cf58b0cd705d03c6d1 Mon Sep 17 00:00:00 2001 From: Wei Han Date: Fri, 26 Jan 2024 23:47:30 -0800 Subject: [PATCH 0178/1133] Add support for privacy manifest Summary: Adding the initial implementation for packaging privacy manifest into apple bundle. No special destination is documented in apple doc, so putting this at the top level of the bundle. see https://developer.apple.com/documentation/bundleresources/privacy_manifest_files?language=objc for more details. Reviewed By: chatura-atapattu Differential Revision: D53032637 fbshipit-source-id: ec09b4fcb78d08e1afa318fd3815c6fb755c9e1b --- prelude/apple/apple_bundle_resources.bzl | 14 ++++++++++++++ prelude/apple/apple_resource_bundle.bzl | 1 + prelude/apple/user/apple_resource_bundle.bzl | 1 + prelude/decls/apple_common.bzl | 8 ++++++++ prelude/decls/ios_rules.bzl | 1 + 5 files changed, 25 insertions(+) diff --git a/prelude/apple/apple_bundle_resources.bzl b/prelude/apple/apple_bundle_resources.bzl index 0dbc6e93f..baf448d54 100644 --- a/prelude/apple/apple_bundle_resources.bzl +++ b/prelude/apple/apple_bundle_resources.bzl @@ -58,6 +58,7 @@ def get_apple_bundle_resource_part_list(ctx: AnalysisContext) -> AppleBundleReso parts = [] parts.extend(_create_pkg_info_if_needed(ctx)) + parts.extend(_copy_privacy_manifest_if_needed(ctx)) (resource_specs, asset_catalog_specs, core_data_specs, scene_kit_assets_spec, cxx_resource_specs) = _select_resources(ctx) @@ -128,6 +129,19 @@ def _create_pkg_info_if_needed(ctx: AnalysisContext) -> list[AppleBundlePart]: artifact = ctx.actions.write("PkgInfo", "APPLWRUN\n") return [AppleBundlePart(source = artifact, destination = AppleBundleDestination("metadata"))] +def _copy_privacy_manifest_if_needed(ctx: AnalysisContext) -> list[AppleBundlePart]: + privacy_manifest = ctx.attrs.privacy_manifest + if privacy_manifest == None: + return [] + + # According to apple docs, privacy manifest has to be named as `PrivacyInfo.xcprivacy` + if privacy_manifest.short_path.split("/", 1)[-1] == "PrivacyInfo.xcprivacy": + artifact = privacy_manifest + else: + output = ctx.actions.declare_output("PrivacyInfo.xcprivacy") + artifact = ctx.actions.copy_file(output.as_output(), privacy_manifest) + return [AppleBundlePart(source = artifact, destination = AppleBundleDestination("metadata"))] + def _select_resources(ctx: AnalysisContext) -> ((list[AppleResourceSpec], list[AppleAssetCatalogSpec], list[AppleCoreDataSpec], list[SceneKitAssetsSpec], list[CxxResourceSpec])): resource_group_info = get_resource_group_info(ctx) if resource_group_info: diff --git a/prelude/apple/apple_resource_bundle.bzl b/prelude/apple/apple_resource_bundle.bzl index 0ed45dfb4..2ea6fae29 100644 --- a/prelude/apple/apple_resource_bundle.bzl +++ b/prelude/apple/apple_resource_bundle.bzl @@ -51,6 +51,7 @@ _RESOURCE_BUNDLE_FIELDS = [ "info_plist", "info_plist_substitutions", "product_name", + "privacy_manifest", "resource_group", "resource_group_map", "within_view", diff --git a/prelude/apple/user/apple_resource_bundle.bzl b/prelude/apple/user/apple_resource_bundle.bzl index 18b82ec23..66c902dfb 100644 --- a/prelude/apple/user/apple_resource_bundle.bzl +++ b/prelude/apple/user/apple_resource_bundle.bzl @@ -37,6 +37,7 @@ def _apple_resource_bundle_attrs(): "info_plist": attrs.source(), "info_plist_substitutions": attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False, default = {}), "labels": attrs.list(attrs.string(), default = []), + "privacy_manifest": attrs.option(attrs.source(), default = None), "product_name": attrs.option(attrs.string(), default = None), "resource_group": attrs.option(attrs.string(), default = None), "resource_group_map": resource_group_map_attr(), diff --git a/prelude/decls/apple_common.bzl b/prelude/decls/apple_common.bzl index 3c44e6c83..ff6820adb 100644 --- a/prelude/decls/apple_common.bzl +++ b/prelude/decls/apple_common.bzl @@ -128,6 +128,13 @@ def _extra_xcode_files(): """), } +def _privacy_manifest_arg(): + return { + "privacy_manifest": attrs.option(attrs.source(), default = None, doc = """ + A path to an `.xcprivacy` file that will be placed in the bundle. +"""), + } + apple_common = struct( headers_arg = _headers_arg, exported_headers_arg = _exported_headers_arg, @@ -138,4 +145,5 @@ apple_common = struct( info_plist_substitutions_arg = _info_plist_substitutions_arg, extra_xcode_sources = _extra_xcode_sources, extra_xcode_files = _extra_xcode_files, + privacy_manifest_arg = _privacy_manifest_arg, ) diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 7bfbf477d..213bb5558 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -342,6 +342,7 @@ apple_bundle = prelude_rule( } | apple_common.info_plist_arg() | apple_common.info_plist_substitutions_arg() | + apple_common.privacy_manifest_arg() | { "asset_catalogs_compilation_options": attrs.dict(key = attrs.string(), value = attrs.any(), default = {}, doc = """ A dict holding parameters for asset catalogs compiler (actool). Its options include: From 7e2023d15beae589321abf124ac7b3c9f4a40252 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 28 Jan 2024 01:31:33 -0800 Subject: [PATCH 0179/1133] rules: Hide the rlibs from rustc Summary: The underlying observation here is that `native_unbundle_deps` has already done all the hard work associated with getting link groups working; Rust dependencies are available in the usual form on the build graph at link time. So if we were to just turn on link groups now, it would almost work; the only problem is that rustc would keep passing the rlibs to the linker, which appear before the link groups in the link line, and so get linked in and negate all the benefits of link groups. Using hollow rlibs prevents this from being a problem Reviewed By: dtolnay Differential Revision: D52537370 fbshipit-source-id: 7ca11f7a4b0d208ebe1c4eb47b78f1e6e195c389 --- prelude/rust/build.bzl | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index b45900347..4dfb5bba3 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -677,7 +677,20 @@ def dependency_args( strategy = strategy_info(dep.info, dep_link_strategy) - use_rmeta = is_check or (compile_ctx.toolchain_info.pipelined and not crate_type_codegen(crate_type) and not is_rustdoc_test) + # With `advanced_unstable_linking`, we unconditionally pass the metadata + # artifacts. There are two things that work together to make this possible + # in the case of binaries: + # + # 1. The actual rlibs appear in the link providers, so they'll still be + # available for the linker to link in + # 2. The metadata artifacts aren't rmetas, but rather rlibs that just + # don't contain any generated code. Rustc can't distinguish these + # from real rlibs, and so doesn't throw an error + # + # The benefit of doing this is that there's no requirment that the + # dependency's generated code be provided to the linker via an rlib. It + # could be provided by other means, say, a link group + use_rmeta = is_check or compile_ctx.dep_ctx.advanced_unstable_linking or (compile_ctx.toolchain_info.pipelined and not crate_type_codegen(crate_type) and not is_rustdoc_test) # Use rmeta dependencies whenever possible because they # should be cheaper to produce. From 3f1b616696ae85f6b35587c39e7eb3b06e52cdda Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 28 Jan 2024 15:51:19 -0800 Subject: [PATCH 0180/1133] rules: Add transitive deps to link graph Summary: Explained in comment Reviewed By: dtolnay Differential Revision: D52537367 fbshipit-source-id: 78ac34bc2311af76bf463e9817dbab93ceb4986a --- prelude/rust/rust_library.bzl | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index b724f85b0..003f117f3 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -657,14 +657,22 @@ def _rust_link_providers( ), deps = inherited_graphs, ) - inherited_graphs = [linkable_graph] + + # We've already reported transitive deps on the inherited graphs, so for + # most purposes it would be fine to just have `linkable_graph` here. + # However, link groups do an analysis that relies on each symbol + # reference having a matching edge in the link graph, and so reexports + # and generics mean that we have to report a dependency on all + # transitive Rust deps and their immediate non-Rust deps + link_graphs = inherited_graphs + [linkable_graph] else: merged_link_info = create_merged_link_info_for_propagation(ctx, inherited_link_infos) shared_libs = merge_shared_libraries( ctx.actions, deps = inherited_shlibs, ) - return (merged_link_info, shared_libs, inherited_graphs, inherited_exported_deps) + link_graphs = inherited_graphs + return (merged_link_info, shared_libs, link_graphs, inherited_exported_deps) def _rust_providers( ctx: AnalysisContext, From 6260a33f06f88407beb85c4dbbed807f0b7b6fca Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Mon, 29 Jan 2024 05:54:55 -0800 Subject: [PATCH 0181/1133] Print identifies when signing fails due to missing identifies Summary: Useful when debugging issues with missing certs is to know which identities were found. Reviewed By: chatura-atapattu, stepancheg, fabiomassimo Differential Revision: D52936001 fbshipit-source-id: c1d861bde4bd4d9426ed50e72d259372de1d5e46 --- prelude/apple/tools/code_signing/codesign_bundle.py | 1 + .../code_signing/provisioning_profile_diagnostics.py | 9 +++++++++ 2 files changed, 10 insertions(+) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 9303d6a74..e1ec8425a 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -92,6 +92,7 @@ def _select_provisioning_profile( diagnostics=mismatches, bundle_id=info_plist_metadata.bundle_id, provisioning_profiles_dir=provisioning_profiles_dir, + identities=identities, log_file_path=log_file_path, ) ) diff --git a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py index a94b1da8d..4021c245f 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py @@ -11,6 +11,8 @@ from .apple_platform import ApplePlatform +from .identity import CodeSigningIdentity + from .provisioning_profile_metadata import ProvisioningProfileMetadata META_IOS_DEVELOPER_CERTIFICATE_LINK: str = "https://www.internalfb.com/intern/qa/5198/how-do-i-get-the-fb-ios-developer-certificate" @@ -147,6 +149,7 @@ def interpret_provisioning_profile_diagnostics( diagnostics: List[IProvisioningProfileDiagnostics], bundle_id: str, provisioning_profiles_dir: Path, + identities: List[CodeSigningIdentity], log_file_path: Optional[Path] = None, ) -> str: if not diagnostics: @@ -182,10 +185,16 @@ def find_mismatch(class_type: Type[_T]) -> Optional[_T]: ) if mismatch := find_mismatch(DeveloperCertificateMismatch): + identities_description = ( + "WARNING: NO SIGNING IDENTITIES FOUND!" + if len(identities) == 0 + else f"List of signing identities: `{identities}`." + ) return "".join( [ header, f"The provisioning profile `{mismatch.profile.file_path.name}` satisfies all constraints, but no matching certificates were found in your keychain. ", + identities_description, f"Please download and install the latest certificate from {META_IOS_DEVELOPER_CERTIFICATE_LINK}.", footer, ] From 169495f2ba1b8f301b65c686affab0ee4495846e Mon Sep 17 00:00:00 2001 From: Alvaro Leiva Geisse Date: Mon, 29 Jan 2024 09:26:19 -0800 Subject: [PATCH 0182/1133] annotating sitecustomize.py Summary: shutting pyre up on sitecustomize Differential Revision: D53140733 fbshipit-source-id: 125d4a243a30892a3c9109c07559f54958dee0df --- .../python/tools/make_par/sitecustomize.py | 19 +++++++------------ 1 file changed, 7 insertions(+), 12 deletions(-) diff --git a/prelude/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py index f7cb4a165..310fcf0a3 100644 --- a/prelude/python/tools/make_par/sitecustomize.py +++ b/prelude/python/tools/make_par/sitecustomize.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +from __future__ import annotations + import importlib import multiprocessing.util as mp_util import os @@ -18,17 +20,14 @@ lock = threading.Lock() -# pyre-fixme[3]: Return type must be annotated. -# pyre-fixme[2]: Parameter must be annotated. -def __patch_spawn(var_names, saved_env): +def __patch_spawn(var_names: tuple[str, ...], saved_env: dict[str, str]) -> None: std_spawn = mp_util.spawnv_passfds # pyre-fixme[53]: Captured variable `std_spawn` is not annotated. # pyre-fixme[53]: Captured variable `saved_env` is not annotated. # pyre-fixme[53]: Captured variable `var_names` is not annotated. - # pyre-fixme[3]: Return type must be annotated. # pyre-fixme[2]: Parameter must be annotated. - def spawnv_passfds(path, args, passfds): + def spawnv_passfds(path, args, passfds) -> None | int: with lock: try: for var in var_names: @@ -45,9 +44,7 @@ def spawnv_passfds(path, args, passfds): mp_util.spawnv_passfds = spawnv_passfds -# pyre-fixme[3]: Return type must be annotated. -# pyre-fixme[2]: Parameter must be annotated. -def __clear_env(patch_spawn=True): +def __clear_env(patch_spawn: bool = True) -> None: saved_env = {} darwin_vars = ("DYLD_LIBRARY_PATH", "DYLD_INSERT_LIBRARIES") linux_vars = ("LD_LIBRARY_PATH", "LD_PRELOAD") @@ -73,8 +70,7 @@ def __clear_env(patch_spawn=True): __patch_spawn(var_names, saved_env) -# pyre-fixme[3]: Return type must be annotated. -def __startup__(): +def __startup__() -> None: # ALL STARTUP_* methods will be called here in lexicographic order. startup_functions = sorted( [ @@ -99,8 +95,7 @@ def __startup__(): ) -# pyre-fixme[3]: Return type must be annotated. -def __passthrough_exec_module(): +def __passthrough_exec_module() -> None: # Delegate this module execution to the next module in the path, if any, # effectively making this sitecustomize.py a passthrough module. spec = PathFinder.find_spec( From d2ce0b644f98d2e97edefbaabb6d05844a86147c Mon Sep 17 00:00:00 2001 From: Andres Suarez Date: Mon, 29 Jan 2024 10:10:44 -0800 Subject: [PATCH 0183/1133] rustc_action: Fix DeprecationWarning: There is no current event loop Summary: Fixes: ... /paragon/pods/232374116/.../buck-out/arc_rust_test/gen/prelude/1b3768255e2c9e91/rust/tools/__rustc_action__/rustc_action.py:349: DeprecationWarning: There is no current event loop sys.exit(asyncio.get_event_loop().run_until_complete(main())) ... `get_event_loop` is deprecated in Python 3.12: https://docs.python.org/3/library/asyncio-eventloop.html#asyncio.get_event_loop Reviewed By: dtolnay Differential Revision: D53187021 fbshipit-source-id: 984923399313cf7126af1f037226af4037ebe121 --- prelude/rust/tools/rustc_action.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/rust/tools/rustc_action.py b/prelude/rust/tools/rustc_action.py index 32c055e02..4a9641016 100755 --- a/prelude/rust/tools/rustc_action.py +++ b/prelude/rust/tools/rustc_action.py @@ -346,4 +346,4 @@ async def main() -> int: # There is a bug with asyncio.run() on Windows: # https://bugs.python.org/issue39232 -sys.exit(asyncio.get_event_loop().run_until_complete(main())) +sys.exit(asyncio.new_event_loop().run_until_complete(main())) From 253437576662982e533419452c5b4941b2d9720c Mon Sep 17 00:00:00 2001 From: Nate Stedman Date: Mon, 29 Jan 2024 10:27:03 -0800 Subject: [PATCH 0184/1133] initial support for passing arguments to javac plugins Reviewed By: IanChilds Differential Revision: D52988100 fbshipit-source-id: 40d66e7106e1c40a3f4f7bb97da56f70fadbd7ed --- prelude/decls/android_rules.bzl | 3 +++ prelude/decls/groovy_rules.bzl | 2 ++ prelude/decls/java_rules.bzl | 3 +++ prelude/decls/kotlin_rules.bzl | 2 ++ prelude/decls/scala_rules.bzl | 2 ++ prelude/java/java_library.bzl | 3 +-- prelude/java/plugins/java_plugin.bzl | 31 ++++++++++++++++++---------- prelude/jvm/cd_jar_creator_util.bzl | 7 ++++--- 8 files changed, 37 insertions(+), 16 deletions(-) diff --git a/prelude/decls/android_rules.bzl b/prelude/decls/android_rules.bzl index 55d11529b..3c15a772a 100644 --- a/prelude/decls/android_rules.bzl +++ b/prelude/decls/android_rules.bzl @@ -152,6 +152,7 @@ android_aar = prelude_rule( "srcs": attrs.list(attrs.source(), default = []), "target": attrs.option(attrs.string(), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -766,6 +767,7 @@ android_library = prelude_rule( "runtime_deps": attrs.list(attrs.dep(), default = []), "source_abi_verification_mode": attrs.option(attrs.enum(SourceAbiVerificationMode), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -1452,6 +1454,7 @@ robolectric_test = prelude_rule( "used_as_dependency_deprecated_do_not_use": attrs.bool(default = False), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), "vm_args": attrs.list(attrs.arg(), default = []), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } | jvm_common.k2() | re_test_common.test_args() ), diff --git a/prelude/decls/groovy_rules.bzl b/prelude/decls/groovy_rules.bzl index 0e5aef98b..91f71860c 100644 --- a/prelude/decls/groovy_rules.bzl +++ b/prelude/decls/groovy_rules.bzl @@ -131,6 +131,7 @@ groovy_library = prelude_rule( "runtime_deps": attrs.list(attrs.dep(), default = []), "source_abi_verification_mode": attrs.option(attrs.enum(SourceAbiVerificationMode), default = None), "source_only_abi_deps": attrs.list(attrs.dep(), default = []), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -188,6 +189,7 @@ groovy_test = prelude_rule( "use_cxx_libraries": attrs.option(attrs.bool(), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), "vm_args": attrs.list(attrs.arg(), default = []), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) diff --git a/prelude/decls/java_rules.bzl b/prelude/decls/java_rules.bzl index abc89185e..2f8a0424c 100644 --- a/prelude/decls/java_rules.bzl +++ b/prelude/decls/java_rules.bzl @@ -290,6 +290,7 @@ java_library = prelude_rule( "proguard_config": attrs.option(attrs.source(), default = None), "runtime_deps": attrs.list(attrs.dep(), default = []), "source_abi_verification_mode": attrs.option(attrs.enum(SourceAbiVerificationMode), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -423,6 +424,7 @@ java_test = prelude_rule( "test_case_timeout_ms": attrs.option(attrs.int(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -467,6 +469,7 @@ java_test_runner = prelude_rule( "source_only_abi_deps": attrs.list(attrs.dep(), default = []), "srcs": attrs.list(attrs.source(), default = []), "target": attrs.option(attrs.string(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) diff --git a/prelude/decls/kotlin_rules.bzl b/prelude/decls/kotlin_rules.bzl index b8dce5a80..b1550992a 100644 --- a/prelude/decls/kotlin_rules.bzl +++ b/prelude/decls/kotlin_rules.bzl @@ -195,6 +195,7 @@ kotlin_library = prelude_rule( "source_only_abi_deps": attrs.list(attrs.dep(), default = []), "target": attrs.option(attrs.string(), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -300,6 +301,7 @@ kotlin_test = prelude_rule( "use_cxx_libraries": attrs.option(attrs.bool(), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), "use_jvm_abi_gen": attrs.option(attrs.bool(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) diff --git a/prelude/decls/scala_rules.bzl b/prelude/decls/scala_rules.bzl index 093ac8e5a..80c95edda 100644 --- a/prelude/decls/scala_rules.bzl +++ b/prelude/decls/scala_rules.bzl @@ -46,6 +46,7 @@ scala_library = prelude_rule( "source_only_abi_deps": attrs.list(attrs.dep(), default = []), "srcs": attrs.list(attrs.source(), default = []), "target": attrs.option(attrs.string(), default = None), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) @@ -103,6 +104,7 @@ scala_test = prelude_rule( "use_cxx_libraries": attrs.option(attrs.bool(), default = None), "use_dependency_order_classpath": attrs.option(attrs.bool(), default = None), "vm_args": attrs.list(attrs.arg(), default = []), + "_wip_java_plugin_arguments": attrs.dict(attrs.label(), attrs.list(attrs.string()), default = {}), } ), ) diff --git a/prelude/java/java_library.bzl b/prelude/java/java_library.bzl index 45525a411..512b6c503 100644 --- a/prelude/java/java_library.bzl +++ b/prelude/java/java_library.bzl @@ -88,8 +88,7 @@ def _process_plugins( # Process Javac Plugins if plugin_params: - plugin = plugin_params.processors[0] - args = plugin_params.args.get(plugin, cmd_args()) + plugin, args = plugin_params.processors[0] # Produces "-Xplugin:PluginName arg1 arg2 arg3", as a single argument plugin_and_args = cmd_args(plugin) diff --git a/prelude/java/plugins/java_plugin.bzl b/prelude/java/plugins/java_plugin.bzl index ac4129903..2636e137a 100644 --- a/prelude/java/plugins/java_plugin.bzl +++ b/prelude/java/plugins/java_plugin.bzl @@ -14,8 +14,7 @@ load( ) PluginParams = record( - processors = field(list[str]), - args = field(dict[str, cmd_args]), + processors = field(list[(str, cmd_args)]), deps = field([JavaPackagingDepTSet, None]), ) @@ -23,22 +22,32 @@ def create_plugin_params(ctx: AnalysisContext, plugins: list[Dependency]) -> [Pl processors = [] plugin_deps = [] + # _wip_java_plugin_arguments keys are providers_label, map to + # target_label to allow lookup with plugin.label.raw_target() + plugin_arguments = { + label.raw_target(): arguments + for label, arguments in ctx.attrs._wip_java_plugin_arguments.items() + } + # Compiler plugin derived from `plugins` attribute - for plugin in filter(None, [x.get(JavaProcessorsInfo) for x in plugins]): - if plugin.type == JavaProcessorsType("plugin"): - if len(plugin.processors) > 1: - fail("Only 1 java compiler plugin is expected. But received: {}".format(plugin.processors)) - processors.append(plugin.processors[0]) - if plugin.deps: - plugin_deps.append(plugin.deps) + for plugin in plugins: + processors_info = plugin.get(JavaProcessorsInfo) + if processors_info != None and processors_info.type == JavaProcessorsType("plugin"): + if len(processors_info.processors) > 1: + fail("Only 1 java compiler plugin is expected. But received: {}".format(processors_info.processors)) + processor = processors_info.processors[0] + if processors_info.deps: + plugin_deps.append(processors_info.deps) + + arguments = plugin_arguments.get(plugin.label.raw_target()) + processors.append((processor, cmd_args(arguments) if arguments != None else cmd_args())) if not processors: return None return PluginParams( - processors = dedupe(processors), + processors = processors, deps = ctx.actions.tset(JavaPackagingDepTSet, children = plugin_deps) if plugin_deps else None, - args = {}, ) def java_plugin_impl(ctx: AnalysisContext) -> list[Provider]: diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index 24022c72a..e89fe1435 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -255,13 +255,13 @@ def encode_plugin_params(plugin_params: [PluginParams, None]) -> [struct, None]: encoded_plugin_params = struct( parameters = [], pluginProperties = [ - encode_plugin_properties(processor, plugin_params) - for processor in plugin_params.processors + encode_plugin_properties(processor, arguments, plugin_params) + for processor, arguments in plugin_params.processors ], ) return encoded_plugin_params -def encode_plugin_properties(processor: str, plugin_params: PluginParams) -> struct: +def encode_plugin_properties(processor: str, arguments: cmd_args, plugin_params: PluginParams) -> struct: return struct( canReuseClassLoader = False, doesNotAffectAbi = False, @@ -269,6 +269,7 @@ def encode_plugin_properties(processor: str, plugin_params: PluginParams) -> str processorNames = [processor], classpath = plugin_params.deps.project_as_json("javacd_json") if plugin_params.deps else [], pathParams = {}, + arguments = arguments, ) def encode_base_jar_command( From 237296172a3d02d8f75848c65bb46cd06c09934c Mon Sep 17 00:00:00 2001 From: Dan Zimmerman Date: Mon, 29 Jan 2024 12:33:35 -0800 Subject: [PATCH 0185/1133] Expose swift-ide-test in SwiftToolchainInfo Summary: We want to use the `swift-ide-test` tool to generate a `[swift-interface]` subtarget on `apple_library`s. The first step is to expose this tool in the swift toolchain. Reviewed By: drodriguez Differential Revision: D53005518 fbshipit-source-id: 954b614056570a2b6bd3ead1279ef00152c324a1 --- prelude/apple/apple_rules_impl.bzl | 1 + prelude/apple/swift/swift_toolchain.bzl | 1 + prelude/apple/swift/swift_toolchain_types.bzl | 1 + 3 files changed, 3 insertions(+) diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index a3cef8fa4..b5a619e16 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -237,6 +237,7 @@ extra_attributes = { "platform_path": attrs.option(attrs.source(), default = None), # Mark as optional until we remove `_internal_platform_path` "sdk_modules": attrs.list(attrs.exec_dep(), default = []), # A list or a root target that represent a graph of sdk modules (e.g Frameworks) "sdk_path": attrs.option(attrs.source(), default = None), # Mark as optional until we remove `_internal_sdk_path` + "swift_ide_test_tool": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "swift_stdlib_tool": attrs.exec_dep(providers = [RunInfo]), "swiftc": attrs.exec_dep(providers = [RunInfo]), # TODO(T111858757): Mirror of `platform_path` but treated as a string. It allows us to diff --git a/prelude/apple/swift/swift_toolchain.bzl b/prelude/apple/swift/swift_toolchain.bzl index c3a7a0a12..7d73e8cd7 100644 --- a/prelude/apple/swift/swift_toolchain.bzl +++ b/prelude/apple/swift/swift_toolchain.bzl @@ -69,6 +69,7 @@ def swift_toolchain_impl(ctx): sdk_path = ctx.attrs._internal_sdk_path or ctx.attrs.sdk_path, swift_stdlib_tool = ctx.attrs.swift_stdlib_tool[RunInfo], swift_stdlib_tool_flags = ctx.attrs.swift_stdlib_tool_flags, + swift_ide_test_tool = ctx.attrs.swift_ide_test_tool[RunInfo] if ctx.attrs.swift_ide_test_tool else None, supports_relative_resource_dir = ctx.attrs.supports_relative_resource_dir, supports_swift_cxx_interoperability_mode = ctx.attrs.supports_swift_cxx_interoperability_mode, supports_swift_importing_objc_forward_declarations = ctx.attrs.supports_swift_importing_obj_c_forward_declarations, diff --git a/prelude/apple/swift/swift_toolchain_types.bzl b/prelude/apple/swift/swift_toolchain_types.bzl index e2f7c8241..14ab3bf47 100644 --- a/prelude/apple/swift/swift_toolchain_types.bzl +++ b/prelude/apple/swift/swift_toolchain_types.bzl @@ -31,6 +31,7 @@ SwiftToolchainInfo = provider( "sdk_path": provider_field(typing.Any, default = None), "swift_stdlib_tool_flags": provider_field(typing.Any, default = None), "swift_stdlib_tool": provider_field(typing.Any, default = None), + "swift_ide_test_tool": provider_field(typing.Any, default = None), "runtime_run_paths": provider_field(typing.Any, default = None), # [str] "supports_relative_resource_dir": provider_field(typing.Any, default = None), # bool "supports_swift_cxx_interoperability_mode": provider_field(typing.Any, default = None), # bool From e8577e65f4e3f1ffdeb3395d725ff7f21b98cf11 Mon Sep 17 00:00:00 2001 From: Dan Zimmerman Date: Mon, 29 Jan 2024 12:33:35 -0800 Subject: [PATCH 0186/1133] Add missing None check in _add_mixed_library_flags_to_cmd Summary: `objc_modulemap_pp_info` is possibly `None` but isn't checked before using Reviewed By: drodriguez Differential Revision: D53135322 fbshipit-source-id: b90d067f10e3f036313ed3b3284eca3f08f1afd1 --- prelude/apple/swift/swift_compilation.bzl | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index e45f3d27f..5dd03ffc5 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -641,17 +641,18 @@ def _add_mixed_library_flags_to_cmd( if not objc_headers: return - # TODO(T99100029): We cannot use VFS overlays to mask this import from - # the debugger as they require absolute paths. Instead we will enforce - # that mixed libraries do not have serialized debugging info and rely on - # rdeps to serialize the correct paths. - for arg in objc_modulemap_pp_info.relative_args.args: - cmd.add("-Xcc") - cmd.add(arg) - - for arg in objc_modulemap_pp_info.modular_args: - cmd.add("-Xcc") - cmd.add(arg) + if objc_modulemap_pp_info: + # TODO(T99100029): We cannot use VFS overlays to mask this import from + # the debugger as they require absolute paths. Instead we will enforce + # that mixed libraries do not have serialized debugging info and rely on + # rdeps to serialize the correct paths. + for arg in objc_modulemap_pp_info.relative_args.args: + cmd.add("-Xcc") + cmd.add(arg) + + for arg in objc_modulemap_pp_info.modular_args: + cmd.add("-Xcc") + cmd.add(arg) cmd.add("-import-underlying-module") From 3b0c47da981a1f5d3605470dece3065f9059b740 Mon Sep 17 00:00:00 2001 From: Balaji S Date: Mon, 29 Jan 2024 13:27:26 -0800 Subject: [PATCH 0187/1133] Relist tests if there were changes Summary: * Earlier if a new test was added / existing test was removed from groups / any group/test list changes were not automatically picked up on `test:run()` from the shell. * This was because, we first listed the tests (this step is required to figure out the modules / deps to recompile), reload the module if required, then run **based on the previous listing step** * Now, if we detect some modules were changed, we list the tests again to pick up any test changes (we do this by calling `do_plain_test_run` which runs without reloading) Reviewed By: TheGeorge Differential Revision: D53181552 fbshipit-source-id: 2a0b330cfcc2ff34ed5b11958203a2deed11ca0b --- prelude/erlang/common_test/test_cli_lib/src/test.erl | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/prelude/erlang/common_test/test_cli_lib/src/test.erl b/prelude/erlang/common_test/test_cli_lib/src/test.erl index 88786b1d1..0b272bf14 100644 --- a/prelude/erlang/common_test/test_cli_lib/src/test.erl +++ b/prelude/erlang/common_test/test_cli_lib/src/test.erl @@ -177,8 +177,15 @@ run(RegExOrId) -> ok -> io:format("Reloading all changed modules... "), Loaded = ct_daemon:load_changed(), - io:format("reloaded ~p modules ~P~n", [erlang:length(Loaded), Loaded, 10]), - rerun(ToRun); + case erlang:length(Loaded) of + 0 -> + do_plain_test_run(ToRun); + ChangedCount -> + io:format("reloaded ~p modules ~P~n", [ChangedCount, Loaded, 10]), + % There were some changes, so list the tests again, then run but without recompiling changes + % Note that if called with the RegEx insted of ToRun test list like above, do_plain_test_run/1 will list the tests again + do_plain_test_run(RegExOrId) + end; Error -> Error end From d69d2f0b2b17a86cafe8c4cbcdd26425680f2d47 Mon Sep 17 00:00:00 2001 From: Dan Zimmerman Date: Mon, 29 Jan 2024 14:35:00 -0800 Subject: [PATCH 0188/1133] Move all flag calculation out of compile_swift Summary: Next up to implement the `[swift-interface]` subtarget is to refactor `swift_compile` so that flag calculation can be reused when invoking `swift-ide-test`. Reviewed By: maxovtsin Differential Revision: D53098989 fbshipit-source-id: c6d5470ee24a66bf3d99f22de1ef3c231ba90064 --- prelude/apple/swift/swift_compilation.bzl | 90 ++++++++++++++--------- 1 file changed, 56 insertions(+), 34 deletions(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 5dd03ffc5..2a94001ad 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -198,36 +198,6 @@ def compile_swift( if not srcs: return None - # If this target imports XCTest we need to pass the search path to its swiftmodule. - framework_search_paths = cmd_args() - framework_search_paths.add(_get_xctest_swiftmodule_search_path(ctx)) - - # Pass the framework search paths to the driver and clang importer. This is required - # for pcm compilation, which does not pass through driver search paths. - framework_search_paths.add(framework_search_paths_flags) - framework_search_paths.add(cmd_args(framework_search_paths_flags, prepend = "-Xcc")) - - # If a target exports ObjC headers and Swift explicit modules are enabled, - # we need to precompile a PCM of the underlying module and supply it to the Swift compilation. - if objc_modulemap_pp_info and ctx.attrs.uses_explicit_modules: - underlying_swift_pcm_uncompiled_info = get_swift_pcm_uncompile_info( - ctx, - None, - objc_modulemap_pp_info, - ) - if underlying_swift_pcm_uncompiled_info: - compiled_underlying_pcm = compile_underlying_pcm( - ctx, - underlying_swift_pcm_uncompiled_info, - deps_providers, - get_swift_cxx_flags(ctx), - framework_search_paths, - ) - else: - compiled_underlying_pcm = None - else: - compiled_underlying_pcm = None - toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info module_name = get_module_name(ctx) @@ -235,17 +205,16 @@ def compile_swift( output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) - shared_flags = _get_shared_flags( + shared_flags = _get_swiftc_flags( ctx, deps_providers, parse_as_library, - compiled_underlying_pcm, module_name, exported_headers, objc_modulemap_pp_info, + framework_search_paths_flags, extra_search_paths_flags, ) - shared_flags.add(framework_search_paths) if toolchain.can_toolchain_emit_obj_c_header_textually: _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header) @@ -442,7 +411,60 @@ def _compile_with_argsfile( # Swift correctly handles relative paths and we can utilize the relative argsfile for absolute paths. return CompileArgsfiles(relative = {extension: relative_argsfile}, absolute = {extension: relative_argsfile}) -def _get_shared_flags( +def _get_swiftc_flags( + ctx: AnalysisContext, + deps_providers: list, + parse_as_library: bool, + module_name: str, + exported_headers: list[CHeader], + objc_modulemap_pp_info: [CPreprocessor, None], + framework_search_paths_flags: cmd_args, + extra_search_paths_flags: list[ArgLike] = []) -> cmd_args: + # If this target imports XCTest we need to pass the search path to its swiftmodule. + framework_search_paths = cmd_args() + framework_search_paths.add(_get_xctest_swiftmodule_search_path(ctx)) + + # Pass the framework search paths to the driver and clang importer. This is required + # for pcm compilation, which does not pass through driver search paths. + framework_search_paths.add(framework_search_paths_flags) + framework_search_paths.add(cmd_args(framework_search_paths_flags, prepend = "-Xcc")) + + # If a target exports ObjC headers and Swift explicit modules are enabled, + # we need to precompile a PCM of the underlying module and supply it to the Swift compilation. + if objc_modulemap_pp_info and ctx.attrs.uses_explicit_modules: + underlying_swift_pcm_uncompiled_info = get_swift_pcm_uncompile_info( + ctx, + None, + objc_modulemap_pp_info, + ) + if underlying_swift_pcm_uncompiled_info: + compiled_underlying_pcm = compile_underlying_pcm( + ctx, + underlying_swift_pcm_uncompiled_info, + deps_providers, + get_swift_cxx_flags(ctx), + framework_search_paths, + ) + else: + compiled_underlying_pcm = None + else: + compiled_underlying_pcm = None + + shared_flags = _get_base_flags( + ctx, + deps_providers, + parse_as_library, + compiled_underlying_pcm, + module_name, + exported_headers, + objc_modulemap_pp_info, + extra_search_paths_flags, + ) + shared_flags.add(framework_search_paths) + + return shared_flags + +def _get_base_flags( ctx: AnalysisContext, deps_providers: list, parse_as_library: bool, From ccb84e86de92c983d81f11e4764adc6930719041 Mon Sep 17 00:00:00 2001 From: Dan Zimmerman Date: Mon, 29 Jan 2024 14:35:00 -0800 Subject: [PATCH 0189/1133] Move serialize_debugging_options warnings into compile_swift Summary: The previous diff simply moves all `swiftc` flag calculation into its own `get_swift_flags` function, but this introduced a superfluous warning. This diff moves that warning into `compile_swift` so that it correctly shows, only when compiling actual swift code. Reviewed By: drodriguez Differential Revision: D53098986 fbshipit-source-id: 842f47e55ea3da9e0edd9200d8f109a2ee0bda6c --- prelude/apple/swift/swift_compilation.bzl | 28 ++++++++++++----------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 2a94001ad..edfa28800 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -201,6 +201,19 @@ def compile_swift( toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info module_name = get_module_name(ctx) + + # See _get_base_flags for where this flag is actually set. We move the + # warnings in here because _get_swiftc_flags is called even if srcs is None + # (for the [swift-interface] subtarget), and we only want to warn if we're + # actually compiling swift files. + if ctx.attrs.serialize_debugging_options: + if exported_headers: + # TODO(T99100029): We cannot use VFS overlays with Buck2, so we have to disable + # serializing debugging options for mixed libraries to debug successfully + warning("Mixed libraries cannot serialize debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) + elif not toolchain.prefix_serialized_debugging_options: + warning("The current toolchain does not support prefixing serialized debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) + output_header = ctx.actions.declare_output(module_name + "-Swift.h") output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) @@ -527,19 +540,8 @@ def _get_base_flags( else: cmd.add(["-enable-experimental-cxx-interop"]) - serialize_debugging_options = False - if ctx.attrs.serialize_debugging_options: - if objc_headers: - # TODO(T99100029): We cannot use VFS overlays with Buck2, so we have to disable - # serializing debugging options for mixed libraries to debug successfully - warning("Mixed libraries cannot serialize debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) - elif not toolchain.prefix_serialized_debugging_options: - warning("The current toolchain does not support prefixing serialized debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) - else: - # Apply the debug prefix map to Swift serialized debugging info. - # This will allow for debugging remotely built swiftmodule files. - serialize_debugging_options = True - + serialize_debugging_options = ctx.attrs.serialize_debugging_options and \ + not objc_headers and toolchain.prefix_serialized_debugging_options if serialize_debugging_options: cmd.add([ "-Xfrontend", From 460828de40d3f8c0d912ec00929f21241eb29a9e Mon Sep 17 00:00:00 2001 From: Dan Zimmerman Date: Mon, 29 Jan 2024 14:35:00 -0800 Subject: [PATCH 0190/1133] Expose new make_swift_interface.py tool Summary: In order to implement the `[swift-interface]` subtarget, we need to convert flags meant for `swiftc` into flags that `swift-ide-test` understands. We also need a driver for `swift-ide-test` which writes the `stdout` output to a file (as `swift-ide-test` has no `--out` flag). This diff introduces a new tool which accomplishes both tasks: it accepts the `swiftc` arguments meant to shuffle over to `swift-ide-test` & a path to `swift-ide-test` and invokes `swift-ide-test` writing `stdout` to the specified out file. Reviewed By: drodriguez Differential Revision: D53098983 fbshipit-source-id: 4512b1cd959ad5cec7659b9583e8a3dd104988bd --- prelude/apple/apple_rules_impl.bzl | 1 + prelude/apple/swift/swift_toolchain.bzl | 1 + prelude/apple/swift/swift_toolchain_types.bzl | 1 + prelude/apple/tools/BUCK.v2 | 6 + prelude/apple/tools/make_swift_interface.py | 282 ++++++++++++++++++ 5 files changed, 291 insertions(+) create mode 100755 prelude/apple/tools/make_swift_interface.py diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index b5a619e16..5372b0da8 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -229,6 +229,7 @@ extra_attributes = { "swift_toolchain": { "architecture": attrs.option(attrs.string(), default = None), # TODO(T115173356): Make field non-optional "make_swift_comp_db": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//apple/tools:make_swift_comp_db")), + "make_swift_interface": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//apple/tools:make_swift_interface")), "object_format": attrs.enum(SwiftObjectFormat.values(), default = "object"), # A placeholder tool that can be used to set up toolchain constraints. # Useful when fat and thin toolchahins share the same underlying tools via `command_alias()`, diff --git a/prelude/apple/swift/swift_toolchain.bzl b/prelude/apple/swift/swift_toolchain.bzl index 7d73e8cd7..7e89e7743 100644 --- a/prelude/apple/swift/swift_toolchain.bzl +++ b/prelude/apple/swift/swift_toolchain.bzl @@ -70,6 +70,7 @@ def swift_toolchain_impl(ctx): swift_stdlib_tool = ctx.attrs.swift_stdlib_tool[RunInfo], swift_stdlib_tool_flags = ctx.attrs.swift_stdlib_tool_flags, swift_ide_test_tool = ctx.attrs.swift_ide_test_tool[RunInfo] if ctx.attrs.swift_ide_test_tool else None, + mk_swift_interface = cmd_args(ctx.attrs._swiftc_wrapper[RunInfo]).add(ctx.attrs.make_swift_interface[RunInfo]), supports_relative_resource_dir = ctx.attrs.supports_relative_resource_dir, supports_swift_cxx_interoperability_mode = ctx.attrs.supports_swift_cxx_interoperability_mode, supports_swift_importing_objc_forward_declarations = ctx.attrs.supports_swift_importing_obj_c_forward_declarations, diff --git a/prelude/apple/swift/swift_toolchain_types.bzl b/prelude/apple/swift/swift_toolchain_types.bzl index 14ab3bf47..10e6941c3 100644 --- a/prelude/apple/swift/swift_toolchain_types.bzl +++ b/prelude/apple/swift/swift_toolchain_types.bzl @@ -32,6 +32,7 @@ SwiftToolchainInfo = provider( "swift_stdlib_tool_flags": provider_field(typing.Any, default = None), "swift_stdlib_tool": provider_field(typing.Any, default = None), "swift_ide_test_tool": provider_field(typing.Any, default = None), + "mk_swift_interface": provider_field(typing.Any, default = None), "runtime_run_paths": provider_field(typing.Any, default = None), # [str] "supports_relative_resource_dir": provider_field(typing.Any, default = None), # bool "supports_swift_cxx_interoperability_mode": provider_field(typing.Any, default = None), # bool diff --git a/prelude/apple/tools/BUCK.v2 b/prelude/apple/tools/BUCK.v2 index d86e174c4..b31f120b5 100644 --- a/prelude/apple/tools/BUCK.v2 +++ b/prelude/apple/tools/BUCK.v2 @@ -41,6 +41,12 @@ python_bootstrap_binary( visibility = ["PUBLIC"], ) +python_bootstrap_binary( + name = "make_swift_interface", + main = "make_swift_interface.py", + visibility = ["PUBLIC"], +) + python_bootstrap_binary( name = "make_vfsoverlay", main = "make_vfsoverlay.py", diff --git a/prelude/apple/tools/make_swift_interface.py b/prelude/apple/tools/make_swift_interface.py new file mode 100755 index 000000000..13c91db7e --- /dev/null +++ b/prelude/apple/tools/make_swift_interface.py @@ -0,0 +1,282 @@ +#!/usr/bin/env fbpython +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +""" +Remaps swiftc arguments to be usable by swift-ide-test, and invokes +swift-ide-test with said arguments. +""" + +import argparse +import dataclasses +import optparse +import pathlib +import subprocess as proc +import sys + +from contextlib import contextmanager +from typing import Iterable, List, Optional + + +@dataclasses.dataclass +class SwiftIDETestArguments(object): + sdk: Optional[str] + target: Optional[str] + xcc: Iterable[str] + defines: Iterable[str] + frameworks: Iterable[str] + includes: Iterable[str] + resource_dir: str + enable_cxx_interop: bool + cxx_interoperability_mode: Optional[str] + upcoming_features: Iterable[str] + explicit_swift_module_map: Optional[str] + swift_version: Optional[str] + + def to_args(self) -> List[str]: + args = [] + if self.target: + args.append("--target") + args.append(self.target) + + if self.sdk: + args.append("--sdk") + args.append(self.sdk) + + for define in self.defines: + args.append("-D") + args.append(define) + + for include in self.includes: + args.append("-I") + args.append(include) + + for framework in self.frameworks: + args.append("-F") + args.append(framework) + + for xcc in self.xcc: + args.append("--Xcc") + args.append(xcc) + + args.append("--resource-dir") + args.append(self.resource_dir) + + if self.enable_cxx_interop: + args.append("-enable-experimental-cxx-interop") + + if self.cxx_interoperability_mode: + # swift-ide-test only understands -enable-experimental-cxx-interop, + # not the versioned -cxx-interoperability-mode=. + args.append("-enable-experimental-cxx-interop") + + if self.upcoming_features: + for feature in self.upcoming_features: + args.append("-enable-upcoming-feature") + args.append(feature) + + if self.explicit_swift_module_map: + args.append("--explicit-swift-module-map-file") + args.append(self.explicit_swift_module_map) + + if self.swift_version: + args.append("-swift-version") + args.append(self.swift_version) + return args + + +class LongSingleDashOpt(optparse.Option): + """ + This Option subclass allows for long arguments specified with single dashes, + e.g. -sdk (the default implementation only allows long options with two + dashes) + """ + + def _set_opt_strings(self, opts): + for opt in opts: + if len(opt) < 2: + raise optparse.OptionError( + "invalid option string %r: " + "must be at least two characters long" % opt, + self, + ) + elif len(opt) == 2: + self._short_opts.append(opt) + else: + self._long_opts.append(opt) + + +class IgnoreUnknownLongSingleDashOptParser(optparse.OptionParser): + """ + This OptionParser subclass allows for + (a) long arguments specified with single dashes (e.g. -sdk) + (b) ignoring unknown arguments + The default OptionParser doesn't have either of these behaviors. + """ + + def __init__(self, *args, **kwargs): + kwargs["option_class"] = LongSingleDashOpt + super().__init__(*args, **kwargs) + + def _process_args(self, largs, rargs, values): + while rargs: + try: + arg = rargs[0] + if arg == "--": + del rargs[0] + return + elif arg[0:2] == "--": + self._process_long_opt(rargs, values) + elif arg[:1] == "-" and len(arg) > 1: + if len(arg) > 2: + self._process_long_opt(rargs, values) + else: + self._process_short_opts(rargs, values) + elif self.allow_interspersed_args: + largs.append(arg) + del rargs[0] + else: + return + except optparse.BadOptionError: + continue + + +def parse_swiftc_args(arguments: List[str]) -> SwiftIDETestArguments: # noqa: C901 + """ + We can't use argparse to do our parsing because arguments like -Xcc + need to accept arguments that are prefixed with `-`. + + optparse can handle this, and it's only soft deprecated (i.e. it should + stay around, just not actively developed), so we should be safe to use it. + + Additionally, our subclasses above are safe, since optparse is no longer + actively developed. + """ + parser = IgnoreUnknownLongSingleDashOptParser() + + parser.add_option("-sdk", dest="sdk") + parser.add_option("-target", dest="target") + parser.add_option("-Xcc", action="append", default=[], dest="xcc") + parser.add_option("-D", dest="defines", action="append", default=[]) + parser.add_option("-F", dest="frameworks", action="append", default=[]) + parser.add_option("-I", dest="includes", action="append", default=[]) + parser.add_option("-resource-dir", dest="resource_dir") + parser.add_option( + "-enable-experimental-cxx-interop", + action="store_true", + default=False, + dest="enable_experimental_cxx_interop", + ) + parser.add_option("-Xfrontend", action="append", default=[], dest="xfrontend") + parser.add_option("-swift-version", dest="swift_version") + parser.add_option("-cxx-interoperability-mode", dest="cxx_interoperability_mode") + + options, leftovers = parser.parse_args(arguments) + + frontend_parser = IgnoreUnknownLongSingleDashOptParser() + frontend_parser.add_option( + "-enable-upcoming-feature", + dest="enable_upcoming_feature", + action="append", + default=[], + ) + frontend_parser.add_option( + "-explicit-swift-module-map-file", dest="explicit_swift_module_map" + ) + frontend_options = frontend_parser.parse_args(options.xfrontend)[0] + + resource_dir = options.resource_dir + if not resource_dir: + # If an explicit resource dir was not provided, we need to figure out + # which resource id would have been used, which, in the case of Xcode, + # is relative to the swiftc used. + assert len(leftovers) >= 1 + compiler_path = pathlib.Path(leftovers[0]) + assert compiler_path.name == "swiftc" + resource_dir_path = compiler_path.parents[1] / "lib" / "swift" + assert resource_dir_path.exists() + resource_dir = str(resource_dir_path) + + return SwiftIDETestArguments( + options.sdk, + options.target, + options.xcc, + options.defines, + options.frameworks, + options.includes, + resource_dir, + options.enable_experimental_cxx_interop, + options.cxx_interoperability_mode, + frontend_options.enable_upcoming_feature, + frontend_options.explicit_swift_module_map, + options.swift_version, + ) + + +def parse_args() -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Uses swift-ide-test to generate a swift interface", + fromfile_prefix_chars="@", + ) + parser.add_argument( + "--swift-ide-test-tool", + required=True, + help="Path to swift-ide-test binary.", + ) + parser.add_argument( + "--module", + required=True, + help="Name of the module to generate the interface for.", + ) + parser.add_argument( + "--out", + help="Path to output file.", + default="-", + ) + parser.add_argument( + "arguments", + nargs="*", + default=[], + help="File containing compiler arguments to use to invoke" + + " swift-ide-test. Note these arguments should be in the format CC" + + " expects, not swift-ide-test, as this tool converts the arguments" + + " as needed", + ) + return parser.parse_args() + + +@contextmanager +def open_or_stdout(out): + if out == "-": + yield sys.stdout + else: + with open(out, "w") as f: + yield f + + +def main() -> None: + args = parse_args() + + parsed = parse_swiftc_args(args.arguments) + with open_or_stdout(args.out) as out: + proc.run( + [ + args.swift_ide_test_tool, + "--source-filename=x", + "--print-module", + "--module-to-print", + args.module, + "--module-print-submodules", + ] + + parsed.to_args(), + stdout=out, + check=True, + ) + + +if __name__ == "__main__": + main() From eb919cf8b477972e03e0058f6002fb266671ab6f Mon Sep 17 00:00:00 2001 From: Dan Zimmerman Date: Mon, 29 Jan 2024 14:35:00 -0800 Subject: [PATCH 0191/1133] Implement [swift-interface] subtarget on apple_library's Summary: We want to be able to generate `.swift` interface files for general `apple_library`s (particularly ones that don't include any `.swift` files). In order to achieve that we reuse the `swiftc` flag calculation, as if this library was a 'mixed' `apple_library` (i.e. it contain both `.swift` and non-`.swift` source files, so the current target is considered an 'underlying module') and pass those arguments (after translation) to `swift-ide-test`. Reviewed By: maxovtsin Differential Revision: D53098987 fbshipit-source-id: 155b52db0dcfa693a0ee07c6d61b81c899496385 --- prelude/apple/apple_library.bzl | 14 +++++- prelude/apple/swift/swift_compilation.bzl | 61 +++++++++++++++++++++++ 2 files changed, 74 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index 530bfc336..ab6b6b37d 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -17,6 +17,7 @@ load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load( "@prelude//apple/swift:swift_compilation.bzl", "compile_swift", + "create_swift_interface", "get_swift_anonymous_targets", "get_swift_debug_infos", "get_swift_dependency_info", @@ -190,12 +191,13 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte # to the CWD, so need to add . to be located correctly. resource_dir_args = ["-I."] + module_name = get_module_name(ctx) modular_pre = CPreprocessor( uses_modules = ctx.attrs.uses_modules, modular_args = [ "-fcxx-modules", "-fmodules", - "-fmodule-name=" + get_module_name(ctx), + "-fmodule-name=" + module_name, "-fmodules-cache-path=" + MODULE_CACHE_PATH, ] + resource_dir_args, ) @@ -254,6 +256,16 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte default_outputs = swift_compile.object_files if swift_compile else None, ), ], + "swift-interface": [create_swift_interface( + ctx, + deps_providers, + True, # parse_as_library + module_name, + exported_hdrs, + modulemap_pre, + framework_search_paths_flags, + params.extra_swift_compiler_flags, + )], "swift-output-file-map": [ DefaultInfo( default_output = swift_compile.output_map_artifact if swift_compile else None, diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index edfa28800..380922b02 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -858,6 +858,67 @@ def _create_compilation_database( return SwiftCompilationDatabase(db = cdb_artifact, other_outputs = argfile.cmd_form) +def create_swift_interface( + ctx: AnalysisContext, + deps_providers: list, + parse_as_library: bool, + module_name: str, + exported_headers: list[CHeader], + objc_modulemap_pp_info: [CPreprocessor, None], + framework_search_paths_flags: cmd_args, + extra_search_paths_flags: list[ArgLike] = []) -> DefaultInfo: + swiftc_flags = _get_swiftc_flags( + ctx, + deps_providers, + parse_as_library, + module_name, + exported_headers, + objc_modulemap_pp_info, + framework_search_paths_flags, + extra_search_paths_flags, + ) + + swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info + swift_ide_test_tool = swift_toolchain.swift_ide_test_tool + if not swift_ide_test_tool: + return DefaultInfo() + mk_swift_interface = swift_toolchain.mk_swift_interface + + module_name = get_module_name(ctx) + identifier = module_name + ".interface.swift" + + argsfile, _ = ctx.actions.write( + identifier + ".argsfile", + swiftc_flags, + allow_args = True, + ) + interface_artifact = ctx.actions.declare_output(identifier) + + mk_swift_args = cmd_args( + mk_swift_interface, + "--swift-ide-test-tool", + swift_ide_test_tool, + "--module", + module_name, + "--out", + interface_artifact.as_output(), + "--", + cmd_args(cmd_args(argsfile, format = "@{}", delimiter = "")).hidden([swiftc_flags]), + ) + + ctx.actions.run( + mk_swift_args, + category = "mk_swift_interface", + identifier = identifier, + ) + + return DefaultInfo( + default_output = interface_artifact, + other_outputs = [ + argsfile, + ], + ) + def _exported_deps(ctx) -> list[Dependency]: if ctx.attrs.reexport_all_header_dependencies: return ctx.attrs.exported_deps + ctx.attrs.deps From 61f5d44641512e386f4864e64cad029134475f13 Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Mon, 29 Jan 2024 15:31:01 -0800 Subject: [PATCH 0192/1133] Simplify and fix dependency traversal between merge groups Summary: Makes somerge behavior when creating merged linkables more closely match linking with relinker only using `_create_link_args`. This behavior is also closer to linking without somerge and to buck1. The existing traversal can create a non-topological ordering in `ordered_group_constituents` because edges to exported deps outside the current group are not followed. This can lead to a situation like this, where the link line for A is B,X instead of the correct ordering X,B. ``` {merge_A} | \ | {merge_X } | | exported | Y | / exported |/ B (static) ``` This can cause app size regressions and other unexpected behavior. It's likely that this has a larger affect on apps that have smaller merge maps/sparse merge graphs, since static libs will be less likely to be merged into "owning" shared libraries. The previous traversal also includes transitive shared library dependencies on link lines instead of just direct ones. Removing these redundant shared deps _should_ be a no-op, but in any case it also makes this more closely match buck1 and other link strategies. Reviewed By: mzlee, cjhopman Differential Revision: D53103928 fbshipit-source-id: a276f7ca78e676dd5bf3b196c9fa302ab321f7a6 --- .../android_binary_native_library_rules.bzl | 250 +++++++++--------- 1 file changed, 123 insertions(+), 127 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index b4aba849a..f530cfa33 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -61,7 +61,7 @@ load( ) load("@prelude//linking:strip.bzl", "strip_object") load("@prelude//utils:expect.bzl", "expect") -load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", "post_order_traversal", "pre_order_traversal", "pre_order_traversal_by") +load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", "post_order_traversal", "pre_order_traversal") load("@prelude//utils:set.bzl", "set", "set_type") # @unused Used as a type load("@prelude//utils:utils.bzl", "dedupe_by_value") @@ -187,7 +187,7 @@ def get_android_binary_native_library_info( enable_relinker = getattr(ctx.attrs, "enable_relinker", False) if has_native_merging or enable_relinker: - native_merge_debug = ctx.actions.declare_output("native_merge.debug") + native_merge_debug = ctx.actions.declare_output("native_merge_debug", dir = True) dynamic_outputs.append(native_merge_debug) # We serialize info about the linkable graph and the apk module mapping and pass that to an @@ -757,16 +757,25 @@ LinkGroupData = record( apk_module = str, ) +# Lookup key for somerge groups, either the soname for shared libraries or the target name for unmerged statics +GroupLabel = str + +# Represents the primary constituents and deps of primary constituents used to create a LinkGroupLinkableNode for a non-prebuilt shared library. +LinkGroupMergeInfo = record( + label = GroupLabel, + deps = list[GroupLabel], + exported_deps = list[GroupLabel], + constituent_link_infos = list[LinkInfo], +) + # Represents a node in the final merged linkable map. Most of these will be shared libraries, either prebuilt shared libs or -# libraries that are created below for a node in the link_groups_graph. The exception is for non-merged static-only nodes, in -# that case this +# libraries that are created below for a node in the link_groups_graph. The exception is for non-merged static-only nodes. LinkGroupLinkableNode = record( # The LinkInfo to add to the link line for a node that links against this. link = LinkInfo, - deps = list[str], - exported_deps = list[str], + deps = list[GroupLabel], + exported_deps = list[GroupLabel], shared_lib = [SharedLibrary, None], - # linker flags to be exported by any node that links against this. This can only be non-None for non-merged static only nodes (as we don't # propagate exported linker flags through transitive shared lib deps). exported_linker_flags = [(list[typing.Any], list[typing.Any]), None], @@ -1067,147 +1076,75 @@ def _get_merged_linkables( ) continue - # Keys in the current group stay as a Label, deps get converted to the group key. - def convert_to_merged_graph_deps(deps: list[Label], curr_group: str) -> list[[Label, str]]: - converted = [] - for dep in deps: - dep_group = target_to_link_group[dep] - if dep_group == curr_group: - converted.append(dep) - elif dep_group: - converted.append(dep_group) - return dedupe_by_value(converted) - - # For the current group, this will traverse the original linkable graph to find the LinkableNodes for - # the constituents of the group and traverses the link_group graph for non-constituent deps. - def get_merged_graph_traversal(curr_group: str, exported_only: bool) -> typing.Callable: - def traversal(key: [Label, str]) -> list[[Label, str]]: - if eval_type(Label).matches(key): - expect(target_to_link_group[key] == curr_group) - node = linkable_nodes[key] - if exported_only: - return convert_to_merged_graph_deps(node.exported_deps, curr_group) - return convert_to_merged_graph_deps(node.deps + node.exported_deps, curr_group) - else: - link_group_node = link_group_linkable_nodes[key] - if exported_only: - return link_group_node.exported_deps - return dedupe_by_value(link_group_node.deps + link_group_node.exported_deps) - - # It's easy for us to accidentally get this merged traversal wrong, so this provides one guardrail - def checked_traversal(key: [Label, str]) -> list[[Label, str]]: - return expect_dedupe(traversal(key)) - - return checked_traversal - - # note that this will possibly contain shared lib dependencies which aren't really public. that's handled below. - public_node_roots = group_data.constituents - - # this is a hybrid of buck1 somerge behavior and what we do for link groups. - # like link groups, we expose link group by setting link_whole on its link infos (this matches buck1 for - # primary constituents, but not for other constituents). - # like buck1, we treat all primary constituents as public node roots (as opposed to link groups that only treats - # preferred_linkage=shared and edges with an outbound dep as public roots), and then traverse exported deps from - # those roots to find all public nodes. - # the main thing to note from this is that for non-primary constituents that are identified as public, we will - # use link_whole whereas buck1 will make dependents link against them directly - exported_public_nodes = { - d: True - for d in breadth_first_traversal_by( - None, - public_node_roots, - get_merged_graph_traversal(group, True), - ) - } - exported_linker_flags = [] exported_linker_post_flags = [] links = [] - shared_lib_deps = [] - real_constituents = [] if is_actually_merged and merge_data.glue_linkable: - real_constituents.append(merge_data.glue_linkable[0]) links.append(set_link_info_link_whole(merge_data.glue_linkable[1])) solib_constituents = [] - link_group_deps = [] - ordered_group_constituents = pre_order_traversal_by(group_data.constituents, get_merged_graph_traversal(group, False)) - representative_label = ordered_group_constituents[0] - for key in ordered_group_constituents: - real_constituents.append(key) - if eval_type(Label).matches(key): - # This is handling targets within this link group - expect(target_to_link_group[key] == group) - node = linkable_nodes[key] - - default_solibs = list(node.shared_libs.keys()) - if not default_solibs and node.preferred_linkage == Linkage("static"): - default_solibs = [node.default_soname] - - for soname in default_solibs: - included_default_solibs[soname] = True - if node.include_in_android_mergemap: - solib_constituents.append(soname) - - node = linkable_nodes[key] - link_info = node.link_infos[archive_output_style].default - - # the propagated link info should already be wrapped with exported flags. - link_info = wrap_link_info( - link_info, - pre_flags = node.linker_flags.flags, - post_flags = node.linker_flags.post_flags, - ) - exported_linker_flags.extend(node.linker_flags.exported_flags) - exported_linker_post_flags.extend(node.linker_flags.exported_post_flags) - if key in exported_public_nodes: - link_info = set_link_info_link_whole(link_info) - else: - # This is cross-link-group deps. We add information to the link line from the LinkGroupLinkableNode of the dep. - link_group_node = link_group_linkable_nodes[key] - link_info = link_group_node.link - if link_group_node.shared_lib: - shared_lib_deps.append(link_group_node.shared_lib.soname) - link_group_deps.append(key) - elif key in exported_public_nodes: - link_info = set_link_info_link_whole(link_info) + group_deps = [] + group_exported_deps = [] + for key in group_data.constituents: + expect(target_to_link_group[key] == group) + node = linkable_nodes[key] + + default_solibs = list(node.shared_libs.keys()) + if not default_solibs and node.preferred_linkage == Linkage("static"): + default_solibs = [node.default_soname] + + for soname in default_solibs: + included_default_solibs[soname] = True + if node.include_in_android_mergemap: + solib_constituents.append(soname) + + node = linkable_nodes[key] + link_info = node.link_infos[archive_output_style].default + + # the propagated link info should already be wrapped with exported flags. + link_info = wrap_link_info( + link_info, + pre_flags = node.linker_flags.flags, + post_flags = node.linker_flags.post_flags, + ) + exported_linker_flags.extend(node.linker_flags.exported_flags) + exported_linker_post_flags.extend(node.linker_flags.exported_post_flags) + links.append(set_link_info_link_whole(link_info)) - if link_group_node.exported_linker_flags: - exported_linker_flags.extend(link_group_node.exported_linker_flags[0]) - exported_linker_post_flags.extend(link_group_node.exported_linker_flags[1]) + dep_groups = [target_to_link_group[dep] for dep in node.deps] + group_deps.extend([dep_group for dep_group in dep_groups if dep_group != group]) - links.append(link_info) + exported_dep_groups = [target_to_link_group[dep] for dep in node.exported_deps] + group_exported_deps.extend([dep_group for dep_group in exported_dep_groups if dep_group != group]) soname = group if not is_actually_merged: soname = linkable_nodes[group_data.constituents[0]].default_soname debug_info.with_default_soname.append((soname, group_data.constituents[0])) - debug_info.group_debug.setdefault( - group, - struct( - soname = soname, - merged = is_actually_merged, - constituents = real_constituents, - shlib_deps = shared_lib_deps, - exported_public_nodes = exported_public_nodes, - exported_linker_flags = exported_linker_flags, - exported_linker_post_flags = exported_linker_post_flags, - ), - ) - output_path = _platform_output_path(soname, platform if len(merged_data_by_platform) > 1 else None) - link_args = [LinkArgs(infos = links)] + + link_merge_info = LinkGroupMergeInfo( + label = group, + deps = dedupe_by_value(group_deps), + exported_deps = dedupe_by_value(group_exported_deps), + constituent_link_infos = links, + ) + link_args, shlib_deps, link_deps_graph = _create_merged_link_args( + root_target = link_merge_info, + linkable_nodes = link_group_linkable_nodes, + cxx_toolchain = cxx_toolchain, + ) shared_lib = create_shared_lib( ctx, output_path = output_path, soname = soname, - link_args = link_args, + link_args = [link_args], cxx_toolchain = cxx_toolchain, - shared_lib_deps = shared_lib_deps, - label = representative_label, + shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname for label in shlib_deps], + label = group_data.constituents[0], can_be_asset = can_be_asset, ) @@ -1220,8 +1157,8 @@ def _get_merged_linkables( )], post_flags = exported_linker_post_flags, ), - deps = link_group_deps, - exported_deps = [], + deps = link_merge_info.deps, + exported_deps = link_merge_info.exported_deps, shared_lib = shared_lib, # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents exported_linker_flags = None, @@ -1234,6 +1171,19 @@ def _get_merged_linkables( is_actually_merged = is_actually_merged, ) + debug_info.group_debug.setdefault( + group, + struct( + soname = soname, + merged = is_actually_merged, + primary_constituents = group_data.constituents, + real_constituents = link_deps_graph.keys(), + shlib_deps = shlib_deps, + exported_linker_flags = exported_linker_flags, + exported_linker_post_flags = exported_linker_post_flags, + ), + ) + shared_libs_by_platform[platform] = group_shared_libs debug_info.missing_default_solibs.extend([d for d in merge_data.default_shared_libs if d not in included_default_solibs]) @@ -1446,6 +1396,52 @@ def _create_link_args( shlib_deps.append(target) else: links.append(node.link_infos[preferred_linkable_type].default) + + extra_runtime_flags = cxx_toolchain.linker_info.shared_dep_runtime_ld_flags or [] + if extra_runtime_flags: + links.append(LinkInfo(pre_flags = extra_runtime_flags)) + return LinkArgs(infos = links), shlib_deps, link_traversal_cache + +# Equivalent to _create_link_args but for somerge +def _create_merged_link_args( + *, + cxx_toolchain: CxxToolchainInfo, + root_target: LinkGroupMergeInfo, + linkable_nodes: dict[GroupLabel, LinkGroupLinkableNode]) -> (LinkArgs, list[GroupLabel], dict[GroupLabel, list[GroupLabel]]): + if LinkOrdering(cxx_toolchain.linker_info.link_ordering) != LinkOrdering("topological"): + fail("don't yet support link ordering {}".format(cxx_toolchain.linker_info.link_ordering)) + + link_traversal_cache = {} + + def link_traversal(label: GroupLabel) -> list[GroupLabel]: + def link_traversal_deps(label: GroupLabel): + if label == root_target.label: + return root_target.deps + root_target.exported_deps + + linkable_node = linkable_nodes[label] + if linkable_node.shared_lib: + return linkable_node.exported_deps + else: + return linkable_node.deps + linkable_node.exported_deps + + res = link_traversal_cache.get(label, None) + if res: + return res + res = link_traversal_deps(label) + link_traversal_cache[label] = res + return res + + links = [] + shlib_deps = [] + for label in _rust_matching_topological_traversal([root_target.label], link_traversal): + if label == root_target.label: + links.extend(root_target.constituent_link_infos) + else: + linkable_node = linkable_nodes[label] + links.append(linkable_node.link) + if linkable_node.shared_lib: + shlib_deps.append(label) + extra_runtime_flags = cxx_toolchain.linker_info.shared_dep_runtime_ld_flags or [] if extra_runtime_flags: links.append(LinkInfo(pre_flags = extra_runtime_flags)) From e00f868c7e0cbb00dcc71c125425285d419abc44 Mon Sep 17 00:00:00 2001 From: Lynn Xu Date: Mon, 29 Jan 2024 15:33:58 -0800 Subject: [PATCH 0193/1133] Revert D53098987: Implement [swift-interface] subtarget on apple_library's Differential Revision: D53098987 Original commit changeset: 155b52db0dcf Original Phabricator Diff: D53098987 fbshipit-source-id: e74363c3e43b1665d83849b06a98ae70e3a8c20f --- prelude/apple/apple_library.bzl | 14 +----- prelude/apple/swift/swift_compilation.bzl | 61 ----------------------- 2 files changed, 1 insertion(+), 74 deletions(-) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index ab6b6b37d..530bfc336 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -17,7 +17,6 @@ load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load( "@prelude//apple/swift:swift_compilation.bzl", "compile_swift", - "create_swift_interface", "get_swift_anonymous_targets", "get_swift_debug_infos", "get_swift_dependency_info", @@ -191,13 +190,12 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte # to the CWD, so need to add . to be located correctly. resource_dir_args = ["-I."] - module_name = get_module_name(ctx) modular_pre = CPreprocessor( uses_modules = ctx.attrs.uses_modules, modular_args = [ "-fcxx-modules", "-fmodules", - "-fmodule-name=" + module_name, + "-fmodule-name=" + get_module_name(ctx), "-fmodules-cache-path=" + MODULE_CACHE_PATH, ] + resource_dir_args, ) @@ -256,16 +254,6 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte default_outputs = swift_compile.object_files if swift_compile else None, ), ], - "swift-interface": [create_swift_interface( - ctx, - deps_providers, - True, # parse_as_library - module_name, - exported_hdrs, - modulemap_pre, - framework_search_paths_flags, - params.extra_swift_compiler_flags, - )], "swift-output-file-map": [ DefaultInfo( default_output = swift_compile.output_map_artifact if swift_compile else None, diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 380922b02..edfa28800 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -858,67 +858,6 @@ def _create_compilation_database( return SwiftCompilationDatabase(db = cdb_artifact, other_outputs = argfile.cmd_form) -def create_swift_interface( - ctx: AnalysisContext, - deps_providers: list, - parse_as_library: bool, - module_name: str, - exported_headers: list[CHeader], - objc_modulemap_pp_info: [CPreprocessor, None], - framework_search_paths_flags: cmd_args, - extra_search_paths_flags: list[ArgLike] = []) -> DefaultInfo: - swiftc_flags = _get_swiftc_flags( - ctx, - deps_providers, - parse_as_library, - module_name, - exported_headers, - objc_modulemap_pp_info, - framework_search_paths_flags, - extra_search_paths_flags, - ) - - swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info - swift_ide_test_tool = swift_toolchain.swift_ide_test_tool - if not swift_ide_test_tool: - return DefaultInfo() - mk_swift_interface = swift_toolchain.mk_swift_interface - - module_name = get_module_name(ctx) - identifier = module_name + ".interface.swift" - - argsfile, _ = ctx.actions.write( - identifier + ".argsfile", - swiftc_flags, - allow_args = True, - ) - interface_artifact = ctx.actions.declare_output(identifier) - - mk_swift_args = cmd_args( - mk_swift_interface, - "--swift-ide-test-tool", - swift_ide_test_tool, - "--module", - module_name, - "--out", - interface_artifact.as_output(), - "--", - cmd_args(cmd_args(argsfile, format = "@{}", delimiter = "")).hidden([swiftc_flags]), - ) - - ctx.actions.run( - mk_swift_args, - category = "mk_swift_interface", - identifier = identifier, - ) - - return DefaultInfo( - default_output = interface_artifact, - other_outputs = [ - argsfile, - ], - ) - def _exported_deps(ctx) -> list[Dependency]: if ctx.attrs.reexport_all_header_dependencies: return ctx.attrs.exported_deps + ctx.attrs.deps From fd9d63858f9710da3cc9a1861bf8feecdf55384d Mon Sep 17 00:00:00 2001 From: Dan Zimmerman Date: Mon, 29 Jan 2024 17:46:55 -0800 Subject: [PATCH 0194/1133] Back out "Move serialize_debugging_options warnings into compile_swift" Summary: Original commit changeset: 842f47e55ea3 Original Phabricator Diff: D53098986 Backing out this commit, as the newer impl of `swift-interface` doesn't require this refactor, and it's easier to see what the new impl actual changes by first backing out this change. Reviewed By: drodriguez Differential Revision: D53203806 fbshipit-source-id: 3affd882ada76edd4100e5b2ebc5a3aecfd80d87 --- prelude/apple/swift/swift_compilation.bzl | 28 +++++++++++------------ 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index edfa28800..2a94001ad 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -201,19 +201,6 @@ def compile_swift( toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info module_name = get_module_name(ctx) - - # See _get_base_flags for where this flag is actually set. We move the - # warnings in here because _get_swiftc_flags is called even if srcs is None - # (for the [swift-interface] subtarget), and we only want to warn if we're - # actually compiling swift files. - if ctx.attrs.serialize_debugging_options: - if exported_headers: - # TODO(T99100029): We cannot use VFS overlays with Buck2, so we have to disable - # serializing debugging options for mixed libraries to debug successfully - warning("Mixed libraries cannot serialize debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) - elif not toolchain.prefix_serialized_debugging_options: - warning("The current toolchain does not support prefixing serialized debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) - output_header = ctx.actions.declare_output(module_name + "-Swift.h") output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) @@ -540,8 +527,19 @@ def _get_base_flags( else: cmd.add(["-enable-experimental-cxx-interop"]) - serialize_debugging_options = ctx.attrs.serialize_debugging_options and \ - not objc_headers and toolchain.prefix_serialized_debugging_options + serialize_debugging_options = False + if ctx.attrs.serialize_debugging_options: + if objc_headers: + # TODO(T99100029): We cannot use VFS overlays with Buck2, so we have to disable + # serializing debugging options for mixed libraries to debug successfully + warning("Mixed libraries cannot serialize debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) + elif not toolchain.prefix_serialized_debugging_options: + warning("The current toolchain does not support prefixing serialized debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) + else: + # Apply the debug prefix map to Swift serialized debugging info. + # This will allow for debugging remotely built swiftmodule files. + serialize_debugging_options = True + if serialize_debugging_options: cmd.add([ "-Xfrontend", From 6f38dca5822b7d2678d70b56a73707f6a4833a8f Mon Sep 17 00:00:00 2001 From: Balaji S Date: Tue, 30 Jan 2024 05:39:04 -0800 Subject: [PATCH 0195/1133] Add type specs to functions of test.erl Summary: * We add type specs to most functions, fix it where possible to remove all typing error * Some are not possible as we don't want to use dynamic_cast here and it is too much effort to write parsing for them imo Reviewed By: jcpetruzza Differential Revision: D53182782 fbshipit-source-id: c7b887e3b9d159dd0cba894bf1fe58596292dcaa --- .../common_test/test_cli_lib/src/test.erl | 46 ++++++++++++------- .../common_test/test_exec/src/ct_daemon.erl | 2 +- 2 files changed, 31 insertions(+), 17 deletions(-) diff --git a/prelude/erlang/common_test/test_cli_lib/src/test.erl b/prelude/erlang/common_test/test_cli_lib/src/test.erl index 0b272bf14..52420ece2 100644 --- a/prelude/erlang/common_test/test_cli_lib/src/test.erl +++ b/prelude/erlang/common_test/test_cli_lib/src/test.erl @@ -34,7 +34,9 @@ start_shell/0 ]). --type run_spec() :: string() | non_neg_integer() | [#{name := string(), suite := string()}]. +-type test_id() :: string() | non_neg_integer(). +-type test_info() :: #{name := string(), suite := atom()}. +-type run_spec() :: test_id() | [test_info()]. -type run_result() :: {non_neg_integer(), non_neg_integer()}. -spec start() -> ok. @@ -75,7 +77,7 @@ help() -> io:format("For more information, use the built in help, e.g. h(test, help)~n"), ok. --spec print_help(function(), arity()) -> ok. +-spec print_help(Fun :: atom(), arity()) -> ok. print_help(Fun, Arity) -> #{args := Args, desc := [DescFirst | DescRest]} = command_description(Fun, Arity), FunSig = string:pad( @@ -83,9 +85,10 @@ print_help(Fun, Arity) -> ), io:format("~s -- ~s~n", [FunSig, DescFirst]), Padding = string:pad("", 34), - [io:format("~s~s~n", [Padding, DescLine]) || DescLine <- DescRest]. + [io:format("~s~s~n", [Padding, DescLine]) || DescLine <- DescRest], + ok. --spec command_description(module(), arity()) -> #{args := [string()], desc := string()}. +-spec command_description(Fun :: atom(), arity()) -> #{args := [string()], desc := [string()]}. command_description(help, 0) -> #{args => [], desc => ["print help"]}; command_description(info, 0) -> @@ -130,7 +133,7 @@ command_description(F, A) -> %% @doc List all available tests %% @equiv test:list("") --spec list() -> non_neg_integer(). +-spec list() -> ok | {error, term()}. list() -> list(""). @@ -138,26 +141,27 @@ list() -> %% [https://www.erlang.org/doc/man/re.html#regexp_syntax] for the supported %% regular expression syntax. If a module is given as argument, list all %% tests from that module instead --spec list(RegExOrModule :: module() | string()) -> non_neg_integer(). +-spec list(RegExOrModule :: module() | string()) -> ok | {error, term()}. list(RegEx) when is_list(RegEx) -> ensure_initialized(), - Tests = ct_daemon:list(RegEx), - print_tests(Tests). + case ct_daemon:list(RegEx) of + {invalid_regex, _} = Err -> {error, Err}; + Tests -> print_tests(Tests) + end. %% @doc Run a test given by either the test id from the last list() command, or %% a regex that matches exactly one test. Tests are run with the shortest possible %% setup. This call does not recompile the test suite and its dependencies, but %% runs them as is. You can manually recompile code with c(Module). %% To reset the test state use reset(). --spec rerun(string() | non_neg_integer() | [#{name := string(), suite := string()}]) -> - run_result(). +-spec rerun(run_spec()) -> run_result(). rerun(Spec) -> ensure_initialized(), do_plain_test_run(Spec). %% @doc update code and run all tests %% @equiv run("") --spec run() -> ok | error. +-spec run() -> run_result() | error. run() -> run(""). @@ -223,6 +227,7 @@ ensure_initialized() -> ok end. +-spec init_utility_apps() -> boolean(). init_utility_apps() -> RunningApps = proplists:get_value(running, application:info()), case proplists:is_defined(test_cli_lib, RunningApps) of @@ -240,6 +245,7 @@ init_utility_apps() -> end end. +-spec init_node() -> boolean(). init_node() -> case ct_daemon:alive() of true -> @@ -266,6 +272,7 @@ init_node() -> true end. +-spec watchdog() -> no_return(). watchdog() -> Node = ct_daemon_node:get_node(), true = erlang:monitor_node(Node, true), @@ -279,6 +286,7 @@ watchdog() -> erlang:halt() end. +-spec init_group_leader() -> boolean(). init_group_leader() -> %% set the group leader unconditionally, we need to do this since %% during init, the group leader is different then the one from the @@ -286,11 +294,13 @@ init_group_leader() -> ct_daemon:set_gl(), false. +-spec print_tests([{module(), [{non_neg_integer(), string()}]}]) -> ok. print_tests([]) -> io:format("no tests found~n"); print_tests(Tests) -> print_tests_impl(lists:reverse(Tests)). +-spec print_tests_impl([{module(), [{non_neg_integer(), string()}]}]) -> ok. print_tests_impl([]) -> ok; print_tests_impl([{Suite, SuiteTests} | Rest]) -> @@ -300,9 +310,12 @@ print_tests_impl([{Suite, SuiteTests} | Rest]) -> -spec is_debug_session() -> boolean(). is_debug_session() -> - application:get_env(test_cli_lib, debugger_mode, false). + case application:get_env(test_cli_lib, debugger_mode, false) of + Value when is_boolean(Value) -> + Value + end. --spec collect_results(#{module => [string()]}) -> #{string => ct_daemon_core:run_result()}. +-spec collect_results(#{module => [string()]}) -> #{string() => ct_daemon_core:run_result()}. collect_results(PerSuite) -> maps:fold( fun(Suite, Tests, Acc) -> @@ -337,7 +350,7 @@ ensure_per_suite_encapsulation(Suite) -> end end. --spec discover(string() | non_neg_integer()) -> [#{name := string(), suite := string()}]. +-spec discover(string() | non_neg_integer()) -> [test_info()]. discover(RegExOrId) -> case ct_daemon:discover(RegExOrId) of {error, not_listed_yet} -> @@ -382,11 +395,12 @@ do_plain_test_run(RegExOrId) -> ToRun -> do_plain_test_run(ToRun) end. --spec start_shell() -> no_return(). +-spec start_shell() -> ok | {error, term()}. start_shell() -> case string:to_integer(erlang:system_info(otp_release)) of {Version, _} when Version >= 26 -> shell:start_interactive(); _ -> - user_drv:start() + user_drv:start(), + ok end. diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon.erl index b5a4fb7fe..c2a1aa6d1 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon.erl @@ -89,7 +89,7 @@ list(RegEx) -> end. -spec discover(pos_integer() | string()) -> - #{suite := module(), name := string()} + [#{suite := module(), name := string()}] | ct_daemon_runner:discover_error(). discover(RegExOrId) -> do_call({discover, RegExOrId}). From 9f7716e6fe11366acfb6bac2ab661ccf3f4bfc41 Mon Sep 17 00:00:00 2001 From: Dan Zimmerman Date: Tue, 30 Jan 2024 06:02:23 -0800 Subject: [PATCH 0196/1133] Back out "Move all flag calculation out of compile_swift" Summary: Original commit changeset: c6d5470ee24a Original Phabricator Diff: D53098989 Backing out this commit, as the newer impl of `swift-interface` doesn't require this refactor, and it's easier to see what the new impl actual changes by first backing out this change. Reviewed By: drodriguez Differential Revision: D53203808 fbshipit-source-id: c5181c9c61a65d87cff0e7f3c62f50ee29dbc430 --- prelude/apple/swift/swift_compilation.bzl | 90 +++++++++-------------- 1 file changed, 34 insertions(+), 56 deletions(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 2a94001ad..5dd03ffc5 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -198,6 +198,36 @@ def compile_swift( if not srcs: return None + # If this target imports XCTest we need to pass the search path to its swiftmodule. + framework_search_paths = cmd_args() + framework_search_paths.add(_get_xctest_swiftmodule_search_path(ctx)) + + # Pass the framework search paths to the driver and clang importer. This is required + # for pcm compilation, which does not pass through driver search paths. + framework_search_paths.add(framework_search_paths_flags) + framework_search_paths.add(cmd_args(framework_search_paths_flags, prepend = "-Xcc")) + + # If a target exports ObjC headers and Swift explicit modules are enabled, + # we need to precompile a PCM of the underlying module and supply it to the Swift compilation. + if objc_modulemap_pp_info and ctx.attrs.uses_explicit_modules: + underlying_swift_pcm_uncompiled_info = get_swift_pcm_uncompile_info( + ctx, + None, + objc_modulemap_pp_info, + ) + if underlying_swift_pcm_uncompiled_info: + compiled_underlying_pcm = compile_underlying_pcm( + ctx, + underlying_swift_pcm_uncompiled_info, + deps_providers, + get_swift_cxx_flags(ctx), + framework_search_paths, + ) + else: + compiled_underlying_pcm = None + else: + compiled_underlying_pcm = None + toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info module_name = get_module_name(ctx) @@ -205,16 +235,17 @@ def compile_swift( output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) - shared_flags = _get_swiftc_flags( + shared_flags = _get_shared_flags( ctx, deps_providers, parse_as_library, + compiled_underlying_pcm, module_name, exported_headers, objc_modulemap_pp_info, - framework_search_paths_flags, extra_search_paths_flags, ) + shared_flags.add(framework_search_paths) if toolchain.can_toolchain_emit_obj_c_header_textually: _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header) @@ -411,60 +442,7 @@ def _compile_with_argsfile( # Swift correctly handles relative paths and we can utilize the relative argsfile for absolute paths. return CompileArgsfiles(relative = {extension: relative_argsfile}, absolute = {extension: relative_argsfile}) -def _get_swiftc_flags( - ctx: AnalysisContext, - deps_providers: list, - parse_as_library: bool, - module_name: str, - exported_headers: list[CHeader], - objc_modulemap_pp_info: [CPreprocessor, None], - framework_search_paths_flags: cmd_args, - extra_search_paths_flags: list[ArgLike] = []) -> cmd_args: - # If this target imports XCTest we need to pass the search path to its swiftmodule. - framework_search_paths = cmd_args() - framework_search_paths.add(_get_xctest_swiftmodule_search_path(ctx)) - - # Pass the framework search paths to the driver and clang importer. This is required - # for pcm compilation, which does not pass through driver search paths. - framework_search_paths.add(framework_search_paths_flags) - framework_search_paths.add(cmd_args(framework_search_paths_flags, prepend = "-Xcc")) - - # If a target exports ObjC headers and Swift explicit modules are enabled, - # we need to precompile a PCM of the underlying module and supply it to the Swift compilation. - if objc_modulemap_pp_info and ctx.attrs.uses_explicit_modules: - underlying_swift_pcm_uncompiled_info = get_swift_pcm_uncompile_info( - ctx, - None, - objc_modulemap_pp_info, - ) - if underlying_swift_pcm_uncompiled_info: - compiled_underlying_pcm = compile_underlying_pcm( - ctx, - underlying_swift_pcm_uncompiled_info, - deps_providers, - get_swift_cxx_flags(ctx), - framework_search_paths, - ) - else: - compiled_underlying_pcm = None - else: - compiled_underlying_pcm = None - - shared_flags = _get_base_flags( - ctx, - deps_providers, - parse_as_library, - compiled_underlying_pcm, - module_name, - exported_headers, - objc_modulemap_pp_info, - extra_search_paths_flags, - ) - shared_flags.add(framework_search_paths) - - return shared_flags - -def _get_base_flags( +def _get_shared_flags( ctx: AnalysisContext, deps_providers: list, parse_as_library: bool, From 1ed28297bc299e9894ebc56f58a3e2ea423ec1b2 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Tue, 30 Jan 2024 06:46:44 -0800 Subject: [PATCH 0197/1133] Materialize external debug info for C++ tests Summary: D51189971 only fixed this for `cxx_binary` targets. This diff applies the same fix for `cxx_test` targets. Reviewed By: Imxset21 Differential Revision: D53215207 fbshipit-source-id: ced16bc98a9b60d3af6abd32f45ce96468328de9 --- prelude/cxx/cxx.bzl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 54f13960c..97a153f8d 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -675,7 +675,11 @@ def cxx_test_impl(ctx: AnalysisContext) -> list[Provider]: use_project_relative_paths = re_executor != None, ), ) + [ - DefaultInfo(default_output = output.binary, other_outputs = output.runtime_files, sub_targets = output.sub_targets), + DefaultInfo( + default_output = output.binary, + other_outputs = output.runtime_files + output.external_debug_info_artifacts, + sub_targets = output.sub_targets, + ), output.compilation_db, output.xcode_data, ] From 9bc46838a7059b5edc318f7b553e2e19c1a6b98b Mon Sep 17 00:00:00 2001 From: Tianyu Li Date: Tue, 30 Jan 2024 07:49:49 -0800 Subject: [PATCH 0198/1133] Pass through allow_cache_upload for cxx header Summary: "generate_hmap" is the top rules that did not hit RE cache for wa-android builds. Similar to the diff before, this allows this locally executed rules to upload to RE cache Differential Revision: D53034059 fbshipit-source-id: d888ddd24092a2599a3dc02885ac9fae5cd6b24a --- prelude/cxx/headers.bzl | 2 +- prelude/decls/cxx_rules.bzl | 6 ++++-- prelude/decls/ios_rules.bzl | 3 ++- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/prelude/cxx/headers.bzl b/prelude/cxx/headers.bzl index 150920d2d..d41dc1f29 100644 --- a/prelude/cxx/headers.bzl +++ b/prelude/cxx/headers.bzl @@ -358,5 +358,5 @@ def _mk_hmap(ctx: AnalysisContext, name: str, headers: dict[str, (Artifact, str) cmd.add(["--mappings-file", hmap_args_file]).hidden(header_args) if project_root_file: cmd.add(["--project-root-file", project_root_file]) - ctx.actions.run(cmd, category = "generate_hmap", identifier = name) + ctx.actions.run(cmd, category = "generate_hmap", identifier = name, allow_cache_upload = ctx.attrs.allow_cache_upload) return output diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index ebc326fce..8234ff2d6 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -850,7 +850,8 @@ cxx_test = prelude_rule( "use_default_test_main": attrs.option(attrs.bool(), default = None), "version_universe": attrs.option(attrs.string(), default = None), "weak_framework_names": attrs.list(attrs.string(), default = []), - } + } | + buck.allow_cache_upload_arg() ), ) @@ -1094,7 +1095,8 @@ prebuilt_cxx_library = prelude_rule( "versioned_soname": attrs.option(attrs.versioned(attrs.string()), default = None), "versioned_static_lib": attrs.option(attrs.versioned(attrs.source()), default = None), "versioned_static_pic_lib": attrs.option(attrs.versioned(attrs.source()), default = None), - } + } | + buck.allow_cache_upload_arg() ), ) diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 213bb5558..69a7019e7 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -225,7 +225,8 @@ apple_binary = prelude_rule( "uses_modules": attrs.bool(default = False), "xcode_private_headers_symlinks": attrs.option(attrs.bool(), default = None), "xcode_public_headers_symlinks": attrs.option(attrs.bool(), default = None), - } + } | + buck.allow_cache_upload_arg() ), ) From 1a4638730a72f2219fc908ddd50dc905f88f15a5 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Tue, 30 Jan 2024 07:58:09 -0800 Subject: [PATCH 0199/1133] Remove bundle checks gating Summary: Bundling checks already rolled out due to the `select()` branch ``` "DEFAULT": True, ``` Remove code to be able to control checking behaviour. Reviewed By: d16r Differential Revision: D53182291 fbshipit-source-id: b6a5fc52e8bd7cf25ce983cde389ef60fb5c0c84 --- prelude/apple/apple_bundle_config.bzl | 12 ------------ prelude/apple/apple_bundle_part.bzl | 4 +--- prelude/apple/apple_rules_impl_utility.bzl | 1 - 3 files changed, 1 insertion(+), 16 deletions(-) diff --git a/prelude/apple/apple_bundle_config.bzl b/prelude/apple/apple_bundle_config.bzl index 507d9f9de..7dd9d79bd 100644 --- a/prelude/apple/apple_bundle_config.bzl +++ b/prelude/apple/apple_bundle_config.bzl @@ -11,23 +11,11 @@ def _maybe_get_bool(config: str, default: [None, bool]) -> [None, bool]: return default return result.lower() == "true" -def _get_bundling_path_conflicts_check_enabled(): - check_enabled = _maybe_get_bool("bundling_path_conflicts_check_enabled", None) - if check_enabled != None: - return check_enabled - - return select({ - "DEFAULT": True, - "ovr_config//features/apple/constraints:bundling_path_conflicts_check_disabled": False, - "ovr_config//features/apple/constraints:bundling_path_conflicts_check_enabled": True, - }) - def apple_bundle_config() -> dict[str, typing.Any]: return { "_bundling_cache_buster": read_root_config("apple", "bundling_cache_buster", None), "_bundling_log_file_enabled": _maybe_get_bool("bundling_log_file_enabled", True), "_bundling_log_file_level": read_root_config("apple", "bundling_log_file_level", None), - "_bundling_path_conflicts_check_enabled": _get_bundling_path_conflicts_check_enabled(), "_codesign_type": read_root_config("apple", "codesign_type_override", None), "_compile_resources_locally_override": _maybe_get_bool("compile_resources_locally_override", None), "_dry_run_code_signing": _maybe_get_bool("dry_run_code_signing", False), diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index b51c73ab5..8f53e9da5 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -177,9 +177,7 @@ def assemble_bundle( command.add("--log-level-file", ctx.attrs._bundling_log_file_level) subtargets["bundling-log"] = [DefaultInfo(default_output = bundling_log_output)] - if ctx.attrs._bundling_path_conflicts_check_enabled: - command.add("--check-conflicts") - + command.add("--check-conflicts") command.add(codesign_configuration_args) # Ensures any genrule deps get built, such targets are used for validation diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index fcdc77269..428d8541d 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -58,7 +58,6 @@ def _apple_bundle_like_common_attrs(): "_bundling_cache_buster": attrs.option(attrs.string(), default = None), "_bundling_log_file_enabled": attrs.bool(default = False), "_bundling_log_file_level": attrs.option(attrs.string(), default = None), - "_bundling_path_conflicts_check_enabled": attrs.bool(default = False), "_codesign_type": attrs.option(attrs.enum(CodeSignType.values()), default = None), "_compile_resources_locally_override": attrs.option(attrs.bool(), default = None), "_dry_run_code_signing": attrs.bool(default = False), From ad4627504eb00aac5b43414423b5286c50f3e206 Mon Sep 17 00:00:00 2001 From: Jacob Rodal Date: Tue, 30 Jan 2024 12:22:04 -0800 Subject: [PATCH 0200/1133] aapt2_link shorten filepaths Summary: # What - Shorten the identifiers from `[use|not]_proto_format` -> `[use|not]_proto`. - Shorten `initial-rdotjava` -> `init-rjava` # Why I recently landed D53102805 and D53108313 to fix some long filepath issues on Windows. As I continue migrating targets to buck2, I keep running into long path issues related to this action. These changes shorten the filepaths by 13 characters, which gives a bit more breathing room and should reduce the number of target name changes that need to be made. Differential Revision: D53201812 fbshipit-source-id: fa15b414312c2f64147f371e36fcc28be1621bfc --- prelude/android/aapt2_link.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prelude/android/aapt2_link.bzl b/prelude/android/aapt2_link.bzl index 6a0ddea23..4044e2d3f 100644 --- a/prelude/android/aapt2_link.bzl +++ b/prelude/android/aapt2_link.bzl @@ -33,9 +33,9 @@ def get_aapt2_link( link_infos = [] for use_proto_format in [False, True]: if use_proto_format: - identifier = "use_proto_format" + identifier = "use_proto" else: - identifier = "not_proto_format" + identifier = "not_proto" aapt2_command = cmd_args(android_toolchain.aapt2) aapt2_command.add("link") @@ -48,7 +48,7 @@ def get_aapt2_link( aapt2_command.add(["--proguard", proguard_config.as_output()]) # We don't need the R.java output, but aapt2 won't output R.txt unless we also request R.java. - r_dot_java = ctx.actions.declare_output("{}/initial-rdotjava".format(identifier), dir = True) + r_dot_java = ctx.actions.declare_output("{}/init-rjava".format(identifier), dir = True) aapt2_command.add(["--java", r_dot_java.as_output()]) r_dot_txt = ctx.actions.declare_output("{}/R.txt".format(identifier)) aapt2_command.add(["--output-text-symbols", r_dot_txt.as_output()]) From 45f1bf05e8ffcddf02cf9f298d87ec4f99d0db4d Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 31 Jan 2024 07:53:14 -0800 Subject: [PATCH 0201/1133] Merge concat operations into rustc action Summary: Today's remote execution SEV (S391893) made me aware what an enormous number of RE actions we are spawning for Rust. This diff collapses all those tiny `concat.py` actions into just having the main `rustc_action.py` action handle the string concatenations we need for rustc args. Reviewed By: zertosh, shayne-fletcher Differential Revision: D53251928 fbshipit-source-id: 267ada422c7e1075f1e346ebd4ce70442e9a8c82 --- prelude/rust/build.bzl | 8 ++- prelude/rust/context.bzl | 11 ---- prelude/rust/extern.bzl | 81 ++++++------------------------ prelude/rust/rust_toolchain.bzl | 1 - prelude/rust/tools/BUCK.v2 | 6 --- prelude/rust/tools/attrs.bzl | 1 - prelude/rust/tools/concat.py | 54 -------------------- prelude/rust/tools/rustc_action.py | 22 +++++++- prelude/toolchains/rust.bzl | 1 - 9 files changed, 38 insertions(+), 147 deletions(-) delete mode 100755 prelude/rust/tools/concat.py diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 4dfb5bba3..44fef4ad7 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -143,8 +143,6 @@ def compile_context(ctx: AnalysisContext) -> CompileContext: linker_args = linker, clippy_wrapper = clippy_wrapper, common_args = {}, - flagfiles_for_extern = {}, - flagfiles_for_crate_map = {}, transitive_dependency_dirs = {}, ) @@ -335,7 +333,7 @@ def generate_rustdoc_test( toolchain_info.rustdoc_flags, ctx.attrs.rustdoc_flags, common_args.args, - extern_arg(ctx, compile_ctx, [], attr_crate(ctx), rlib), + extern_arg([], attr_crate(ctx), rlib), "--extern=proc_macro" if ctx.attrs.proc_macro else [], compile_ctx.linker_args, cmd_args(linker_argsfile, format = "-Clink-arg=@{}"), @@ -706,7 +704,7 @@ def dependency_args( strategy = strategy_info(info, dep_link_strategy) transitive_deps[strategy.rmeta if use_rmeta else strategy.rlib] = info.crate - args.add(extern_arg(ctx, compile_ctx, dep.flags, crate, artifact)) + args.add(extern_arg(dep.flags, crate, artifact)) crate_targets.append((crate, dep.label)) # Because deps of this *target* can also be transitive deps of this compiler @@ -1199,7 +1197,7 @@ def _rustc_invoke( ) for k, v in crate_map: - compile_cmd.add(crate_map_arg(ctx, compile_ctx, k, v)) + compile_cmd.add(crate_map_arg(k, v)) for k, v in plain_env.items(): compile_cmd.add(cmd_args("--env=", k, "=", v, delimiter = "")) for k, v in path_env.items(): diff --git a/prelude/rust/context.bzl b/prelude/rust/context.bzl index 22e494d35..6f0ced6bf 100644 --- a/prelude/rust/context.bzl +++ b/prelude/rust/context.bzl @@ -26,15 +26,6 @@ CommonArgsInfo = record( crate_map = field(list[(CrateName, Label)]), ) -ExternArg = record( - flags = str, - lib = field(Artifact), -) - -CrateMapArg = record( - label = field(Label), -) - # Information that determines how dependencies should be collected DepCollectionContext = record( advanced_unstable_linking = field(bool), @@ -63,7 +54,5 @@ CompileContext = record( clippy_wrapper = field(cmd_args), # Memoized common args for reuse. common_args = field(dict[(CrateType, Emit, LinkStrategy, bool), CommonArgsInfo]), - flagfiles_for_extern = field(dict[ExternArg, Artifact]), - flagfiles_for_crate_map = field(dict[CrateMapArg, Artifact]), transitive_dependency_dirs = field(dict[Artifact, None]), ) diff --git a/prelude/rust/extern.bzl b/prelude/rust/extern.bzl index 443e4db85..d2702ded1 100644 --- a/prelude/rust/extern.bzl +++ b/prelude/rust/extern.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load(":context.bzl", "CompileContext", "CrateMapArg", "CrateName", "ExternArg") +load(":context.bzl", "CrateName") # Create `--extern` flag. For crates with a name computed during analysis: # @@ -13,48 +13,22 @@ load(":context.bzl", "CompileContext", "CrateMapArg", "CrateName", "ExternArg") # # For crates with a name computed during build: # -# --extern @extern/libPROVISIONAL +# --extern=$(cat path/to/REALNAME)=path/to/libPROVISIONAL.rlib # -# where extern/libPROVISIONAL holds a flag containing the real crate name: -# -# REALNAME=path/to/libPROVISIONAL.rlib -# -# The `compile_ctx` may be omitted for non-dynamic crate names -def extern_arg( - ctx: AnalysisContext, - compile_ctx: CompileContext | None, - flags: list[str], - crate: CrateName, - lib: Artifact) -> cmd_args: +def extern_arg(flags: list[str], crate: CrateName, lib: Artifact) -> cmd_args: if flags == []: flags = "" else: flags = ",".join(flags) + ":" if crate.dynamic: - args = ExternArg(flags = flags, lib = lib) - flagfile = compile_ctx.flagfiles_for_extern.get(args, None) - if not flagfile: - flagfile = ctx.actions.declare_output("extern/{}".format(lib.short_path)) - concat_cmd = [ - compile_ctx.toolchain_info.concat_tool, - "--output", - flagfile.as_output(), - "--", - flags, - cmd_args("@", crate.dynamic, delimiter = ""), - "=", - cmd_args(lib).ignore_artifacts(), - ] - ctx.actions.run( - concat_cmd, - category = "concat", - identifier = str(len(compile_ctx.flagfiles_for_extern)), - ) - compile_ctx.flagfiles_for_extern[args] = flagfile - return cmd_args("--extern", cmd_args("@", flagfile, delimiter = "")).hidden(lib) + # TODO: consider using `cmd_args(crate.dynamic, quote = "json")` so it + # doesn't fall apart on paths containing ')' + crate_name = cmd_args(crate.dynamic, format = "$(cat {})") else: - return cmd_args("--extern=", flags, crate.simple, "=", lib, delimiter = "") + crate_name = crate.simple + + return cmd_args("--extern=", flags, crate_name, "=", lib, delimiter = "") # Create `--crate-map` flag. For crates with a name computed during analysis: # @@ -62,37 +36,12 @@ def extern_arg( # # For crates with a name computed during build: # -# --crate-map @cratemap/path/to/target +# --crate-map=$(cat path/to/REALNAME)=//path/to:target # -# where cratemap/path/to/target holds a flag containing the real crate name: -# -# REALNAME=//path/to:target -# -def crate_map_arg( - ctx: AnalysisContext, - compile_ctx: CompileContext, - crate: CrateName, - label: Label) -> cmd_args: +def crate_map_arg(crate: CrateName, label: Label) -> cmd_args: if crate.dynamic: - args = CrateMapArg(label = label) - flagfile = compile_ctx.flagfiles_for_crate_map.get(args, None) - if not flagfile: - flagfile = ctx.actions.declare_output("cratemap/{}/{}/{}".format(label.cell, label.package, label.name)) - concat_cmd = [ - compile_ctx.toolchain_info.concat_tool, - "--output", - flagfile.as_output(), - "--", - cmd_args("@", crate.dynamic, delimiter = ""), - "=", - str(label.raw_target()), - ] - ctx.actions.run( - concat_cmd, - category = "cratemap", - identifier = str(len(compile_ctx.flagfiles_for_crate_map)), - ) - compile_ctx.flagfiles_for_crate_map[args] = flagfile - return cmd_args("--crate-map", cmd_args("@", flagfile, delimiter = "")) + crate_name = cmd_args(crate.dynamic, format = "$(cat {})") else: - return cmd_args("--crate-map=", crate.simple, "=", str(label.raw_target()), delimiter = "") + crate_name = crate.simple + + return cmd_args("--crate-map=", crate_name, "=", str(label.raw_target()), delimiter = "") diff --git a/prelude/rust/rust_toolchain.bzl b/prelude/rust/rust_toolchain.bzl index b0a66a978..facf9a768 100644 --- a/prelude/rust/rust_toolchain.bzl +++ b/prelude/rust/rust_toolchain.bzl @@ -91,7 +91,6 @@ rust_toolchain_attrs = { # linking types in signatures to their definition in another crate. "extern_html_root_url_prefix": provider_field(str | None, default = None), # Utilities used for building flagfiles containing dynamic crate names - "concat_tool": provider_field(RunInfo | None, default = None), "transitive_dependency_symlinks_tool": provider_field(RunInfo | None, default = None), # Setting this enables additional behaviors that improves linking at the # cost of using unstable implementation details of rustc. At the moment, diff --git a/prelude/rust/tools/BUCK.v2 b/prelude/rust/tools/BUCK.v2 index c40ac525c..f3f1bada5 100644 --- a/prelude/rust/tools/BUCK.v2 +++ b/prelude/rust/tools/BUCK.v2 @@ -25,12 +25,6 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) -prelude.python_bootstrap_binary( - name = "concat", - main = "concat.py", - visibility = ["PUBLIC"], -) - prelude.python_bootstrap_binary( name = "transitive_dependency_symlinks", main = "transitive_dependency_symlinks.py", diff --git a/prelude/rust/tools/attrs.bzl b/prelude/rust/tools/attrs.bzl index 7d4231e8f..f5fb89307 100644 --- a/prelude/rust/tools/attrs.bzl +++ b/prelude/rust/tools/attrs.bzl @@ -12,7 +12,6 @@ def _internal_tool(default: str) -> Attr: # configurable attributes there. This list of internal tools is distracting and # expected to grow. internal_tool_attrs = { - "concat_tool": _internal_tool("prelude//rust/tools:concat"), "failure_filter_action": _internal_tool("prelude//rust/tools:failure_filter_action"), "rustc_action": _internal_tool("prelude//rust/tools:rustc_action"), "rustdoc_test_with_resources": _internal_tool("prelude//rust/tools:rustdoc_test_with_resources"), diff --git a/prelude/rust/tools/concat.py b/prelude/rust/tools/concat.py deleted file mode 100755 index 6dfb8723f..000000000 --- a/prelude/rust/tools/concat.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -# A tool to concatenate strings, some of which may be from @files. ¯\_(ツ)_/¯ -# -# Rustc's command line requires dependencies to be provided as: -# -# --extern cratename=path/to/libcratename.rlib -# -# In Buck, sometimes the cratename is computed at build time, for example -# extracted from a Thrift file. Rustc's "@" support isn't sufficient for this -# because the following doesn't make sense: -# -# --extern @filecontainingcrate=path/to/libcratename.rlib -# -# and the cratename isn't able to be its own argument: -# -# --extern @filecontainingcrate =path/to/libcratename.rlib -# -# Instead we use Python to make a single file containing the dynamic cratename -# and the rlib filepath concatenated together. -# -# concat.py --output $TMP -- @filecontainingcrate = path/to/libcratename.rlib -# -# then: -# -# --extern @$TMP -# - -import argparse -from typing import IO, List, NamedTuple - - -class Args(NamedTuple): - output: IO[str] - strings: List[str] - - -def main(): - parser = argparse.ArgumentParser(fromfile_prefix_chars="@") - parser.add_argument("--output", type=argparse.FileType("w")) - parser.add_argument("strings", nargs="*", type=str) - args = Args(**vars(parser.parse_args())) - - args.output.write("".join(args.strings)) - - -if __name__ == "__main__": - main() diff --git a/prelude/rust/tools/rustc_action.py b/prelude/rust/tools/rustc_action.py index 4a9641016..b6f998441 100755 --- a/prelude/rust/tools/rustc_action.py +++ b/prelude/rust/tools/rustc_action.py @@ -128,6 +128,24 @@ def arg_parse() -> Args: return Args(**vars(parser.parse_args())) +def arg_eval(arg: str) -> str: + """ + Expand an argument such as --extern=$(cat buck-out/v2/gen/foo.txt)=buck-out/dev/gen/libfoo.rlib + """ + expanded = "" + + while True: + begin = arg.find("$(cat ") + if begin == -1: + return expanded + arg + expanded += arg[:begin] + begin += len("$(cat ") + path, rest = arg[begin:].split(")", maxsplit=1) + with open(path, encoding="utf-8") as f: + expanded += f.read().strip() + arg = rest + + async def handle_output( # noqa: C901 proc: asyncio.subprocess.Process, args: Args, @@ -261,7 +279,7 @@ async def main() -> int: print(f"args {repr(args)} env {env} crate_map {crate_map}", end="\n") rustc_cmd = args.rustc[:1] - rustc_args = args.rustc[1:] + rustc_args = [arg_eval(arg) for arg in args.rustc[1:]] if args.remap_cwd_prefix is not None: rustc_args.append( @@ -308,7 +326,7 @@ async def main() -> int: # Check for death by signal - this is always considered a failure if res < 0: - cmdline = " ".join(shlex.quote(arg) for arg in args.rustc) + cmdline = " ".join(shlex.quote(arg) for arg in rustc_cmd + rustc_args) eprint(f"Command exited with signal {-res}: command line: {cmdline}") elif args.failure_filter: # If failure filtering is enabled, then getting an error diagnostic is also diff --git a/prelude/toolchains/rust.bzl b/prelude/toolchains/rust.bzl index 9f5a85251..d018edded 100644 --- a/prelude/toolchains/rust.bzl +++ b/prelude/toolchains/rust.bzl @@ -42,7 +42,6 @@ def _system_rust_toolchain_impl(ctx): clippy_driver = RunInfo(args = ["clippy-driver"]), clippy_toml = ctx.attrs.clippy_toml[DefaultInfo].default_outputs[0] if ctx.attrs.clippy_toml else None, compiler = RunInfo(args = ["rustc"]), - concat_tool = ctx.attrs.concat_tool[RunInfo], default_edition = ctx.attrs.default_edition, panic_runtime = PanicRuntime("unwind"), deny_lints = ctx.attrs.deny_lints, From 8f45377a825e9609075c8954415dc495c4edbbb9 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Wed, 31 Jan 2024 09:44:11 -0800 Subject: [PATCH 0202/1133] Sanitizers: add sanitizer fields to `cxx_toolchain()` Summary: Adds two fields to `cxx_toolchain()`, so that the correct sanitizer runtime libs can be referenced and bundled as required. - `sanitizer_runtime_dir`: - `sanitizer_runtime_enabled`: controls whether the sanitizer runtime directory would be referenced during linking using `rpath` linker flags and whether the runtime would be embedded in any standalone app bundles (e.g., macOS applications). The directory and flag to enable are separated out, for easier debugging and visibility (vs having an empty `sanitizer_runtime_dir` control both). `sanitizer_runtime_dir` is a dep and not an exec-dep because that allows the runtime to be customised on a per-target platform basis (e.g., macOS vs iOS get different runtime dylibs). Reviewed By: blackm00n Differential Revision: D53264639 fbshipit-source-id: ea987ff0188b3c9e7d7f3287bb928ed95e63f2d5 --- prelude/cxx/cxx_toolchain.bzl | 4 ++++ prelude/cxx/cxx_toolchain_types.bzl | 2 ++ prelude/cxx/user/cxx_toolchain_override.bzl | 5 +++++ 3 files changed, 11 insertions(+) diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index 3c9f3fa01..09d4c8335 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -95,6 +95,8 @@ def cxx_toolchain_impl(ctx): independent_shlib_interface_linker_flags = ctx.attrs.shared_library_interface_flags, requires_archives = value_or(ctx.attrs.requires_archives, True), requires_objects = value_or(ctx.attrs.requires_objects, False), + sanitizer_runtime_dir = ctx.attrs.sanitizer_runtime_dir[DefaultInfo].default_outputs[0] if ctx.attrs.sanitizer_runtime_dir else None, + sanitizer_runtime_enabled = ctx.attrs.sanitizer_runtime_enabled, supports_distributed_thinlto = ctx.attrs.supports_distributed_thinlto, shared_dep_runtime_ld_flags = ctx.attrs.shared_dep_runtime_ld_flags, shared_library_name_default_prefix = _get_shared_library_name_default_prefix(ctx), @@ -194,6 +196,8 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): "public_headers_symlinks_enabled": attrs.bool(default = True), "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), "requires_objects": attrs.bool(default = False), + "sanitizer_runtime_dir": attrs.option(attrs.dep(), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform + "sanitizer_runtime_enabled": attrs.bool(default = False), "shared_library_interface_mode": attrs.enum(ShlibInterfacesMode.values(), default = "disabled"), "shared_library_interface_producer": attrs.option(dep_type(providers = [RunInfo]), default = None), "split_debug_mode": attrs.enum(SplitDebugMode.values(), default = "none"), diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index 1456cad11..89a2bef6e 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -43,6 +43,8 @@ LinkerInfo = provider( "mk_shlib_intf": provider_field(typing.Any, default = None), # "o" on Unix, "obj" on Windows "object_file_extension": provider_field(typing.Any, default = None), # str + "sanitizer_runtime_enabled": provider_field(bool, default = False), + "sanitizer_runtime_dir": provider_field([Artifact, None], default = None), "shlib_interfaces": provider_field(ShlibInterfacesMode), "shared_dep_runtime_ld_flags": provider_field(typing.Any, default = None), # "lib" on Linux/Mac/Android, "" on Windows. diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index 1cc4cd726..eb8cbe5ce 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -75,6 +75,7 @@ def _cxx_toolchain_override(ctx): # linker flags should be changed as well. pdb_expected = linker_type == "windows" and pdb_expected shlib_interfaces = ShlibInterfacesMode(ctx.attrs.shared_library_interface_mode) if ctx.attrs.shared_library_interface_mode else None + sanitizer_runtime_dir = ctx.attrs.sanitizer_runtime_dir[DefaultInfo].default_outputs[0] if ctx.attrs.sanitizer_runtime_dir else None linker_info = LinkerInfo( archiver = _pick_bin(ctx.attrs.archiver, base_linker_info.archiver), archiver_type = base_linker_info.archiver_type, @@ -98,6 +99,8 @@ def _cxx_toolchain_override(ctx): requires_objects = base_linker_info.requires_objects, supports_distributed_thinlto = base_linker_info.supports_distributed_thinlto, independent_shlib_interface_linker_flags = base_linker_info.independent_shlib_interface_linker_flags, + sanitizer_runtime_dir = value_or(sanitizer_runtime_dir, base_linker_info.sanitizer_runtime_dir), + sanitizer_runtime_enabled = value_or(ctx.attrs.sanitizer_runtime_enabled, base_linker_info.sanitizer_runtime_enabled), shared_dep_runtime_ld_flags = [], shared_library_name_default_prefix = ctx.attrs.shared_library_name_default_prefix if ctx.attrs.shared_library_name_default_prefix != None else base_linker_info.shared_library_name_default_prefix, shared_library_name_format = ctx.attrs.shared_library_name_format if ctx.attrs.shared_library_name_format != None else base_linker_info.shared_library_name_format, @@ -206,6 +209,8 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "platform_name": attrs.option(attrs.string(), default = None), "produce_interface_from_stub_shared_library": attrs.option(attrs.bool(), default = None), "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), + "sanitizer_runtime_dir": attrs.option(attrs.dep(), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform + "sanitizer_runtime_enabled": attrs.bool(default = False), "shared_library_interface_mode": attrs.option(attrs.enum(ShlibInterfacesMode.values()), default = None), "shared_library_name_default_prefix": attrs.option(attrs.string(), default = None), "shared_library_name_format": attrs.option(attrs.string(), default = None), From 79cdf3fa0f9f0ba372b7e3f32d9e1e9eb975f1b2 Mon Sep 17 00:00:00 2001 From: Dan Zimmerman Date: Wed, 31 Jan 2024 10:10:53 -0800 Subject: [PATCH 0203/1133] Implement [swift-interface] subtarget on apple_library's Summary: We want to be able to generate `.swift` interface files for general `apple_library`s (particularly ones that don't include any `.swift` files). In order to achieve that we reuse the `swiftc` flag calculation, as if this library was a 'mixed' `apple_library` (i.e. it contain both `.swift` and non-`.swift` source files, so the current target is considered an 'underlying module') and pass those arguments (after translation) to `swift-ide-test`. The previous attempt at this ended up in double declaring the swift modulemap artifact. This approach avoids that shortcoming by reusing the flag calculation for both swift compilation artifacts & the `[swift-interface]` artifact. Reviewed By: maxovtsin Differential Revision: D53203807 fbshipit-source-id: d1dd1eb13abc72126a3affaecd4850b1fb7a61ed --- prelude/apple/apple_binary.bzl | 2 +- prelude/apple/apple_library.bzl | 3 +- prelude/apple/swift/swift_compilation.bzl | 63 +++++++++++++++++++---- 3 files changed, 55 insertions(+), 13 deletions(-) diff --git a/prelude/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl index f07121842..6daaf9087 100644 --- a/prelude/apple/apple_binary.bzl +++ b/prelude/apple/apple_binary.bzl @@ -82,7 +82,7 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: cxx_srcs, swift_srcs = _filter_swift_srcs(ctx) framework_search_path_flags = get_framework_search_path_flags(ctx) - swift_compile = compile_swift( + swift_compile, _ = compile_swift( ctx, swift_srcs, False, # parse_as_library diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index 530bfc336..c38f5a4cb 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -145,7 +145,7 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte modulemap_pre = None framework_search_paths_flags = get_framework_search_path_flags(ctx) - swift_compile = compile_swift( + swift_compile, swift_interface = compile_swift( ctx, swift_srcs, True, # parse_as_library @@ -254,6 +254,7 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte default_outputs = swift_compile.object_files if swift_compile else None, ), ], + "swift-interface": [swift_interface], "swift-output-file-map": [ DefaultInfo( default_output = swift_compile.output_map_artifact if swift_compile else None, diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 5dd03ffc5..7feaf6e2e 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -194,10 +194,7 @@ def compile_swift( exported_headers: list[CHeader], objc_modulemap_pp_info: [CPreprocessor, None], framework_search_paths_flags: cmd_args, - extra_search_paths_flags: list[ArgLike] = []) -> [SwiftCompilationOutput, None]: - if not srcs: - return None - + extra_search_paths_flags: list[ArgLike] = []) -> ([SwiftCompilationOutput, None], DefaultInfo): # If this target imports XCTest we need to pass the search path to its swiftmodule. framework_search_paths = cmd_args() framework_search_paths.add(_get_xctest_swiftmodule_search_path(ctx)) @@ -228,12 +225,7 @@ def compile_swift( else: compiled_underlying_pcm = None - toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info - module_name = get_module_name(ctx) - output_header = ctx.actions.declare_output(module_name + "-Swift.h") - - output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) shared_flags = _get_shared_flags( ctx, @@ -246,6 +238,14 @@ def compile_swift( extra_search_paths_flags, ) shared_flags.add(framework_search_paths) + swift_interface_info = _create_swift_interface(ctx, shared_flags, module_name) + + if not srcs: + return (None, swift_interface_info) + + toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info + output_header = ctx.actions.declare_output(module_name + "-Swift.h") + output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) if toolchain.can_toolchain_emit_obj_c_header_textually: _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header) @@ -281,7 +281,7 @@ def compile_swift( pre = CPreprocessor(headers = [swift_header]) # Pass up the swiftmodule paths for this module and its exported_deps - return SwiftCompilationOutput( + return (SwiftCompilationOutput( output_map_artifact = object_output.output_map_artifact, object_files = object_output.object_files, object_format = toolchain.object_format, @@ -293,7 +293,7 @@ def compile_swift( swift_debug_info = extract_and_merge_swift_debug_infos(ctx, deps_providers, [output_swiftmodule]), clang_debug_info = extract_and_merge_clang_debug_infos(ctx, deps_providers), compilation_database = _create_compilation_database(ctx, srcs, object_output.argsfiles.absolute[SWIFT_EXTENSION]), - ) + ), swift_interface_info) # Swift headers are postprocessed to make them compatible with Objective-C # compilation that does not use -fmodules. This is a workaround for the bad @@ -834,6 +834,47 @@ def _create_compilation_database( return SwiftCompilationDatabase(db = cdb_artifact, other_outputs = argfile.cmd_form) +def _create_swift_interface(ctx: AnalysisContext, shared_flags: cmd_args, module_name: str) -> DefaultInfo: + swift_toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info + swift_ide_test_tool = swift_toolchain.swift_ide_test_tool + if not swift_ide_test_tool: + return DefaultInfo() + mk_swift_interface = swift_toolchain.mk_swift_interface + + identifier = module_name + ".interface.swift" + + argsfile, _ = ctx.actions.write( + identifier + ".argsfile", + shared_flags, + allow_args = True, + ) + interface_artifact = ctx.actions.declare_output(identifier) + + mk_swift_args = cmd_args( + mk_swift_interface, + "--swift-ide-test-tool", + swift_ide_test_tool, + "--module", + module_name, + "--out", + interface_artifact.as_output(), + "--", + cmd_args(cmd_args(argsfile, format = "@{}", delimiter = "")).hidden([shared_flags]), + ) + + ctx.actions.run( + mk_swift_args, + category = "mk_swift_interface", + identifier = identifier, + ) + + return DefaultInfo( + default_output = interface_artifact, + other_outputs = [ + argsfile, + ], + ) + def _exported_deps(ctx) -> list[Dependency]: if ctx.attrs.reexport_all_header_dependencies: return ctx.attrs.exported_deps + ctx.attrs.deps From 985c1669b03f6ea8f4981f8a5b5b671a43274392 Mon Sep 17 00:00:00 2001 From: Dan Zimmerman Date: Wed, 31 Jan 2024 10:10:53 -0800 Subject: [PATCH 0204/1133] Move serialize_debugging_options warnings into compile_swift Summary: I had to revert the original implementation of the `swift-interface` subtarget, and put up a diff reimplementing it, while fixing the issues the original impl had. This is the same as D53098986, but now on top of the newer impl Reviewed By: maxovtsin Differential Revision: D53203809 fbshipit-source-id: 106fdc83c835acd344dbb648cc4fddb5baac4e45 --- prelude/apple/swift/swift_compilation.bzl | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 7feaf6e2e..f9570a5ce 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -244,6 +244,15 @@ def compile_swift( return (None, swift_interface_info) toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo].swift_toolchain_info + + if ctx.attrs.serialize_debugging_options: + if exported_headers: + # TODO(T99100029): We cannot use VFS overlays with Buck2, so we have to disable + # serializing debugging options for mixed libraries to debug successfully + warning("Mixed libraries cannot serialize debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) + elif not toolchain.prefix_serialized_debugging_options: + warning("The current toolchain does not support prefixing serialized debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) + output_header = ctx.actions.declare_output(module_name + "-Swift.h") output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) @@ -505,19 +514,7 @@ def _get_shared_flags( else: cmd.add(["-enable-experimental-cxx-interop"]) - serialize_debugging_options = False - if ctx.attrs.serialize_debugging_options: - if objc_headers: - # TODO(T99100029): We cannot use VFS overlays with Buck2, so we have to disable - # serializing debugging options for mixed libraries to debug successfully - warning("Mixed libraries cannot serialize debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) - elif not toolchain.prefix_serialized_debugging_options: - warning("The current toolchain does not support prefixing serialized debugging options, disabling for module `{}` in rule `{}`".format(module_name, ctx.label)) - else: - # Apply the debug prefix map to Swift serialized debugging info. - # This will allow for debugging remotely built swiftmodule files. - serialize_debugging_options = True - + serialize_debugging_options = ctx.attrs.serialize_debugging_options and not objc_headers and toolchain.prefix_serialized_debugging_options if serialize_debugging_options: cmd.add([ "-Xfrontend", From e47091c1a22068596a893c904886baa42038b57a Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Wed, 31 Jan 2024 18:35:59 -0800 Subject: [PATCH 0205/1133] Introducing new Python attributes for typing Reviewed By: zsol Differential Revision: D52986679 fbshipit-source-id: f1a2c2b1d4971b14ac58c75c04fc69e61f865de3 --- prelude/decls/python_rules.bzl | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/prelude/decls/python_rules.bzl b/prelude/decls/python_rules.bzl index e84a135a7..424f4f8c2 100644 --- a/prelude/decls/python_rules.bzl +++ b/prelude/decls/python_rules.bzl @@ -12,6 +12,16 @@ load(":python_common.bzl", "python_common") NativeLinkStrategy = ["separate", "merged"] +def _typing_arg(): + return { + "py_version_for_type_checking": attrs.option(attrs.string(), default = None, doc = """ + This option will force the type checker to perform checking under a specific version of Python interpreter. +"""), + "typing": attrs.bool(default = True, doc = """ + Determines whether to perform type checking on the given target. Default is True. +"""), + } + cxx_python_extension = prelude_rule( name = "cxx_python_extension", docs = """ @@ -270,7 +280,8 @@ python_binary = prelude_rule( "version_universe": attrs.option(attrs.string(), default = None), "zip_safe": attrs.option(attrs.bool(), default = None), } | - buck.allow_cache_upload_arg() + buck.allow_cache_upload_arg() | + _typing_arg() ), ) @@ -339,7 +350,8 @@ python_library = prelude_rule( "versioned_resources": attrs.option(attrs.versioned(attrs.named_set(attrs.source(), sorted = True)), default = None), "versioned_srcs": attrs.option(attrs.versioned(attrs.named_set(attrs.source(), sorted = True)), default = None), "zip_safe": attrs.option(attrs.bool(), default = None), - } + } | + _typing_arg() ), ) @@ -449,7 +461,8 @@ python_test = prelude_rule( "versioned_resources": attrs.option(attrs.versioned(attrs.named_set(attrs.source(), sorted = True)), default = None), "versioned_srcs": attrs.option(attrs.versioned(attrs.named_set(attrs.source(), sorted = True)), default = None), "zip_safe": attrs.option(attrs.bool(), default = None), - } + } | + _typing_arg() ), ) From 95d21591df39e76c052cf855acda7ef36db62d1d Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Wed, 31 Jan 2024 21:29:27 -0800 Subject: [PATCH 0206/1133] Adding Pyre executable to Python toolchain and default it to a dummy binary Reviewed By: zsol Differential Revision: D53113187 fbshipit-source-id: a25f31a29ca8dc7c6aae8ae10067ce2db329e719 --- prelude/python/toolchain.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/python/toolchain.bzl b/prelude/python/toolchain.bzl index 075c8d835..039e717c4 100644 --- a/prelude/python/toolchain.bzl +++ b/prelude/python/toolchain.bzl @@ -68,6 +68,7 @@ PythonToolchainInfo = provider( "make_py_package_modules": provider_field(typing.Any, default = None), "pex_executor": provider_field(typing.Any, default = None), "pex_extension": provider_field(typing.Any, default = None), + "type_checker": provider_field(typing.Any, default = None), "emit_omnibus_metadata": provider_field(typing.Any, default = None), "fail_with_message": provider_field(typing.Any, default = None), "emit_dependency_metadata": provider_field(typing.Any, default = None), From 29a22637014f3d414f8054d2dcaeb5cc49d3e15d Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Wed, 31 Jan 2024 21:29:27 -0800 Subject: [PATCH 0207/1133] Add a [pyre] subtarget to all python libraries and binaries Reviewed By: zsol Differential Revision: D53113186 fbshipit-source-id: 6b5c76ae5cb94a446e9ea36a887fb8945eeaf936 --- prelude/python/python_binary.bzl | 17 ++++++++++ prelude/python/python_library.bzl | 16 +++++++++ prelude/python/typing.bzl | 55 +++++++++++++++++++++++++++++++ 3 files changed, 88 insertions(+) create mode 100644 prelude/python/typing.bzl diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 129c7c245..daf87c34c 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -98,6 +98,7 @@ load( ) load(":source_db.bzl", "create_dbg_source_db", "create_python_source_db_info", "create_source_db", "create_source_db_no_deps") load(":toolchain.bzl", "NativeLinkStrategy", "PackageStyle", "PythonPlatformInfo", "PythonToolchainInfo", "get_package_style", "get_platform_attr") +load(":typing.bzl", "create_per_target_type_check") OmnibusMetadataInfo = provider( # @unsorted-dict-items @@ -399,6 +400,22 @@ def python_executable( "source-db-no-deps": [source_db_no_deps, create_python_source_db_info(library_info.manifests)], }) + # Type check + type_checker = python_toolchain.type_checker + if type_checker != None: + exe.sub_targets.update({ + "typecheck": [ + create_per_target_type_check( + ctx.actions, + type_checker, + src_manifest, + python_deps, + py_version = ctx.attrs.py_version_for_type_checking, + typing_enabled = ctx.attrs.typing, + ), + ], + }) + return exe def create_dep_report( diff --git a/prelude/python/python_library.bzl b/prelude/python/python_library.bzl index e4238f8ae..e95cdc277 100644 --- a/prelude/python/python_library.bzl +++ b/prelude/python/python_library.bzl @@ -53,6 +53,7 @@ load(":needed_coverage.bzl", "PythonNeededCoverageInfo") load(":python.bzl", "PythonLibraryInfo", "PythonLibraryManifests", "PythonLibraryManifestsTSet") load(":source_db.bzl", "create_python_source_db_info", "create_source_db", "create_source_db_no_deps") load(":toolchain.bzl", "PythonToolchainInfo") +load(":typing.bzl", "create_per_target_type_check") def dest_prefix(label: Label, base_module: [None, str]) -> str: """ @@ -310,6 +311,21 @@ def python_library_impl(ctx: AnalysisContext) -> list[Provider]: # Source DBs. sub_targets["source-db"] = [create_source_db(ctx, src_type_manifest, deps)] sub_targets["source-db-no-deps"] = [create_source_db_no_deps(ctx, src_types), create_python_source_db_info(library_info.manifests)] + + # Type check + type_checker = python_toolchain.type_checker + if type_checker != None: + sub_targets["typecheck"] = [ + create_per_target_type_check( + ctx.actions, + type_checker, + src_type_manifest, + deps, + py_version = ctx.attrs.py_version_for_type_checking, + typing_enabled = ctx.attrs.typing, + ), + ] + providers.append(DefaultInfo(sub_targets = sub_targets)) # Create, augment and provide the linkable graph. diff --git a/prelude/python/typing.bzl b/prelude/python/typing.bzl new file mode 100644 index 000000000..2abca4c30 --- /dev/null +++ b/prelude/python/typing.bzl @@ -0,0 +1,55 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//python:python.bzl", "PythonLibraryInfo") +load( + ":manifest.bzl", + "ManifestInfo", # @unused Used as a type +) +load(":python.bzl", "PythonLibraryManifestsTSet") + +def create_per_target_type_check( + actions: AnalysisActions, + executable: RunInfo, + srcs: ManifestInfo | None, + deps: list[PythonLibraryInfo], + py_version: str | None, + typing_enabled: bool) -> DefaultInfo: + output_file_name = "type_check_result.json" + if not typing_enabled: + # Use empty dict to signal that no type checking was performed. + output_file = actions.write_json(output_file_name, {}) + else: + cmd = cmd_args(executable) + cmd.add(cmd_args("check")) + + # Source artifacts + source_manifests = [] + if srcs != None: + source_manifests = [srcs.manifest] + cmd.hidden([a for a, _ in srcs.artifacts]) + + # Dep artifacts + dep_manifest_tset = actions.tset(PythonLibraryManifestsTSet, children = [d.manifests for d in deps]) + dep_manifests = dep_manifest_tset.project_as_args("source_type_manifests") + cmd.hidden(dep_manifest_tset.project_as_args("source_type_artifacts")) + + # Create input configs + input_config = { + "dependencies": dep_manifests, + "py_version": py_version, + "sources": source_manifests, + } + + input_file = actions.write_json("type_check_config.json", input_config, with_inputs = True) + output_file = actions.declare_output(output_file_name) + cmd.add(cmd_args(input_file)) + cmd.add(cmd_args(output_file.as_output(), format = "--output={}")) + + actions.run(cmd, category = "type_check") + + return DefaultInfo(default_output = output_file) From 5b28ea2ed74ac1c7433aea0415b248a018838e84 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Thu, 1 Feb 2024 04:58:47 -0800 Subject: [PATCH 0208/1133] .so merge should respect can_be_asset of its constituents Summary: When building `fbsource//fbandroid/java/com/facebook/cameracore/litecamera/factory/whatsapp:ar`, one of the `.so` is deliberately specified as not being an asset: https://www.internalfb.com/code/fbsource/[eaa324a02142]/fbandroid/java/com/facebook/cameracore/litecamera/factory/whatsapp/build_defs.bzl?lines=107 In `buck1`, this is packaged into the `.aar` as `jni/arm64-v8a/libar-bundle1.so`, but in `buck2` it was put into the `.aar` as `assets/lib/arm64-v8a/libar-bundle1.so`, because the merged `.so` didn't respect `can_be_asset` on its constituents. This fixes that. Reviewed By: mzlee Differential Revision: D53227646 fbshipit-source-id: 4a1104f59ab0fc86dfb511d4366976abe30ff496 --- prelude/android/android_binary_native_library_rules.bzl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index f530cfa33..5dbaf94c3 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -1031,12 +1031,16 @@ def _get_merged_linkables( for group in post_order_traversal(link_groups_graph): group_data = link_groups[group] is_actually_merged = len(group_data.constituents) > 1 + can_be_asset = True + for target in group_data.constituents: + if not linkable_nodes[target].can_be_asset: + can_be_asset = False + break if not is_actually_merged: target = group_data.constituents[0] node_data = linkable_nodes[target] - can_be_asset = node_data.can_be_asset if node_data.preferred_linkage == Linkage("static") or not _has_linkable(node_data): debug_info.unmerged_statics.append(target) From 322d9ec95a2a4032cf6339521b1bf4f21bf66f1d Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Thu, 1 Feb 2024 06:05:43 -0800 Subject: [PATCH 0209/1133] Add compile_shared_transition MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: Add compile-shared transition to propagate -shared flag to go_library depending on top-level target - go_binary -> shared=False - go_test -> shared=False - go_exported_library -> shared=True Currently we produce actions for both shared/non-shared use cases https://fburl.com/code/243xqijs This approach isn’t scalable when we have many flags For example if we have 3 binary flags, we have 8 combinations of them - -shared - -cgo_enabled - -race In this diff we're just adding new attributes, but not using them Reviewed By: awalterschulze Differential Revision: D52881364 fbshipit-source-id: e03bdab377f648b321c178d5ef873c09b5bf3df0 --- prelude/decls/go_rules.bzl | 6 -- prelude/go/constraints/BUCK.v2 | 17 +++++ prelude/go/transitions/cgo_enabled.bzl | 39 ----------- prelude/go/transitions/defs.bzl | 97 ++++++++++++++++++++++++++ prelude/rules_impl.bzl | 11 +-- 5 files changed, 121 insertions(+), 49 deletions(-) delete mode 100644 prelude/go/transitions/cgo_enabled.bzl create mode 100644 prelude/go/transitions/defs.bzl diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index 525f0b9b7..973c35bf3 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -325,12 +325,6 @@ go_library = prelude_rule( "exported_deps": attrs.list(attrs.dep(), default = []), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), - "_cgo_enabled": attrs.default_only(attrs.option(attrs.bool(), default = select({ - "DEFAULT": None, - "prelude//go/constraints:cgo_enabled_auto": None, - "prelude//go/constraints:cgo_enabled_false": False, - "prelude//go/constraints:cgo_enabled_true": True, - }))), } ), ) diff --git a/prelude/go/constraints/BUCK.v2 b/prelude/go/constraints/BUCK.v2 index 0fbffe4c8..a4b034fe7 100644 --- a/prelude/go/constraints/BUCK.v2 +++ b/prelude/go/constraints/BUCK.v2 @@ -20,3 +20,20 @@ constraint_value( constraint_setting = ":cgo_enabled", visibility = ["PUBLIC"], ) + +constraint_setting( + name = "compile_shared", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "compile_shared_false", + constraint_setting = ":compile_shared", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "compile_shared_true", + constraint_setting = ":compile_shared", + visibility = ["PUBLIC"], +) diff --git a/prelude/go/transitions/cgo_enabled.bzl b/prelude/go/transitions/cgo_enabled.bzl deleted file mode 100644 index c1adf3b10..000000000 --- a/prelude/go/transitions/cgo_enabled.bzl +++ /dev/null @@ -1,39 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -def _cgo_enabled_transition(platform, refs, attrs): - if attrs.cgo_enabled == None: - cgo_enabled_ref = refs.cgo_enabled_auto - elif attrs.cgo_enabled == True: - cgo_enabled_ref = refs.cgo_enabled_true - else: - cgo_enabled_ref = refs.cgo_enabled_false - - cgo_enabled_value = cgo_enabled_ref[ConstraintValueInfo] - constraints = platform.configuration.constraints - constraints[cgo_enabled_value.setting.label] = cgo_enabled_value - - new_cfg = ConfigurationInfo( - constraints = constraints, - values = platform.configuration.values, - ) - - return PlatformInfo( - label = platform.label, - configuration = new_cfg, - ) - -cgo_enabled_transition = transition( - impl = _cgo_enabled_transition, - refs = { - "cgo_enabled": "prelude//go/constraints:cgo_enabled", - "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", - "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", - "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", - }, - attrs = ["cgo_enabled"], -) diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl new file mode 100644 index 000000000..fb6082f76 --- /dev/null +++ b/prelude/go/transitions/defs.bzl @@ -0,0 +1,97 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def _cgo_enabled_transition(platform, refs, attrs): + if attrs.cgo_enabled == None: + cgo_enabled_ref = refs.cgo_enabled_auto + elif attrs.cgo_enabled == True: + cgo_enabled_ref = refs.cgo_enabled_true + else: + cgo_enabled_ref = refs.cgo_enabled_false + + cgo_enabled_value = cgo_enabled_ref[ConstraintValueInfo] + constraints = platform.configuration.constraints + constraints[cgo_enabled_value.setting.label] = cgo_enabled_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + +def _compile_shared_transition(platform, refs, _): + compile_shared_value = refs.compile_shared_value[ConstraintValueInfo] + constraints = platform.configuration.constraints + constraints[compile_shared_value.setting.label] = compile_shared_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + +def _chain_transitions(transitions): + def tr(platform, refs, attrs): + for t in transitions: + platform = t(platform, refs, attrs) + return platform + + return tr + +go_binary_transition = transition( + impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition]), + refs = { + "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", + "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", + "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", + "compile_shared_value": "prelude//go/constraints:compile_shared_false", + }, + attrs = ["cgo_enabled"], +) + +go_test_transition = transition( + impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition]), + refs = { + "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", + "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", + "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", + "compile_shared_value": "prelude//go/constraints:compile_shared_false", + }, + attrs = ["cgo_enabled"], +) + +go_exported_library_transition = transition( + impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition]), + refs = { + "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", + "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", + "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", + "compile_shared_value": "prelude//go/constraints:compile_shared_true", + }, + attrs = ["cgo_enabled"], +) + +cgo_enabled_attr = attrs.default_only(attrs.option(attrs.bool(), default = select({ + "DEFAULT": None, + "prelude//go/constraints:cgo_enabled_auto": None, + "prelude//go/constraints:cgo_enabled_false": False, + "prelude//go/constraints:cgo_enabled_true": True, +}))) + +compile_shared_attr = attrs.default_only(attrs.bool(default = select({ + "DEFAULT": False, + "prelude//go/constraints:compile_shared_false": False, + "prelude//go/constraints:compile_shared_true": True, +}))) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index d7b97b47d..584d5c2eb 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -27,7 +27,7 @@ load("@prelude//go:go_library.bzl", "go_library_impl") load("@prelude//go:go_test.bzl", "go_test_impl") load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl", "haskell_toolchain_library_impl") -load("@prelude//go/transitions:cgo_enabled.bzl", "cgo_enabled_transition") +load("@prelude//go/transitions:defs.bzl", "cgo_enabled_attr", "compile_shared_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition") load("@prelude//haskell:haskell_ghci.bzl", "haskell_ghci_impl") load("@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_impl") load("@prelude//haskell:haskell_ide.bzl", "haskell_ide_impl") @@ -371,6 +371,7 @@ inlined_extra_attributes = { # go "cgo_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), + "_compile_shared": compile_shared_attr, "_cxx_toolchain": toolchains_common.cxx(), "_exec_os_type": buck.exec_os_type_arg(), "_go_toolchain": toolchains_common.go(), @@ -427,6 +428,8 @@ inlined_extra_attributes = { }, "go_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), + "_cgo_enabled": cgo_enabled_attr, + "_compile_shared": compile_shared_attr, "_go_toolchain": toolchains_common.go(), }, "go_test": { @@ -589,9 +592,9 @@ extra_attributes = struct(**all_extra_attributes) transitions = { "android_binary": constraint_overrides_transition, "apple_resource": apple_resource_transition, - "go_binary": cgo_enabled_transition, - "go_exported_library": cgo_enabled_transition, - "go_test": cgo_enabled_transition, + "go_binary": go_binary_transition, + "go_exported_library": go_exported_library_transition, + "go_test": go_test_transition, "python_binary": constraint_overrides_transition, "python_test": constraint_overrides_transition, } From 2acb22b814fdc9e52d8568a8da9584b880490f05 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Thu, 1 Feb 2024 06:05:43 -0800 Subject: [PATCH 0210/1133] Use derived flag _compile_shared instead of extra actions Summary: Use new _compile_shared attribute instead of pre-defined static/shared_pkg actions Reviewed By: leoleovich Differential Revision: D52884774 fbshipit-source-id: 3cbe63b3480bf20af8df0f3e2029a2fee80c19fe --- prelude/go/cgo_library.bzl | 23 ++++++----------------- prelude/go/compile.bzl | 5 +---- prelude/go/go_library.bzl | 27 +++++++-------------------- prelude/go/go_test.bzl | 4 ++-- prelude/go/link.bzl | 6 +----- prelude/go/packages.bzl | 22 +++++++++------------- prelude/go/toolchain.bzl | 4 ---- prelude/go/transitions/defs.bzl | 1 - 8 files changed, 26 insertions(+), 66 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 3a146e622..ba0721c53 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -242,31 +242,20 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: all_srcs.add(get_filtered_srcs(ctx, ctx.attrs.go_srcs)) # Build Go library. - static_pkg = compile( + compiled_pkg = compile( ctx, pkg_name, all_srcs, cgo_enabled = True, deps = ctx.attrs.deps + ctx.attrs.exported_deps, - shared = False, + shared = ctx.attrs._compile_shared, ) - shared_pkg = compile( - ctx, - pkg_name, - all_srcs, - cgo_enabled = True, - deps = ctx.attrs.deps + ctx.attrs.exported_deps, - shared = True, - ) - coverage_shared = {mode: _compile_with_coverage(ctx, pkg_name, all_srcs, mode, True) for mode in GoCoverageMode} - coverage_static = {mode: _compile_with_coverage(ctx, pkg_name, all_srcs, mode, False) for mode in GoCoverageMode} + pkg_with_coverage = {mode: _compile_with_coverage(ctx, pkg_name, all_srcs, mode) for mode in GoCoverageMode} pkgs = { pkg_name: GoPkg( - shared = shared_pkg, - static = static_pkg, cgo = True, - coverage_shared = coverage_shared, - coverage_static = coverage_static, + pkg = compiled_pkg, + pkg_with_coverage = pkg_with_coverage, ), } @@ -275,7 +264,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: # to work with cgo. And when nearly every FB service client is cgo, # we need to support it well. return [ - DefaultInfo(default_output = static_pkg, other_outputs = go_srcs), + DefaultInfo(default_output = compiled_pkg, other_outputs = go_srcs), GoPkgCompileInfo(pkgs = merge_pkgs([ pkgs, get_inherited_compile_pkgs(ctx.attrs.exported_deps), diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index 3c9c46cf6..89af4d8ca 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -111,14 +111,11 @@ def _compile_cmd( # Add shared/static flags. if shared: cmd.add("-shared") - cmd.add(go_toolchain.compiler_flags_shared) - else: - cmd.add(go_toolchain.compiler_flags_static) # Add Go pkgs inherited from deps to compiler search path. all_pkgs = merge_pkgs([ pkgs, - pkg_artifacts(get_inherited_compile_pkgs(deps), shared = shared, coverage_mode = coverage_mode), + pkg_artifacts(get_inherited_compile_pkgs(deps), coverage_mode = coverage_mode), stdlib_pkg_artifacts(go_toolchain, shared = shared, non_cgo = not cgo_enabled), ]) diff --git a/prelude/go/go_library.bzl b/prelude/go/go_library.bzl index 04515caad..f417e4dfd 100644 --- a/prelude/go/go_library.bzl +++ b/prelude/go/go_library.bzl @@ -57,8 +57,9 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, force_disable_cgo = True) cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs._cgo_enabled) + shared = ctx.attrs._compile_shared - static_pkg = compile( + compiled_pkg = compile( ctx, pkg_name, srcs = srcs, @@ -66,29 +67,15 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: deps = ctx.attrs.deps + ctx.attrs.exported_deps, compile_flags = ctx.attrs.compiler_flags, assemble_flags = ctx.attrs.assembler_flags, - shared = False, + shared = shared, ) - shared_pkg = compile( - ctx, - pkg_name, - srcs = srcs, - cgo_enabled = cgo_enabled, - deps = ctx.attrs.deps + ctx.attrs.exported_deps, - compile_flags = ctx.attrs.compiler_flags, - assemble_flags = ctx.attrs.assembler_flags, - shared = True, - ) - - coverage_shared = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, cgo_enabled, True) for mode in GoCoverageMode} - coverage_static = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, cgo_enabled, False) for mode in GoCoverageMode} + pkg_with_coverage = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, cgo_enabled, shared) for mode in GoCoverageMode} - default_output = static_pkg + default_output = compiled_pkg pkgs[pkg_name] = GoPkg( - shared = shared_pkg, - static = static_pkg, - coverage_shared = coverage_shared, - coverage_static = coverage_static, + pkg = compiled_pkg, + pkg_with_coverage = pkg_with_coverage, ) return [ diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index cd15e0458..5f0ece710 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -83,8 +83,8 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: # Get all packages that are linked to the test (i.e. the entire dependency tree) for name, pkg in get_inherited_compile_pkgs(deps).items(): if ctx.label != None and is_subpackage_of(name, ctx.label.package): - artifact = pkg_artifact(pkg, False, coverage_mode) - vars = pkg_coverage_vars("", pkg, False, coverage_mode) + artifact = pkg_artifact(pkg, coverage_mode) + vars = pkg_coverage_vars("", pkg, coverage_mode) coverage_vars[name] = vars pkgs[name] = artifact diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 9427a73e1..b9985eaaf 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -129,10 +129,6 @@ def link( cmd = get_toolchain_cmd_args(go_toolchain) cmd.add(go_toolchain.linker) - if shared: - cmd.add(go_toolchain.linker_flags_shared) - else: - cmd.add(go_toolchain.linker_flags_static) cmd.add("-o", output.as_output()) cmd.add("-buildmode=" + _build_mode_param(build_mode)) @@ -141,7 +137,7 @@ def link( # Add inherited Go pkgs to library search path. all_pkgs = merge_pkgs([ pkgs, - pkg_artifacts(get_inherited_link_pkgs(deps), shared = shared, coverage_mode = coverage_mode), + pkg_artifacts(get_inherited_link_pkgs(deps), coverage_mode = coverage_mode), stdlib_pkg_artifacts(go_toolchain, shared = shared, non_cgo = not cgo_enabled), ]) diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index b8d40a1b6..51878c08d 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -11,13 +11,9 @@ load("@prelude//utils:utils.bzl", "value_or") load(":coverage.bzl", "GoCoverageMode") GoPkg = record( - # Built w/ `-shared`. - shared = field(Artifact), - # Built w/o `-shared`. - static = field(Artifact), cgo = field(bool, default = False), - coverage_static = field(dict[GoCoverageMode, (Artifact, cmd_args)]), - coverage_shared = field(dict[GoCoverageMode, (Artifact, cmd_args)]), + pkg = field(Artifact), + pkg_with_coverage = field(dict[GoCoverageMode, (Artifact, cmd_args)]), ) def go_attr_pkg_name(ctx: AnalysisContext) -> str: @@ -42,26 +38,26 @@ def merge_pkgs(pkgss: list[dict[str, typing.Any]]) -> dict[str, typing.Any]: return all_pkgs -def pkg_artifact(pkg: GoPkg, shared: bool, coverage_mode: [GoCoverageMode, None]) -> Artifact: +def pkg_artifact(pkg: GoPkg, coverage_mode: [GoCoverageMode, None]) -> Artifact: if coverage_mode: - artifact = pkg.coverage_shared if shared else pkg.coverage_static + artifact = pkg.pkg_with_coverage return artifact[coverage_mode][0] - return pkg.shared if shared else pkg.static + return pkg.pkg -def pkg_coverage_vars(name: str, pkg: GoPkg, shared: bool, coverage_mode: [GoCoverageMode, None]) -> [cmd_args, None]: +def pkg_coverage_vars(name: str, pkg: GoPkg, coverage_mode: [GoCoverageMode, None]) -> [cmd_args, None]: if coverage_mode: - artifact = pkg.coverage_shared if shared else pkg.coverage_static + artifact = pkg.pkg_with_coverage if coverage_mode not in artifact: fail("coverage variables don't exist for {}".format(name)) return artifact[coverage_mode][1] fail("coverage variables were requested but coverage_mode is None") -def pkg_artifacts(pkgs: dict[str, GoPkg], shared: bool, coverage_mode: [GoCoverageMode, None] = None) -> dict[str, Artifact]: +def pkg_artifacts(pkgs: dict[str, GoPkg], coverage_mode: [GoCoverageMode, None] = None) -> dict[str, Artifact]: """ Return a map package name to a `shared` or `static` package artifact. """ return { - name: pkg_artifact(pkg, shared, coverage_mode) + name: pkg_artifact(pkg, coverage_mode) for name, pkg in pkgs.items() } diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index dd888bcff..04570f619 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -13,8 +13,6 @@ GoToolchainInfo = provider( "cgo_wrapper": provider_field(typing.Any, default = None), "compile_wrapper": provider_field(typing.Any, default = None), "compiler": provider_field(typing.Any, default = None), - "compiler_flags_shared": provider_field(typing.Any, default = None), - "compiler_flags_static": provider_field(typing.Any, default = None), "cover": provider_field(typing.Any, default = None), "cover_srcs": provider_field(typing.Any, default = None), "cxx_toolchain_for_linking": provider_field(typing.Any, default = None), @@ -26,8 +24,6 @@ GoToolchainInfo = provider( "filter_srcs": provider_field(typing.Any, default = None), "go": provider_field(typing.Any, default = None), "linker": provider_field(typing.Any, default = None), - "linker_flags_shared": provider_field(typing.Any, default = None), - "linker_flags_static": provider_field(typing.Any, default = None), "packer": provider_field(typing.Any, default = None), "prebuilt_stdlib": provider_field(typing.Any, default = None), "prebuilt_stdlib_shared": provider_field(typing.Any, default = None), diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl index fb6082f76..9ab848911 100644 --- a/prelude/go/transitions/defs.bzl +++ b/prelude/go/transitions/defs.bzl @@ -31,7 +31,6 @@ def _compile_shared_transition(platform, refs, _): compile_shared_value = refs.compile_shared_value[ConstraintValueInfo] constraints = platform.configuration.constraints constraints[compile_shared_value.setting.label] = compile_shared_value - new_cfg = ConfigurationInfo( constraints = constraints, values = platform.configuration.values, From e1e0ce454d57d9f97d6cbe202ede7dc565e0a4b1 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Thu, 1 Feb 2024 06:05:43 -0800 Subject: [PATCH 0211/1133] Split command flags from commands Summary: We need to store these flags separately to pass it into `go build` when building standard library Reviewed By: leoleovich Differential Revision: D52907326 fbshipit-source-id: dc6eb68e3158f28a8e28f7406700bd0be55f2653 --- prelude/go/compile.bzl | 2 ++ prelude/go/link.bzl | 1 + prelude/go/toolchain.bzl | 3 +++ 3 files changed, 6 insertions(+) diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index 89af4d8ca..44ffffd61 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -82,6 +82,7 @@ def _assemble_cmd( go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] cmd = cmd_args() cmd.add(go_toolchain.assembler) + cmd.add(go_toolchain.assembler_flags) cmd.add(flags) cmd.add("-p", pkg_name) if shared: @@ -102,6 +103,7 @@ def _compile_cmd( cmd = cmd_args() cmd.add(go_toolchain.compiler) + cmd.add(go_toolchain.compiler_flags) cmd.add("-p", pkg_name) cmd.add("-pack") cmd.add("-nolocalimports") diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index b9985eaaf..f52d3cdab 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -129,6 +129,7 @@ def link( cmd = get_toolchain_cmd_args(go_toolchain) cmd.add(go_toolchain.linker) + cmd.add(go_toolchain.linker_flags) cmd.add("-o", output.as_output()) cmd.add("-buildmode=" + _build_mode_param(build_mode)) diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 04570f619..14c45242a 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -9,10 +9,12 @@ GoToolchainInfo = provider( # @unsorted-dict-items fields = { "assembler": provider_field(typing.Any, default = None), + "assembler_flags": provider_field(typing.Any, default = None), "cgo": provider_field(typing.Any, default = None), "cgo_wrapper": provider_field(typing.Any, default = None), "compile_wrapper": provider_field(typing.Any, default = None), "compiler": provider_field(typing.Any, default = None), + "compiler_flags": provider_field(typing.Any, default = None), "cover": provider_field(typing.Any, default = None), "cover_srcs": provider_field(typing.Any, default = None), "cxx_toolchain_for_linking": provider_field(typing.Any, default = None), @@ -24,6 +26,7 @@ GoToolchainInfo = provider( "filter_srcs": provider_field(typing.Any, default = None), "go": provider_field(typing.Any, default = None), "linker": provider_field(typing.Any, default = None), + "linker_flags": provider_field(typing.Any, default = None), "packer": provider_field(typing.Any, default = None), "prebuilt_stdlib": provider_field(typing.Any, default = None), "prebuilt_stdlib_shared": provider_field(typing.Any, default = None), From 88aff245da2f6102f749e507c2bc32152d5d8418 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Thu, 1 Feb 2024 06:05:43 -0800 Subject: [PATCH 0212/1133] Extract hardcoded cflags outside of prelude Summary: Extract hardcoded fbcode-specific flag from OSS buck2 toolchain Reviewed By: leoleovich Differential Revision: D52914562 fbshipit-source-id: 208cdfc420be7ad872745764eea87c092ad62813 --- prelude/go/cgo_library.bzl | 5 +---- prelude/go/toolchain.bzl | 1 + 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index ba0721c53..114247101 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -118,10 +118,7 @@ def _cgo( cxx_cmd.add(c_compiler.compiler_flags) cxx_cmd.add(pre_args) cxx_cmd.add(pre_include_dirs) - - # Passing the same value as go-build, because our -g flags break cgo - # in some buck modes - cxx_cmd.add("-g") + cxx_cmd.add(go_toolchain.c_compiler_flags) # Wrap the C/C++ command in a wrapper script to avoid arg length limits. is_win = ctx.attrs._exec_os_type[OsLookup].platform == "windows" diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 14c45242a..a7f119e09 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -10,6 +10,7 @@ GoToolchainInfo = provider( fields = { "assembler": provider_field(typing.Any, default = None), "assembler_flags": provider_field(typing.Any, default = None), + "c_compiler_flags": provider_field(typing.Any, default = None), "cgo": provider_field(typing.Any, default = None), "cgo_wrapper": provider_field(typing.Any, default = None), "compile_wrapper": provider_field(typing.Any, default = None), From e42c87450261662e9c817525c0c1e5f0dc819523 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Thu, 1 Feb 2024 06:05:43 -0800 Subject: [PATCH 0213/1133] Move buck's toolchain ldflags to -extldflags to overriede any flags baked into object files Summary: Using -extldflags adds the flags to the end of command line and overrides previous. This fixes the issue when we build stdlib with go-build using buck and go-build bakes -isysroot paths inside object file. Reviewed By: milend Differential Revision: D53229340 fbshipit-source-id: 29b49ecd302fc01813c6894971f0f55c1f33266f --- prelude/go/link.bzl | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index f52d3cdab..6bd81ebc5 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -190,8 +190,6 @@ def link( cxx_link_cmd = cmd_args( [ cxx_toolchain.linker_info.linker, - cxx_toolchain.linker_info.linker_flags, - go_toolchain.external_linker_flags, ext_link_args, "%*" if is_win else "\"$@\"", ], @@ -204,6 +202,11 @@ def link( is_executable = True, ) cmd.add("-extld", linker_wrapper).hidden(cxx_link_cmd) + cmd.add("-extldflags", cmd_args( + cxx_toolchain.linker_info.linker_flags, + go_toolchain.external_linker_flags, + delimiter = " ", + )) cmd.add(linker_flags) From cfc65e4cbccd7df76921898c05a7f2935307f31f Mon Sep 17 00:00:00 2001 From: Balaji S Date: Thu, 1 Feb 2024 07:50:12 -0800 Subject: [PATCH 0214/1133] Format timeout messages correctly + have a proper fallback for other error tuples Summary: * Here are the possible result tuples for a test run - https://www.internalfb.com/code/whatsapp-server/buck2/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl?lines=354 * Note that we don't handle timetraps and {error, ...} from this when printing to test shell * So we add function clauses to handle both these cases and print a proper error message. Reviewed By: jcpetruzza Differential Revision: D53184258 fbshipit-source-id: 5c99ca0062aaeb21045e71b20f28ffceb2be5413 --- .../erlang/common_test/test_exec/src/ct_daemon_printer.erl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl index dd81f8cbd..dc8f945d9 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl @@ -55,6 +55,10 @@ print_result(Name, {error, {_TestId, {'ct_daemon_core$sentinel_crash', Info}}}) io:format("~ts ~ts~n", [?CROSS_MARK, Name]), io:format("Test process received EXIT signal with reason: ~p~n", [Info]), fail; +print_result(Name, {error, {_TestId, {timetrap, TimeoutValue}}}) -> + io:format("~ts ~ts~n", [?CROSS_MARK, Name]), + io:format("Test timed out after ~p ms~n", [TimeoutValue]), + fail; print_result(Name, Unstructured) -> io:format("~ts ~ts~n", [?CROSS_MARK, Name]), io:format("unable to format failure reason, please report.~n"), From 4a4f7ede16b455e95fcac4a2ba23b3430ff6eff1 Mon Sep 17 00:00:00 2001 From: Shayne Fletcher Date: Thu, 1 Feb 2024 12:08:13 -0800 Subject: [PATCH 0215/1133] manifold_client_wrapper: add preferred_linkage="static" Summary: ~~excluding dependencies of omnibus roots because they don't have preferred linkage "static" results in rust cxx artifacts not making it into 'libomnibus.so' in the next diff up the stack.~~ ~~it's not clear why that restriction has existed before now. maybe andrewjcg or ndmitchell know or maybe it was just copied over from buck1 (and the rationale not known)?~~ `manifold_client_wrapper` is a C++ dependency of `manifold_client` which is ultimately a dependency of `container_manifest_lib_py`. since `manifold_client_wrapper` is not marked static, it ends up in the 'libomnibus.so' produced for the test `fbcode//fblearner/flow/projects/modelstore/tests:pyper_publish_integration_test` of which `container_manifest_lib_py` is an omnibus root. `manifold_client` and `manifold_client-bridge` are preferred linkage static and are therefore linked directly into `container_manifest_lib_py` and excluded from this 'libomnibus.so'. the result of all this is link errors in 'libomnibus.so'. marking `manifold_client_wrapper` as preferred linkage static ensures now that it is linked directly into `container_manifest_py` (as `manifold_client` and `manifold_client-bridge` are) and no longer considered for linking into 'libomnibus.so'. Reviewed By: dtolnay Differential Revision: D53275848 fbshipit-source-id: f68f916b30e07ebbbfc6eb8ed80d22a6ae6cd825 --- prelude/cxx/omnibus.bzl | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index 9e4cd06fb..b26c06f28 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -587,11 +587,18 @@ def _build_omnibus_spec( if label not in excluded } - # Find the deps of the root nodes. These form the roots of the nodes - # included in the omnibus link. + # Find the deps of the root nodes that should be linked into + # 'libomnibus.so'. + # + # If a dep indicates preferred linkage static, it is linked directly into + # this omnimbus root and therefore not added to `first_order_root_deps` and + # thereby will not be linked into 'libomnibus.so'. If the dep does not + # indicate preferred linkage static, then it is added to + # `first_order_root_deps` and thereby will be linked into 'libomnibus.so'. first_order_root_deps = [] for label in _link_deps(graph.nodes, flatten([r.deps for r in roots.values()]), get_cxx_toolchain_info(ctx).pic_behavior): - # We only consider deps which aren't *only* statically linked. + # Per the comment above, only consider deps which aren't *only* + # statically linked. if _is_static_only(graph.nodes[label]): continue From 0868a4addb010f3cddf496be1843a31f0e11f8ee Mon Sep 17 00:00:00 2001 From: Lauri Heino Date: Thu, 1 Feb 2024 13:48:30 -0800 Subject: [PATCH 0216/1133] Add option to pass test class names as a param Summary: This change allows you to build test sources in a separate rule (e.g. `java_library`) and pass it `robolectric_test`, `java_test` and `kotlin_test`. More specifically: - You can leave `srcs` of the test target empty. - Since test rule uses `srcs` also for listing test classes, a list of classes needs to be passed separately (`test_class_names_file`). Reviewed By: IanChilds Differential Revision: D53310138 fbshipit-source-id: b5a263fd1f565b957e7ce8589505a695c37e9c6d --- prelude/android/android.bzl | 1 + prelude/java/java.bzl | 1 + prelude/java/java_test.bzl | 28 +++++++++++++++------------- prelude/kotlin/kotlin.bzl | 1 + 4 files changed, 18 insertions(+), 13 deletions(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index ab1eb4259..184a93470 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -202,6 +202,7 @@ extra_attributes = { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), "resources_root": attrs.option(attrs.string(), default = None), "robolectric_runtime_dependencies": attrs.list(attrs.source(), default = []), + "test_class_names_file": attrs.option(attrs.source(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), "_android_toolchain": toolchains_common.android(), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), diff --git a/prelude/java/java.bzl b/prelude/java/java.bzl index 9a35285df..92137a352 100644 --- a/prelude/java/java.bzl +++ b/prelude/java/java.bzl @@ -80,6 +80,7 @@ extra_attributes = { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + "test_class_names_file": attrs.option(attrs.source(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_exec_os_type": buck.exec_os_type_arg(), diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index 1077416f7..254bc6c4b 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -112,19 +112,21 @@ def build_junit_test( if ctx.attrs.test_case_timeout_ms: cmd.extend(["--default_test_timeout", str(ctx.attrs.test_case_timeout_ms)]) - expect(tests_java_library_info.library_output != None, "Built test library has no output, likely due to missing srcs") - - class_names = ctx.actions.declare_output("class_names") - list_class_names_cmd = cmd_args([ - java_test_toolchain.list_class_names[RunInfo], - "--jar", - tests_java_library_info.library_output.full_library, - "--sources", - ctx.actions.write("sources.txt", ctx.attrs.srcs), - "--output", - class_names.as_output(), - ]).hidden(ctx.attrs.srcs) - ctx.actions.run(list_class_names_cmd, category = "list_class_names") + if ctx.attrs.test_class_names_file: + class_names = ctx.attrs.test_class_names_file + else: + expect(tests_java_library_info.library_output != None, "Built test library has no output, likely due to missing srcs") + class_names = ctx.actions.declare_output("class_names") + list_class_names_cmd = cmd_args([ + java_test_toolchain.list_class_names[RunInfo], + "--jar", + tests_java_library_info.library_output.full_library, + "--sources", + ctx.actions.write("sources.txt", ctx.attrs.srcs), + "--output", + class_names.as_output(), + ]).hidden(ctx.attrs.srcs) + ctx.actions.run(list_class_names_cmd, category = "list_class_names") cmd.extend(["--test-class-names-file", class_names]) diff --git a/prelude/kotlin/kotlin.bzl b/prelude/kotlin/kotlin.bzl index 4c9dd097e..6f60384e9 100644 --- a/prelude/kotlin/kotlin.bzl +++ b/prelude/kotlin/kotlin.bzl @@ -35,6 +35,7 @@ extra_attributes = { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + "test_class_names_file": attrs.option(attrs.source(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_exec_os_type": buck.exec_os_type_arg(), From d559af2ebcfe732ab877164994b45d95303be899 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Fri, 2 Feb 2024 11:56:02 -0800 Subject: [PATCH 0217/1133] Introduce prepackaged_validators Summary: Context + RFC: https://fb.workplace.com/groups/buck2apple/posts/713185827267246/ This adds the replacement of `validator` and `validator_args` to `apple_package`: `prepackaged_validators`. There are a few "upgrades" here: - We can pass a list of validators. This will come in handy since I plan to inject one for every production app to check privacy manifests. - Similar to `SourceWithFlags` or [cxx sources]() this colocates the additional args with the script that needs to be run. - This no longer runs as part of packaging - we can eventually migrate the single `validator` in the codebase and remove all the validation logic in all of our custom packagers. Reviewed By: blackm00n Differential Revision: D53315946 fbshipit-source-id: 5157a4d953202155dd877cdb90ff71c0f410c3d5 --- prelude/apple/apple_package.bzl | 53 +++++++++++++++++++++++++++--- prelude/apple/apple_rules_impl.bzl | 7 ++++ 2 files changed, 56 insertions(+), 4 deletions(-) diff --git a/prelude/apple/apple_package.bzl b/prelude/apple/apple_package.bzl index fc0227933..59a5235fe 100644 --- a/prelude/apple/apple_package.bzl +++ b/prelude/apple/apple_package.bzl @@ -17,25 +17,37 @@ load(":apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") def apple_package_impl(ctx: AnalysisContext) -> list[Provider]: package = ctx.actions.declare_output("{}.{}".format(ctx.attrs.bundle.label.name, ctx.attrs.ext)) + contents = ( + ctx.attrs.bundle[DefaultInfo].default_outputs[0] if ctx.attrs.packager else _get_ipa_contents(ctx) + ) if ctx.attrs.packager: process_ipa_cmd = cmd_args([ ctx.attrs.packager[RunInfo], "--app-bundle-path", - ctx.attrs.bundle[DefaultInfo].default_outputs[0], + contents, "--output-path", package.as_output(), ctx.attrs.packager_args, ]) category = "apple_package_make_custom" else: - unprocessed_ipa_contents = _get_ipa_contents(ctx) process_ipa_cmd = _get_default_package_cmd( ctx, - unprocessed_ipa_contents, + contents, package.as_output(), ) category = "apple_package_make" + sub_targets = {} + + prepackaged_validators_artifacts = _get_prepackaged_validators_outputs(ctx, contents) + if prepackaged_validators_artifacts: + # Add the artifacts to packaging cmd so that they are run. + process_ipa_cmd.hidden(prepackaged_validators_artifacts) + sub_targets["prepackaged_validators"] = [ + DefaultInfo(default_outputs = prepackaged_validators_artifacts), + ] + if ctx.attrs.validator != None: process_ipa_cmd.add([ "--validator", @@ -44,7 +56,10 @@ def apple_package_impl(ctx: AnalysisContext) -> list[Provider]: ]) ctx.actions.run(process_ipa_cmd, category = category) - return [DefaultInfo(default_output = package)] + return [DefaultInfo( + default_output = package, + sub_targets = sub_targets, + )] def _get_default_package_cmd(ctx: AnalysisContext, unprocessed_ipa_contents: Artifact, output: OutputArtifact) -> cmd_args: apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] @@ -161,3 +176,33 @@ def _compression_level_arg(compression_level: IpaCompressionLevel) -> str: return "9" else: fail("Unknown .ipa compression level: " + str(compression_level)) + +def _get_prepackaged_validators_outputs(ctx: AnalysisContext, prepackaged_contents: Artifact) -> list[Artifact]: + if not ctx.attrs.prepackaged_validators: + return [] + + outputs = [] + for idx, validator in enumerate(ctx.attrs.prepackaged_validators): + if type(validator) == "tuple": + validator, validator_args = validator + else: + validator = validator + validator_args = [] + + output = ctx.actions.declare_output(validator.label.name + "_{}".format(idx)) + outputs.append(output) + + ctx.actions.run( + cmd_args([ + validator[RunInfo], + "--contents-dir", + prepackaged_contents, + "--output-path", + output.as_output(), + validator_args, + ]), + category = "prepackaged_validator", + identifier = str(idx), + ) + + return outputs diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 5372b0da8..5fd3ee9fd 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -152,6 +152,13 @@ extra_attributes = { "ext": attrs.enum(ApplePackageExtension.values(), default = "ipa"), "packager": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "packager_args": attrs.list(attrs.arg(), default = []), + "prepackaged_validators": attrs.list( + attrs.one_of( + attrs.exec_dep(providers = [RunInfo]), + attrs.tuple(attrs.exec_dep(providers = [RunInfo]), attrs.list(attrs.arg())), + ), + default = [], + ), "validator": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "validator_args": attrs.list(attrs.arg(), default = []), "_apple_toolchain": get_apple_bundle_toolchain_attr(), From f00575d642655f87b895b18a666afac700f8f941 Mon Sep 17 00:00:00 2001 From: Joshua Selbo Date: Fri, 2 Feb 2024 12:56:51 -0800 Subject: [PATCH 0218/1133] Replace usage of ctx.attrs.name MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: From wendy728: > it’s technically a bug from our side that we allow calling ctx.attrs.name. You could replace all uses of ctx.attrs.name with ctx.label.name in all rule impls and it should still work correctly. Reviewed By: stepancheg Differential Revision: D53356930 fbshipit-source-id: a0dfa271d190475e17f30ebaaaf335936be1fc9e --- prelude/java/java_test.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index 254bc6c4b..75a0d2d33 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -141,7 +141,7 @@ def build_junit_test( if tests_class_to_source_info != None: transitive_class_to_src_map = merge_class_to_source_map_from_jar( actions = ctx.actions, - name = ctx.attrs.name + ".transitive_class_to_src.json", + name = ctx.label.name + ".transitive_class_to_src.json", java_test_toolchain = java_test_toolchain, relative_to = ctx.label.cell_root if run_from_cell_root else None, deps = [tests_class_to_source_info], From d9504ac680584a371c1d2193c046c88d4d1ba7e3 Mon Sep 17 00:00:00 2001 From: Rainer Dreyer Date: Sun, 4 Feb 2024 06:37:12 -0800 Subject: [PATCH 0219/1133] Improve indentation in link_execution_preference docs Summary: The doc generator doesn't handle a list item in the second line correctly, this works around the issue by splitting the first line. Before: ![image](https://github.com/facebook/buck2/assets/347977/8507e943-284b-42ae-a149-8e8a8ab72d38) After: ![image](https://github.com/facebook/buck2/assets/347977/b0893b97-f68c-455e-a55c-9f12ed6e237f) Tested by running `docs.py`, building the website and verifying `docs/api/rules.generated.md` X-link: https://github.com/facebook/buck2/pull/557 Reviewed By: rdrey Differential Revision: D53389803 Pulled By: ndmitchell fbshipit-source-id: f72303de771d790b7df0da6a2ad921a0d4510e41 --- prelude/linking/execution_preference.bzl | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/prelude/linking/execution_preference.bzl b/prelude/linking/execution_preference.bzl index 92d45adee..e864b72a0 100644 --- a/prelude/linking/execution_preference.bzl +++ b/prelude/linking/execution_preference.bzl @@ -36,15 +36,17 @@ _ActionExecutionAttributes = record( def link_execution_preference_attr(): # The attribute is optional, allowing for None to represent that no preference has been set and we should fallback on the toolchain. return attrs.option(attrs.one_of(attrs.enum(LinkExecutionPreferenceTypes), attrs.dep(providers = [LinkExecutionPreferenceDeterminatorInfo])), default = None, doc = """ - The execution preference for linking. Options are:\n + The execution preference for linking. + + Options are: - any : No preference is set, and the link action will be performed based on buck2's executor configuration.\n - full_hybrid : The link action will execute both locally and remotely, regardless of buck2's executor configuration (if\n - the executor is capable of hybrid execution). The use_limited_hybrid setting of the hybrid executor is ignored.\n + the executor is capable of hybrid execution). The use_limited_hybrid setting of the hybrid executor is ignored.\n - local : The link action will execute locally if compatible on current host platform.\n - local_only : The link action will execute locally, and error if the current platform is not compatible.\n - remote : The link action will execute remotely if a compatible remote platform exists, otherwise locally.\n - The default is None, expressing that no preference has been set on the target itself. + The default is None, expressing that no preference has been set on the target itself. """) def get_link_execution_preference(ctx, links: list[Label]) -> LinkExecutionPreference: From ee41bf189794c028b57475d772c6260e99f37d0a Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Sun, 4 Feb 2024 11:35:24 -0800 Subject: [PATCH 0220/1133] Fix Haskell linker error on MacOS Summary: Closes https://github.com/facebook/buck2/issues/535 - Haskell toolchain: automatic host arch name - Configure platform specific linker flags X-link: https://github.com/facebook/buck2/pull/536 Reviewed By: shayne-fletcher Differential Revision: D53390492 Pulled By: ndmitchell fbshipit-source-id: b6fb0cac9df772caca0d36fc144ad39a06a15249 --- prelude/haskell/haskell.bzl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 7f4e50abc..ceb40af50 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -37,6 +37,11 @@ load( "get_transitive_deps_matching_labels", "is_link_group_shlib", ) +load( + "@prelude//cxx:linker.bzl", + "LINKERS", + "get_shared_library_flags", +) load( "@prelude//cxx:preprocessor.bzl", "CPreprocessor", From e295b4bf542a1c27f801650f890a4a68ec8bb9f2 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Mon, 5 Feb 2024 06:19:26 -0800 Subject: [PATCH 0221/1133] Sanitizers: support for sanitizer runtime for Darwin platforms Summary: If `cxx_toolchain()` has `sanitizer_runtime_enabled = True`, then for any binaries (e.g., `cxx_binary()`, `apple_binary()`, `rust_binary()`), ensure that: - The executable contains an `executable_path` rpath to the sanitizer runtime. This is required to ensure such binaries can find the sanitizer runtime at the correct path. - The `RunInfo` of such binaries contains the sanitizer runtime. This makes it possible to `$exe` such binaries and ensure it works correctly with buck2, locally and on RE. Reviewed By: blackm00n Differential Revision: D53268268 fbshipit-source-id: a50ca6ae113d621f95275e735446d516a19cb63c --- prelude/cxx/cxx_link_utility.bzl | 45 ++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 77ab0424d..23fb51b5e 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -157,6 +157,47 @@ ExecutableSharedLibArguments = record( shared_libs_symlink_tree = field(list[Artifact] | Artifact | None, None), ) +CxxSanitizerRuntimeArguments = record( + extra_link_args = field(list[ArgLike], []), + sanitizer_runtime = field(list[Artifact], []), +) + +# @executable_path/Frameworks + +def _sanitizer_runtime_arguments( + cxx_toolchain: CxxToolchainInfo, + output: Artifact) -> CxxSanitizerRuntimeArguments: + linker_info = cxx_toolchain.linker_info + if not linker_info.sanitizer_runtime_enabled: + return CxxSanitizerRuntimeArguments() + + if linker_info.sanitizer_runtime_dir == None: + fail("C++ sanitizer runtime enabled but there's no runtime directory") + + if linker_info.type == "darwin": + runtime_rpath = cmd_args(linker_info.sanitizer_runtime_dir, format = "-Wl,-rpath,@executable_path/{}").relative_to(output, parent = 1) + + # Ignore_artifacts() as the runtime directory is not required at _link_ time + runtime_rpath = runtime_rpath.ignore_artifacts() + return CxxSanitizerRuntimeArguments( + extra_link_args = [ + runtime_rpath, + # Add rpaths in case the binary gets bundled and the app bundle is expected to be standalone. + # Not all transitive callers have `CxxPlatformInfo`, so just add both iOS and macOS rpaths. + # There's no downsides to having both, except dyld would check in both locations (and it won't + # find anything for the non-current platform). + "-Wl,-rpath,@loader_path/Frameworks", # iOS + "-Wl,-rpath,@executable_path/Frameworks", # iOS + "-Wl,-rpath,@loader_path/../Frameworks", # macOS + "-Wl,-rpath,@executable_path/../Frameworks", # macOS + ], + sanitizer_runtime = [ + linker_info.sanitizer_runtime_dir, + ], + ) + + return CxxSanitizerRuntimeArguments() + def executable_shared_lib_arguments( actions: AnalysisActions, cxx_toolchain: CxxToolchainInfo, @@ -196,6 +237,10 @@ def executable_shared_lib_arguments( rpath_arg = cmd_args(shared_libs_symlink_tree, format = "-Wl,-rpath,{}/{{}}".format(rpath_reference)).relative_to(output, parent = 1).ignore_artifacts() extra_link_args.append(rpath_arg) + sanitizer_runtime_args = _sanitizer_runtime_arguments(cxx_toolchain, output) + extra_link_args += sanitizer_runtime_args.extra_link_args + runtime_files += sanitizer_runtime_args.sanitizer_runtime + return ExecutableSharedLibArguments( extra_link_args = extra_link_args, runtime_files = runtime_files, From 285b40a4cb12b6b30d2bb5a1f226a50b2d42810f Mon Sep 17 00:00:00 2001 From: Wendy Yu Date: Mon, 5 Feb 2024 09:23:07 -0800 Subject: [PATCH 0222/1133] Add kotlinc error handler to kotlin toolchain provider Summary: Suggested by IanChilds Reviewed By: IanChilds Differential Revision: D53351773 fbshipit-source-id: 1de50647df864b8b724df03a3039ec1194a75502 --- prelude/kotlin/kotlin_toolchain.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/kotlin/kotlin_toolchain.bzl b/prelude/kotlin/kotlin_toolchain.bzl index 248ea5a19..0c863de11 100644 --- a/prelude/kotlin/kotlin_toolchain.bzl +++ b/prelude/kotlin/kotlin_toolchain.bzl @@ -22,6 +22,7 @@ KotlinToolchainInfo = provider( "kosabi_jvm_abi_gen_plugin": provider_field(typing.Any, default = None), "kosabi_stubs_gen_plugin": provider_field(typing.Any, default = None), "kosabi_supported_ksp_providers": provider_field(typing.Any, default = None), + "kotlin_error_handler": provider_field(typing.Any, default = None), "kotlin_home_libraries": provider_field(typing.Any, default = None), "kotlin_stdlib": provider_field(typing.Any, default = None), "kotlinc": provider_field(typing.Any, default = None), From dadf41c1748976be4b38c517836b144444fadf4c Mon Sep 17 00:00:00 2001 From: Wendy Yu Date: Mon, 5 Feb 2024 09:23:07 -0800 Subject: [PATCH 0223/1133] use action error handler in kotlinc Summary: Hook up a general kotlinc error handler, which currently will only generate DI-specific sub categories. There's a performance risk here since it would be the first use case in production, so I have split the diffs up to add an error handler per action category (kotlinc, ksp_kotlinc, and javac). Reviewed By: IanChilds Differential Revision: D53241843 fbshipit-source-id: dfc64760a8a25ed81a075c5f9ccbb75eed9d1456 --- prelude/kotlin/kotlincd_jar_creator.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/kotlin/kotlincd_jar_creator.bzl b/prelude/kotlin/kotlincd_jar_creator.bzl index 30a792a79..24bfa1531 100644 --- a/prelude/kotlin/kotlincd_jar_creator.bzl +++ b/prelude/kotlin/kotlincd_jar_creator.bzl @@ -331,6 +331,7 @@ def create_jar_artifact_kotlincd( local_only = local_only, low_pass_filter = False, weight = 2, + error_handler = kotlin_toolchain.kotlin_error_handler, ) library_classpath_jars_tag = actions.artifact_tag() From 2a9502d311dc7f2cee4b7c186d2aae920ce0ab23 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Mon, 5 Feb 2024 12:30:14 -0800 Subject: [PATCH 0224/1133] add objdump to AppleToolchainInfo Summary: As part of package validation we want objdump to be available, so add it to AppleToolchainInfo. Reviewed By: narissiam Differential Revision: D53354696 fbshipit-source-id: 22d9f237266940525f9d2c64d98a2b5d1a249f9d --- prelude/apple/apple_rules_impl.bzl | 1 + prelude/apple/apple_toolchain.bzl | 1 + prelude/apple/apple_toolchain_types.bzl | 1 + prelude/apple/user/apple_toolchain_override.bzl | 1 + 4 files changed, 4 insertions(+) diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 5fd3ee9fd..847f20448 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -194,6 +194,7 @@ extra_attributes = { "lipo": attrs.exec_dep(providers = [RunInfo]), "min_version": attrs.option(attrs.string(), default = None), "momc": attrs.exec_dep(providers = [RunInfo]), + "objdump": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "odrcov": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), # A placeholder tool that can be used to set up toolchain constraints. # Useful when fat and thin toolchahins share the same underlying tools via `command_alias()`, diff --git a/prelude/apple/apple_toolchain.bzl b/prelude/apple/apple_toolchain.bzl index 2f84d2506..e13b18df3 100644 --- a/prelude/apple/apple_toolchain.bzl +++ b/prelude/apple/apple_toolchain.bzl @@ -32,6 +32,7 @@ def apple_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: lipo = ctx.attrs.lipo[RunInfo], min_version = ctx.attrs.min_version, momc = ctx.attrs.momc[RunInfo], + objdump = ctx.attrs.objdump[RunInfo] if ctx.attrs.objdump else None, odrcov = ctx.attrs.odrcov[RunInfo] if ctx.attrs.odrcov else None, platform_path = platform_path, sdk_build_version = ctx.attrs.build_version, diff --git a/prelude/apple/apple_toolchain_types.bzl b/prelude/apple/apple_toolchain_types.bzl index 0f7435a91..96adbaa9a 100644 --- a/prelude/apple/apple_toolchain_types.bzl +++ b/prelude/apple/apple_toolchain_types.bzl @@ -25,6 +25,7 @@ AppleToolchainInfo = provider( "lipo": provider_field(typing.Any, default = None), # "RunInfo" "min_version": provider_field(typing.Any, default = None), # [None, str] "momc": provider_field(typing.Any, default = None), # "RunInfo" + "objdump": provider_field(RunInfo | None, default = None), "odrcov": provider_field(typing.Any, default = None), # ["RunInfo", None] "platform_path": provider_field(typing.Any, default = None), # [str, artifact] "sdk_build_version": provider_field(typing.Any, default = None), # "[None, str]" diff --git a/prelude/apple/user/apple_toolchain_override.bzl b/prelude/apple/user/apple_toolchain_override.bzl index 0a3886e49..8cce54845 100644 --- a/prelude/apple/user/apple_toolchain_override.bzl +++ b/prelude/apple/user/apple_toolchain_override.bzl @@ -31,6 +31,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: lipo = base.lipo, min_version = base.min_version, momc = base.momc, + objdump = base.objdump, odrcov = base.odrcov, platform_path = base.platform_path, sdk_build_version = base.sdk_build_version, From 0744ec598a3a4c7057531fa079126c852a63a237 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Mon, 5 Feb 2024 13:11:27 -0800 Subject: [PATCH 0225/1133] Compile stdlib on-demand Summary: Build Go stdlib on-demand with buck, instead of using pre-built archives This is the second attempt to do this change, last one ended with S365277. Some homework has been done since then, so this time it should go more smoothly. See [post](https://fb.workplace.com/groups/codegophers/permalink/25768351322786806/) the details how this diff changes behaviour. The stdlib is added as a separate buck rule and attached to go_library|bunary|test|exported_ibrary via magic attribute `"_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib"))` Reviewed By: awalterschulze Differential Revision: D52914546 fbshipit-source-id: fdd8a877c5455a2b21ecbed1d8c48b62ac174978 --- prelude/go/cgo_library.bzl | 2 - prelude/go/compile.bzl | 23 ++----- prelude/go/go_binary.bzl | 6 -- prelude/go/go_exported_library.bzl | 6 -- prelude/go/go_library.bzl | 10 +--- prelude/go/go_stdlib.bzl | 76 ++++++++++++++++++++++++ prelude/go/go_test.bzl | 8 +-- prelude/go/link.bzl | 13 +--- prelude/go/packages.bzl | 76 ++++++++++++++---------- prelude/go/toolchain.bzl | 3 + prelude/go/tools/BUCK.v2 | 23 +++++++ prelude/go/tools/concat_files.py | 33 ++++++++++ prelude/go/tools/gen_stdlib_importcfg.py | 32 ++++++++++ prelude/go/tools/go_wrapper.py | 60 +++++++++++++++++++ prelude/rules_impl.bzl | 13 ++++ 15 files changed, 292 insertions(+), 92 deletions(-) create mode 100644 prelude/go/go_stdlib.bzl create mode 100644 prelude/go/tools/concat_files.py create mode 100644 prelude/go/tools/gen_stdlib_importcfg.py create mode 100644 prelude/go/tools/go_wrapper.py diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 114247101..217d203fe 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -165,7 +165,6 @@ def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, ctx, pkg_name, srcs = srcs, - cgo_enabled = True, deps = ctx.attrs.deps + ctx.attrs.exported_deps, coverage_mode = coverage_mode, shared = shared, @@ -243,7 +242,6 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx, pkg_name, all_srcs, - cgo_enabled = True, deps = ctx.attrs.deps + ctx.attrs.exported_deps, shared = ctx.attrs._compile_shared, ) diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index 44ffffd61..f33f1e761 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -14,9 +14,9 @@ load(":coverage.bzl", "GoCoverageMode") load( ":packages.bzl", "GoPkg", # @Unused used as type + "make_importcfg", "merge_pkgs", "pkg_artifacts", - "stdlib_pkg_artifacts", ) load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") @@ -92,8 +92,8 @@ def _assemble_cmd( def _compile_cmd( ctx: AnalysisContext, + root: str, pkg_name: str, - cgo_enabled: bool, pkgs: dict[str, Artifact] = {}, deps: list[Dependency] = [], flags: list[str] = [], @@ -118,25 +118,11 @@ def _compile_cmd( all_pkgs = merge_pkgs([ pkgs, pkg_artifacts(get_inherited_compile_pkgs(deps), coverage_mode = coverage_mode), - stdlib_pkg_artifacts(go_toolchain, shared = shared, non_cgo = not cgo_enabled), ]) - importcfg_content = [] - for name_, pkg_ in all_pkgs.items(): - # Hack: we use cmd_args get "artifact" valid path and write it to a file. - importcfg_content.append(cmd_args("packagefile ", name_, "=", pkg_, delimiter = "")) - - # Future work: support importmap in buck rules insted of hacking here. - if name_.startswith("third-party-source/go/"): - real_name_ = name_.removeprefix("third-party-source/go/") - importcfg_content.append(cmd_args("importmap ", real_name_, "=", name_, delimiter = "")) - - root = _out_root(shared, coverage_mode) - importcfg = ctx.actions.declare_output(root, paths.basename(pkg_name) + "-importcfg") - ctx.actions.write(importcfg.as_output(), importcfg_content) + importcfg = make_importcfg(ctx, root, pkg_name, all_pkgs, with_importmap = True) cmd.add("-importcfg", importcfg) - cmd.hidden(all_pkgs.values()) return cmd @@ -144,7 +130,6 @@ def compile( ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, - cgo_enabled: bool, pkgs: dict[str, Artifact] = {}, deps: list[Dependency] = [], compile_flags: list[str] = [], @@ -158,7 +143,7 @@ def compile( cmd = get_toolchain_cmd_args(go_toolchain) cmd.add(go_toolchain.compile_wrapper[RunInfo]) cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(_compile_cmd(ctx, pkg_name, cgo_enabled, pkgs, deps, compile_flags, shared = shared, coverage_mode = coverage_mode), format = "--compiler={}")) + cmd.add(cmd_args(_compile_cmd(ctx, root, pkg_name, pkgs, deps, compile_flags, shared = shared, coverage_mode = coverage_mode), format = "--compiler={}")) cmd.add(cmd_args(_assemble_cmd(ctx, pkg_name, assemble_flags, shared = shared), format = "--assembler={}")) cmd.add(cmd_args(go_toolchain.packer, format = "--packer={}")) if ctx.attrs.embedcfg: diff --git a/prelude/go/go_binary.bzl b/prelude/go/go_binary.bzl index a6ac99e33..2ce58d917 100644 --- a/prelude/go/go_binary.bzl +++ b/prelude/go/go_binary.bzl @@ -18,24 +18,18 @@ load( ) load(":compile.bzl", "compile", "get_filtered_srcs") load(":link.bzl", "link") -load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs.cgo_enabled) - lib = compile( ctx, "main", get_filtered_srcs(ctx, ctx.attrs.srcs), - cgo_enabled = cgo_enabled, deps = ctx.attrs.deps, compile_flags = ctx.attrs.compiler_flags, ) (bin, runtime_files, external_debug_info) = link( ctx, lib, - cgo_enabled = cgo_enabled, deps = ctx.attrs.deps, link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, diff --git a/prelude/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl index f8daa8708..28101cf52 100644 --- a/prelude/go/go_exported_library.bzl +++ b/prelude/go/go_exported_library.bzl @@ -16,17 +16,12 @@ load( ) load(":compile.bzl", "compile", "get_filtered_srcs") load(":link.bzl", "GoBuildMode", "link") -load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs.cgo_enabled) - lib = compile( ctx, "main", get_filtered_srcs(ctx, ctx.attrs.srcs), - cgo_enabled = cgo_enabled, deps = ctx.attrs.deps, compile_flags = ctx.attrs.compiler_flags, shared = True, @@ -34,7 +29,6 @@ def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: (bin, runtime_files, _external_debug_info) = link( ctx, lib, - cgo_enabled = cgo_enabled, deps = ctx.attrs.deps, build_mode = GoBuildMode(ctx.attrs.build_mode), link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static_pic")), diff --git a/prelude/go/go_library.bzl b/prelude/go/go_library.bzl index f417e4dfd..07bc8f463 100644 --- a/prelude/go/go_library.bzl +++ b/prelude/go/go_library.bzl @@ -27,9 +27,8 @@ load(":compile.bzl", "GoPkgCompileInfo", "GoTestInfo", "compile", "get_filtered_ load(":coverage.bzl", "GoCoverageMode", "cover_srcs") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") -load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") -def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, coverage_mode: GoCoverageMode, cgo_enabled: bool, shared: bool = False) -> (Artifact, cmd_args): +def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, coverage_mode: GoCoverageMode, shared: bool = False) -> (Artifact, cmd_args): cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, shared) srcs = cov_res.srcs coverage_vars = cov_res.variables @@ -37,7 +36,6 @@ def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, ctx, pkg_name, srcs = srcs, - cgo_enabled = cgo_enabled, deps = ctx.attrs.deps + ctx.attrs.exported_deps, compile_flags = ctx.attrs.compiler_flags, coverage_mode = coverage_mode, @@ -46,8 +44,6 @@ def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, return (coverage_pkg, coverage_vars) def go_library_impl(ctx: AnalysisContext) -> list[Provider]: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - pkgs = {} default_output = None pkg_name = None @@ -56,21 +52,19 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, force_disable_cgo = True) - cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs._cgo_enabled) shared = ctx.attrs._compile_shared compiled_pkg = compile( ctx, pkg_name, srcs = srcs, - cgo_enabled = cgo_enabled, deps = ctx.attrs.deps + ctx.attrs.exported_deps, compile_flags = ctx.attrs.compiler_flags, assemble_flags = ctx.attrs.assembler_flags, shared = shared, ) - pkg_with_coverage = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, cgo_enabled, shared) for mode in GoCoverageMode} + pkg_with_coverage = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, shared) for mode in GoCoverageMode} default_output = compiled_pkg pkgs[pkg_name] = GoPkg( diff --git a/prelude/go/go_stdlib.bzl b/prelude/go/go_stdlib.bzl new file mode 100644 index 000000000..f09ee8f2d --- /dev/null +++ b/prelude/go/go_stdlib.bzl @@ -0,0 +1,76 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":packages.bzl", "GoStdlib") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled", "get_toolchain_cmd_args") + +def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + stdlib_pkgdir = ctx.actions.declare_output("stdlib_pkgdir", dir = True) + cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs._cgo_enabled) + tags = go_toolchain.tags + linker_flags = [] + go_toolchain.linker_flags + assembler_flags = [] + go_toolchain.assembler_flags + compiler_flags = [] + go_toolchain.compiler_flags + compiler_flags += ["-buildid="] # Make builds reproducible. + if ctx.attrs._compile_shared: + assembler_flags += ["-shared"] + compiler_flags += ["-shared"] + + go_wrapper_args = [] + cxx_toolchain = go_toolchain.cxx_toolchain_for_linking + if cxx_toolchain != None: + c_compiler = cxx_toolchain.c_compiler_info + + cgo_ldflags = cmd_args( + cxx_toolchain.linker_info.linker_flags, + go_toolchain.external_linker_flags, + ) + + go_wrapper_args += [ + cmd_args(c_compiler.compiler, format = "--cc={}").absolute_prefix("%cwd%/"), + cmd_args([c_compiler.compiler_flags, go_toolchain.c_compiler_flags], format = "--cgo_cflags={}").absolute_prefix("%cwd%/"), + cmd_args(c_compiler.preprocessor_flags, format = "--cgo_cppflags={}").absolute_prefix("%cwd%/"), + cmd_args(cgo_ldflags, format = "--cgo_ldflags={}").absolute_prefix("%cwd%/"), + ] + + cmd = get_toolchain_cmd_args(go_toolchain, go_root = True) + cmd.add([ + "GODEBUG={}".format("installgoroot=all"), + "CGO_ENABLED={}".format("1" if cgo_enabled else "0"), + go_toolchain.go_wrapper, + go_toolchain.go, + go_wrapper_args, + "install", + "-pkgdir", + stdlib_pkgdir.as_output(), + cmd_args(["-asmflags=", cmd_args(assembler_flags, delimiter = " ")], delimiter = "") if assembler_flags else [], + cmd_args(["-gcflags=", cmd_args(compiler_flags, delimiter = " ")], delimiter = "") if compiler_flags else [], + cmd_args(["-ldflags=", cmd_args(linker_flags, delimiter = " ")], delimiter = "") if linker_flags else [], + ["-tags", ",".join(tags)] if tags else [], + "std", + ]) + + ctx.actions.run(cmd, category = "go_build_stdlib", identifier = "go_build_stdlib") + + importcfg = ctx.actions.declare_output("stdlib.importcfg") + ctx.actions.run( + [ + go_toolchain.gen_stdlib_importcfg, + "--stdlib", + stdlib_pkgdir, + "--output", + importcfg.as_output(), + ], + category = "go_gen_stdlib_importcfg", + identifier = "go_gen_stdlib_importcfg", + ) + + return [ + DefaultInfo(default_output = stdlib_pkgdir), + GoStdlib(pkgdir = stdlib_pkgdir, importcfg = importcfg), + ] diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index 5f0ece710..f32d17182 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -23,7 +23,6 @@ load(":compile.bzl", "GoTestInfo", "compile", "get_filtered_srcs", "get_inherite load(":coverage.bzl", "GoCoverageMode", "cover_srcs") load(":link.bzl", "link") load(":packages.bzl", "go_attr_pkg_name", "pkg_artifact", "pkg_coverage_vars") -load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled") def _gen_test_main( ctx: AnalysisContext, @@ -88,15 +87,11 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: coverage_vars[name] = vars pkgs[name] = artifact - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - cgo_enabled = evaluate_cgo_enabled(go_toolchain, ctx.attrs.cgo_enabled) - # Compile all tests into a package. tests = compile( ctx, pkg_name, srcs, - cgo_enabled = cgo_enabled, deps = deps, pkgs = pkgs, compile_flags = ctx.attrs.compiler_flags, @@ -107,14 +102,13 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: # package. gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, srcs) pkgs[pkg_name] = tests - main = compile(ctx, "main", cmd_args(gen_main), cgo_enabled = cgo_enabled, pkgs = pkgs, coverage_mode = coverage_mode) + main = compile(ctx, "main", cmd_args(gen_main), pkgs = pkgs, coverage_mode = coverage_mode) # Link the above into a Go binary. (bin, runtime_files, external_debug_info) = link( ctx = ctx, main = main, pkgs = pkgs, - cgo_enabled = cgo_enabled, deps = deps, link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 6bd81ebc5..35fb0557a 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -37,9 +37,9 @@ load(":coverage.bzl", "GoCoverageMode") load( ":packages.bzl", "GoPkg", # @Unused used as type + "make_importcfg", "merge_pkgs", "pkg_artifacts", - "stdlib_pkg_artifacts", ) load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") @@ -106,7 +106,6 @@ def _process_shared_dependencies( def link( ctx: AnalysisContext, main: Artifact, - cgo_enabled: bool, pkgs: dict[str, Artifact] = {}, deps: list[Dependency] = [], build_mode: GoBuildMode = GoBuildMode("executable"), @@ -139,19 +138,11 @@ def link( all_pkgs = merge_pkgs([ pkgs, pkg_artifacts(get_inherited_link_pkgs(deps), coverage_mode = coverage_mode), - stdlib_pkg_artifacts(go_toolchain, shared = shared, non_cgo = not cgo_enabled), ]) - importcfg_content = [] - for name_, pkg_ in all_pkgs.items(): - # Hack: we use cmd_args get "artifact" valid path and write it to a file. - importcfg_content.append(cmd_args("packagefile ", name_, "=", pkg_, delimiter = "")) - - importcfg = ctx.actions.declare_output("importcfg") - ctx.actions.write(importcfg.as_output(), importcfg_content) + importcfg = make_importcfg(ctx, "", "", all_pkgs, with_importmap = False) cmd.add("-importcfg", importcfg) - cmd.hidden(all_pkgs.values()) executable_args = _process_shared_dependencies(ctx, output, deps, link_style) diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index 51878c08d..6ca43871f 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -5,7 +5,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:artifacts.bzl", "ArtifactGroupInfo") load("@prelude//go:toolchain.bzl", "GoToolchainInfo") load("@prelude//utils:utils.bzl", "value_or") load(":coverage.bzl", "GoCoverageMode") @@ -16,6 +15,13 @@ GoPkg = record( pkg_with_coverage = field(dict[GoCoverageMode, (Artifact, cmd_args)]), ) +GoStdlib = provider( + fields = { + "importcfg": provider_field(Artifact), + "pkgdir": provider_field(Artifact), + }, +) + def go_attr_pkg_name(ctx: AnalysisContext) -> str: """ Return the Go package name for the given context corresponding to a rule. @@ -61,35 +67,39 @@ def pkg_artifacts(pkgs: dict[str, GoPkg], coverage_mode: [GoCoverageMode, None] for name, pkg in pkgs.items() } -def stdlib_pkg_artifacts(toolchain: GoToolchainInfo, shared: bool = False, non_cgo: bool = False) -> dict[str, Artifact]: - """ - Return a map package name to a `shared` or `static` package artifact of stdlib. - """ - - # shared == True && non_cgo == True is not supported yet, - # we'll temporarily use non_cgo if both flags are true, this will be wixed with on-demand building of stdlib. - - if non_cgo: - prebuilt_stdlib = toolchain.prebuilt_stdlib_noncgo - elif shared: - prebuilt_stdlib = toolchain.prebuilt_stdlib_shared - else: - prebuilt_stdlib = toolchain.prebuilt_stdlib - - stdlib_pkgs = prebuilt_stdlib[ArtifactGroupInfo].artifacts - - if len(stdlib_pkgs) == 0: - fail("Stdlib for current platfrom is missing from toolchain.") - - pkgs = {} - for pkg in stdlib_pkgs: - # remove first directory like `pgk` - _, _, temp_path = pkg.short_path.partition("/") - - # remove second directory like `darwin_amd64` - # now we have name like `net/http.a` - _, _, pkg_relpath = temp_path.partition("/") - name = pkg_relpath.removesuffix(".a") # like `net/http` - pkgs[name] = pkg - - return pkgs +def make_importcfg( + ctx: AnalysisContext, + root: str, + pkg_name: str, + own_pkgs: dict[str, typing.Any], + with_importmap: bool) -> cmd_args: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + stdlib = ctx.attrs._go_stdlib[GoStdlib] + + content = [] + for name_, pkg_ in own_pkgs.items(): + # Hack: we use cmd_args get "artifact" valid path and write it to a file. + content.append(cmd_args("packagefile ", name_, "=", pkg_, delimiter = "")) + + # Future work: support importmap in buck rules insted of hacking here. + if with_importmap and name_.startswith("third-party-source/go/"): + real_name_ = name_.removeprefix("third-party-source/go/") + content.append(cmd_args("importmap ", real_name_, "=", name_, delimiter = "")) + + own_importcfg = ctx.actions.declare_output(root, "{}.importcfg".format(pkg_name)) + ctx.actions.write(own_importcfg, content) + + final_importcfg = ctx.actions.declare_output(root, "{}.final.importcfg".format(pkg_name)) + ctx.actions.run( + [ + go_toolchain.concat_files, + "--output", + final_importcfg.as_output(), + stdlib.importcfg, + own_importcfg, + ], + category = "concat_importcfgs", + identifier = "{}/{}".format(root, pkg_name), + ) + + return cmd_args(final_importcfg).hidden(stdlib.pkgdir).hidden(own_pkgs.values()) diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index a7f119e09..2311a2fb0 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -13,9 +13,12 @@ GoToolchainInfo = provider( "c_compiler_flags": provider_field(typing.Any, default = None), "cgo": provider_field(typing.Any, default = None), "cgo_wrapper": provider_field(typing.Any, default = None), + "gen_stdlib_importcfg": provider_field(typing.Any, default = None), + "go_wrapper": provider_field(typing.Any, default = None), "compile_wrapper": provider_field(typing.Any, default = None), "compiler": provider_field(typing.Any, default = None), "compiler_flags": provider_field(typing.Any, default = None), + "concat_files": provider_field(typing.Any, default = None), "cover": provider_field(typing.Any, default = None), "cover_srcs": provider_field(typing.Any, default = None), "cxx_toolchain_for_linking": provider_field(typing.Any, default = None), diff --git a/prelude/go/tools/BUCK.v2 b/prelude/go/tools/BUCK.v2 index b7499e98f..92b006f5b 100644 --- a/prelude/go/tools/BUCK.v2 +++ b/prelude/go/tools/BUCK.v2 @@ -6,6 +6,12 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) +prelude.python_bootstrap_binary( + name = "concat_files", + main = "concat_files.py", + visibility = ["PUBLIC"], +) + prelude.python_bootstrap_binary( name = "cover_srcs", main = "cover_srcs.py", @@ -24,6 +30,18 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) +prelude.python_bootstrap_binary( + name = "gen_stdlib_importcfg", + main = "gen_stdlib_importcfg.py", + visibility = ["PUBLIC"], +) + +prelude.python_bootstrap_binary( + name = "go_wrapper", + main = "go_wrapper.py", + visibility = ["PUBLIC"], +) + prelude.go_binary( name = "testmaingen", srcs = [ @@ -33,3 +51,8 @@ prelude.go_binary( "PUBLIC", ], ) + +prelude.go_stdlib( + name = "stdlib", + visibility = ["PUBLIC"], +) diff --git a/prelude/go/tools/concat_files.py b/prelude/go/tools/concat_files.py new file mode 100644 index 000000000..145335a28 --- /dev/null +++ b/prelude/go/tools/concat_files.py @@ -0,0 +1,33 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import sys +from pathlib import Path + + +def main(argv): + parser = argparse.ArgumentParser(fromfile_prefix_chars="@") + parser.add_argument("--output", required=True, type=Path) + parser.add_argument("files", type=Path, nargs="*") + args = parser.parse_args(argv[1:]) + + if len(args.files) == 0: + print( + "usage: concat_files.py --output out.txt in1.txt in2.txt", file=sys.stderr + ) + return 1 + + with open(args.output, "wb") as outfile: + for f in args.files: + with open(f, "rb") as infile: + outfile.write(infile.read()) + + return 0 + + +sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/gen_stdlib_importcfg.py b/prelude/go/tools/gen_stdlib_importcfg.py new file mode 100644 index 000000000..ce973c0ab --- /dev/null +++ b/prelude/go/tools/gen_stdlib_importcfg.py @@ -0,0 +1,32 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import os +import sys +from pathlib import Path + + +def main(argv): + parser = argparse.ArgumentParser() + parser.add_argument("--stdlib", type=Path, default=None) + parser.add_argument("--output", type=Path, default=None) + + args = parser.parse_args() + + with open(args.output, "w") as f: + for root, _dirs, files in os.walk(args.stdlib): + for file in files: + pkg_path = Path(root, file) + pkg_name, _ = os.path.splitext(pkg_path.relative_to(args.stdlib)) + # package names always use unix slashes + pkg_name = pkg_name.replace("\\", "/") + f.write(f"packagefile {pkg_name}={pkg_path}\n") + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/go_wrapper.py b/prelude/go/tools/go_wrapper.py new file mode 100644 index 000000000..bb830da97 --- /dev/null +++ b/prelude/go/tools/go_wrapper.py @@ -0,0 +1,60 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import os +import subprocess +import sys +from pathlib import Path + + +def main(argv): + """ + This is a wrapper script around the `go` binary. + - It fixes GOROOT and GOCACHE + """ + if len(argv) < 2: + print("usage: go_wrapper.py ", file=sys.stderr) + return 1 + + wrapped_binary = Path(argv[1]) + + parser = argparse.ArgumentParser() + parser.add_argument("--cc", action="append", default=[]) + parser.add_argument("--cgo_cflags", action="append", default=[]) + parser.add_argument("--cgo_cppflags", action="append", default=[]) + parser.add_argument("--cgo_ldflags", action="append", default=[]) + parsed, unknown = parser.parse_known_args(argv[2:]) + + env = os.environ.copy() + # Make paths absolute, otherwise go build will fail. + env["GOROOT"] = os.path.realpath(env["GOROOT"]) + env["GOCACHE"] = os.path.realpath(env["BUCK_SCRATCH_PATH"]) + + cwd = os.getcwd() + if len(parsed.cc) > 0: + env["CC"] = " ".join([arg.replace("%cwd%", cwd) for arg in parsed.cc]) + + if len(parsed.cgo_cflags) > 0: + env["CGO_CFLAGS"] = " ".join( + [arg.replace("%cwd%", cwd) for arg in parsed.cgo_cflags] + ) + + if len(parsed.cgo_cppflags) > 0: + env["CGO_CPPFLAGS"] = " ".join( + [arg.replace("%cwd%", cwd) for arg in parsed.cgo_cppflags] + ) + + if len(parsed.cgo_ldflags) > 0: + env["CGO_LDFLAGS"] = " ".join( + [arg.replace("%cwd%", cwd) for arg in parsed.cgo_ldflags] + ) + + return subprocess.call([wrapped_binary] + unknown, env=env) + + +sys.exit(main(sys.argv)) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 584d5c2eb..a33152b1e 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -24,6 +24,7 @@ load("@prelude//go:coverage.bzl", "GoCoverageMode") load("@prelude//go:go_binary.bzl", "go_binary_impl") load("@prelude//go:go_exported_library.bzl", "go_exported_library_impl") load("@prelude//go:go_library.bzl", "go_library_impl") +load("@prelude//go:go_stdlib.bzl", "go_stdlib_impl") load("@prelude//go:go_test.bzl", "go_test_impl") load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl", "haskell_toolchain_library_impl") @@ -170,6 +171,7 @@ extra_implemented_rules = struct( go_exported_library = go_exported_library_impl, go_library = go_library_impl, go_test = go_test_impl, + go_stdlib = go_stdlib_impl, #haskell haskell_library = haskell_library_impl, @@ -374,6 +376,7 @@ inlined_extra_attributes = { "_compile_shared": compile_shared_attr, "_cxx_toolchain": toolchains_common.cxx(), "_exec_os_type": buck.exec_os_type_arg(), + "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), }, # csharp @@ -419,17 +422,26 @@ inlined_extra_attributes = { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "resources": attrs.list(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), default = []), "_exec_os_type": buck.exec_os_type_arg(), + "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), }, "go_exported_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "_exec_os_type": buck.exec_os_type_arg(), + "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), }, "go_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "_cgo_enabled": cgo_enabled_attr, "_compile_shared": compile_shared_attr, + "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), + "_go_toolchain": toolchains_common.go(), + }, + "go_stdlib": { + "_cgo_enabled": cgo_enabled_attr, + "_compile_shared": compile_shared_attr, + "_exec_os_type": buck.exec_os_type_arg(), "_go_toolchain": toolchains_common.go(), }, "go_test": { @@ -437,6 +449,7 @@ inlined_extra_attributes = { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "resources": attrs.list(attrs.source(allow_directory = True), default = []), "_exec_os_type": buck.exec_os_type_arg(), + "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), "_testmaingen": attrs.default_only(attrs.exec_dep(default = "prelude//go/tools:testmaingen")), }, From 9d461e5d94668c78181d7a535bf87e227a4eea30 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 6 Feb 2024 02:27:01 -0800 Subject: [PATCH 0226/1133] disable provisioning profiles verification Reviewed By: chatura-atapattu Differential Revision: D53349463 fbshipit-source-id: 9d5217f1bdd6dde69d8c3f56161fb9fb40a6ff1d --- .../code_signing/read_provisioning_profile_command_factory.py | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py b/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py index 0d4d75362..ed5b01a4d 100644 --- a/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py +++ b/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py @@ -27,6 +27,7 @@ class DefaultReadProvisioningProfileCommandFactory( "der", "-verify", "-noverify", + "-nosigs", "-in", ] From 7e54ed388a1a20b439acce680ee4906016e18b33 Mon Sep 17 00:00:00 2001 From: Conner Nilsen Date: Tue, 6 Feb 2024 09:45:31 -0800 Subject: [PATCH 0227/1133] Update query.bxl to allow user to filter deps from sources Summary: Allows us to query for included targets for Pyre evaluation (and eventually source files) from a given target expression *without* including dependencies. This is an alternate approach to running ``` buck2 query "kind(\"python_binary|python_library|python_test\", %s) - attrfilter(labels, generated, %s) - attrfilter(labels, no_pyre, %s)" ``` Both work (as far as I can tell), but at this point it's a matter of preference on which one we use. I think this approach might be better, since we'll be able to easily reuse this functionality in future diffs if we ever need it again. Reviewed By: grievejia Differential Revision: D53439983 fbshipit-source-id: a0796f180d234d8cd554c967ea6e6f2c4fc5da14 --- prelude/python/sourcedb/filter.bxl | 64 ++++++++++++++++++++++++++++++ prelude/python/sourcedb/query.bxl | 55 +++++++------------------ 2 files changed, 79 insertions(+), 40 deletions(-) create mode 100644 prelude/python/sourcedb/filter.bxl diff --git a/prelude/python/sourcedb/filter.bxl b/prelude/python/sourcedb/filter.bxl new file mode 100644 index 000000000..9cbbbe214 --- /dev/null +++ b/prelude/python/sourcedb/filter.bxl @@ -0,0 +1,64 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +BUCK_PYTHON_RULE_KINDS = [ + "python_binary", + "python_library", + "python_test", +] +BUCK_PYTHON_RULE_KIND_QUERY = "|".join(BUCK_PYTHON_RULE_KINDS) + +def filter_root_targets( + query: bxl.CqueryContext, + target_patterns: typing.Any) -> bxl.ConfiguredTargetSet: + # Find all Pure-Python targets + candidate_targets = ctarget_set() + for pattern in target_patterns: + candidate_targets += query.kind( + BUCK_PYTHON_RULE_KIND_QUERY, + pattern, + ) + + # Don't check generated rules + filtered_targets = candidate_targets - query.attrfilter( + "labels", + "generated", + candidate_targets, + ) + + # Provide an opt-out label + filtered_targets = filtered_targets - query.attrfilter( + "labels", + "no_pyre", + candidate_targets, + ) + return filtered_targets + +def do_filter( + query: bxl.CqueryContext, + target_patterns: typing.Any) -> list[ConfiguredTargetLabel]: + root_targets = filter_root_targets(query, target_patterns) + return [root_target.label for root_target in root_targets] + +def _do_filter_entry_point(ctx: bxl.Context) -> None: + query = ctx.cquery() + targets = do_filter( + query, + [query.eval(target) for target in ctx.cli_args.target], + ) + ctx.output.print_json([target.raw_target() for target in targets]) + +filter = bxl_main( + doc = ( + "Expand target patterns and look for all targets in immediate sources " + + "that will be built by Pyre." + ), + impl = _do_filter_entry_point, + cli_args = { + "target": cli_args.list(cli_args.string()), + }, +) diff --git a/prelude/python/sourcedb/query.bxl b/prelude/python/sourcedb/query.bxl index 8152d15db..3b79a3b69 100644 --- a/prelude/python/sourcedb/query.bxl +++ b/prelude/python/sourcedb/query.bxl @@ -7,39 +7,7 @@ load("@prelude//python:python.bzl", "PythonLibraryManifestsTSet") load("@prelude//python:source_db.bzl", "PythonSourceDBInfo") - -BUCK_PYTHON_RULE_KINDS = [ - "python_binary", - "python_library", - "python_test", -] -BUCK_PYTHON_RULE_KIND_QUERY = "|".join(BUCK_PYTHON_RULE_KINDS) - -def _filter_root_targets( - query: bxl.CqueryContext, - target_patterns: typing.Any) -> bxl.ConfiguredTargetSet: - # Find all Pure-Python targets - candidate_targets = ctarget_set() - for pattern in target_patterns: - candidate_targets += query.kind( - BUCK_PYTHON_RULE_KIND_QUERY, - pattern, - ) - - # Don't check generated rules - filtered_targets = candidate_targets - query.attrfilter( - "labels", - "generated", - candidate_targets, - ) - - # Provide an opt-out label - filtered_targets = filtered_targets - query.attrfilter( - "labels", - "no_pyre", - candidate_targets, - ) - return filtered_targets +load("@prelude//python/sourcedb/filter.bxl", "filter_root_targets") def _get_python_library_manifests_from_analysis_result( analysis_result: bxl.AnalysisResult) -> [PythonLibraryManifestsTSet, None]: @@ -73,7 +41,7 @@ def get_python_library_manifests_tset_from_target_patterns( query: bxl.CqueryContext, actions: AnalysisActions, target_patterns: typing.Any) -> PythonLibraryManifestsTSet: - root_targets = _filter_root_targets(query, target_patterns) + root_targets = filter_root_targets(query, target_patterns) return get_python_library_manifests_tset_from_targets(ctx, actions, root_targets) def do_query( @@ -81,11 +49,13 @@ def do_query( query: bxl.CqueryContext, actions: AnalysisActions, target_patterns: typing.Any) -> list[ConfiguredTargetLabel]: - manifests_of_transitive_dependencies = get_python_library_manifests_tset_from_target_patterns( - ctx, - query, - actions, - target_patterns, + manifests_of_transitive_dependencies = ( + get_python_library_manifests_tset_from_target_patterns( + ctx, + query, + actions, + target_patterns, + ) ) return [ manifest.label.configured_target() @@ -96,7 +66,12 @@ def do_query( def _do_query_entry_point(ctx: bxl.Context) -> None: query = ctx.cquery() actions = ctx.bxl_actions().actions - targets = do_query(ctx, query, actions, [query.eval(target) for target in ctx.cli_args.target]) + targets = do_query( + ctx, + query, + actions, + [query.eval(target) for target in ctx.cli_args.target], + ) ctx.output.print_json([target.raw_target() for target in targets]) query = bxl_main( From 85523367413fb63b6212773016a1bdfb98104d2d Mon Sep 17 00:00:00 2001 From: Ruslan Sayfutdinov Date: Tue, 6 Feb 2024 09:51:18 -0800 Subject: [PATCH 0228/1133] buck2/docs: update genrule docs Summary: Unlike buck1, buck2 evaluates these macros to relative paths. Fixes: https://github.com/facebook/buck2/issues/560 Reviewed By: wendy728 Differential Revision: D53473247 fbshipit-source-id: 9d8c541ee213a516b033b607447cf68abe13e68a --- prelude/decls/genrule_common.bzl | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/prelude/decls/genrule_common.bzl b/prelude/decls/genrule_common.bzl index 0538c1ec4..1b84e2808 100644 --- a/prelude/decls/genrule_common.bzl +++ b/prelude/decls/genrule_common.bzl @@ -49,13 +49,13 @@ def _cmd_arg(): A string expansion of the `srcs` argument delimited by the `environment_expansion_separator` argument where each element of `srcs` will be translated - into an absolute path. + into a relative path. `${SRCDIR}` - The absolute path to a directory to which sources are copied + The relative path to a directory to which sources are copied prior to running the command. @@ -97,8 +97,7 @@ def _cmd_arg(): to be dependencies of the `genrule()`. - Note that the paths returned by these macros are *absolute* paths. You should convert these paths to be relative paths before - embedding them in, for example, a shell script or batch file. Using + Note that the paths returned by these macros are *relative* paths. Using relative paths ensures that your builds are *hermetic*, that is, they are reproducible across different machine environments. From 21fdb73d99bc1aaa1600c437fdadcb7c6f43b9cb Mon Sep 17 00:00:00 2001 From: Wendy Yu Date: Tue, 6 Feb 2024 11:20:58 -0800 Subject: [PATCH 0229/1133] Add java error handler to java toolchain provider Summary: Do the same for java - add this error handler function to the toolchain provider Reviewed By: IanChilds Differential Revision: D53356225 fbshipit-source-id: 1072f9e6dde11e6f2b0a101f3600f50e739af41f --- prelude/java/java_toolchain.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/java/java_toolchain.bzl b/prelude/java/java_toolchain.bzl index 9435148bf..51bbe0637 100644 --- a/prelude/java/java_toolchain.bzl +++ b/prelude/java/java_toolchain.bzl @@ -38,6 +38,7 @@ JavaToolchainInfo = provider( "jar": provider_field(typing.Any, default = None), "jar_builder": provider_field(typing.Any, default = None), "java": provider_field(typing.Any, default = None), + "java_error_handler": provider_field(typing.Any, default = None), "java_for_tests": provider_field(typing.Any, default = None), "javac": provider_field(typing.Any, default = None), "javac_protocol": provider_field(typing.Any, default = None), From ba344f29d43d61fa6825bad7f48484c52250f7f6 Mon Sep 17 00:00:00 2001 From: Wendy Yu Date: Tue, 6 Feb 2024 11:20:58 -0800 Subject: [PATCH 0230/1133] use action error handler in javacd_jar Summary: As the title says Reviewed By: IanChilds Differential Revision: D53241846 fbshipit-source-id: 00b98676e4d90f32446490c49ec82dcff7a408d7 --- prelude/java/javacd_jar_creator.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/java/javacd_jar_creator.bzl b/prelude/java/javacd_jar_creator.bzl index a01b7b5ac..1806facc6 100644 --- a/prelude/java/javacd_jar_creator.bzl +++ b/prelude/java/javacd_jar_creator.bzl @@ -288,6 +288,7 @@ def create_jar_artifact_javacd( local_only = local_only, low_pass_filter = False, weight = 2, + error_handler = java_toolchain.java_error_handler, ) library_classpath_jars_tag = actions.artifact_tag() From 8af8575045f5bf1f41b0f67f680b07399c72d445 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Tue, 6 Feb 2024 15:06:26 -0800 Subject: [PATCH 0231/1133] Add DistInfo provider for cxx_test rule Summary: Without this provider, when a `python_binary` depends on a `cxx_test` as a resource, the inplace pex builder winds up trying to package the C++ target's external debuginfo, which is not allowed: ```lang=text,counterexample Cannot package hidden srcs/resources in a standalone python_binary. Eliminate resources in non-Python dependencies of this python binary, use `package_style = "inplace"`, use `strip_mode="full"` or turn off Split DWARF `-c fbcode.split-dwarf=false` on C++ binary resources. ``` Reviewed By: zertosh Differential Revision: D53493364 fbshipit-source-id: 9c07b73a9a05bfb46e6ba67ce575e909f9945e91 --- prelude/cxx/cxx.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 97a153f8d..76cc32df1 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -682,6 +682,7 @@ def cxx_test_impl(ctx: AnalysisContext) -> list[Provider]: ), output.compilation_db, output.xcode_data, + output.dist_info, ] def _get_params_for_android_binary_cxx_library() -> (CxxRuleSubTargetParams, CxxRuleProviderParams): From dd63c6d68f11e86998fd5227baea92209460ee09 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Tue, 6 Feb 2024 16:09:27 -0800 Subject: [PATCH 0232/1133] Sanitizers: add ability to enable sanitizer runtime on a per-target basis Summary: Adds `sanitizer_runtime_enabled` field to `apple_binary()` rule, so that a particular target can override the sanitizer runtime logic for testing and explicit control purposes. Reviewed By: rmaz Differential Revision: D53470622 fbshipit-source-id: 82482da52950883951acd2d9d956eb6cc2182f07 --- prelude/apple/apple_rules_impl.bzl | 1 + prelude/cxx/cxx_executable.bzl | 2 +- prelude/cxx/cxx_link_utility.bzl | 15 +++++++++------ prelude/go/link.bzl | 2 +- prelude/rust/build.bzl | 2 +- prelude/rust/rust_binary.bzl | 2 +- 6 files changed, 14 insertions(+), 10 deletions(-) diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 847f20448..12bd7aa69 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -87,6 +87,7 @@ def _apple_binary_extra_attrs(): "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "prefer_stripped_objects": attrs.bool(default = False), "preferred_linkage": attrs.enum(Linkage, default = "any"), + "sanitizer_runtime_enabled": attrs.option(attrs.bool(), default = None), "stripped": attrs.option(attrs.bool(), default = None), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index eb3577123..57598c3c6 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -710,7 +710,7 @@ def _link_into_executable( output_name = "{}{}".format(executable_name if executable_name else get_cxx_executable_product_name(ctx), "." + binary_extension if binary_extension else "") output = ctx.actions.declare_output(output_name) executable_args = executable_shared_lib_arguments( - ctx.actions, + ctx, get_cxx_toolchain_info(ctx), output, shared_libs, diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 23fb51b5e..161a2e7b3 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -165,10 +165,13 @@ CxxSanitizerRuntimeArguments = record( # @executable_path/Frameworks def _sanitizer_runtime_arguments( + ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, output: Artifact) -> CxxSanitizerRuntimeArguments: linker_info = cxx_toolchain.linker_info - if not linker_info.sanitizer_runtime_enabled: + target_sanitizer_runtime_enabled = ctx.attrs.sanitizer_runtime_enabled if hasattr(ctx.attrs, "sanitizer_runtime_enabled") else None + sanitizer_runtime_enabled = target_sanitizer_runtime_enabled if target_sanitizer_runtime_enabled != None else linker_info.sanitizer_runtime_enabled + if not sanitizer_runtime_enabled: return CxxSanitizerRuntimeArguments() if linker_info.sanitizer_runtime_dir == None: @@ -199,7 +202,7 @@ def _sanitizer_runtime_arguments( return CxxSanitizerRuntimeArguments() def executable_shared_lib_arguments( - actions: AnalysisActions, + ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, output: Artifact, shared_libs: dict[str, LinkedObject]) -> ExecutableSharedLibArguments: @@ -210,7 +213,7 @@ def executable_shared_lib_arguments( # External debug info is materialized only when the executable is the output # of a build. Do not add to runtime_files. external_debug_info = project_artifacts( - actions = actions, + actions = ctx.actions, tsets = [shlib.external_debug_info for shlib in shared_libs.values()], ) @@ -218,7 +221,7 @@ def executable_shared_lib_arguments( if len(shared_libs) > 0: if linker_type == "windows": - shared_libs_symlink_tree = [actions.symlink_file( + shared_libs_symlink_tree = [ctx.actions.symlink_file( shlib.output.basename, shlib.output, ) for _, shlib in shared_libs.items()] @@ -226,7 +229,7 @@ def executable_shared_lib_arguments( # Windows doesn't support rpath. else: - shared_libs_symlink_tree = actions.symlinked_dir( + shared_libs_symlink_tree = ctx.actions.symlinked_dir( shared_libs_symlink_tree_name(output), {name: shlib.output for name, shlib in shared_libs.items()}, ) @@ -237,7 +240,7 @@ def executable_shared_lib_arguments( rpath_arg = cmd_args(shared_libs_symlink_tree, format = "-Wl,-rpath,{}/{{}}".format(rpath_reference)).relative_to(output, parent = 1).ignore_artifacts() extra_link_args.append(rpath_arg) - sanitizer_runtime_args = _sanitizer_runtime_arguments(cxx_toolchain, output) + sanitizer_runtime_args = _sanitizer_runtime_arguments(ctx, cxx_toolchain, output) extra_link_args += sanitizer_runtime_args.extra_link_args runtime_files += sanitizer_runtime_args.sanitizer_runtime diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 35fb0557a..09d637edf 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -97,7 +97,7 @@ def _process_shared_dependencies( shared_libs[name] = shared_lib.lib return executable_shared_lib_arguments( - ctx.actions, + ctx, ctx.attrs._go_toolchain[GoToolchainInfo].cxx_toolchain_for_linking, artifact, shared_libs, diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 44fef4ad7..7f4a2a5c0 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -267,7 +267,7 @@ def generate_rustdoc_test( for soname, shared_lib in traverse_shared_library_info(shlib_info).items(): shared_libs[soname] = shared_lib.lib executable_args = executable_shared_lib_arguments( - ctx.actions, + ctx, compile_ctx.cxx_toolchain_info, resources, shared_libs, diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 987a92e17..046d43d04 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -183,7 +183,7 @@ def _rust_binary_common( # link groups shared libraries link args are directly added to the link command, # we don't have to add them here executable_args = executable_shared_lib_arguments( - ctx.actions, + ctx, compile_ctx.cxx_toolchain_info, output, shared_libs, From 4db8e6292b063f7aececb96ab8fb681cd5f45af9 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Tue, 6 Feb 2024 16:09:27 -0800 Subject: [PATCH 0233/1133] Sanitizers: explicitly expose a sanitizer dir Summary: Explicitly expose the sanitizer runtime directory as an explicit field rather than a list of arbitrary artifacts. Required to be able to bundle the runtime dylibs into `.app` bundles. Reviewed By: rmaz Differential Revision: D53470758 fbshipit-source-id: 3dd18bcfb9e7cd58f72fb99ae2a351a5c91ed3ab --- prelude/cxx/cxx_link_utility.bzl | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 161a2e7b3..0848332b7 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -159,7 +159,7 @@ ExecutableSharedLibArguments = record( CxxSanitizerRuntimeArguments = record( extra_link_args = field(list[ArgLike], []), - sanitizer_runtime = field(list[Artifact], []), + sanitizer_runtime_dir = field(Artifact | None, None), ) # @executable_path/Frameworks @@ -194,9 +194,7 @@ def _sanitizer_runtime_arguments( "-Wl,-rpath,@loader_path/../Frameworks", # macOS "-Wl,-rpath,@executable_path/../Frameworks", # macOS ], - sanitizer_runtime = [ - linker_info.sanitizer_runtime_dir, - ], + sanitizer_runtime_dir = linker_info.sanitizer_runtime_dir, ) return CxxSanitizerRuntimeArguments() @@ -242,7 +240,8 @@ def executable_shared_lib_arguments( sanitizer_runtime_args = _sanitizer_runtime_arguments(ctx, cxx_toolchain, output) extra_link_args += sanitizer_runtime_args.extra_link_args - runtime_files += sanitizer_runtime_args.sanitizer_runtime + if sanitizer_runtime_args.sanitizer_runtime_dir != None: + runtime_files.append(sanitizer_runtime_args.sanitizer_runtime_dir) return ExecutableSharedLibArguments( extra_link_args = extra_link_args, From b29394bbe34b7f639226eef0d2d114b830c6db4c Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Tue, 6 Feb 2024 16:09:27 -0800 Subject: [PATCH 0234/1133] Sanitizers: expose sanitizer runtime provider Summary: Exposes a `CxxSanitizerRuntimeInfo` provider from `(cxx|apple)_binary()`. The provider can be used to bundle the sanitizer runtime into `.app` bundles, so that they can be distributed and ran standalone. Reviewed By: rmaz Differential Revision: D53471077 fbshipit-source-id: c9e354cd43d6b6fa9a3587e54753aecf21858227 --- prelude/apple/apple_binary.bzl | 7 ++++++- prelude/cxx/cxx.bzl | 3 +++ prelude/cxx/cxx_executable.bzl | 4 ++++ prelude/cxx/cxx_link_utility.bzl | 3 +++ prelude/linking/link_info.bzl | 4 ++++ 5 files changed, 20 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl index 6daaf9087..2e4b4ea72 100644 --- a/prelude/apple/apple_binary.bzl +++ b/prelude/apple/apple_binary.bzl @@ -52,6 +52,7 @@ load( ) load( "@prelude//linking:link_info.bzl", + "CxxSanitizerRuntimeInfo", "LinkCommandDebugOutputInfo", "UnstrippedLinkOutputInfo", ) @@ -187,6 +188,10 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: if cxx_output.link_command_debug_output: link_command_providers.append(LinkCommandDebugOutputInfo(debug_outputs = [cxx_output.link_command_debug_output])) + sanitizer_runtime_providers = [] + if cxx_output.sanitizer_runtime_dir: + sanitizer_runtime_providers.append(CxxSanitizerRuntimeInfo(runtime_dir = cxx_output.sanitizer_runtime_dir)) + return [ DefaultInfo(default_output = cxx_output.binary, sub_targets = cxx_output.sub_targets), RunInfo(args = cmd_args(cxx_output.binary).hidden(cxx_output.runtime_files)), @@ -196,7 +201,7 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: cxx_output.compilation_db, merge_bundle_linker_maps_info(bundle_infos), UnstrippedLinkOutputInfo(artifact = unstripped_binary), - ] + [resource_graph] + min_version_providers + link_command_providers + ] + [resource_graph] + min_version_providers + link_command_providers + sanitizer_runtime_providers if uses_explicit_modules(ctx): return get_swift_anonymous_targets(ctx, get_apple_binary_providers) diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 76cc32df1..535fa544c 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -25,6 +25,7 @@ load( "@prelude//linking:link_info.bzl", "Archive", "ArchiveLinkable", + "CxxSanitizerRuntimeInfo", "LibOutputStyle", "LinkArgs", "LinkCommandDebugOutputInfo", @@ -243,6 +244,8 @@ def cxx_binary_impl(ctx: AnalysisContext) -> list[Provider]: extra_providers = [] if output.link_command_debug_output: extra_providers.append(LinkCommandDebugOutputInfo(debug_outputs = [output.link_command_debug_output])) + if output.sanitizer_runtime_dir: + extra_providers.append(CxxSanitizerRuntimeInfo(runtime_dir = output.sanitizer_runtime_dir)) # When an executable is the output of a build, also materialize all the # unpacked external debuginfo that goes with it. This makes `buck2 build diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 57598c3c6..27393f32b 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -184,6 +184,7 @@ CxxExecutableOutput = record( linker_map_data = [CxxLinkerMapData, None], link_command_debug_output = field([LinkCommandDebugOutput, None], None), dist_info = DistInfo, + sanitizer_runtime_dir = field([Artifact, None], None), ) def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, is_cxx_test: bool = False) -> CxxExecutableOutput: @@ -679,6 +680,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, shared_libs = shlib_info.set, nondebug_runtime_files = runtime_files, ), + sanitizer_runtime_dir = link_result.sanitizer_runtime_dir, ) _CxxLinkExecutableResult = record( @@ -695,6 +697,7 @@ _CxxLinkExecutableResult = record( # Optional shared libs symlink tree symlinked_dir action shared_libs_symlink_tree = [list[Artifact], Artifact, None], linker_map_data = [CxxLinkerMapData, None], + sanitizer_runtime_dir = [Artifact, None], ) def _link_into_executable( @@ -732,6 +735,7 @@ def _link_into_executable( external_debug_info = executable_args.external_debug_info, shared_libs_symlink_tree = executable_args.shared_libs_symlink_tree, linker_map_data = link_result.linker_map_data, + sanitizer_runtime_dir = executable_args.sanitizer_runtime_dir, ) def get_cxx_executable_product_name(ctx: AnalysisContext) -> str: diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 0848332b7..e6e446a67 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -155,6 +155,8 @@ ExecutableSharedLibArguments = record( external_debug_info = field(list[TransitiveSetArgsProjection], []), # Optional shared libs symlink tree symlinked_dir action. shared_libs_symlink_tree = field(list[Artifact] | Artifact | None, None), + # A directory containing sanitizer runtime shared libraries + sanitizer_runtime_dir = field(Artifact | None, None), ) CxxSanitizerRuntimeArguments = record( @@ -248,6 +250,7 @@ def executable_shared_lib_arguments( runtime_files = runtime_files, external_debug_info = external_debug_info, shared_libs_symlink_tree = shared_libs_symlink_tree, + sanitizer_runtime_dir = sanitizer_runtime_args.sanitizer_runtime_dir, ) def cxx_link_cmd_parts(toolchain: CxxToolchainInfo) -> ((RunInfo | cmd_args), cmd_args): diff --git a/prelude/linking/link_info.bzl b/prelude/linking/link_info.bzl index cb938a17a..d9b8cc1b2 100644 --- a/prelude/linking/link_info.bzl +++ b/prelude/linking/link_info.bzl @@ -177,6 +177,10 @@ LinkOrdering = enum( "topological", ) +CxxSanitizerRuntimeInfo = provider(fields = { + "runtime_dir": provider_field(Artifact), +}) + def set_link_info_link_whole(info: LinkInfo) -> LinkInfo: linkables = [set_linkable_link_whole(linkable) for linkable in info.linkables] return LinkInfo( From 0f2197bec84979f8fb597633f53e40e1bc6bd205 Mon Sep 17 00:00:00 2001 From: Rainer Dreyer Date: Wed, 7 Feb 2024 03:57:14 -0800 Subject: [PATCH 0235/1133] Fix multi-line doc generation Summary: When generating a parameter list item the doc generator now indents the rest of the docstring to match the level of the list item. examples: ``` * `resources_from_deps`: Set of build targets whose transitive `apple_resource`s should be considered as part of the current resource when collecting resources for bundles. Usually, an `apple_bundle` collects all `apple_resource` rules transitively reachable through apple\_library rules. This field allows for resources which are not reachable using the above traversal strategy to be considered for inclusion in the bundle. ``` ``` * `asset_catalogs_compilation_options`: A dict holding parameters for asset catalogs compiler (actool). Its options include: * `notices` (defaults to `True`) * `warnings` (defaults to `True`) * `errors` (defaults to `True`) * `compress_pngs` (defaults to `True`) * `optimization` (defaults to `'space'`) * `output_format` (defaults to `'human-readable-text'`) * `extra_flags` (defaults to `[]`) * `deps`: A list of dependencies of this bundle as build targets. You can embed application extensions by specifying the extension's bundle target. To include a WatchKit app, append the flavor `#watch` to the target specification. Buck will automatically substitute the appropriate platform flavor (either `watchsimulator` or `watchos`) based on the parent. ``` ```` * `header_path_prefix`: A path prefix when including headers of this target. For example, headers from a library defined using ``` apple_library( name = "Library", headers = glob(["**/*.h"]), header_path_prefix = "Lib", ) ``` can be imported using following mapping ``` Library/SubDir/Header1.h -> Lib/Header1.h Library/Header2.h -> Lib/Header2.h ``` Defaults to the short name of the target. Can contain forward slashes (`/`), but cannot start with one. See `headers` for more information. ```` X-link: https://github.com/facebook/buck2/pull/559 Reviewed By: JakobDegen Differential Revision: D53470929 Pulled By: rdrey fbshipit-source-id: 88947e9b95f42a50021819eef22c618e6d4a2d4e --- prelude/decls/apple_common.bzl | 6 ++---- prelude/decls/ios_rules.bzl | 3 ++- prelude/linking/execution_preference.bzl | 17 ++++++++--------- 3 files changed, 12 insertions(+), 14 deletions(-) diff --git a/prelude/decls/apple_common.bzl b/prelude/decls/apple_common.bzl index ff6820adb..92ff68036 100644 --- a/prelude/decls/apple_common.bzl +++ b/prelude/decls/apple_common.bzl @@ -53,22 +53,20 @@ def _header_path_prefix_arg(): using ``` - apple_library( name = "Library", headers = glob(["**/*.h"]), header_path_prefix = "Lib", ) - ``` + can be imported using following mapping ``` - Library/SubDir/Header1.h -> Lib/Header1.h Library/Header2.h -> Lib/Header2.h - ``` + Defaults to the short name of the target. Can contain forward slashes (`/`), but cannot start with one. See `headers` for more information. """), diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 69a7019e7..4a4608d7a 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -347,7 +347,8 @@ apple_bundle = prelude_rule( { "asset_catalogs_compilation_options": attrs.dict(key = attrs.string(), value = attrs.any(), default = {}, doc = """ A dict holding parameters for asset catalogs compiler (actool). Its options include: - * `notices` (defaults to `True`) + + * `notices` (defaults to `True`) * `warnings` (defaults to `True`) * `errors` (defaults to `True`) * `compress_pngs` (defaults to `True`) diff --git a/prelude/linking/execution_preference.bzl b/prelude/linking/execution_preference.bzl index e864b72a0..041ceb7dc 100644 --- a/prelude/linking/execution_preference.bzl +++ b/prelude/linking/execution_preference.bzl @@ -36,17 +36,16 @@ _ActionExecutionAttributes = record( def link_execution_preference_attr(): # The attribute is optional, allowing for None to represent that no preference has been set and we should fallback on the toolchain. return attrs.option(attrs.one_of(attrs.enum(LinkExecutionPreferenceTypes), attrs.dep(providers = [LinkExecutionPreferenceDeterminatorInfo])), default = None, doc = """ - The execution preference for linking. + The execution preference for linking. Options are: - Options are: - - any : No preference is set, and the link action will be performed based on buck2's executor configuration.\n - - full_hybrid : The link action will execute both locally and remotely, regardless of buck2's executor configuration (if\n - the executor is capable of hybrid execution). The use_limited_hybrid setting of the hybrid executor is ignored.\n - - local : The link action will execute locally if compatible on current host platform.\n - - local_only : The link action will execute locally, and error if the current platform is not compatible.\n - - remote : The link action will execute remotely if a compatible remote platform exists, otherwise locally.\n + - any : No preference is set, and the link action will be performed based on buck2's executor configuration. + - full_hybrid : The link action will execute both locally and remotely, regardless of buck2's executor configuration (if + the executor is capable of hybrid execution). The use_limited_hybrid setting of the hybrid executor is ignored. + - local : The link action will execute locally if compatible on current host platform. + - local_only : The link action will execute locally, and error if the current platform is not compatible. + - remote : The link action will execute remotely if a compatible remote platform exists, otherwise locally. - The default is None, expressing that no preference has been set on the target itself. + The default is None, expressing that no preference has been set on the target itself. """) def get_link_execution_preference(ctx, links: list[Label]) -> LinkExecutionPreference: From 306fbb3f7e4f6b5959822d198e4f704f224b680e Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Wed, 7 Feb 2024 04:16:50 -0800 Subject: [PATCH 0236/1133] Sanitizers: bundle sanitizer runtime in apple_bundle() output Summary: If a C++ sanitizer runtime is required by the main binary of an `apple_bundle()`, bundle the runtime, so that the application can run standalone without dependence on `buck-out` location or Xcode installation paths. Reviewed By: d16r Differential Revision: D53471331 fbshipit-source-id: a2d2c01ca6dbfe43a58440c8ade1d8020bdbd33c --- prelude/apple/apple_bundle_resources.bzl | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/prelude/apple/apple_bundle_resources.bzl b/prelude/apple/apple_bundle_resources.bzl index baf448d54..a8d07747e 100644 --- a/prelude/apple/apple_bundle_resources.bzl +++ b/prelude/apple/apple_bundle_resources.bzl @@ -8,6 +8,10 @@ load("@prelude//:artifacts.bzl", "single_artifact") load("@prelude//:paths.bzl", "paths") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") +load( + "@prelude//linking:link_info.bzl", + "CxxSanitizerRuntimeInfo", +) load("@prelude//utils:utils.bzl", "flatten_dict") load( ":apple_asset_catalog.bzl", @@ -80,6 +84,15 @@ def get_apple_bundle_resource_part_list(ctx: AnalysisContext) -> AppleBundleReso ), ) + cxx_sanitizer_runtime_info = ctx.attrs.binary.get(CxxSanitizerRuntimeInfo) if ctx.attrs.binary else None + if cxx_sanitizer_runtime_info: + runtime_resource_spec = AppleResourceSpec( + content_dirs = [cxx_sanitizer_runtime_info.runtime_dir], + destination = AppleResourceDestination("frameworks"), + codesign_files_on_copy = True, + ) + resource_specs.append(runtime_resource_spec) + asset_catalog_result = compile_apple_asset_catalog(ctx, asset_catalog_specs) if asset_catalog_result != None: asset_catalog_part = AppleBundlePart( From 7c743f5e98b2b19d0b37b61ea6c711fc858ef82a Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Wed, 7 Feb 2024 08:31:47 -0800 Subject: [PATCH 0237/1133] Sanitizers: add sanitizer fields to `cxx_toolchain()` Summary: Add `sanitizer_runtime_files` which is a list of shared libs. `sanitizer_runtime_files` is a dep and not an exec-dep because that allows the runtime to be customised on a per-target platform basis (e.g., macOS vs iOS get different runtime dylibs). Reviewed By: d16r Differential Revision: D53519156 fbshipit-source-id: 872f0fe1491d2030560577722cae06e0a7db179f --- prelude/cxx/cxx_toolchain.bzl | 4 +++- prelude/cxx/cxx_toolchain_types.bzl | 1 + prelude/cxx/user/cxx_toolchain_override.bzl | 5 ++++- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index 09d4c8335..f71205f56 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -12,7 +12,7 @@ load("@prelude//cxx:headers.bzl", "HeaderMode", "HeadersAsRawHeadersMode") load("@prelude//cxx:linker.bzl", "LINKERS", "is_pdb_generated") load("@prelude//linking:link_info.bzl", "LinkOrdering", "LinkStyle") load("@prelude//linking:lto.bzl", "LtoMode", "lto_compiler_flags") -load("@prelude//utils:utils.bzl", "value_or") +load("@prelude//utils:utils.bzl", "flatten", "value_or") load("@prelude//decls/cxx_rules.bzl", "cxx_rules") def cxx_toolchain_impl(ctx): @@ -97,6 +97,7 @@ def cxx_toolchain_impl(ctx): requires_objects = value_or(ctx.attrs.requires_objects, False), sanitizer_runtime_dir = ctx.attrs.sanitizer_runtime_dir[DefaultInfo].default_outputs[0] if ctx.attrs.sanitizer_runtime_dir else None, sanitizer_runtime_enabled = ctx.attrs.sanitizer_runtime_enabled, + sanitizer_runtime_files = flatten([runtime_file[DefaultInfo].default_outputs for runtime_file in ctx.attrs.sanitizer_runtime_files]), supports_distributed_thinlto = ctx.attrs.supports_distributed_thinlto, shared_dep_runtime_ld_flags = ctx.attrs.shared_dep_runtime_ld_flags, shared_library_name_default_prefix = _get_shared_library_name_default_prefix(ctx), @@ -198,6 +199,7 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): "requires_objects": attrs.bool(default = False), "sanitizer_runtime_dir": attrs.option(attrs.dep(), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform "sanitizer_runtime_enabled": attrs.bool(default = False), + "sanitizer_runtime_files": attrs.set(attrs.dep(), sorted = True, default = []), # Use `attrs.dep()` as it's not a tool, always propagate target platform "shared_library_interface_mode": attrs.enum(ShlibInterfacesMode.values(), default = "disabled"), "shared_library_interface_producer": attrs.option(dep_type(providers = [RunInfo]), default = None), "split_debug_mode": attrs.enum(SplitDebugMode.values(), default = "none"), diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index 89a2bef6e..2a8a9b6d3 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -45,6 +45,7 @@ LinkerInfo = provider( "object_file_extension": provider_field(typing.Any, default = None), # str "sanitizer_runtime_enabled": provider_field(bool, default = False), "sanitizer_runtime_dir": provider_field([Artifact, None], default = None), + "sanitizer_runtime_files": provider_field(list[Artifact], default = []), "shlib_interfaces": provider_field(ShlibInterfacesMode), "shared_dep_runtime_ld_flags": provider_field(typing.Any, default = None), # "lib" on Linux/Mac/Android, "" on Windows. diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index eb8cbe5ce..6d4437f4b 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -16,7 +16,7 @@ load( load("@prelude//linking:lto.bzl", "LtoMode") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") load("@prelude//utils:pick.bzl", _pick = "pick", _pick_and_add = "pick_and_add", _pick_bin = "pick_bin", _pick_dep = "pick_dep") -load("@prelude//utils:utils.bzl", "map_val", "value_or") +load("@prelude//utils:utils.bzl", "flatten", "map_val", "value_or") def _cxx_toolchain_override(ctx): base_toolchain = ctx.attrs.base[CxxToolchainInfo] @@ -76,6 +76,7 @@ def _cxx_toolchain_override(ctx): pdb_expected = linker_type == "windows" and pdb_expected shlib_interfaces = ShlibInterfacesMode(ctx.attrs.shared_library_interface_mode) if ctx.attrs.shared_library_interface_mode else None sanitizer_runtime_dir = ctx.attrs.sanitizer_runtime_dir[DefaultInfo].default_outputs[0] if ctx.attrs.sanitizer_runtime_dir else None + sanitizer_runtime_files = flatten([runtime_file[DefaultInfo].default_outputs for runtime_file in ctx.attrs.sanitizer_runtime_files]) if ctx.attrs.sanitizer_runtime_files != None else None linker_info = LinkerInfo( archiver = _pick_bin(ctx.attrs.archiver, base_linker_info.archiver), archiver_type = base_linker_info.archiver_type, @@ -101,6 +102,7 @@ def _cxx_toolchain_override(ctx): independent_shlib_interface_linker_flags = base_linker_info.independent_shlib_interface_linker_flags, sanitizer_runtime_dir = value_or(sanitizer_runtime_dir, base_linker_info.sanitizer_runtime_dir), sanitizer_runtime_enabled = value_or(ctx.attrs.sanitizer_runtime_enabled, base_linker_info.sanitizer_runtime_enabled), + sanitizer_runtime_files = value_or(sanitizer_runtime_files, base_linker_info.sanitizer_runtime_files), shared_dep_runtime_ld_flags = [], shared_library_name_default_prefix = ctx.attrs.shared_library_name_default_prefix if ctx.attrs.shared_library_name_default_prefix != None else base_linker_info.shared_library_name_default_prefix, shared_library_name_format = ctx.attrs.shared_library_name_format if ctx.attrs.shared_library_name_format != None else base_linker_info.shared_library_name_format, @@ -211,6 +213,7 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), "sanitizer_runtime_dir": attrs.option(attrs.dep(), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform "sanitizer_runtime_enabled": attrs.bool(default = False), + "sanitizer_runtime_files": attrs.option(attrs.set(attrs.dep(), sorted = True, default = []), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform "shared_library_interface_mode": attrs.option(attrs.enum(ShlibInterfacesMode.values()), default = None), "shared_library_name_default_prefix": attrs.option(attrs.string(), default = None), "shared_library_name_format": attrs.option(attrs.string(), default = None), From 2408da1b690ae888a3ac58e0b95a27e8ed6361e1 Mon Sep 17 00:00:00 2001 From: Balaji S Date: Wed, 7 Feb 2024 09:39:10 -0800 Subject: [PATCH 0238/1133] Eqwalize ct_executor.erl Summary: * Added some missing specs * Removed unused macro Reviewed By: jcpetruzza Differential Revision: D53478800 fbshipit-source-id: 3dfdfda4fa9d6b3523bb6e6f38d8f8c2c32ebfab --- .../common_test/test_exec/src/ct_executor.erl | 38 +++++++++++++++---- 1 file changed, 30 insertions(+), 8 deletions(-) diff --git a/prelude/erlang/common_test/test_exec/src/ct_executor.erl b/prelude/erlang/common_test/test_exec/src/ct_executor.erl index f9caec0d8..2478882a9 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_executor.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_executor.erl @@ -11,15 +11,19 @@ %% Notably allows us to call post/pre method on the node if needed, e.g for coverage. -module(ct_executor). - -include_lib("kernel/include/logger.hrl"). -include_lib("common/include/buck_ct_records.hrl"). +-compile(warn_missing_spec_all). -export([run/1]). -% Time we give the beam to close off, in ms. --define(INIT_STOP_TIMEOUT, 5000). +%% `ct_run_arg()` represents an option accepted by ct:run_test/1, such as +%% `multiply_timetraps` or `ct_hooks`. +%% For all the options, see https://www.erlang.org/doc/man/ct#run_test-1 +-type ct_run_arg() :: {atom(), term()}. +-type ct_exec_arg() :: {output_dir | suite | providers, term()}. +-spec run([string()]) -> no_return(). run(Args) when is_list(Args) -> ExitCode = try @@ -90,7 +94,7 @@ run(Args) when is_list(Args) -> end, erlang:halt(ExitCode). --spec parse_arguments([string()]) -> {proplists:proplist(), [term()]}. +-spec parse_arguments([string()]) -> {[ct_exec_arg()], [ct_run_arg()]}. parse_arguments(Args) -> % The logger is not set up yet. % This will be sent to the program executing it (ct_runner), @@ -109,14 +113,32 @@ parse_arguments(Args) -> split_args(ParsedArgs). % @doc Splits the argument before those that happens -% before ct_args (the executor args) amd those after +% before ct_args (the executor args) and those after % (the args for ct_run). +-spec split_args([term()]) -> {[ct_exec_arg()], [ct_run_arg()]}. split_args(Args) -> split_args(Args, [], []). -split_args([ct_args | Args], CtExecutorArgs, []) -> {lists:reverse(CtExecutorArgs), Args}; -split_args([Arg | Args], CtExecutorArgs, []) -> split_args(Args, [Arg | CtExecutorArgs], []); -split_args([], CtExecutorArgs, []) -> {lists:reverse(CtExecutorArgs), []}. +-spec split_args([term()], [ct_exec_arg()], [ct_run_arg()]) -> {[ct_exec_arg()], [ct_run_arg()]}. +split_args([ct_args | Args], CtExecutorArgs, []) -> + {parse_ct_exec_args(lists:reverse(CtExecutorArgs)), parse_ct_run_args(Args)}; +split_args([Arg | Args], CtExecutorArgs, []) -> + split_args(Args, [Arg | CtExecutorArgs], []); +split_args([], CtExecutorArgs, []) -> + {parse_ct_exec_args(lists:reverse(CtExecutorArgs)), []}. + +-spec parse_ct_run_args([term()]) -> [ct_run_arg()]. +parse_ct_run_args([]) -> + []; +parse_ct_run_args([{Key, _Value} = Arg | Args]) when is_atom(Key) -> + [Arg | parse_ct_run_args(Args)]. + +-spec parse_ct_exec_args([term()]) -> [ct_exec_arg()]. +parse_ct_exec_args([]) -> + []; +parse_ct_exec_args([{Key, _Value} = Arg | Args]) when Key =:= output_dir; Key =:= suite; Key =:= providers -> + [Arg | parse_ct_exec_args(Args)]. +-spec debug_print(string(), [term()]) -> ok. debug_print(Fmt, Args) -> case os:getenv("ERLANG_BUCK_DEBUG_PRINT") of false -> io:format(Fmt, Args); From 9d0f692252bfe767454844ecd99ed1b5f585e16a Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 7 Feb 2024 10:57:33 -0800 Subject: [PATCH 0239/1133] assert -> explicit exception Summary: Those checks should be evaluated unconditionally, not only in non-opt mode Reviewed By: milend Differential Revision: D53473184 fbshipit-source-id: 9fa4dc96a27795bba80c744ed563d3c9f83be846 --- prelude/apple/tools/bundling/main.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 752ed2d31..a26d07c89 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -257,16 +257,28 @@ def _main() -> None: pr.enable() if args.codesign: - assert args.info_plist_source and args.info_plist_destination and args.platform + if not args.info_plist_source: + raise RuntimeError( + "Paths to Info.plist source file should be set when code signing is required." + ) + if not args.info_plist_destination: + raise RuntimeError( + "Info.plist destination path should be set when code signing is required." + ) + if not args.platform: + raise RuntimeError( + "Apple platform should be set when code signing is required." + ) if args.ad_hoc: signing_context = AdhocSigningContext( codesign_identity=args.ad_hoc_codesign_identity ) selected_identity_argument = args.ad_hoc_codesign_identity else: - assert ( - args.profiles_dir - ), "Path to directory with provisioning profile files should be set when signing is not ad-hoc." + if not args.profiles_dir: + raise RuntimeError( + "Path to directory with provisioning profile files should be set when signing is not ad-hoc." + ) signing_context = non_adhoc_signing_context( info_plist_source=args.info_plist_source, info_plist_destination=args.info_plist_destination, From 9265bc31598f59ccd76937b441fbae88af5fbafd Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 7 Feb 2024 10:57:33 -0800 Subject: [PATCH 0240/1133] NonAdhocSigningContext -> SigningContextWithProfileSelection Summary: There is an option yet not supported to enable profile selection when running adhoc signing. Given that it's not the best name for that struct since it will be used in adhoc workflow. Reviewed By: milend Differential Revision: D53473185 fbshipit-source-id: 1df771da72aaeaa455778aa462b2b6bf5579b9ed --- prelude/apple/tools/bundling/main.py | 4 ++-- prelude/apple/tools/code_signing/codesign_bundle.py | 12 ++++++------ prelude/apple/tools/code_signing/main.py | 4 ++-- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index a26d07c89..4959b76ab 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -20,7 +20,7 @@ AdhocSigningContext, codesign_bundle, CodesignConfiguration, - non_adhoc_signing_context, + signing_context_with_profile_selection, ) from apple.tools.code_signing.list_codesign_identities_command_factory import ( ListCodesignIdentitiesCommandFactory, @@ -279,7 +279,7 @@ def _main() -> None: raise RuntimeError( "Path to directory with provisioning profile files should be set when signing is not ad-hoc." ) - signing_context = non_adhoc_signing_context( + signing_context = signing_context_with_profile_selection( info_plist_source=args.info_plist_source, info_plist_destination=args.info_plist_destination, provisioning_profiles_dir=args.profiles_dir, diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index e1ec8425a..e363c0cbf 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -108,14 +108,14 @@ def __init__(self, codesign_identity: Optional[str] = None): @dataclass -class NonAdhocSigningContext: +class SigningContextWithProfileSelection: info_plist_source: Path info_plist_destination: Path info_plist_metadata: InfoPlistMetadata selected_profile_info: SelectedProvisioningProfileInfo -def non_adhoc_signing_context( +def signing_context_with_profile_selection( info_plist_source: Path, info_plist_destination: Path, provisioning_profiles_dir: Path, @@ -125,7 +125,7 @@ def non_adhoc_signing_context( IListCodesignIdentitiesCommandFactory ] = None, log_file_path: Optional[Path] = None, -) -> NonAdhocSigningContext: +) -> SigningContextWithProfileSelection: with open(info_plist_source, mode="rb") as info_plist_file: info_plist_metadata = InfoPlistMetadata.from_file(info_plist_file) selected_profile_info = _select_provisioning_profile( @@ -138,7 +138,7 @@ def non_adhoc_signing_context( log_file_path=log_file_path, ) - return NonAdhocSigningContext( + return SigningContextWithProfileSelection( info_plist_source, info_plist_destination, info_plist_metadata, @@ -154,7 +154,7 @@ class CodesignConfiguration(str, Enum): def codesign_bundle( bundle_path: Path, - signing_context: Union[AdhocSigningContext, NonAdhocSigningContext], + signing_context: Union[AdhocSigningContext, SigningContextWithProfileSelection], entitlements_path: Optional[Path], platform: ApplePlatform, codesign_on_copy_paths: List[Path], @@ -163,7 +163,7 @@ def codesign_bundle( codesign_configuration: Optional[CodesignConfiguration] = None, ) -> None: with tempfile.TemporaryDirectory() as tmp_dir: - if isinstance(signing_context, NonAdhocSigningContext): + if isinstance(signing_context, SigningContextWithProfileSelection): info_plist_metadata = signing_context.info_plist_metadata selected_profile_info = signing_context.selected_profile_info prepared_entitlements_path = prepare_code_signing_entitlements( diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index 549e32499..206b21050 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -13,7 +13,7 @@ from .codesign_bundle import ( AdhocSigningContext, codesign_bundle, - non_adhoc_signing_context, + signing_context_with_profile_selection, ) from .provisioning_profile_selection import CodeSignProvisioningError @@ -97,7 +97,7 @@ def _main(): assert ( args.profiles_dir ), "Path to directory with provisioning profile files should be set when signing is not ad-hoc." - signing_context = non_adhoc_signing_context( + signing_context = signing_context_with_profile_selection( info_plist_source=args.bundle_path / args.info_plist, info_plist_destination=args.info_plist, provisioning_profiles_dir=args.profiles_dir, From 53735281221ce25f43a5c2035b33e022dc466217 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 7 Feb 2024 10:57:33 -0800 Subject: [PATCH 0241/1133] produce identities instead of just command in factory Summary: Preparation for adding support for provisioning profile selection during adhoc code signing Reviewed By: milend Differential Revision: D53473187 fbshipit-source-id: ea8d77c7eefc278a1a96358988a37f98c979e8a6 --- prelude/apple/tools/bundling/main.py | 6 ++-- .../tools/code_signing/codesign_bundle.py | 28 ++++--------------- ...factory.py => list_codesign_identities.py} | 28 ++++++++++++++----- 3 files changed, 29 insertions(+), 33 deletions(-) rename prelude/apple/tools/code_signing/{list_codesign_identities_command_factory.py => list_codesign_identities.py} (50%) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 4959b76ab..68c8c4e91 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -22,9 +22,7 @@ CodesignConfiguration, signing_context_with_profile_selection, ) -from apple.tools.code_signing.list_codesign_identities_command_factory import ( - ListCodesignIdentitiesCommandFactory, -) +from apple.tools.code_signing.list_codesign_identities import ListCodesignIdentities from apple.tools.re_compatibility_utils.writable import make_dir_recursively_writable @@ -285,7 +283,7 @@ def _main() -> None: provisioning_profiles_dir=args.profiles_dir, entitlements_path=args.entitlements, platform=args.platform, - list_codesign_identities_command_factory=ListCodesignIdentitiesCommandFactory.override( + list_codesign_identities=ListCodesignIdentities.override( shlex.split(args.codesign_identities_command) ) if args.codesign_identities_command diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index e363c0cbf..16fb2eda6 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -26,12 +26,8 @@ ICodesignCommandFactory, ) from .fast_adhoc import is_fast_adhoc_codesign_allowed, should_skip_adhoc_signing_path -from .identity import CodeSigningIdentity from .info_plist_metadata import InfoPlistMetadata -from .list_codesign_identities_command_factory import ( - IListCodesignIdentitiesCommandFactory, - ListCodesignIdentitiesCommandFactory, -) +from .list_codesign_identities import IListCodesignIdentities, ListCodesignIdentities from .prepare_code_signing_entitlements import prepare_code_signing_entitlements from .prepare_info_plist import prepare_info_plist from .provisioning_profile_diagnostics import ( @@ -62,11 +58,11 @@ def _select_provisioning_profile( provisioning_profiles_dir: Path, entitlements_path: Optional[Path], platform: ApplePlatform, - list_codesign_identities_command_factory: IListCodesignIdentitiesCommandFactory, + list_codesign_identities: IListCodesignIdentities, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory = _default_read_provisioning_profile_command_factory, log_file_path: Optional[Path] = None, ) -> SelectedProvisioningProfileInfo: - identities = _list_identities(list_codesign_identities_command_factory) + identities = list_codesign_identities.list_codesign_identities() provisioning_profiles = _read_provisioning_profiles( provisioning_profiles_dir, read_provisioning_profile_command_factory ) @@ -121,9 +117,7 @@ def signing_context_with_profile_selection( provisioning_profiles_dir: Path, entitlements_path: Optional[Path], platform: ApplePlatform, - list_codesign_identities_command_factory: Optional[ - IListCodesignIdentitiesCommandFactory - ] = None, + list_codesign_identities: Optional[IListCodesignIdentities] = None, log_file_path: Optional[Path] = None, ) -> SigningContextWithProfileSelection: with open(info_plist_source, mode="rb") as info_plist_file: @@ -133,8 +127,8 @@ def signing_context_with_profile_selection( provisioning_profiles_dir=provisioning_profiles_dir, entitlements_path=entitlements_path, platform=platform, - list_codesign_identities_command_factory=list_codesign_identities_command_factory - or ListCodesignIdentitiesCommandFactory.default(), + list_codesign_identities=list_codesign_identities + or ListCodesignIdentities.default(), log_file_path=log_file_path, ) @@ -225,16 +219,6 @@ def codesign_bundle( ) -def _list_identities( - list_codesign_identities_command_factory: IListCodesignIdentitiesCommandFactory, -) -> List[CodeSigningIdentity]: - output = subprocess.check_output( - list_codesign_identities_command_factory.list_codesign_identities_command(), - encoding="utf-8", - ) - return CodeSigningIdentity.parse_security_stdout(output) - - def _read_provisioning_profiles( dirpath: Path, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, diff --git a/prelude/apple/tools/code_signing/list_codesign_identities_command_factory.py b/prelude/apple/tools/code_signing/list_codesign_identities.py similarity index 50% rename from prelude/apple/tools/code_signing/list_codesign_identities_command_factory.py rename to prelude/apple/tools/code_signing/list_codesign_identities.py index ad92e239b..e622a663b 100644 --- a/prelude/apple/tools/code_signing/list_codesign_identities_command_factory.py +++ b/prelude/apple/tools/code_signing/list_codesign_identities.py @@ -7,29 +7,43 @@ from __future__ import annotations +import subprocess + from abc import ABCMeta, abstractmethod from typing import List +from .identity import CodeSigningIdentity + -class IListCodesignIdentitiesCommandFactory(metaclass=ABCMeta): +class IListCodesignIdentities(metaclass=ABCMeta): @abstractmethod - def list_codesign_identities_command(self) -> List[str]: + def list_codesign_identities(self) -> List[CodeSigningIdentity]: raise NotImplementedError -class ListCodesignIdentitiesCommandFactory(IListCodesignIdentitiesCommandFactory): +class ListCodesignIdentities(IListCodesignIdentities): _default_command = ["security", "find-identity", "-v", "-p", "codesigning"] def __init__(self, command: List[str]): self.command = command @classmethod - def default(cls) -> ListCodesignIdentitiesCommandFactory: + def default(cls) -> IListCodesignIdentities: return cls(cls._default_command) @classmethod - def override(cls, command: List[str]) -> ListCodesignIdentitiesCommandFactory: + def override(cls, command: List[str]) -> IListCodesignIdentities: return cls(command) - def list_codesign_identities_command(self) -> List[str]: - return self.command + def list_codesign_identities(self) -> List[CodeSigningIdentity]: + return _list_identities(self.command) + + +def _list_identities( + command: List[str], +) -> List[CodeSigningIdentity]: + output = subprocess.check_output( + command, + encoding="utf-8", + ) + return CodeSigningIdentity.parse_security_stdout(output) From fd4e9b83310a642506bec5490fec5655debee0aa Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 7 Feb 2024 10:57:33 -0800 Subject: [PATCH 0242/1133] always pass list_codesign_identities explicitly Summary: no behavior changes Reviewed By: milend Differential Revision: D53473186 fbshipit-source-id: e77ec0a655cd362439488e6e1e1ca9a31780d8f8 --- prelude/apple/tools/bundling/main.py | 2 +- prelude/apple/tools/code_signing/codesign_bundle.py | 7 +++---- prelude/apple/tools/code_signing/main.py | 2 ++ 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 68c8c4e91..c4fd7d724 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -287,7 +287,7 @@ def _main() -> None: shlex.split(args.codesign_identities_command) ) if args.codesign_identities_command - else None, + else ListCodesignIdentities.default(), log_file_path=args.log_file, ) selected_identity_argument = ( diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 16fb2eda6..94650f682 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -27,7 +27,7 @@ ) from .fast_adhoc import is_fast_adhoc_codesign_allowed, should_skip_adhoc_signing_path from .info_plist_metadata import InfoPlistMetadata -from .list_codesign_identities import IListCodesignIdentities, ListCodesignIdentities +from .list_codesign_identities import IListCodesignIdentities from .prepare_code_signing_entitlements import prepare_code_signing_entitlements from .prepare_info_plist import prepare_info_plist from .provisioning_profile_diagnostics import ( @@ -117,7 +117,7 @@ def signing_context_with_profile_selection( provisioning_profiles_dir: Path, entitlements_path: Optional[Path], platform: ApplePlatform, - list_codesign_identities: Optional[IListCodesignIdentities] = None, + list_codesign_identities: IListCodesignIdentities, log_file_path: Optional[Path] = None, ) -> SigningContextWithProfileSelection: with open(info_plist_source, mode="rb") as info_plist_file: @@ -127,8 +127,7 @@ def signing_context_with_profile_selection( provisioning_profiles_dir=provisioning_profiles_dir, entitlements_path=entitlements_path, platform=platform, - list_codesign_identities=list_codesign_identities - or ListCodesignIdentities.default(), + list_codesign_identities=list_codesign_identities, log_file_path=log_file_path, ) diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index 206b21050..b1189b37b 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -15,6 +15,7 @@ codesign_bundle, signing_context_with_profile_selection, ) +from .list_codesign_identities import ListCodesignIdentities from .provisioning_profile_selection import CodeSignProvisioningError @@ -102,6 +103,7 @@ def _main(): info_plist_destination=args.info_plist, provisioning_profiles_dir=args.profiles_dir, entitlements_path=args.entitlements, + list_codesign_identities=ListCodesignIdentities.default(), platform=args.platform, ) codesign_bundle( From 8752384f9b7b5cb1df8e636a7eefce8089dc3b53 Mon Sep 17 00:00:00 2001 From: Navid Qaragozlou Date: Wed, 7 Feb 2024 12:38:57 -0800 Subject: [PATCH 0243/1133] Refactor buck2_compatibility Summary: This is just a refactor and I will re-use the code for Android in the next diff. Reviewed By: mzlee, chatura-atapattu Differential Revision: D53457503 fbshipit-source-id: 3b5883fde691ed70d87986861af6cb68c18960ca --- prelude/apple/apple_binary.bzl | 3 --- prelude/apple/apple_bundle.bzl | 2 -- prelude/apple/apple_library.bzl | 3 --- prelude/apple/apple_rules_impl.bzl | 2 +- prelude/apple/apple_rules_impl_utility.bzl | 2 +- prelude/apple/apple_test.bzl | 3 --- ...buck2_compatibility.bzl => buck2_compatibility.bzl} | 2 +- prelude/rules.bzl | 10 +++++++++- 8 files changed, 12 insertions(+), 15 deletions(-) rename prelude/{apple/apple_buck2_compatibility.bzl => buck2_compatibility.bzl} (94%) diff --git a/prelude/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl index 2e4b4ea72..ded300620 100644 --- a/prelude/apple/apple_binary.bzl +++ b/prelude/apple/apple_binary.bzl @@ -6,7 +6,6 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") -load("@prelude//apple:apple_buck2_compatibility.bzl", "apple_check_buck2_compatibility") load("@prelude//apple:apple_stripping.bzl", "apple_strip_args") # @oss-disable: load("@prelude//apple/meta_only:linker_outputs.bzl", "add_extra_linker_outputs") load( @@ -72,8 +71,6 @@ load(":resource_groups.bzl", "create_resource_graph") load(":xcode.bzl", "apple_populate_xcode_attributes") def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: - apple_check_buck2_compatibility(ctx) - def get_apple_binary_providers(deps_providers) -> list[Provider]: # FIXME: Ideally we'd like to remove the support of "bridging header", # cause it affects build time and in general considered a bad practise. diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index c7ecb179e..1af42af84 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -12,7 +12,6 @@ load( "project_artifacts", ) load("@prelude//:paths.bzl", "paths") -load("@prelude//apple:apple_buck2_compatibility.bzl", "apple_check_buck2_compatibility") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") # @oss-disable: load("@prelude//apple/meta_only:linker_outputs.bzl", "subtargets_for_apple_bundle_extra_outputs") load("@prelude//apple/user:apple_selected_debug_path_file.bzl", "SELECTED_DEBUG_PATH_FILE_NAME") @@ -306,7 +305,6 @@ def _infer_apple_bundle_type(ctx: AnalysisContext) -> AppleBundleType: def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: _apple_bundle_run_validity_checks(ctx) - apple_check_buck2_compatibility(ctx) binary_outputs = _get_binary(ctx) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index c38f5a4cb..afe9e6f34 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -9,7 +9,6 @@ load( "@prelude//:artifact_tset.bzl", "project_artifacts", ) -load("@prelude//apple:apple_buck2_compatibility.bzl", "apple_check_buck2_compatibility") load("@prelude//apple:apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym") load("@prelude//apple:apple_stripping.bzl", "apple_strip_args") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") @@ -108,8 +107,6 @@ AppleLibraryAdditionalParams = record( ) def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: - apple_check_buck2_compatibility(ctx) - def get_apple_library_providers(deps_providers) -> list[Provider]: constructor_params = apple_library_rule_constructor_params_and_swift_providers( ctx, diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 12bd7aa69..772472415 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//apple:apple_buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") +load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") load("@prelude//apple/swift:swift_toolchain.bzl", "swift_toolchain_impl") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 428d8541d..63c3d87ad 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//apple:apple_buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") +load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_system_identification_attrs") load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo", "AppleBundleTypeAttributeType") load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignType") diff --git a/prelude/apple/apple_test.bzl b/prelude/apple/apple_test.bzl index f3da39d96..fd74d3d38 100644 --- a/prelude/apple/apple_test.bzl +++ b/prelude/apple/apple_test.bzl @@ -6,7 +6,6 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") -load("@prelude//apple:apple_buck2_compatibility.bzl", "apple_check_buck2_compatibility") load("@prelude//apple:apple_library.bzl", "AppleLibraryAdditionalParams", "apple_library_rule_constructor_params_and_swift_providers") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") # @oss-disable: load("@prelude//apple/meta_only:apple_test_re_capabilities.bzl", "ios_test_re_capabilities", "macos_test_re_capabilities") @@ -48,8 +47,6 @@ load(":xcode.bzl", "apple_populate_xcode_attributes") load(":xctest_swift_support.bzl", "XCTestSwiftSupportInfo") def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: - apple_check_buck2_compatibility(ctx) - def get_apple_test_providers(deps_providers) -> list[Provider]: xctest_bundle = bundle_output(ctx) diff --git a/prelude/apple/apple_buck2_compatibility.bzl b/prelude/buck2_compatibility.bzl similarity index 94% rename from prelude/apple/apple_buck2_compatibility.bzl rename to prelude/buck2_compatibility.bzl index 09d870c0f..439b344af 100644 --- a/prelude/apple/apple_buck2_compatibility.bzl +++ b/prelude/buck2_compatibility.bzl @@ -15,6 +15,6 @@ Buck2Compatibility = enum( BUCK2_COMPATIBILITY_ATTRIB_NAME = "buck2_compatibility" BUCK2_COMPATIBILITY_ATTRIB_TYPE = attrs.enum(Buck2Compatibility.values(), default = "unknown") -def apple_check_buck2_compatibility(ctx: AnalysisContext): +def check_buck2_compatibility(ctx: AnalysisContext): if hasattr(ctx.attrs, "buck2_compatibility") and ctx.attrs.buck2_compatibility == "incompatible": warning("The target '{}' is marked as incompatible with buck2, output might be incorrect".format(ctx.label)) diff --git a/prelude/rules.bzl b/prelude/rules.bzl index 5394ad635..9653302b8 100644 --- a/prelude/rules.bzl +++ b/prelude/rules.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:buck2_compatibility.bzl", "check_buck2_compatibility") load("@prelude//configurations:rules.bzl", _config_implemented_rules = "implemented_rules") load("@prelude//decls/common.bzl", "prelude_rule") load("@prelude//is_full_meta_repo.bzl", "is_full_meta_repo") @@ -86,11 +87,18 @@ def _mk_rule(rule_spec: typing.Any, extra_attrs: dict[str, typing.Any] = dict(), extra_args.setdefault("is_configuration_rule", name in _config_implemented_rules) extra_args.setdefault("is_toolchain_rule", name in toolchain_rule_names) return rule( - impl = impl, + impl = buck2_compatibility_check_wrapper(impl), attrs = attributes, **extra_args ) +def buck2_compatibility_check_wrapper(impl) -> typing.Callable: + def buck2_compatibility_shim(ctx: AnalysisContext) -> [list[Provider], Promise]: + check_buck2_compatibility(ctx) + return impl(ctx) + + return buck2_compatibility_shim + def _flatten_decls(): decls = {} for decl_set in rule_decl_records: From 44726a0195173c1d012eac8276e9112a00b0b814 Mon Sep 17 00:00:00 2001 From: Navid Qaragozlou Date: Wed, 7 Feb 2024 12:38:57 -0800 Subject: [PATCH 0244/1133] Add support for `buck2_compatibility` field Summary: TSIA. Reviewed By: mzlee Differential Revision: D53457502 fbshipit-source-id: a3c45977f6d07bfdb4854cf59e18d1d836ae88ed --- prelude/android/android.bzl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 184a93470..7818bfae5 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS") load("@prelude//java:java.bzl", "AbiGenerationMode", "dex_min_sdk_version") load("@prelude//decls/android_rules.bzl", "AaptMode", "DuplicateResourceBehaviour") @@ -95,6 +96,7 @@ extra_attributes = { "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), "_java_toolchain": toolchains_common.java_for_android(), + BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, }, "android_build_config": { "_android_toolchain": toolchains_common.android(), @@ -123,6 +125,7 @@ extra_attributes = { "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), "_java_toolchain": toolchains_common.java_for_android(), + BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, }, "android_instrumentation_apk": { "aapt_mode": attrs.enum(AaptMode, default = "aapt1"), # Match default in V1 @@ -148,6 +151,7 @@ extra_attributes = { "_android_toolchain": toolchains_common.android(), "_exec_os_type": buck.exec_os_type_arg(), "_java_toolchain": toolchains_common.java_for_android(), + BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, }, "android_library": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), @@ -160,6 +164,7 @@ extra_attributes = { "_is_building_android_binary": is_building_android_binary_attr(), "_java_toolchain": toolchains_common.java_for_android(), "_kotlin_toolchain": toolchains_common.kotlin(), + BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, }, "android_manifest": { "_android_toolchain": toolchains_common.android(), @@ -211,5 +216,6 @@ extra_attributes = { "_java_test_toolchain": toolchains_common.java_test(), "_java_toolchain": toolchains_common.java_for_host_test(), "_kotlin_toolchain": toolchains_common.kotlin(), + BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, }, } From cac4e4063425b5f7b30fc00acce6db07395eaac8 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Wed, 7 Feb 2024 14:55:47 -0800 Subject: [PATCH 0245/1133] Delete validator and validator_args. Summary: They are all replaced by `prepackaged_validators`. Reviewed By: passy Differential Revision: D53495146 fbshipit-source-id: 88fd437333c920db9ae582cb586e5d9d7d5c0ade --- prelude/apple/apple_package.bzl | 6 ------ prelude/apple/apple_rules_impl.bzl | 2 -- 2 files changed, 8 deletions(-) diff --git a/prelude/apple/apple_package.bzl b/prelude/apple/apple_package.bzl index 59a5235fe..bc35f7ffb 100644 --- a/prelude/apple/apple_package.bzl +++ b/prelude/apple/apple_package.bzl @@ -48,12 +48,6 @@ def apple_package_impl(ctx: AnalysisContext) -> list[Provider]: DefaultInfo(default_outputs = prepackaged_validators_artifacts), ] - if ctx.attrs.validator != None: - process_ipa_cmd.add([ - "--validator", - ctx.attrs.validator[RunInfo], - [cmd_args(["--validator-args=", arg], delimiter = "") for arg in ctx.attrs.validator_args], - ]) ctx.actions.run(process_ipa_cmd, category = category) return [DefaultInfo( diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 772472415..a62e4bc62 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -160,8 +160,6 @@ extra_attributes = { ), default = [], ), - "validator": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), - "validator_args": attrs.list(attrs.arg(), default = []), "_apple_toolchain": get_apple_bundle_toolchain_attr(), "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), "_ipa_compression_level": attrs.enum(IpaCompressionLevel.values()), From 8ecde4a63da1d32afc2619467b5592973a494f5d Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Wed, 7 Feb 2024 16:36:02 -0800 Subject: [PATCH 0246/1133] Adding typeshed to Python toolchain Reviewed By: zsol Differential Revision: D53335074 fbshipit-source-id: a13f805eac132cab0f0dab230f2a3a42781dfeef --- prelude/python/toolchain.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/python/toolchain.bzl b/prelude/python/toolchain.bzl index 039e717c4..62bd8be36 100644 --- a/prelude/python/toolchain.bzl +++ b/prelude/python/toolchain.bzl @@ -69,6 +69,7 @@ PythonToolchainInfo = provider( "pex_executor": provider_field(typing.Any, default = None), "pex_extension": provider_field(typing.Any, default = None), "type_checker": provider_field(typing.Any, default = None), + "typeshed_stubs": provider_field(typing.Any, default = []), "emit_omnibus_metadata": provider_field(typing.Any, default = None), "fail_with_message": provider_field(typing.Any, default = None), "emit_dependency_metadata": provider_field(typing.Any, default = None), From a6d979bfeeee2977130d719d601714b00de57346 Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Wed, 7 Feb 2024 16:36:02 -0800 Subject: [PATCH 0247/1133] Create manifest file for typeshed and pass it to `[typecheck]` subtarget Reviewed By: zsol Differential Revision: D53335073 fbshipit-source-id: b25effe15537f94cffc6d12aa0397aae6d8bb082 --- prelude/python/python_binary.bzl | 3 ++- prelude/python/python_library.bzl | 3 ++- prelude/python/typing.bzl | 36 +++++++++++++++++++++++++------ 3 files changed, 34 insertions(+), 8 deletions(-) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index daf87c34c..5140a6ade 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -406,10 +406,11 @@ def python_executable( exe.sub_targets.update({ "typecheck": [ create_per_target_type_check( - ctx.actions, + ctx, type_checker, src_manifest, python_deps, + typeshed_stubs = python_toolchain.typeshed_stubs, py_version = ctx.attrs.py_version_for_type_checking, typing_enabled = ctx.attrs.typing, ), diff --git a/prelude/python/python_library.bzl b/prelude/python/python_library.bzl index e95cdc277..295a076ca 100644 --- a/prelude/python/python_library.bzl +++ b/prelude/python/python_library.bzl @@ -317,10 +317,11 @@ def python_library_impl(ctx: AnalysisContext) -> list[Provider]: if type_checker != None: sub_targets["typecheck"] = [ create_per_target_type_check( - ctx.actions, + ctx, type_checker, src_type_manifest, deps, + typeshed_stubs = python_toolchain.typeshed_stubs, py_version = ctx.attrs.py_version_for_type_checking, typing_enabled = ctx.attrs.typing, ), diff --git a/prelude/python/typing.bzl b/prelude/python/typing.bzl index 2abca4c30..a856095b4 100644 --- a/prelude/python/typing.bzl +++ b/prelude/python/typing.bzl @@ -5,24 +5,39 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:artifacts.bzl", "ArtifactGroupInfo") load("@prelude//python:python.bzl", "PythonLibraryInfo") load( ":manifest.bzl", "ManifestInfo", # @unused Used as a type + "create_manifest_for_source_map", ) load(":python.bzl", "PythonLibraryManifestsTSet") +def create_typeshed_manifest_info( + ctx: AnalysisContext, + typeshed_deps: list[Dependency]) -> ManifestInfo: + # NOTE(grievejia): This assumes that if multiple typeshed targets offer + # the same stub file, the target that comes later wins. + srcs = { + artifact.short_path: artifact + for typeshed_dep in typeshed_deps + for artifact in typeshed_dep[ArtifactGroupInfo].artifacts + } + return create_manifest_for_source_map(ctx, "typeshed", srcs) + def create_per_target_type_check( - actions: AnalysisActions, + ctx: AnalysisContext, executable: RunInfo, srcs: ManifestInfo | None, deps: list[PythonLibraryInfo], + typeshed_stubs: list[Dependency], py_version: str | None, typing_enabled: bool) -> DefaultInfo: output_file_name = "type_check_result.json" if not typing_enabled: # Use empty dict to signal that no type checking was performed. - output_file = actions.write_json(output_file_name, {}) + output_file = ctx.actions.write_json(output_file_name, {}) else: cmd = cmd_args(executable) cmd.add(cmd_args("check")) @@ -34,22 +49,31 @@ def create_per_target_type_check( cmd.hidden([a for a, _ in srcs.artifacts]) # Dep artifacts - dep_manifest_tset = actions.tset(PythonLibraryManifestsTSet, children = [d.manifests for d in deps]) + dep_manifest_tset = ctx.actions.tset(PythonLibraryManifestsTSet, children = [d.manifests for d in deps]) dep_manifests = dep_manifest_tset.project_as_args("source_type_manifests") cmd.hidden(dep_manifest_tset.project_as_args("source_type_artifacts")) + # Typeshed artifacts + if len(typeshed_stubs) > 0: + typeshed_manifest_info = create_typeshed_manifest_info(ctx, typeshed_stubs) + cmd.hidden([a for a, _ in typeshed_manifest_info.artifacts]) + typeshed_manifest = typeshed_manifest_info.manifest + else: + typeshed_manifest = None + # Create input configs input_config = { "dependencies": dep_manifests, "py_version": py_version, "sources": source_manifests, + "typeshed": typeshed_manifest, } - input_file = actions.write_json("type_check_config.json", input_config, with_inputs = True) - output_file = actions.declare_output(output_file_name) + input_file = ctx.actions.write_json("type_check_config.json", input_config, with_inputs = True) + output_file = ctx.actions.declare_output(output_file_name) cmd.add(cmd_args(input_file)) cmd.add(cmd_args(output_file.as_output(), format = "--output={}")) - actions.run(cmd, category = "type_check") + ctx.actions.run(cmd, category = "type_check") return DefaultInfo(default_output = output_file) From 345775d32c350219f751d47fd06a46d33ea3244c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20L=C3=B6scher?= Date: Wed, 7 Feb 2024 17:30:15 -0800 Subject: [PATCH 0248/1133] move include_src from config to application attribute Summary: This moves controlling wether the output application directory has a src/ directory from global config to per-application attribut. It also changes the default from False to True, since this is the behaviour one expects coming from other build systems like rebar3 Differential Revision: D53545917 fbshipit-source-id: 7ac1dc97d47e9b065dd41098e9004d14b4b1e455 --- prelude/decls/erlang_rules.bzl | 3 +++ prelude/erlang/erlang_application.bzl | 3 +-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/prelude/decls/erlang_rules.bzl b/prelude/decls/erlang_rules.bzl index 86154b034..1c4b0ac5e 100644 --- a/prelude/decls/erlang_rules.bzl +++ b/prelude/decls/erlang_rules.bzl @@ -121,6 +121,9 @@ rules_attributes = { [application_opt()](https://www.erlang.org/doc/man/application.html#load-2). The key-value pair will be stored in the applications `.app` file and can be accessed by `file:consult/1`. """), + "include_src": attrs.bool(default = True, doc = """ + This field controlls if the generated application directory contains a src/ directory with the Erlang code or not. + """), "includes": attrs.list(attrs.source(), default = [], doc = """ The public header files accessible via `-include_lib("appname/include/header.hrl")` from other erlang files. """), diff --git a/prelude/erlang/erlang_application.bzl b/prelude/erlang/erlang_application.bzl index e9d3a134b..22f9daf98 100644 --- a/prelude/erlang/erlang_application.bzl +++ b/prelude/erlang/erlang_application.bzl @@ -37,7 +37,6 @@ load( "multidict_projection", "multidict_projection_key", "normalise_metadata", - "str_to_bool", "to_term_args", ) @@ -366,7 +365,7 @@ def link_output( def _link_srcs_folder(ctx: AnalysisContext) -> dict[str, Artifact]: """Build mapping for the src folder if erlang.include_src is set""" - if not str_to_bool(read_root_config("erlang", "include_src", "False")): + if not ctx.attrs.include_src: return {} srcs = { paths.join("src", src_file.basename): src_file From 4551e35c4a0c2ffcb3790c9720052006cd8bae6c Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Thu, 8 Feb 2024 02:22:48 -0800 Subject: [PATCH 0249/1133] allow install apps on real device Summary: ditto Reviewed By: milend Differential Revision: D53527606 fbshipit-source-id: fdc8b6ecaeeca8486365cd35624d9ee052397f82 --- prelude/apple/apple_bundle.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index 1af42af84..357e8c1f0 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -460,6 +460,7 @@ def generate_install_data( data = { "fullyQualifiedName": ctx.label, "info_plist": plist_path, + "platform_name": get_apple_sdk_name(ctx), "use_idb": "true", ## TODO(T110665037): read from .buckconfig # We require the user to have run `xcode-select` and `/var/db/xcode_select_link` to symlink From c48d6eb7047ae07c60b33ca3a938c1b919541083 Mon Sep 17 00:00:00 2001 From: David Reiss Date: Thu, 8 Feb 2024 04:16:09 -0800 Subject: [PATCH 0250/1133] Capture dep files for preprocessed assembly Summary: ".S" is an extension for preprocessed assembly (already recognized by buck2), but this codepath was not treating it as such. Reviewed By: IanChilds Differential Revision: D53329801 fbshipit-source-id: 497635790afdbe24fed23b7d683fa2b2c29ca6f8 --- prelude/cxx/compile.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index 03a5f4a44..fd8ca4704 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -559,7 +559,7 @@ def _get_compile_base(compiler_info: typing.Any) -> cmd_args: def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: # Raw assembly doesn't make sense to capture dep files for. - if ext.value in (".s", ".S", ".asm"): + if ext.value in (".s", ".asm"): return None elif ext.value == ".hip": # TODO (T118797886): HipCompilerInfo doesn't have dep files processor. @@ -573,7 +573,7 @@ def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: return DepFileType("c") elif ext.value == ".cu": return DepFileType("cuda") - elif ext.value in (".asmpp"): + elif ext.value in (".asmpp", ".S"): return DepFileType("asm") else: # This should be unreachable as long as we handle all enum values From fce882f74aa9496f16803bb54d021b6ec29376e7 Mon Sep 17 00:00:00 2001 From: Anuar Navarro Hawach Date: Thu, 8 Feb 2024 06:36:55 -0800 Subject: [PATCH 0251/1133] Revert D53329801: Capture dep files for preprocessed assembly Differential Revision: D53329801 Original commit changeset: 497635790afd Original Phabricator Diff: D53329801 fbshipit-source-id: 6492740da94da36079c6a988baed47c7297c0043 --- prelude/cxx/compile.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index fd8ca4704..03a5f4a44 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -559,7 +559,7 @@ def _get_compile_base(compiler_info: typing.Any) -> cmd_args: def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: # Raw assembly doesn't make sense to capture dep files for. - if ext.value in (".s", ".asm"): + if ext.value in (".s", ".S", ".asm"): return None elif ext.value == ".hip": # TODO (T118797886): HipCompilerInfo doesn't have dep files processor. @@ -573,7 +573,7 @@ def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: return DepFileType("c") elif ext.value == ".cu": return DepFileType("cuda") - elif ext.value in (".asmpp", ".S"): + elif ext.value in (".asmpp"): return DepFileType("asm") else: # This should be unreachable as long as we handle all enum values From 8c996da61d7af1e152be54b2be39b91a73ccd99a Mon Sep 17 00:00:00 2001 From: Przemyslaw Szczepanski Date: Thu, 8 Feb 2024 06:40:52 -0800 Subject: [PATCH 0252/1133] Fix buck2 remote execution error Summary: Fixes: ``` Action failed: fbsource//arvr/apps/aria/hub/models:moc_FileSortModel.cppModelsBundle (genrule) Remote command returned non-zero exit code -1073741515 Reproduce locally: `frecli cas download-action fa25da54dcc4cf00b9fd430e43e1db5497e65ea1299303d48464d49f324bd5c5:145` stdout: stderr: ... ``` This workaround makes Qt `moc`, `uic` & `rcc` to run on local machine instead of using remote execution. Reviewed By: seanwarren-meta Differential Revision: D53526557 fbshipit-source-id: b3d8cb6d06b234d94835f078f6c329cb4e74f134 --- prelude/genrule_local_labels.bzl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/prelude/genrule_local_labels.bzl b/prelude/genrule_local_labels.bzl index 13cb97fd2..428f61d76 100644 --- a/prelude/genrule_local_labels.bzl +++ b/prelude/genrule_local_labels.bzl @@ -170,10 +170,12 @@ _GENRULE_LOCAL_LABELS = {label: True for label in [ # Some Qt genrules don't support RE yet "qt_moc", - "qt_qrc_gen", + "qt_qmlcachegen", "qt_qrc_compile", + "qt_qrc_gen", "qt_qsb_gen", - "qt_qmlcachegen", + "qt_rcc", + "qt_uic", # use local jar "uses_jar", From 2bd4314315e49c1198eeebf06c9a2461845c21e7 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Thu, 8 Feb 2024 07:55:36 -0800 Subject: [PATCH 0253/1133] generalize process spawning 1/x Summary: to be reused later by provisioning profile parsing, no behavior change Reviewed By: milend Differential Revision: D53518128 fbshipit-source-id: f2e015ed380582172a570fc55dff41a6c9bef97e --- .../tools/code_signing/codesign_bundle.py | 36 +++++++++++-------- 1 file changed, 22 insertions(+), 14 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 94650f682..072c7f032 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -351,7 +351,7 @@ def _codesign_everything( @dataclass -class CodesignProcess: +class ParallelProcess: process: subprocess.Popen stdout_path: str stderr_path: str @@ -367,31 +367,39 @@ def check_result(self) -> None: ) -def _spawn_codesign_process( - path: Path, - identity_fingerprint: str, +def _spawn_process( + command: List[Union[str, Path]], tmp_dir: str, - codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], stack: ExitStack, - codesign_args: List[str], -) -> CodesignProcess: +) -> ParallelProcess: stdout_path = os.path.join(tmp_dir, uuid.uuid4().hex) stdout = stack.enter_context(open(stdout_path, "w")) stderr_path = os.path.join(tmp_dir, uuid.uuid4().hex) stderr = stack.enter_context(open(stderr_path, "w")) - command = codesign_command_factory.codesign_command( - path, identity_fingerprint, entitlements, codesign_args - ) - _LOGGER.info(f"Executing codesign command: {command}") + _LOGGER.info(f"Executing command: {command}") process = subprocess.Popen(command, stdout=stdout, stderr=stderr) - return CodesignProcess( + return ParallelProcess( process, stdout_path, stderr_path, ) +def _spawn_codesign_process( + path: Path, + identity_fingerprint: str, + tmp_dir: str, + codesign_command_factory: ICodesignCommandFactory, + entitlements: Optional[Path], + stack: ExitStack, + codesign_args: List[str], +) -> ParallelProcess: + command = codesign_command_factory.codesign_command( + path, identity_fingerprint, entitlements, codesign_args + ) + return _spawn_process(command, tmp_dir, stack) + + def _codesign_paths( paths: List[Path], identity_fingerprint: str, @@ -402,7 +410,7 @@ def _codesign_paths( codesign_args: List[str], ) -> None: """Codesigns several paths in parallel.""" - processes: List[CodesignProcess] = [] + processes: List[ParallelProcess] = [] with ExitStack() as stack: for path in paths: process = _spawn_codesign_process( From c27594a2b684f249567bf70325908a04db320d0c Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Thu, 8 Feb 2024 07:55:36 -0800 Subject: [PATCH 0254/1133] generalize process spawning 2/x Summary: allow stdout as pipe, not used anywhere yet so no behavior change Reviewed By: milend Differential Revision: D53518127 fbshipit-source-id: c2bc4c207993002601244964ba8c62e023016b65 --- .../tools/code_signing/codesign_bundle.py | 27 ++++++++++++------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 072c7f032..59a531445 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -353,27 +353,36 @@ def _codesign_everything( @dataclass class ParallelProcess: process: subprocess.Popen - stdout_path: str + stdout_path: Optional[str] stderr_path: str def check_result(self) -> None: if self.process.returncode == 0: return - with open(self.stdout_path, encoding="utf8") as stdout, open( - self.stderr_path, encoding="utf8" - ) as stderr: - raise RuntimeError( - "\nstdout:\n{}\n\nstderr:\n{}\n".format(stdout.read(), stderr.read()) + with ExitStack() as stack: + stderr = stack.enter_context(open(self.stderr_path, encoding="utf8")) + stderr_string = f"\nstderr:\n{stderr.read()}\n" + stdout = ( + stack.enter_context(open(self.stdout_path, encoding="utf8")) + if self.stdout_path + else None ) + stdout_string = f"\nstdout:\n{stdout.read()}\n" if stdout else "" + raise RuntimeError(f"{stdout_string}{stderr_string}") def _spawn_process( command: List[Union[str, Path]], tmp_dir: str, stack: ExitStack, + pipe_stdout: bool = False, ) -> ParallelProcess: - stdout_path = os.path.join(tmp_dir, uuid.uuid4().hex) - stdout = stack.enter_context(open(stdout_path, "w")) + if pipe_stdout: + stdout_path = None + stdout = subprocess.PIPE + else: + stdout_path = os.path.join(tmp_dir, uuid.uuid4().hex) + stdout = stack.enter_context(open(stdout_path, "w")) stderr_path = os.path.join(tmp_dir, uuid.uuid4().hex) stderr = stack.enter_context(open(stderr_path, "w")) _LOGGER.info(f"Executing command: {command}") @@ -397,7 +406,7 @@ def _spawn_codesign_process( command = codesign_command_factory.codesign_command( path, identity_fingerprint, entitlements, codesign_args ) - return _spawn_process(command, tmp_dir, stack) + return _spawn_process(command=command, tmp_dir=tmp_dir, stack=stack) def _codesign_paths( From f2e3e2a3926ff86edf21e6791131e365673b7aa5 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Thu, 8 Feb 2024 07:55:36 -0800 Subject: [PATCH 0255/1133] parallelize provisioning profiles decryption Summary: in this diff run `openssl smime` processes that decrypt provisioning profiles in parallel to get some performance improvements Reviewed By: chatura-atapattu Differential Revision: D53518126 fbshipit-source-id: a37d230e550816b813bd0422bd3479501b5fdc74 --- .../tools/code_signing/codesign_bundle.py | 42 ++++++++++++++++--- 1 file changed, 37 insertions(+), 5 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 59a531445..6eb5be341 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -222,14 +222,46 @@ def _read_provisioning_profiles( dirpath: Path, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, ) -> List[ProvisioningProfileMetadata]: - return [ - _provisioning_profile_from_file_path( - dirpath / f, - read_provisioning_profile_command_factory, - ) + paths = [ + dirpath / f for f in os.listdir(dirpath) if (f.endswith(".mobileprovision") or f.endswith(".provisionprofile")) ] + with tempfile.TemporaryDirectory() as tmp_dir: + path_to_data = _decode_provisioning_profiles( + paths, tmp_dir, read_provisioning_profile_command_factory + ) + return [ + ProvisioningProfileMetadata.from_provisioning_profile_file_content(path, data) + for path, data in path_to_data.items() + ] + + +def _decode_provisioning_profiles( + paths: [Path], + tmp_dir: str, + read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, +) -> Dict[Path, bytes]: + """Reads multiple provisioning profiles in parallel.""" + processes: Dict[Path, ParallelProcess] = {} + result = {} + with ExitStack() as stack: + for path in paths: + command = read_provisioning_profile_command_factory.read_provisioning_profile_command( + path + ) + process = _spawn_process( + command=command, + tmp_dir=tmp_dir, + stack=stack, + pipe_stdout=True, + ) + processes[path] = process + for path, process in processes.items(): + data, _ = process.process.communicate() + process.check_result() + result[path] = data + return result def _provisioning_profile_from_file_path( From c98a1a9489f6d2dc57b00f48d33dad024a5995e5 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Thu, 8 Feb 2024 10:41:01 -0800 Subject: [PATCH 0256/1133] Override cgo_enabled with configuration modifiers Summary: cgo_enabled is a non-selectable attribute, but hopefully we can override it's behaviour using the band new shiny configuration modifiers API Reviewed By: awalterschulze Differential Revision: D53575342 fbshipit-source-id: 26b4d137487fda01d21e33f2e723fe24784c7685 --- prelude/go/transitions/defs.bzl | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl index 9ab848911..5686852ac 100644 --- a/prelude/go/transitions/defs.bzl +++ b/prelude/go/transitions/defs.bzl @@ -6,6 +6,14 @@ # of this source tree. def _cgo_enabled_transition(platform, refs, attrs): + constraints = platform.configuration.constraints + + # Cancel transition if the value already set + # to enable using configuration modifiers for overiding this option + cgo_enabled_setting = refs.cgo_enabled_auto[ConstraintValueInfo].setting + if cgo_enabled_setting.label in constraints: + return platform + if attrs.cgo_enabled == None: cgo_enabled_ref = refs.cgo_enabled_auto elif attrs.cgo_enabled == True: @@ -14,7 +22,6 @@ def _cgo_enabled_transition(platform, refs, attrs): cgo_enabled_ref = refs.cgo_enabled_false cgo_enabled_value = cgo_enabled_ref[ConstraintValueInfo] - constraints = platform.configuration.constraints constraints[cgo_enabled_value.setting.label] = cgo_enabled_value new_cfg = ConfigurationInfo( From 609e0cd397a29ac070c15d019adcf534ddb11a6f Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Thu, 8 Feb 2024 15:00:31 -0800 Subject: [PATCH 0257/1133] Format the rest of our markdown Summary: Stacked the next diff on top and ran ``` arc lint --take PRETTIERMARKDOWN --paths-cmd 'find . -type f -name "*.md" -print' -a ``` Reviewed By: zertosh Differential Revision: D53590286 fbshipit-source-id: 52cad14893ef6894e36646a1de22f1da55e6f58d --- prelude/CHANGELOG.md | 2 +- prelude/CONTRIBUTING.md | 18 +- prelude/README.md | 16 +- prelude/cxx/dist_lto/README.md | 344 +++++++++++++++++++------------ prelude/haskell/ide/README.md | 7 +- prelude/pull_request_template.md | 8 +- 6 files changed, 238 insertions(+), 157 deletions(-) diff --git a/prelude/CHANGELOG.md b/prelude/CHANGELOG.md index 0ab36850d..524da6309 100644 --- a/prelude/CHANGELOG.md +++ b/prelude/CHANGELOG.md @@ -1,3 +1,3 @@ # Buck2 Prelude -* Initial version. +- Initial version. diff --git a/prelude/CONTRIBUTING.md b/prelude/CONTRIBUTING.md index e2f05f03e..7f7c52bbb 100644 --- a/prelude/CONTRIBUTING.md +++ b/prelude/CONTRIBUTING.md @@ -1,15 +1,16 @@ # Contributing to Buck2 Prelude -This repository is a subset of . -You can contribute to either that repo, or this repo - changes will be mirrored to both. +This repository is a subset of . You can +contribute to either that repo, or this repo - changes will be mirrored to both. -We want to make contributing to this project as easy and transparent as possible. +We want to make contributing to this project as easy and transparent as +possible. ## Our Development Process -Buck2 Prelude is currently developed in Facebook's internal repositories and then exported -out to GitHub by a Facebook team member; however, we invite you to submit pull -requests as described below. +Buck2 Prelude is currently developed in Facebook's internal repositories and +then exported out to GitHub by a Facebook team member; however, we invite you to +submit pull requests as described below. ## Pull Requests @@ -45,5 +46,6 @@ We use several Python formatters. ## License By contributing to Buck2 Prelude, you agree that your contributions will be -licensed under both the [LICENSE-MIT](LICENSE-MIT) and [LICENSE-APACHE](LICENSE-APACHE) -files in the root directory of this source tree. +licensed under both the [LICENSE-MIT](LICENSE-MIT) and +[LICENSE-APACHE](LICENSE-APACHE) files in the root directory of this source +tree. diff --git a/prelude/README.md b/prelude/README.md index e41bdc072..6830efe26 100644 --- a/prelude/README.md +++ b/prelude/README.md @@ -1,12 +1,16 @@ # Buck2 Prelude -This repo contains a copy of the Buck2 Prelude, which is often included as a submodule with a Buck2 project. -To obtain a copy of this repo, and set up other details of a Buck2, you should usually run `buck2 init --git`. -Most information can be found on the main [Buck2 GitHub project](https://github.com/facebook/buck2). +This repo contains a copy of the Buck2 Prelude, which is often included as a +submodule with a Buck2 project. To obtain a copy of this repo, and set up other +details of a Buck2, you should usually run `buck2 init --git`. Most information +can be found on the main +[Buck2 GitHub project](https://github.com/facebook/buck2). -Pull requests and issues should be raised at [facebook/buck2](https://github.com/facebook/buck2) as that project -is more closely monitored and contains CI checks. +Pull requests and issues should be raised at +[facebook/buck2](https://github.com/facebook/buck2) as that project is more +closely monitored and contains CI checks. ## License -Buck2 Prelude is both MIT and Apache License, Version 2.0 licensed, as found in the [LICENSE-MIT](LICENSE-MIT) and [LICENSE-APACHE](LICENSE-APACHE) files. +Buck2 Prelude is both MIT and Apache License, Version 2.0 licensed, as found in +the [LICENSE-MIT](LICENSE-MIT) and [LICENSE-APACHE](LICENSE-APACHE) files. diff --git a/prelude/cxx/dist_lto/README.md b/prelude/cxx/dist_lto/README.md index 1102134a2..88a4b80a7 100644 --- a/prelude/cxx/dist_lto/README.md +++ b/prelude/cxx/dist_lto/README.md @@ -1,23 +1,27 @@ # Distributed ThinLTO in Buck2 + Sean Gillespie, April 2022 -This document is a technical overview into Buck2's implementation of a distributed ThinLTO. -Like all rules in Buck2, this implementation is written entirely in Starlark, contained in -`dist_lto.bzl` (in this same directory). +This document is a technical overview into Buck2's implementation of a +distributed ThinLTO. Like all rules in Buck2, this implementation is written +entirely in Starlark, contained in `dist_lto.bzl` (in this same directory). ## Motivation -First, I highly recommend watching [Teresa Johnson's CppCon2017 talk about ThinLTO](https://www.youtube.com/watch?v=p9nH2vZ2mNo), +First, I highly recommend watching +[Teresa Johnson's CppCon2017 talk about ThinLTO](https://www.youtube.com/watch?v=p9nH2vZ2mNo), which covers the topics in this section in much greater detail than I can. -C and C++ have long enjoyed significant optimizations at the hands of compilers. However, they have also -long suffered a fundamental limitation; a C or C++ compiler can only optimize code that it sees in a single -translation unit. For a language like C or C++, this means in practice that only code that is included via -the preprocessor or specified in the translation unit can be optimized as a single unit. C and C++ compilers -are unable to inline functions that are defined in different translation units. However, a crucial advantage -of this compilation model is that all C and C++ compiler invocations are *completely parallelizable*; despite -sacrificing some code quality, C and C++ compilation turns into a massively parallel problem with a serial -link step at the very end. +C and C++ have long enjoyed significant optimizations at the hands of compilers. +However, they have also long suffered a fundamental limitation; a C or C++ +compiler can only optimize code that it sees in a single translation unit. For a +language like C or C++, this means in practice that only code that is included +via the preprocessor or specified in the translation unit can be optimized as a +single unit. C and C++ compilers are unable to inline functions that are defined +in different translation units. However, a crucial advantage of this compilation +model is that all C and C++ compiler invocations are _completely +parallelizable_; despite sacrificing some code quality, C and C++ compilation +turns into a massively parallel problem with a serial link step at the very end. ``` flowchart LR; @@ -36,20 +40,25 @@ flowchart LR; c.o --> main; ``` -([Rendered](https://fburl.com/mermaid/rzup8o32). Compilation and optimization of a, b, and c can proceed in parallel.) - - -In cases where absolute performance is required, though, the inability to perform cross-translation-unit -(or "cross-module", in LLVM parlance) optimizations becomes more of a problem. To solve this, a new compilation -paradigm was designed, dubbed "Link-Time Optimization" (LTO). In this scheme, a compiler will not produce machine code -when processing a translation unit; rather, it will output the compiler's intermediate representation (e.g. LLVM bitcode). -Later on, when it is time for the linker to run, it will load all of the compiler IR into one giant module, run -optimization passes on the mega-module, and produce a final binary from that. - -This works quite well, if all that you're looking for is run-time performance. A major drawback of the LTO approach is -that all of the parallelism gained from optimizing translation units individually is now completely lost; instead, the -linker (using a plugin) will do a single-threaded pass of *all code* produced by compilation steps. This is extremely -slow, memory-intensive, and unable to be run incrementally. There are targets at Meta that simply can't be LTO-compiled +([Rendered](https://fburl.com/mermaid/rzup8o32). Compilation and optimization of +a, b, and c can proceed in parallel.) + +In cases where absolute performance is required, though, the inability to +perform cross-translation-unit (or "cross-module", in LLVM parlance) +optimizations becomes more of a problem. To solve this, a new compilation +paradigm was designed, dubbed "Link-Time Optimization" (LTO). In this scheme, a +compiler will not produce machine code when processing a translation unit; +rather, it will output the compiler's intermediate representation (e.g. LLVM +bitcode). Later on, when it is time for the linker to run, it will load all of +the compiler IR into one giant module, run optimization passes on the +mega-module, and produce a final binary from that. + +This works quite well, if all that you're looking for is run-time performance. A +major drawback of the LTO approach is that all of the parallelism gained from +optimizing translation units individually is now completely lost; instead, the +linker (using a plugin) will do a single-threaded pass of _all code_ produced by +compilation steps. This is extremely slow, memory-intensive, and unable to be +run incrementally. There are targets at Meta that simply can't be LTO-compiled because of their size. ``` @@ -74,15 +83,21 @@ flowchart LR; main.o --> |ld| main ``` -([Rendered](https://fburl.com/mermaid/kid35io9). `a.bc`, `b.bc`, and `c.bc` are LLVM bitcode; they are all merged -together into a single module, `a_b_c_optimized.bc`, which is then optimized and codegen'd into a final binary.) -The idea of ThinLTO comes from a desire to maintain the ability to optimize modules in parallel while still -allowing for profitable cross-module optimizations. The idea is this: +([Rendered](https://fburl.com/mermaid/kid35io9). `a.bc`, `b.bc`, and `c.bc` are +LLVM bitcode; they are all merged together into a single module, +`a_b_c_optimized.bc`, which is then optimized and codegen'd into a final +binary.) -1. Just like regular LTO, the compiler emits bitcode instead of machine code. However, it also contains some light -metadata such as a call graph of symbols within the module. -2. The monolithic LTO link is split into three steps: `index`, `opt`, and `link`. +The idea of ThinLTO comes from a desire to maintain the ability to optimize +modules in parallel while still allowing for profitable cross-module +optimizations. The idea is this: + +1. Just like regular LTO, the compiler emits bitcode instead of machine code. + However, it also contains some light metadata such as a call graph of symbols + within the module. +2. The monolithic LTO link is split into three steps: `index`, `opt`, and + `link`. ``` flowchart LR; @@ -117,137 +132,192 @@ flowchart LR; ([Rendered](https://fburl.com/mermaid/56oc99t5)) -The `index` step looks like a link step. However, it does not produce a final binary; instead, it looks at every -compiler IR input file that it receives and heuristically determines which other IR modules it should be optimized -with in order to achieve profitable optimizations. These modules might include functions that the index step thinks -probably will get inlined, or globals that are read in the target IR input file. The output of the index step is a -series of files on disk that indicate which sibling object files should be present when optimizing a particular object -file, for each object file in the linker command-line. - -The `opt` step runs in parallel for every object file. Each object file will be optimized using the compiler's -optimizer (e.g. `opt`, for LLVM). The optimizer will combine the objects that were referenced as part of the index -step as potentially profitable to include and optimize them all together. - -The `link` step takes the outputs of `opt` and links them together, like a normal linker. - -In practice, ThinLTO manages to recapture the inherent parallelism of C/C++ compilation by pushing the majority of work -to the parallel `opt` phase of execution. When LLVM performs ThinLTO by default, it will launch a thread pool and process -independent modules in parallel. ThinLTO does not produce as performant a binary as a monolithic LTO; however, in practice, -ThinLTO binaries [paired with AutoFDO](https://fburl.com/wiki/q480euco) perform comparably to monolithic LTO. Furthermore, -ThinLTO's greater efficiency allows for more expensive optimization passes to be run, which can further improve code quality +The `index` step looks like a link step. However, it does not produce a final +binary; instead, it looks at every compiler IR input file that it receives and +heuristically determines which other IR modules it should be optimized with in +order to achieve profitable optimizations. These modules might include functions +that the index step thinks probably will get inlined, or globals that are read +in the target IR input file. The output of the index step is a series of files +on disk that indicate which sibling object files should be present when +optimizing a particular object file, for each object file in the linker +command-line. + +The `opt` step runs in parallel for every object file. Each object file will be +optimized using the compiler's optimizer (e.g. `opt`, for LLVM). The optimizer +will combine the objects that were referenced as part of the index step as +potentially profitable to include and optimize them all together. + +The `link` step takes the outputs of `opt` and links them together, like a +normal linker. + +In practice, ThinLTO manages to recapture the inherent parallelism of C/C++ +compilation by pushing the majority of work to the parallel `opt` phase of +execution. When LLVM performs ThinLTO by default, it will launch a thread pool +and process independent modules in parallel. ThinLTO does not produce as +performant a binary as a monolithic LTO; however, in practice, ThinLTO binaries +[paired with AutoFDO](https://fburl.com/wiki/q480euco) perform comparably to +monolithic LTO. Furthermore, ThinLTO's greater efficiency allows for more +expensive optimization passes to be run, which can further improve code quality near that of a monolithic LTO. -This is all great, and ThinLTO has been in use at Meta for some time. However, Buck2 has the ability to take a step -further than Buck1 could ever have - Buck2 can distribute parallel `opt` actions across many machines via Remote Execution -to achieve drastic speedups in ThinLTO wall clock time, memory usage, and incrementality. +This is all great, and ThinLTO has been in use at Meta for some time. However, +Buck2 has the ability to take a step further than Buck1 could ever have - Buck2 +can distribute parallel `opt` actions across many machines via Remote Execution +to achieve drastic speedups in ThinLTO wall clock time, memory usage, and +incrementality. ## Buck2's Implementation -Buck2's role in a distributed ThinLTO compilation is to construct a graph of actions that directly mirrors the graph -that the `index` step outputs. The graph that the `index` step outputs is entirely dynamic and, as such, the build -system is only aware of what the graph could be after the `index` step is complete. Unlike Buck1 (or even Blaze/Bazel), -Buck2 has explicit support for this paradigm [("dynamic dependencies")](https://fburl.com/gdoc/zklwhkll). Therefore, for Buck2, the basic strategy looks like: - -1. Invoke `clang` to act as `index`. `index` will output a file for every object file that indicates what other modules -need to be present when running `opt` on the object file (an "imports file"). -2. Read imports files and construct a graph of dynamic `opt` actions whose dependencies mirror the contents of the imports files. -3. Collect the outputs from the `opt` actions and invoke the linker to produce a final binary. - -Action `2` is inherently dynamic, since it must read the contents of files produced as part of action `1`. Furthermore, -Buck2's support of `1` is complicated by the fact that certain Buck2 rules can produce an archive of object files as -an output (namely, the Rust compiler). As a result, Buck2's implementation of Distributed ThinLTO is highly dynamic. +Buck2's role in a distributed ThinLTO compilation is to construct a graph of +actions that directly mirrors the graph that the `index` step outputs. The graph +that the `index` step outputs is entirely dynamic and, as such, the build system +is only aware of what the graph could be after the `index` step is complete. +Unlike Buck1 (or even Blaze/Bazel), Buck2 has explicit support for this paradigm +[("dynamic dependencies")](https://fburl.com/gdoc/zklwhkll). Therefore, for +Buck2, the basic strategy looks like: + +1. Invoke `clang` to act as `index`. `index` will output a file for every object + file that indicates what other modules need to be present when running `opt` + on the object file (an "imports file"). +2. Read imports files and construct a graph of dynamic `opt` actions whose + dependencies mirror the contents of the imports files. +3. Collect the outputs from the `opt` actions and invoke the linker to produce a + final binary. + +Action `2` is inherently dynamic, since it must read the contents of files +produced as part of action `1`. Furthermore, Buck2's support of `1` is +complicated by the fact that certain Buck2 rules can produce an archive of +object files as an output (namely, the Rust compiler). As a result, Buck2's +implementation of Distributed ThinLTO is highly dynamic. Buck2's implementation contains four phases of actions: -1. `thin_lto_prepare`, which specifically handles archives containing LLVM IR and prepares them to be inputs to `thin_lto_index`, -2. `thin_lto_index`, which invokes LLVM's ThinLTO indexer to produce a imports list for every object file to be optimized, -3. `thin_lto_opt`, which optimizes each object file in parallel with its imports present, +1. `thin_lto_prepare`, which specifically handles archives containing LLVM IR + and prepares them to be inputs to `thin_lto_index`, +2. `thin_lto_index`, which invokes LLVM's ThinLTO indexer to produce a imports + list for every object file to be optimized, +3. `thin_lto_opt`, which optimizes each object file in parallel with its imports + present, 4. `thin_lto_link`, which links together the optimized code into a final binary. ### thin_lto_prepare -It is a reality of Buck2 today that some rules don't produce a statically-known list of object files. The list of object -files is known *a priori* during C/C++ compilation, since they have a one-to-one correspondence to source files; however, -the Rust compiler emits an archive of object files; without inspecting the archive, Buck2 has no way of knowing what -the contents of the archive are, or even if they contain bitcode at all. +It is a reality of Buck2 today that some rules don't produce a statically-known +list of object files. The list of object files is known _a priori_ during C/C++ +compilation, since they have a one-to-one correspondence to source files; +however, the Rust compiler emits an archive of object files; without inspecting +the archive, Buck2 has no way of knowing what the contents of the archive are, +or even if they contain bitcode at all. -Future steps (particularly `thin_lto_index`) are defined to only operate on a list of object files - a limitation [inherited from LLVM](https://lists.llvm.org/pipermail/llvm-dev/2019-June/133145.html). Therefore, it is the job of `thin_lto_prepare` to turn an archive into a list of objects - namely, by extracting the archive into a directory. +Future steps (particularly `thin_lto_index`) are defined to only operate on a +list of object files - a limitation +[inherited from LLVM](https://lists.llvm.org/pipermail/llvm-dev/2019-June/133145.html). +Therefore, it is the job of `thin_lto_prepare` to turn an archive into a list of +objects - namely, by extracting the archive into a directory. -Buck2 dispatches a `thin_lto_prepare` action for every archive. Each prepare action has two outputs: +Buck2 dispatches a `thin_lto_prepare` action for every archive. Each prepare +action has two outputs: -1. An **output directory** (called `objects` in the code), a directory that contains the unextracted contents of the archive. -2. A **archive manifest**, a JSON document containing a list of object files that are contained in the output directory. +1. An **output directory** (called `objects` in the code), a directory that + contains the unextracted contents of the archive. +2. A **archive manifest**, a JSON document containing a list of object files + that are contained in the output directory. -The core logic of this action is implemented in the Python script `dist_lto_prepare.py`, contained in the `tools` directory. In addition to unpacking each archive, Buck2 -keeps track of the list of archives as a Starlark array that will be referenced by index -in later steps. +The core logic of this action is implemented in the Python script +`dist_lto_prepare.py`, contained in the `tools` directory. In addition to +unpacking each archive, Buck2 keeps track of the list of archives as a Starlark +array that will be referenced by index in later steps. ### thin_lto_index -With all archives prepared, the next step is to invoke LLVM's ThinLTO indexer. For the purposes of Buck2, the indexer -looks like a linker; because of this, Buck2 must construct a reasonable link line. Buck2 does this by iterating over the -list of linkables that it has been given and constructing a link line from them. Uniquely for distributed ThinLTO, Buck2 -must wrap all objects that were derived from `thin_lto_prepare` (i.e. were extracted from archives) with `-Wl,--start-lib` -and `-Wl,--end-lib` to ensure that they are still treated as if they were archives by the indexer. - -Invoking the indexer is relatively straightforward in that Buck2 invokes it like it would any other linker. However, -once the indexer returns, Buck2 must post-process its output into a format that Buck2's Starlark can understand and -translate into a graph of dynamic `opt` actions. The first thing that Buck2 is write a "meta file" to disk, which -communicates inputs and outputs of `thin_lto_index` to a Python script, `dist_lto_planner.py`. The meta file contains -a list of 7-tuples, whose members are: - -1. The path to the source bitcode file. This is used as an index into - a dictionary that records much of the metadata coming - from these lines. +With all archives prepared, the next step is to invoke LLVM's ThinLTO indexer. +For the purposes of Buck2, the indexer looks like a linker; because of this, +Buck2 must construct a reasonable link line. Buck2 does this by iterating over +the list of linkables that it has been given and constructing a link line from +them. Uniquely for distributed ThinLTO, Buck2 must wrap all objects that were +derived from `thin_lto_prepare` (i.e. were extracted from archives) with +`-Wl,--start-lib` and `-Wl,--end-lib` to ensure that they are still treated as +if they were archives by the indexer. + +Invoking the indexer is relatively straightforward in that Buck2 invokes it like +it would any other linker. However, once the indexer returns, Buck2 must +post-process its output into a format that Buck2's Starlark can understand and +translate into a graph of dynamic `opt` actions. The first thing that Buck2 is +write a "meta file" to disk, which communicates inputs and outputs of +`thin_lto_index` to a Python script, `dist_lto_planner.py`. The meta file +contains a list of 7-tuples, whose members are: + +1. The path to the source bitcode file. This is used as an index into a + dictionary that records much of the metadata coming from these lines. 2. The path to an output file. `dist_lto_planner.py`is expected to place a - ThinLTO index file at this location (suffixed `.thinlto.bc`). -3. The path to an output plan. This script is expected to place a link - plan here (a JSON document indicating which other object files this) - object file depends on, among other things. -4. If this object file came from an archive, the index of the archive in - the Starlark archives array. + ThinLTO index file at this location (suffixed `.thinlto.bc`). +3. The path to an output plan. This script is expected to place a link plan here + (a JSON document indicating which other object files this) object file + depends on, among other things. +4. If this object file came from an archive, the index of the archive in the + Starlark archives array. 5. If this object file came from an archive, the name of the archive. -6. If this object file came from an archive, the path to an output plan. - This script is expected to produce an archive link plan here (a JSON) - document similar to the object link plan, except containing link - information for every file in the archive from which this object - came. +6. If this object file came from an archive, the path to an output plan. This + script is expected to produce an archive link plan here (a JSON) document + similar to the object link plan, except containing link information for every + file in the archive from which this object came. 7. If this object file came from an archive, the indexes directory of that - archive. This script is expected to place all ThinLTO indexes derived - from object files originating from this archive in that directory. - -There are two indices that are derived from this meta file: the object -index (`mapping["index"]`) and the archive index (`mapping["archive_index"]`). -These indices are indices into Starlark arrays for all objects and archive -linkables, respectively. `dist_lto_planner.py` script does not inspect them; rather, -it is expected to communicate these indices back to Starlark by writing them to the + archive. This script is expected to place all ThinLTO indexes derived from + object files originating from this archive in that directory. + +There are two indices that are derived from this meta file: the object index +(`mapping["index"]`) and the archive index (`mapping["archive_index"]`). These +indices are indices into Starlark arrays for all objects and archive linkables, +respectively. `dist_lto_planner.py` script does not inspect them; rather, it is +expected to communicate these indices back to Starlark by writing them to the link plan. -`dist_lto_planner.py` reads the index and imports file produced by LLVM and derives -a number of artifacts: - -1. For each object file, a `thinlto.bc` file (`bitcode_file`). This file is the same as the input bitcode file, except that LLVM has inserted a number of module imports to refer to the other modules that will be present when the object file is optimized. -2. For each object file, an optimization plan (`plan`). The optimization plan is a JSON document indicating how to construct an `opt` action for this object file. This plan includes -this object file's module imports, whether or not this file contains bitcode at all, a location to place the optimized object file, and a list of archives that this object file imported. -3. For each archive, an optimization plan (`archive_plan`), which contains optimization plans for all of the object files contained within the archive. - -This action is a dynamic action because, in the case that there are archives that needed to be preprocessed by `thin_lto_prepare`, this action must read the archive manifest. +`dist_lto_planner.py` reads the index and imports file produced by LLVM and +derives a number of artifacts: + +1. For each object file, a `thinlto.bc` file (`bitcode_file`). This file is the + same as the input bitcode file, except that LLVM has inserted a number of + module imports to refer to the other modules that will be present when the + object file is optimized. +2. For each object file, an optimization plan (`plan`). The optimization plan is + a JSON document indicating how to construct an `opt` action for this object + file. This plan includes this object file's module imports, whether or not + this file contains bitcode at all, a location to place the optimized object + file, and a list of archives that this object file imported. +3. For each archive, an optimization plan (`archive_plan`), which contains + optimization plans for all of the object files contained within the archive. + +This action is a dynamic action because, in the case that there are archives +that needed to be preprocessed by `thin_lto_prepare`, this action must read the +archive manifest. ### thin_lto_opt -After `thin_lto_index` completes, Buck2 launches `thin_lto_opt` actions for every object file and for every archive. For each object file, Buck2 reads that object file's optimization plan. -At this phase, it is Buck2's responsibility to declare dependencies on every object file referenced by that object's compilation plan; it does so here by adding `hidden` dependencies -on every object file and archive that the archive plan says that this object depends on. - -`thin_lto_opt` uses a Python wrapper around LLVM because of a bug (T116695431) where LTO fatal errors don't prevent `clang` from returning an exit code of zero. The Python script wraps -`clang` and exits with a non-zero exit code if `clang` produced an empty object file. - -For each archive, Buck2 reads the archive's optimization plan and constructs additional `thin_lto_opt` actions for each object file contained in the archive. Buck2 creates a directory of -symlinks (`opt_objects`) that either contains symlinks to optimized object files (if the object file contained bitcode) or the original object file (if it didn't). The purpose of this symlink directory is to allow the final link to consume object files directly -from this directory without having to know whether they were optimized or not. Paths to these files are passed to the link step -via the optimization manifest (`opt_manifest`). +After `thin_lto_index` completes, Buck2 launches `thin_lto_opt` actions for +every object file and for every archive. For each object file, Buck2 reads that +object file's optimization plan. At this phase, it is Buck2's responsibility to +declare dependencies on every object file referenced by that object's +compilation plan; it does so here by adding `hidden` dependencies on every +object file and archive that the archive plan says that this object depends on. + +`thin_lto_opt` uses a Python wrapper around LLVM because of a bug (T116695431) +where LTO fatal errors don't prevent `clang` from returning an exit code of +zero. The Python script wraps `clang` and exits with a non-zero exit code if +`clang` produced an empty object file. + +For each archive, Buck2 reads the archive's optimization plan and constructs +additional `thin_lto_opt` actions for each object file contained in the archive. +Buck2 creates a directory of symlinks (`opt_objects`) that either contains +symlinks to optimized object files (if the object file contained bitcode) or the +original object file (if it didn't). The purpose of this symlink directory is to +allow the final link to consume object files directly from this directory +without having to know whether they were optimized or not. Paths to these files +are passed to the link step via the optimization manifest (`opt_manifest`). ### thin_lto_link -The final link step. Similar to `thin_lto_index`, this involves creating a link line to feed to the linker that uses the optimized artifacts that we just calculated. In cases where Buck2 -would put an archive on the link line, it instead inserts `-Wl,--start-lib`, `-Wl,--end-lib`, and references to the objects in `opt_objects`. +The final link step. Similar to `thin_lto_index`, this involves creating a link +line to feed to the linker that uses the optimized artifacts that we just +calculated. In cases where Buck2 would put an archive on the link line, it +instead inserts `-Wl,--start-lib`, `-Wl,--end-lib`, and references to the +objects in `opt_objects`. diff --git a/prelude/haskell/ide/README.md b/prelude/haskell/ide/README.md index c7867a541..4e58eed4f 100644 --- a/prelude/haskell/ide/README.md +++ b/prelude/haskell/ide/README.md @@ -1,13 +1,14 @@ # Haskell Language Server integration This integration allows loading `haskell_binary` and `haskell_library` targets -on Haskell Language Server. This is accomplished via a BXL script that is -used to drive a hie-bios "bios" cradle. +on Haskell Language Server. This is accomplished via a BXL script that is used +to drive a hie-bios "bios" cradle. # Usage To print the list of GHC flags and targets for a Haskell source file: - buck2 bxl prelude//haskell/ide/ide.bxl -- --bios true --file +buck2 bxl prelude//haskell/ide/ide.bxl -- --bios true --file + To integrate with hie_bios, copy `hie.yaml` to your repo root diff --git a/prelude/pull_request_template.md b/prelude/pull_request_template.md index 1554e0ee1..ab8b59797 100644 --- a/prelude/pull_request_template.md +++ b/prelude/pull_request_template.md @@ -1,3 +1,7 @@ -IMPORTANT: Please don't raise pull requests here, but at [facebook/buck2](https://github.com/facebook/buck2/pulls). +IMPORTANT: Please don't raise pull requests here, but at +[facebook/buck2](https://github.com/facebook/buck2/pulls). -The [`prelude`](https://github.com/facebook/buck2/tree/main/prelude) directory is a mirror of this repo, but that repo also features CI tests and is more actively monitored. Any PR's landing there will automatically show up here at the same time. +The [`prelude`](https://github.com/facebook/buck2/tree/main/prelude) directory +is a mirror of this repo, but that repo also features CI tests and is more +actively monitored. Any PR's landing there will automatically show up here at +the same time. From 3ff0c42030c5dee82b6f209bbef641918d6c5169 Mon Sep 17 00:00:00 2001 From: Jacob Rodal Date: Thu, 8 Feb 2024 15:05:59 -0800 Subject: [PATCH 0258/1133] fix filter_dex long input line Summary: ## What - Introduce argsfile to the filter_dex command ## Why - Windows doesn't like long command line inputs Reviewed By: navidqar Differential Revision: D53584904 fbshipit-source-id: c26098c8b2dea4f79d7925196533b55a3e26a848 --- prelude/android/dex_rules.bzl | 9 +++++++-- prelude/android/tools/filter_dex.py | 3 ++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/prelude/android/dex_rules.bzl b/prelude/android/dex_rules.bzl index af03ea65d..02184f174 100644 --- a/prelude/android/dex_rules.bzl +++ b/prelude/android/dex_rules.bzl @@ -365,8 +365,7 @@ def _filter_pre_dexed_libs( batch_number: int) -> DexInputsWithClassNamesAndWeightEstimatesFile: weight_estimate_and_filtered_class_names_file = actions.declare_output("class_names_and_weight_estimates_for_batch_{}".format(batch_number)) - filter_dex_cmd = cmd_args([ - android_toolchain.filter_dex_class_names[RunInfo], + filter_dex_cmd_args = cmd_args([ "--primary-dex-patterns", primary_dex_patterns_file, "--dex-target-identifiers", @@ -378,6 +377,12 @@ def _filter_pre_dexed_libs( "--output", weight_estimate_and_filtered_class_names_file.as_output(), ]) + filter_dex_cmd_argsfile = actions.write("filter_dex_cmd_args_{}".format(batch_number), filter_dex_cmd_args) + + filter_dex_cmd = cmd_args([ + android_toolchain.filter_dex_class_names[RunInfo], + cmd_args(filter_dex_cmd_argsfile, format = "@{}").hidden(filter_dex_cmd_args), + ]) actions.run(filter_dex_cmd, category = "filter_dex", identifier = "batch_{}".format(batch_number)) return DexInputsWithClassNamesAndWeightEstimatesFile(libs = pre_dexed_libs, weight_estimate_and_filtered_class_names_file = weight_estimate_and_filtered_class_names_file) diff --git a/prelude/android/tools/filter_dex.py b/prelude/android/tools/filter_dex.py index e26d507e0..808f586e2 100644 --- a/prelude/android/tools/filter_dex.py +++ b/prelude/android/tools/filter_dex.py @@ -72,7 +72,8 @@ def class_name_matches_filter(self, class_name): def _parse_args(): parser = argparse.ArgumentParser( - description="Tool to filter a dex for primary class names." + description="Tool to filter a dex for primary class names.", + fromfile_prefix_chars="@", ) parser.add_argument( From 3f065d0e19ed506d9923e2144f7a76199b4fd499 Mon Sep 17 00:00:00 2001 From: Vitalii Nikonorov Date: Fri, 9 Feb 2024 02:24:20 -0800 Subject: [PATCH 0259/1133] Add support of re_opts_use_case for AIT with buck2 Summary: This diff adds support of a `re_opts_use_case` label to android instrumentation tests being run by buck2 to specify RE use case, similar to the of Buck1 Adding the change to unblock buck2 support for WA4A since there is no strong opinion on how to proceed with it: https://fb.workplace.com/groups/1042353022615812/permalink/2457693211081779/ Reviewed By: IanChilds Differential Revision: D53519562 fbshipit-source-id: 08807f4c77607d03569f7c1e9ea6859dbf569f2d --- prelude/android/android_instrumentation_test.bzl | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/prelude/android/android_instrumentation_test.bzl b/prelude/android/android_instrumentation_test.bzl index 1fd2e61be..f14709ac3 100644 --- a/prelude/android/android_instrumentation_test.bzl +++ b/prelude/android/android_instrumentation_test.bzl @@ -15,6 +15,7 @@ load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") DEFAULT_ANDROID_SUBPLATFORM = "android-30" DEFAULT_ANDROID_PLATFORM = "android-emulator" +DEFAULT_ANDROID_INSTRUMENTATION_TESTS_USE_CASE = "instrumentation-tests" def android_instrumentation_test_impl(ctx: AnalysisContext): android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] @@ -95,7 +96,7 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): "platform": _compute_emulator_platform(ctx.attrs.labels or []), "subplatform": _compute_emulator_subplatform(ctx.attrs.labels or []), }, - remote_execution_use_case = "instrumentation-tests", + remote_execution_use_case = _compute_re_use_case(ctx.attrs.labels or []), ), "static-listing": CommandExecutorConfig( local_enabled = True, @@ -133,3 +134,11 @@ def _compute_emulator_platform(labels: list[str]) -> str: return DEFAULT_ANDROID_PLATFORM else: # len(emulator_platform_labels) == 1: return emulator_platform_labels[0].replace("re_platform_", "") + +def _compute_re_use_case(labels: list[str]) -> str: + re_use_case_labels = [label for label in labels if label.startswith("re_opts_use_case=")] + expect(len(re_use_case_labels) <= 1, "multiple 're_opts_use_case' labels were found:[{}], there must be only one!".format(", ".join(re_use_case_labels))) + if len(re_use_case_labels) == 0: + return DEFAULT_ANDROID_INSTRUMENTATION_TESTS_USE_CASE + else: # len(re_use_case_labels) == 1: + return re_use_case_labels[0].replace("re_opts_use_case=", "") From ea6d376d03353e66236f4e2f70a662994a3a438a Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 9 Feb 2024 03:26:13 -0800 Subject: [PATCH 0260/1133] enable pyre and add annotations to info plist tools Summary: ditto Reviewed By: milend Differential Revision: D53570450 fbshipit-source-id: 3ad5e2cfde21cd17c4a41e8eb0e897728a827a39 --- .../apple/tools/info_plist_processor/main.py | 12 ++++++++---- .../tools/info_plist_processor/preprocess.py | 18 ++++++++++++++---- .../tools/info_plist_processor/process.py | 12 ++++++------ prelude/apple/tools/plistlib_utils.py | 7 ++++--- 4 files changed, 32 insertions(+), 17 deletions(-) diff --git a/prelude/apple/tools/info_plist_processor/main.py b/prelude/apple/tools/info_plist_processor/main.py index b1d3e6b67..157652f60 100644 --- a/prelude/apple/tools/info_plist_processor/main.py +++ b/prelude/apple/tools/info_plist_processor/main.py @@ -19,7 +19,9 @@ class _SubcommandName(str, Enum): process = "process" -def _create_preprocess_subparser(subparsers): +def _create_preprocess_subparser( + subparsers: "argparse._SubParsersAction[argparse.ArgumentParser]", +) -> None: parser = subparsers.add_parser( _SubcommandName.preprocess.value, description="Sub-command to expand macro variables in parametrized Info.plist files. It's the Buck v2 equivalent of what `FindAndReplaceStep` and `InfoPlistSubstitution` do.", @@ -53,7 +55,9 @@ def _create_preprocess_subparser(subparsers): ) -def _create_process_subparser(subparsers): +def _create_process_subparser( + subparsers: "argparse._SubParsersAction[argparse.ArgumentParser]", +) -> None: parser = subparsers.add_parser( _SubcommandName.process.value, description="Sub-command to do the final processing of the Info.plist before it's copied to the application bundle. It's the Buck v2 equivalent of what `PlistProcessStep` does in v1.", @@ -92,7 +96,7 @@ def _create_process_subparser(subparsers): ) -def _parse_args(): +def _parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( description="Tool to process Info.plist file before it is placed into the bundle. It's the Buck v2 equivalent of what `AppleInfoPlist` build rule from v1 does." ) @@ -102,7 +106,7 @@ def _parse_args(): return parser.parse_args() -def main(): +def main() -> None: args = _parse_args() if args.subcommand_name == _SubcommandName.preprocess: with ExitStack() as stack: diff --git a/prelude/apple/tools/info_plist_processor/preprocess.py b/prelude/apple/tools/info_plist_processor/preprocess.py index 18b8e71a9..36cf9b231 100644 --- a/prelude/apple/tools/info_plist_processor/preprocess.py +++ b/prelude/apple/tools/info_plist_processor/preprocess.py @@ -8,6 +8,7 @@ import json import re from enum import Enum +from typing import Dict, TextIO class _ReGroupName(str, Enum): @@ -17,7 +18,7 @@ class _ReGroupName(str, Enum): closeparen = "closeparen" -_re_string = "\\$(?P<{openparen}>[\\{{\\(])(?P<{variable}>[^\\}}\\):]+)(?::(?P<{modifier}>[^\\}}\\)]+))?(?P<{closeparen}>[\\}}\\)])".format( +_re_string: str = "\\$(?P<{openparen}>[\\{{\\(])(?P<{variable}>[^\\}}\\):]+)(?::(?P<{modifier}>[^\\}}\\)]+))?(?P<{closeparen}>[\\}}\\)])".format( openparen=_ReGroupName.openparen, variable=_ReGroupName.variable, modifier=_ReGroupName.modifier, @@ -25,7 +26,9 @@ class _ReGroupName(str, Enum): ) -def _make_substitution_dict(substitutions_json_file, product_name): +def _make_substitution_dict( + substitutions_json_file: TextIO, product_name: str +) -> Dict[str, str]: result = { "EXECUTABLE_NAME": product_name, "PRODUCT_NAME": product_name, @@ -36,7 +39,9 @@ def _make_substitution_dict(substitutions_json_file, product_name): return result -def _process_line(line, pattern, substitutions): +def _process_line( + line: str, pattern: re.Pattern[str], substitutions: Dict[str, str] +) -> str: result = line pos = 0 substituted_keys = set() @@ -62,7 +67,12 @@ def _process_line(line, pattern, substitutions): return result -def preprocess(input_file, output_file, substitutions_file, product_name): +def preprocess( + input_file: TextIO, + output_file: TextIO, + substitutions_file: TextIO, + product_name: str, +) -> None: pattern = re.compile(_re_string) substitutions = _make_substitution_dict(substitutions_file, product_name) for line in input_file: diff --git a/prelude/apple/tools/info_plist_processor/process.py b/prelude/apple/tools/info_plist_processor/process.py index 91f1d5d38..bca05d93a 100644 --- a/prelude/apple/tools/info_plist_processor/process.py +++ b/prelude/apple/tools/info_plist_processor/process.py @@ -7,7 +7,7 @@ import json import plistlib -from typing import Any, Dict, IO, Optional +from typing import Any, Dict, IO, Optional, TextIO from apple.tools.plistlib_utils import detect_format_and_load @@ -26,12 +26,12 @@ def _merge_plist_dicts( def process( - input_file: IO, - output_file: IO, - override_input_file: Optional[IO] = None, + input_file: IO[bytes], + output_file: IO[bytes], + override_input_file: Optional[IO[bytes]] = None, additional_keys: Optional[Dict[str, Any]] = None, - additional_keys_file: Optional[IO] = None, - override_keys_file: Optional[IO] = None, + additional_keys_file: Optional[TextIO] = None, + override_keys_file: Optional[TextIO] = None, output_format: plistlib.PlistFormat = plistlib.FMT_BINARY, ) -> None: root = detect_format_and_load(input_file) diff --git a/prelude/apple/tools/plistlib_utils.py b/prelude/apple/tools/plistlib_utils.py index 2f927c38c..63ea7a356 100644 --- a/prelude/apple/tools/plistlib_utils.py +++ b/prelude/apple/tools/plistlib_utils.py @@ -7,13 +7,14 @@ import plistlib from io import BytesIO +from typing import Any, Dict, IO -def _is_fmt_binary(header): +def _is_fmt_binary(header: bytes) -> bool: return header[:8] == b"bplist00" -def detect_format_and_load(fp): +def detect_format_and_load(fp: IO[bytes]) -> Dict[str, Any]: header = fp.read(32) fp.seek(0) if _is_fmt_binary(header): @@ -23,6 +24,6 @@ def detect_format_and_load(fp): return plistlib.load(fp, fmt=fmt) -def detect_format_and_loads(value): +def detect_format_and_loads(value: bytes) -> Dict[str, Any]: fp = BytesIO(value) return detect_format_and_load(fp) From 71ebd3d14810a6aecf8c46794d3ff0e04317ff08 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 9 Feb 2024 03:59:08 -0800 Subject: [PATCH 0261/1133] Add MacCatalyst to MobileCoreServices supported SDKs Summary: This is supported from MacCatalyst 14.0 on: https://developer.apple.com/documentation/uniformtypeidentifiers When building the Internal Catalust app: ``` chatatap@chatatap-mac ~/fbsource (default) [3]> buck2 build fbsource//fbobjc/mode/workchat-catalyst -c cxx.default_platform=maccatalyst-x86_64 //fbobjc/Apps/AtWorkMessenger:AtWorkMessenger-dmg Buck UI: https://www.internalfb.com/buck2/5c8c5ea4-874d-4d33-a531-0efd002f764e Note: Using experimental modern dice Jobs completed: 3. Time elapsed: 0.0s. BUILD FAILED Error running analysis for `fbsource//fbobjc/Apps/AtWorkMessenger:AtWorkMessenger-dmg (fbsource//fbobjc/Apps/AtWorkMessenger/Platforms:AtWorkMessengerCatalyst-maccatalyst-x86_64-local#be8a9aee662efc88)` Caused by: 0: Error running analysis for `fbsource//fbobjc/Apps/AtWorkMessenger:AtWorkMessenger-dmgBinary (fbsource//fbobjc/Apps/AtWorkMessenger/Platforms:AtWorkMessengerCatalyst-maccatalyst-x86_64-local#be8a9aee662efc88)` 1: Error running analysis for `fbsource//fbobjc/Apps/AtWorkMessenger/Libraries/WPMReorderChatsWithinThreadFolderIntentHandler:WPMReorderChatsWithinThreadFolderIntentHandler (fbsource//fbobjc/Apps/AtWorkMessenger/Platforms:AtWorkMessengerCatalyst-maccatalyst-x86_64-local#be8a9aee662efc88)` 2: Error running analysis for `fbsource//fbobjc/Apps/AtWorkMessenger/Libraries/WPMDraftsKit:WPMDraftsKit (fbsource//fbobjc/Apps/AtWorkMessenger/Platforms:AtWorkMessengerCatalyst-maccatalyst-x86_64-local#be8a9aee662efc88)` 3: Error running analysis for `fbsource//fbobjc/Libraries/Lexical:Lexical (fbsource//fbobjc/Apps/AtWorkMessenger/Platforms:AtWorkMessengerCatalyst-maccatalyst-x86_64-local#be8a9aee662efc88)` 4: Traceback (most recent call last): File , in * fbcode/buck2/prelude/apple/apple_library.bzl:134, in apple_library_impl return get_apple_library_providers([]) * fbcode/buck2/prelude/apple/apple_library.bzl:128, in get_apple_library_providers output = cxx_library_parameterized(ctx, constructor_params) * fbcode/buck2/prelude/cxx/cxx_library.bzl:464, in cxx_library_parameterized dep_infos, link_group_map, link_execution_preference = _get_shared_library_li... * fbcode/buck2/prelude/cxx/cxx_library.bzl:1138, in _get_shared_library_links return apple_build_link_args_with_deduped_flags( * fbcode/buck2/prelude/apple/apple_frameworks.bzl:137, in apple_build_link_args_with_deduped_flags link_info = _apple_link_info_from_linkables( * fbcode/buck2/prelude/apple/apple_frameworks.bzl:192, in _apple_link_info_from_linkables framework_link_args = _get_apple_frameworks_linker_flags(ctx, merge_framework... * fbcode/buck2/prelude/apple/apple_frameworks.bzl:50, in _get_apple_frameworks_linker_flags flags.add(get_framework_linker_args(ctx, linkable.framework_names)) * fbcode/buck2/prelude/apple/apple_framework_versions.bzl:1040, in get_framework_linker_args fail("SDK framework {} is not compatible with platform {}".format(name, sdk_n... error: fail: SDK framework MobileCoreServices is not compatible with platform maccatalyst --> fbcode/buck2/prelude/apple/apple_framework_versions.bzl:1040:17 | 1040 | fail("SDK framework {} is not compatible with platform {}".format(name, sdk_name)) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ``` Reviewed By: rmaz Differential Revision: D53141459 fbshipit-source-id: 6405ac5e5decc17233337b38023b49408af7569a --- prelude/apple/apple_framework_versions.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/apple/apple_framework_versions.bzl b/prelude/apple/apple_framework_versions.bzl index 3f6761b8a..9b86b910f 100644 --- a/prelude/apple/apple_framework_versions.bzl +++ b/prelude/apple/apple_framework_versions.bzl @@ -660,6 +660,7 @@ _FRAMEWORK_INTRODUCED_VERSIONS = { "MobileCoreServices": { "appletvos": (9, 0, 0), "iphoneos": (2, 0, 0), + "maccatalyst": (14, 0, 0), "watchos": (1, 0, 0), }, "ModelIO": { From a9589ae08559d40e300883c3ab6789e64c4b74d6 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 9 Feb 2024 09:42:41 -0800 Subject: [PATCH 0262/1133] add type annotations to codesign tools Summary: ditto Reviewed By: milend Differential Revision: D53571225 fbshipit-source-id: d6f94d3d1fc497d12c52c3d68a105f71c2489f5f --- prelude/apple/tools/code_signing/app_id.py | 4 ++-- .../tools/code_signing/codesign_bundle.py | 8 ++++---- .../code_signing/codesign_command_factory.py | 15 ++++++++++----- prelude/apple/tools/code_signing/fast_adhoc.py | 10 ++++++---- prelude/apple/tools/code_signing/identity.py | 4 ++-- .../tools/code_signing/info_plist_metadata.py | 2 +- .../code_signing/list_codesign_identities.py | 2 +- prelude/apple/tools/code_signing/main.py | 2 +- .../provisioning_profile_diagnostics.py | 14 +++++++------- .../provisioning_profile_metadata.py | 18 ++++++++++-------- .../provisioning_profile_selection.py | 8 ++++---- 11 files changed, 48 insertions(+), 39 deletions(-) diff --git a/prelude/apple/tools/code_signing/app_id.py b/prelude/apple/tools/code_signing/app_id.py index fbd70e517..2e0e0b3b3 100644 --- a/prelude/apple/tools/code_signing/app_id.py +++ b/prelude/apple/tools/code_signing/app_id.py @@ -22,11 +22,11 @@ class _ReGroupName(str, Enum): team_id = "team_id" bundle_id = "bundle_id" - _re_string = "^(?P<{team_id}>[A-Z0-9]{{10}})\\.(?P<{bundle_id}>.+)$".format( + _re_string: str = "^(?P<{team_id}>[A-Z0-9]{{10}})\\.(?P<{bundle_id}>.+)$".format( team_id=_ReGroupName.team_id, bundle_id=_ReGroupName.bundle_id, ) - _re_pattern = re.compile(_re_string) + _re_pattern: re.Pattern[str] = re.compile(_re_string) # Takes a application identifier and splits it into Team ID and bundle ID. # Prefix is always a ten-character alphanumeric sequence. Bundle ID may be a fully-qualified name or a wildcard ending in *. diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 6eb5be341..8f91e32de 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -50,7 +50,7 @@ DefaultReadProvisioningProfileCommandFactory() ) -_LOGGER = logging.getLogger(__name__) +_LOGGER: logging.Logger = logging.getLogger(__name__) def _select_provisioning_profile( @@ -99,7 +99,7 @@ def _select_provisioning_profile( class AdhocSigningContext: codesign_identity: str - def __init__(self, codesign_identity: Optional[str] = None): + def __init__(self, codesign_identity: Optional[str] = None) -> None: self.codesign_identity = codesign_identity or "-" @@ -238,7 +238,7 @@ def _read_provisioning_profiles( def _decode_provisioning_profiles( - paths: [Path], + paths: List[Path], tmp_dir: str, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, ) -> Dict[Path, bytes]: @@ -384,7 +384,7 @@ def _codesign_everything( @dataclass class ParallelProcess: - process: subprocess.Popen + process: subprocess.Popen[bytes] stdout_path: Optional[str] stderr_path: str diff --git a/prelude/apple/tools/code_signing/codesign_command_factory.py b/prelude/apple/tools/code_signing/codesign_command_factory.py index 894b07a14..bf77cbfb1 100644 --- a/prelude/apple/tools/code_signing/codesign_command_factory.py +++ b/prelude/apple/tools/code_signing/codesign_command_factory.py @@ -23,9 +23,10 @@ def codesign_command( class DefaultCodesignCommandFactory(ICodesignCommandFactory): - _command_args = ["--force", "--sign"] + codesign_tool: Path + _command_args: List[str] = ["--force", "--sign"] - def __init__(self, codesign_tool: Optional[Path]): + def __init__(self, codesign_tool: Optional[Path]) -> None: self.codesign_tool = codesign_tool or Path("codesign") def codesign_command( @@ -47,7 +48,10 @@ def codesign_command( class DryRunCodesignCommandFactory(ICodesignCommandFactory): - def __init__(self, codesign_tool: Path): + codesign_tool: Path + codesign_on_copy_file_paths: Optional[List[Path]] + + def __init__(self, codesign_tool: Path) -> None: self.codesign_tool = codesign_tool self.codesign_on_copy_file_paths = None @@ -64,7 +68,8 @@ def codesign_command( args = [path, "--identity", identity_fingerprint] if entitlements: args += ["--entitlements", entitlements] if entitlements else [] - if self.codesign_on_copy_file_paths: + codesign_on_copy_file_paths = self.codesign_on_copy_file_paths + if codesign_on_copy_file_paths: args += ["--extra-paths-to-sign"] - args += self.codesign_on_copy_file_paths + args += codesign_on_copy_file_paths return [self.codesign_tool] + args diff --git a/prelude/apple/tools/code_signing/fast_adhoc.py b/prelude/apple/tools/code_signing/fast_adhoc.py index 8d3fb16c6..e752232da 100644 --- a/prelude/apple/tools/code_signing/fast_adhoc.py +++ b/prelude/apple/tools/code_signing/fast_adhoc.py @@ -11,11 +11,11 @@ import sys from pathlib import Path -from typing import Optional +from typing import List, Optional, Union from .apple_platform import ApplePlatform -_LOGGER = logging.getLogger(__name__) +_LOGGER: logging.Logger = logging.getLogger(__name__) def _find_executable_for_signed_path(path: Path, platform: ApplePlatform) -> Path: @@ -29,7 +29,9 @@ def _find_executable_for_signed_path(path: Path, platform: ApplePlatform) -> Pat return contents_dir / path.stem -def _logged_subprocess_run(name, why, args): +def _logged_subprocess_run( + name: str, why: str, args: List[Union[str, Path]] +) -> subprocess.CompletedProcess[str]: _LOGGER.info(f" Calling {name} to {why}: `{args}`") result = subprocess.run( args, @@ -74,7 +76,7 @@ def should_skip_adhoc_signing_path( identity_fingerprint: str, entitlements_path: Optional[Path], platform: ApplePlatform, -): +) -> bool: logging.getLogger(__name__).info( f"Checking if should skip adhoc signing path `{path}` with identity `{identity_fingerprint}` and entitlements `{entitlements_path}` for platform `{platform}`" ) diff --git a/prelude/apple/tools/code_signing/identity.py b/prelude/apple/tools/code_signing/identity.py index 7893a6ff9..ed6ba5827 100644 --- a/prelude/apple/tools/code_signing/identity.py +++ b/prelude/apple/tools/code_signing/identity.py @@ -22,12 +22,12 @@ class _ReGroupName(str, Enum): fingerprint = "fingerprint" subject_common_name = "subject_common_name" - _re_string = '(?P<{fingerprint}>[A-F0-9]{{40}}) "(?P<{subject_common_name}>.+)"(?!.*CSSMERR_.+)'.format( + _re_string: str = '(?P<{fingerprint}>[A-F0-9]{{40}}) "(?P<{subject_common_name}>.+)"(?!.*CSSMERR_.+)'.format( fingerprint=_ReGroupName.fingerprint, subject_common_name=_ReGroupName.subject_common_name, ) - _pattern = re.compile(_re_string) + _pattern: re.Pattern[str] = re.compile(_re_string) @classmethod def parse_security_stdout(cls, text: str) -> List[CodeSigningIdentity]: diff --git a/prelude/apple/tools/code_signing/info_plist_metadata.py b/prelude/apple/tools/code_signing/info_plist_metadata.py index 21942eecb..75f666fba 100644 --- a/prelude/apple/tools/code_signing/info_plist_metadata.py +++ b/prelude/apple/tools/code_signing/info_plist_metadata.py @@ -20,7 +20,7 @@ class InfoPlistMetadata: is_watchos_app: bool @staticmethod - def from_file(info_plist_file: IO) -> InfoPlistMetadata: + def from_file(info_plist_file: IO[bytes]) -> InfoPlistMetadata: root = detect_format_and_load(info_plist_file) return InfoPlistMetadata( root["CFBundleIdentifier"], diff --git a/prelude/apple/tools/code_signing/list_codesign_identities.py b/prelude/apple/tools/code_signing/list_codesign_identities.py index e622a663b..b072ebb47 100644 --- a/prelude/apple/tools/code_signing/list_codesign_identities.py +++ b/prelude/apple/tools/code_signing/list_codesign_identities.py @@ -24,7 +24,7 @@ def list_codesign_identities(self) -> List[CodeSigningIdentity]: class ListCodesignIdentities(IListCodesignIdentities): _default_command = ["security", "find-identity", "-v", "-p", "codesigning"] - def __init__(self, command: List[str]): + def __init__(self, command: List[str]) -> None: self.command = command @classmethod diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index b1189b37b..c0faf2d74 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -87,7 +87,7 @@ def decorate_error_message(message: str) -> str: return " ".join(["❗️", message]) -def _main(): +def _main() -> None: args = _args_parser().parse_args() try: if args.ad_hoc: diff --git a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py index 4021c245f..625a850a7 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py @@ -25,7 +25,7 @@ class IProvisioningProfileDiagnostics(metaclass=ABCMeta): profile: ProvisioningProfileMetadata - def __init__(self, profile: ProvisioningProfileMetadata): + def __init__(self, profile: ProvisioningProfileMetadata) -> None: self.profile = profile @abstractmethod @@ -42,7 +42,7 @@ def __init__( profile: ProvisioningProfileMetadata, team_id: str, team_id_constraint: str, - ): + ) -> None: super().__init__(profile) self.team_id = team_id self.team_id_constraint = team_id_constraint @@ -60,7 +60,7 @@ def __init__( profile: ProvisioningProfileMetadata, bundle_id: str, bundle_id_constraint: str, - ): + ) -> None: super().__init__(profile) self.bundle_id = bundle_id self.bundle_id_constraint = bundle_id_constraint @@ -76,7 +76,7 @@ def __init__( self, profile: ProvisioningProfileMetadata, bundle_id_match_length: int, - ): + ) -> None: super().__init__(profile) self.bundle_id_match_length = bundle_id_match_length @@ -93,7 +93,7 @@ def __init__( profile: ProvisioningProfileMetadata, bundle_id_match_length: int, platform_constraint: ApplePlatform, - ): + ) -> None: super().__init__(profile) self.bundle_id_match_length = bundle_id_match_length self.platform_constraint = platform_constraint @@ -114,7 +114,7 @@ def __init__( bundle_id_match_length: int, mismatched_key: str, mismatched_value: str, - ): + ) -> None: super().__init__(profile) self.bundle_id_match_length = bundle_id_match_length self.mismatched_key = mismatched_key @@ -131,7 +131,7 @@ def __init__( self, profile: ProvisioningProfileMetadata, bundle_id_match_length: int, - ): + ) -> None: super().__init__(profile) self.bundle_id_match_length = bundle_id_match_length diff --git a/prelude/apple/tools/code_signing/provisioning_profile_metadata.py b/prelude/apple/tools/code_signing/provisioning_profile_metadata.py index 331ded4e7..d8b05ad73 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_metadata.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_metadata.py @@ -11,7 +11,7 @@ from dataclasses import dataclass from datetime import datetime from pathlib import Path -from typing import Any, Dict, Set +from typing import Any, Dict, FrozenSet, Set from apple.tools.plistlib_utils import detect_format_and_loads @@ -30,13 +30,15 @@ class ProvisioningProfileMetadata: developer_certificate_fingerprints: Set[str] entitlements: Dict[str, Any] - _mergeable_entitlements_keys = { - "application-identifier", - "beta-reports-active", - "get-task-allow", - "com.apple.developer.aps-environment", - "com.apple.developer.team-identifier", - } + _mergeable_entitlements_keys: FrozenSet[str] = frozenset( + [ + "application-identifier", + "beta-reports-active", + "get-task-allow", + "com.apple.developer.aps-environment", + "com.apple.developer.team-identifier", + ] + ) # See `ProvisioningProfileMetadataFactory::getAppIDFromEntitlements` from `ProvisioningProfileMetadataFactory.java` in Buck v1 def get_app_id(self) -> AppId: diff --git a/prelude/apple/tools/code_signing/provisioning_profile_selection.py b/prelude/apple/tools/code_signing/provisioning_profile_selection.py index 0858efb87..7358b2d53 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_selection.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_selection.py @@ -25,7 +25,7 @@ ) from .provisioning_profile_metadata import ProvisioningProfileMetadata -_LOGGER = logging.getLogger(__name__) +_LOGGER: logging.Logger = logging.getLogger(__name__) class CodeSignProvisioningError(Exception): @@ -45,8 +45,8 @@ def _parse_team_id_from_entitlements( def _matches_or_array_is_subset_of( entitlement_name: str, - expected_value: Any, - actual_value: Any, + expected_value: object, + actual_value: object, platform: ApplePlatform, ) -> bool: if expected_value is None: @@ -170,7 +170,7 @@ def select_best_provisioning_profile( result = None # Used for error messages - diagnostics = [] + diagnostics: List[IProvisioningProfileDiagnostics] = [] def log_mismatched_profile(mismatch: IProvisioningProfileDiagnostics) -> None: diagnostics.append(mismatch) From e1e1dfd280bb0472b66710206a580f939164932f Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 9 Feb 2024 09:42:41 -0800 Subject: [PATCH 0263/1133] add type annotations to bundling tools Summary: ditto Reviewed By: milend Differential Revision: D53609687 fbshipit-source-id: e0e9bc7b9dd84a37ee5faf9f143c8fc2fdbc2658 --- prelude/apple/tools/bundling/action_metadata.py | 4 ++-- prelude/apple/tools/bundling/assemble_bundle.py | 8 ++++---- .../apple/tools/bundling/assemble_bundle_types.py | 12 +++++++----- prelude/apple/tools/bundling/incremental_state.py | 6 +++--- prelude/apple/tools/bundling/incremental_utils.py | 2 +- prelude/apple/tools/bundling/main.py | 10 +++++----- prelude/apple/tools/code_signing/codesign_bundle.py | 6 +++--- .../apple/tools/re_compatibility_utils/writable.py | 4 ++-- 8 files changed, 27 insertions(+), 25 deletions(-) diff --git a/prelude/apple/tools/bundling/action_metadata.py b/prelude/apple/tools/bundling/action_metadata.py index 8f73315f8..56569e8bf 100644 --- a/prelude/apple/tools/bundling/action_metadata.py +++ b/prelude/apple/tools/bundling/action_metadata.py @@ -10,7 +10,7 @@ from dataclasses import dataclass from io import TextIOBase from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union _METADATA_VERSION = 1 @@ -27,7 +27,7 @@ class _Metadata: digests: List[_Item] -def _object_hook(dict: Dict[str, Any]) -> Any: +def _object_hook(dict: Dict[str, Any]) -> Union[_Item, _Metadata]: if "version" in dict: return _Metadata(**dict) else: diff --git a/prelude/apple/tools/bundling/assemble_bundle.py b/prelude/apple/tools/bundling/assemble_bundle.py index 86ef7d409..484d67045 100644 --- a/prelude/apple/tools/bundling/assemble_bundle.py +++ b/prelude/apple/tools/bundling/assemble_bundle.py @@ -9,7 +9,7 @@ import os import shutil from pathlib import Path -from typing import cast, Dict, List, Optional +from typing import Any, cast, Dict, List, Optional from .assemble_bundle_types import BundleSpecItem, IncrementalContext from .incremental_state import IncrementalState, IncrementalStateItem @@ -18,7 +18,7 @@ should_assemble_incrementally, ) -_LOGGER = logging.getLogger(__name__) +_LOGGER: logging.Logger = logging.getLogger(__name__) def assemble_bundle( @@ -63,9 +63,9 @@ def _assemble_non_incrementally( logging.getLogger(__name__).info("Assembling bundle non-incrementally.") _cleanup_output(incremental=False, path=bundle_path) - copied_contents = {} + copied_contents: Dict[Path, str] = {} - def _copy(src, dst, **kwargs) -> None: + def _copy(src: str, dst: Path, **kwargs: Any) -> None: if check_conflicts: if dst in copied_contents: raise RuntimeError( diff --git a/prelude/apple/tools/bundling/assemble_bundle_types.py b/prelude/apple/tools/bundling/assemble_bundle_types.py index 2f0ea7597..c304c4f09 100644 --- a/prelude/apple/tools/bundling/assemble_bundle_types.py +++ b/prelude/apple/tools/bundling/assemble_bundle_types.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +from __future__ import annotations + import functools from dataclasses import dataclass from pathlib import Path @@ -23,21 +25,21 @@ class BundleSpecItem: dst: str codesign_on_copy: bool = False - def __eq__(self, other) -> bool: + def __eq__(self: BundleSpecItem, other: Optional[BundleSpecItem]) -> bool: return ( - other + other is not None and self.src == other.src and self.dst == other.dst and self.codesign_on_copy == other.codesign_on_copy ) - def __ne__(self, other) -> bool: + def __ne__(self: BundleSpecItem, other: BundleSpecItem) -> bool: return not self.__eq__(other) - def __hash__(self) -> int: + def __hash__(self: BundleSpecItem) -> int: return hash((self.src, self.dst, self.codesign_on_copy)) - def __lt__(self, other) -> bool: + def __lt__(self: BundleSpecItem, other: BundleSpecItem) -> bool: return ( self.src < other.src or self.dst < other.dst diff --git a/prelude/apple/tools/bundling/incremental_state.py b/prelude/apple/tools/bundling/incremental_state.py index d35daecf5..e2bd67fbb 100644 --- a/prelude/apple/tools/bundling/incremental_state.py +++ b/prelude/apple/tools/bundling/incremental_state.py @@ -9,7 +9,7 @@ from dataclasses import dataclass from io import TextIOBase from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from apple.tools.code_signing.codesign_bundle import CodesignConfiguration @@ -49,7 +49,7 @@ class IncrementalState: class IncrementalStateJSONEncoder(json.JSONEncoder): - def default(self, o: Any) -> Any: + def default(self, o: object) -> object: if isinstance(o, IncrementalState): return { "items": [self.default(i) for i in o.items], @@ -76,7 +76,7 @@ def default(self, o: Any) -> Any: return super().default(o) -def _object_hook(dict: Dict[str, Any]) -> Any: +def _object_hook(dict: Dict[str, Any]) -> Union[IncrementalState, IncrementalStateItem]: if "version" in dict: dict["codesign_on_copy_paths"] = [ Path(p) for p in dict.pop("codesign_on_copy_paths") diff --git a/prelude/apple/tools/bundling/incremental_utils.py b/prelude/apple/tools/bundling/incremental_utils.py index a92b6463f..df5af7584 100644 --- a/prelude/apple/tools/bundling/incremental_utils.py +++ b/prelude/apple/tools/bundling/incremental_utils.py @@ -80,7 +80,7 @@ def should_assemble_incrementally( def _codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build( previously_codesigned_on_copy_paths: Set[Path], all_input_files: Set[Path], -): +) -> Set[Path]: all_input_files_and_directories = all_input_files | { i for file in all_input_files for i in file.parents } diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index c4fd7d724..f9d54ad98 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -13,7 +13,7 @@ import shlex import sys from pathlib import Path -from typing import List, Optional +from typing import Dict, List, Optional from apple.tools.code_signing.apple_platform import ApplePlatform from apple.tools.code_signing.codesign_bundle import ( @@ -463,7 +463,7 @@ def _write_incremental_state( codesign_configuration: CodesignConfiguration, selected_codesign_identity: Optional[str], swift_stdlib_paths: List[Path], -): +) -> None: state = IncrementalState( items, codesigned=codesigned, @@ -524,7 +524,7 @@ def _setup_logging( class ColoredLogFormatter(logging.Formatter): - _colors = { + _colors: Dict[int, str] = { logging.DEBUG: "\x1b[m", logging.INFO: "\x1b[37m", logging.WARNING: "\x1b[33m", @@ -533,10 +533,10 @@ class ColoredLogFormatter(logging.Formatter): } _reset_color = "\x1b[0m" - def __init__(self, text_format: str): + def __init__(self, text_format: str) -> None: self.text_format = text_format - def format(self, record: logging.LogRecord): + def format(self, record: logging.LogRecord) -> str: colored_format = ( self._colors[record.levelno] + self.text_format + self._reset_color ) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 8f91e32de..174440ee1 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -150,7 +150,7 @@ def codesign_bundle( signing_context: Union[AdhocSigningContext, SigningContextWithProfileSelection], entitlements_path: Optional[Path], platform: ApplePlatform, - codesign_on_copy_paths: List[Path], + codesign_on_copy_paths: List[str], codesign_args: List[str], codesign_tool: Optional[Path] = None, codesign_configuration: Optional[CodesignConfiguration] = None, @@ -288,7 +288,7 @@ def _read_entitlements_file(path: Optional[Path]) -> Optional[Dict[str, Any]]: def _dry_codesign_everything( bundle_path: Path, - codesign_on_copy_paths: List[Path], + codesign_on_copy_paths: List[str], identity_fingerprint: str, tmp_dir: str, codesign_tool: Path, @@ -337,7 +337,7 @@ def _dry_codesign_everything( def _codesign_everything( bundle_path: Path, - codesign_on_copy_paths: List[Path], + codesign_on_copy_paths: List[str], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, diff --git a/prelude/apple/tools/re_compatibility_utils/writable.py b/prelude/apple/tools/re_compatibility_utils/writable.py index 668f96ebc..7fc1aec5a 100644 --- a/prelude/apple/tools/re_compatibility_utils/writable.py +++ b/prelude/apple/tools/re_compatibility_utils/writable.py @@ -10,7 +10,7 @@ import stat -def make_path_user_writable(path: str): +def make_path_user_writable(path: str) -> None: # On Linux, `os.chmod()` does not support setting the permissions on a symlink. # `chmod` manpage says: # > AT_SYMLINK_NOFOLLOW If pathname is a symbolic link, do not @@ -26,7 +26,7 @@ def make_path_user_writable(path: str): os.chmod(path, st.st_mode | stat.S_IWUSR, follow_symlinks=follow_symlinks) -def make_dir_recursively_writable(dir: str): +def make_dir_recursively_writable(dir: str) -> None: for dirpath, _, filenames in os.walk(dir): make_path_user_writable(dirpath) for filename in filenames: From 00bf6e39db12e02d94a234e60ffd17dda3b572a9 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 9 Feb 2024 09:42:41 -0800 Subject: [PATCH 0264/1133] add type annotations to resource brokers Summary: ditto Reviewed By: milend Differential Revision: D53609686 fbshipit-source-id: 185714c30e3d798e5b899045bccc8f48287ae8a5 --- prelude/apple/tools/resource_broker/main.py | 4 ++-- prelude/apple/tools/resource_broker/utils.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/prelude/apple/tools/resource_broker/main.py b/prelude/apple/tools/resource_broker/main.py index 01f1f940b..0600a351e 100644 --- a/prelude/apple/tools/resource_broker/main.py +++ b/prelude/apple/tools/resource_broker/main.py @@ -56,7 +56,7 @@ class _ResourceType(str, Enum): macosIdbCompanion = "macos_idb_companion" -def _exit_gracefully(*args): +def _exit_gracefully(*args: List[object]) -> None: for idb_companion in idb_companions: idb_companion.cleanup() exit(0) @@ -67,7 +67,7 @@ def _check_simulator_manager_exists(simulator_manager: Optional[str]) -> None: raise Exception("Simulator manager is not specified") -def main(): +def main() -> None: args = _args_parser().parse_args() if args.type == _ResourceType.iosBootedSimulator: _check_simulator_manager_exists(args.simulator_manager) diff --git a/prelude/apple/tools/resource_broker/utils.py b/prelude/apple/tools/resource_broker/utils.py index 4e6119bdb..5128fd19b 100644 --- a/prelude/apple/tools/resource_broker/utils.py +++ b/prelude/apple/tools/resource_broker/utils.py @@ -36,7 +36,7 @@ def cleanup(self) -> None: self.stderr.close() -async def _read_until_valid_json(stream: asyncio.StreamReader) -> Any: +async def _read_until_valid_json(stream: asyncio.StreamReader) -> object: buffer = b"" while True: data = await stream.readuntil(b"}") @@ -50,7 +50,7 @@ async def _read_until_valid_json(stream: asyncio.StreamReader) -> Any: ) -async def _read_stdout(p: IdbCompanionProcess) -> Tuple[int, TextIOWrapper, Any]: +async def _read_stdout(p: IdbCompanionProcess) -> Tuple[int, TextIOWrapper, object]: if not p.process.stdout: raise ValueError("Expected stdout to be set for idb companion launch process.") try: From d975ea420578eca0e499c4c707b44959f842e349 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 9 Feb 2024 09:42:41 -0800 Subject: [PATCH 0265/1133] add type annotations to selective debugging tools Summary: ditto Reviewed By: milend Differential Revision: D53610492 fbshipit-source-id: 70beb46d61271bb5ebf10a65c30fc92f4feeaad7 --- prelude/apple/tools/selective_debugging/macho.py | 4 ++-- prelude/apple/tools/selective_debugging/main.py | 4 ++-- prelude/apple/tools/selective_debugging/scrubber.py | 2 +- prelude/apple/tools/selective_debugging/spec.py | 6 +++--- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/prelude/apple/tools/selective_debugging/macho.py b/prelude/apple/tools/selective_debugging/macho.py index 966bee567..9b4964891 100644 --- a/prelude/apple/tools/selective_debugging/macho.py +++ b/prelude/apple/tools/selective_debugging/macho.py @@ -20,7 +20,7 @@ class MachO: - def __str__(self): + def __str__(self) -> str: props = {} for k, v in self.__dict__.items(): props[k] = hex(v) @@ -39,7 +39,7 @@ class MachOHeader(MachO): reserved: int @property - def is_valid(self): + def is_valid(self) -> bool: return self.magic in (MH_CIGAM_64, MH_MAGIC_64) diff --git a/prelude/apple/tools/selective_debugging/main.py b/prelude/apple/tools/selective_debugging/main.py index 20920c02a..aa5e2b4af 100644 --- a/prelude/apple/tools/selective_debugging/main.py +++ b/prelude/apple/tools/selective_debugging/main.py @@ -12,7 +12,7 @@ from .scrubber import scrub -def _parse_args(): +def _parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( description="Tool to postprocess executables/dylibs." ) @@ -38,7 +38,7 @@ def _parse_args(): return parser.parse_args() -def main(): +def main() -> None: args = _parse_args() try: scrub( diff --git a/prelude/apple/tools/selective_debugging/scrubber.py b/prelude/apple/tools/selective_debugging/scrubber.py index f0d4f88c6..2600ba5b8 100644 --- a/prelude/apple/tools/selective_debugging/scrubber.py +++ b/prelude/apple/tools/selective_debugging/scrubber.py @@ -55,7 +55,7 @@ def load_focused_targets_output_paths(json_file_path: str) -> Set[str]: # Visible for testing def _get_target_output_path_from_debug_file_path( debug_target_path: str, -): +) -> str: # This function assumes the debug file path created by buck2 is in the following format: # buck-out/isolation_dir/gen/project_cell/{hash}/.../__name__/libFoo.a parts = debug_target_path.split("/") diff --git a/prelude/apple/tools/selective_debugging/spec.py b/prelude/apple/tools/selective_debugging/spec.py index 250a2fa43..6bba9c3b8 100644 --- a/prelude/apple/tools/selective_debugging/spec.py +++ b/prelude/apple/tools/selective_debugging/spec.py @@ -46,11 +46,11 @@ class Spec: include_build_target_patterns: List[BuildTargetPatternOutputPathMatcher] = field( init=False ) - include_regular_expressions: List[re.Pattern] = field(init=False) + include_regular_expressions: List[re.Pattern[str]] = field(init=False) exclude_build_target_patterns: List[BuildTargetPatternOutputPathMatcher] = field( init=False ) - exclude_regular_expressions: List[re.Pattern] = field(init=False) + exclude_regular_expressions: List[re.Pattern[str]] = field(init=False) def __post_init__(self) -> None: with open(self.spec_path, "r") as f: @@ -95,7 +95,7 @@ def scrub_debug_file_path(self, debug_file_path: str) -> bool: def _path_matches_pattern_or_expression( debug_file_path: str, patterns: List[BuildTargetPatternOutputPathMatcher], - expressions: List[re.Pattern], + expressions: List[re.Pattern[str]], ) -> bool: for pattern in patterns: if pattern.match_path(debug_file_path): From 4a8bd84eed225ed1fffca9ec941cfcda7802d600 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 9 Feb 2024 09:42:41 -0800 Subject: [PATCH 0266/1133] enable pyre for all tools Summary: ditto Reviewed By: milend Differential Revision: D53610491 fbshipit-source-id: f52adcc0f2889bf11df17a9728043466200be677 --- prelude/apple/tools/dry_codesign_tool.py | 2 +- prelude/apple/tools/ipa_package_maker.py | 8 ++++---- .../apple/tools/split_arch_combine_dsym_bundles_tool.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/prelude/apple/tools/dry_codesign_tool.py b/prelude/apple/tools/dry_codesign_tool.py index 5e445b3ac..71364ddb7 100644 --- a/prelude/apple/tools/dry_codesign_tool.py +++ b/prelude/apple/tools/dry_codesign_tool.py @@ -48,7 +48,7 @@ def _args_parser() -> argparse.ArgumentParser: return parser -def _main(): +def _main() -> None: args = _args_parser().parse_args() content = { # This is always empty string if you check `DryCodeSignStep` class usages in buck1 diff --git a/prelude/apple/tools/ipa_package_maker.py b/prelude/apple/tools/ipa_package_maker.py index 710872b94..f4848dfa4 100644 --- a/prelude/apple/tools/ipa_package_maker.py +++ b/prelude/apple/tools/ipa_package_maker.py @@ -19,13 +19,13 @@ from apple.tools.re_compatibility_utils.writable import make_dir_recursively_writable -def _copy_ipa_contents(ipa_contents_dir: Path, output_dir: Path): +def _copy_ipa_contents(ipa_contents_dir: Path, output_dir: Path) -> None: if os.path.exists(output_dir): shutil.rmtree(output_dir, ignore_errors=False) shutil.copytree(ipa_contents_dir, output_dir, symlinks=True, dirs_exist_ok=False) -def _delete_empty_SwiftSupport_dir(output_dir: Path): +def _delete_empty_SwiftSupport_dir(output_dir: Path) -> None: swiftSupportDir = output_dir / "SwiftSupport" if not swiftSupportDir.exists(): return @@ -46,7 +46,7 @@ def _package_ipa_contents( compression_level: int, validator: Optional[Path], validator_args: List[str], -): +) -> None: with tempfile.TemporaryDirectory() as processed_package_dir: processed_package_dir_path = Path(processed_package_dir) _copy_ipa_contents(ipa_contents_dir, processed_package_dir_path) @@ -86,7 +86,7 @@ def _package_ipa_contents( ) -def main(): +def main() -> None: parser = argparse.ArgumentParser(description="Tool to make an .ipa package file.") parser.add_argument( "--ipa-contents-dir", diff --git a/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py b/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py index b7e99d853..77ec8a7c8 100644 --- a/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py +++ b/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py @@ -31,7 +31,7 @@ def _args_parser() -> argparse.ArgumentParser: return parser -def _main(): +def _main() -> None: args = _args_parser().parse_args() output_dwarf_path = os.path.join(args.output, "Contents/Resources/DWARF") From 6d0b5baf38128df1c2c5af821a00e458b8704f30 Mon Sep 17 00:00:00 2001 From: David Reiss Date: Fri, 9 Feb 2024 10:46:28 -0800 Subject: [PATCH 0267/1133] Clarify comment about raw assembly Summary: The comment was previously incorrectly referring to ".S" as raw assembly, even though it is preprocessed assembly. Reviewed By: igorsugak Differential Revision: D53581185 fbshipit-source-id: c77f1dcb9d0bda3a8e6b1cd716e49fb7ec25e60e --- prelude/cxx/compile.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index 03a5f4a44..7941d522b 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -559,6 +559,8 @@ def _get_compile_base(compiler_info: typing.Any) -> cmd_args: def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: # Raw assembly doesn't make sense to capture dep files for. + # .S is preprocessed assembly, but some builds use it with + # assemblers that don't support -MF, so leave depfiles off. if ext.value in (".s", ".S", ".asm"): return None elif ext.value == ".hip": From 34d8affdddafd1335c5e20f63d40a8914b450c5b Mon Sep 17 00:00:00 2001 From: Balaji S Date: Fri, 9 Feb 2024 12:06:03 -0800 Subject: [PATCH 0268/1133] Add first test - unit test for split_args in ct_executor Summary: * This is more to get started on the test setup * We add unit tests for ct_executor split_args function * We then refactor the function by ~~removing an unused arg~~ replacing the existing logic with `lists:splitwith` - the test giving us confidence that this works. Reviewed By: TheGeorge Differential Revision: D53479843 fbshipit-source-id: a3c7950adfd2d3901a306d3bce8643adfc79c5ac --- prelude/erlang/common_test/test_exec/BUCK.v2 | 12 ++++++ .../common_test/test_exec/src/ct_executor.erl | 18 ++++---- .../test_exec/test/ct_executor_SUITE.erl | 41 +++++++++++++++++++ 3 files changed, 61 insertions(+), 10 deletions(-) create mode 100644 prelude/erlang/common_test/test_exec/test/ct_executor_SUITE.erl diff --git a/prelude/erlang/common_test/test_exec/BUCK.v2 b/prelude/erlang/common_test/test_exec/BUCK.v2 index 0ea358626..af2feb4f4 100644 --- a/prelude/erlang/common_test/test_exec/BUCK.v2 +++ b/prelude/erlang/common_test/test_exec/BUCK.v2 @@ -23,3 +23,15 @@ erlang_application( use_global_parse_transforms = False, visibility = ["PUBLIC"], ) + +erlang_tests( + contacts = ["whatsapp_erlclient"], + labels = ["unit"], + suites = glob( + ["test/*_SUITE.erl"], + ), + deps = [ + "stdlib", + ":test_exec", + ], +) diff --git a/prelude/erlang/common_test/test_exec/src/ct_executor.erl b/prelude/erlang/common_test/test_exec/src/ct_executor.erl index 2478882a9..af35d10df 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_executor.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_executor.erl @@ -23,6 +23,9 @@ -type ct_run_arg() :: {atom(), term()}. -type ct_exec_arg() :: {output_dir | suite | providers, term()}. +% For testing +-export([split_args/1]). + -spec run([string()]) -> no_return(). run(Args) when is_list(Args) -> ExitCode = @@ -114,17 +117,12 @@ parse_arguments(Args) -> % @doc Splits the argument before those that happens % before ct_args (the executor args) and those after -% (the args for ct_run). +% (the args for ct_run). ct_args will always be +% present in the list -spec split_args([term()]) -> {[ct_exec_arg()], [ct_run_arg()]}. -split_args(Args) -> split_args(Args, [], []). - --spec split_args([term()], [ct_exec_arg()], [ct_run_arg()]) -> {[ct_exec_arg()], [ct_run_arg()]}. -split_args([ct_args | Args], CtExecutorArgs, []) -> - {parse_ct_exec_args(lists:reverse(CtExecutorArgs)), parse_ct_run_args(Args)}; -split_args([Arg | Args], CtExecutorArgs, []) -> - split_args(Args, [Arg | CtExecutorArgs], []); -split_args([], CtExecutorArgs, []) -> - {parse_ct_exec_args(lists:reverse(CtExecutorArgs)), []}. +split_args(Args) -> + {CtExecutorArgs, [ct_args | CtRunArgs]} = lists:splitwith(fun(Arg) -> Arg =/= ct_args end, Args), + {parse_ct_exec_args(CtExecutorArgs), parse_ct_run_args(CtRunArgs)}. -spec parse_ct_run_args([term()]) -> [ct_run_arg()]. parse_ct_run_args([]) -> diff --git a/prelude/erlang/common_test/test_exec/test/ct_executor_SUITE.erl b/prelude/erlang/common_test/test_exec/test/ct_executor_SUITE.erl new file mode 100644 index 000000000..bcf4d0b86 --- /dev/null +++ b/prelude/erlang/common_test/test_exec/test/ct_executor_SUITE.erl @@ -0,0 +1,41 @@ +%% Copyright (c) Meta Platforms, Inc. and affiliates. +%% This source code is licensed under both the MIT license found in the +%% LICENSE-MIT file in the root directory of this source tree and the Apache +%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%% of this source tree. +%%% % @format +-module(ct_executor_SUITE). + +-include_lib("stdlib/include/assert.hrl"). + +-export([all/0]). + +-export([ + test_split_args/1 +]). + +all() -> + [test_split_args]. + +test_split_args(_Config) -> + ?assertEqual( + {[{output_dir, ""}, {providers, [something]}, {suite, a_suite}], [{dir, ""}, {suite, a_suite}, {group, a_group}]}, + ct_executor:split_args([ + {output_dir, ""}, + {providers, [something]}, + {suite, a_suite}, + ct_args, + {dir, ""}, + {suite, a_suite}, + {group, a_group} + ]) + ), + ?assertEqual( + {[{output_dir, ""}, {providers, [something]}, {suite, a_suite}], []}, + ct_executor:split_args([{output_dir, ""}, {providers, [something]}, {suite, a_suite}, ct_args]) + ), + ?assertEqual( + {[], [{dir, ""}, {suite, a_suite}, {group, a_group}]}, + ct_executor:split_args([ct_args, {dir, ""}, {suite, a_suite}, {group, a_group}]) + ), + ?assertEqual({[], []}, ct_executor:split_args([ct_args])). From 7ef941094acbad7b433725cabf71af1392fdb3b8 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 9 Feb 2024 12:28:38 -0800 Subject: [PATCH 0269/1133] Sub-target to write out the main module Summary: Simple sub-target to export the main module. Reviewed By: igorsugak Differential Revision: D53600412 fbshipit-source-id: 68549a6b632e1cd0b42b7f0a9dbf7a8cffc9f1ca --- prelude/python/python_binary.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 5140a6ade..30d62f1d6 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -396,6 +396,7 @@ def python_executable( exe.sub_targets.update({ "dbg-source-db": [dbg_source_db], "library-info": [library_info], + "main": [DefaultInfo(default_output = ctx.actions.write_json("main.json", main))], "source-db": [source_db], "source-db-no-deps": [source_db_no_deps, create_python_source_db_info(library_info.manifests)], }) From 887ef12c11a4818a30b1ed18f6f25092d8284ea1 Mon Sep 17 00:00:00 2001 From: Alvaro Leiva Geisse Date: Fri, 9 Feb 2024 18:57:41 -0800 Subject: [PATCH 0270/1133] move startup functions out of enviromental variables and into the manifest Summary: when we run a par with a enviromental variable that starts with STARTUP then this get executed as early as site packages... we ude this to initialize code early in the execution of the par even before the main function has time to be bootstrapped. but this has 3 big issues * Variables can leak from one par to the otheer, so when a PAR executes another par, then the STARTUP_ variables from one leaks to the second, we see this issue here: https://fb.workplace.com/groups/cinderusers/permalink/3333847296761297/ * people can execute arbitrary code from the environment, like if you really wanna F- someone up, add STARTUP_EXIT=sys:exit to their envirmental variable and they will not be able to execute python pars... now this is funny, but also really really bad because this opens up injection of code by someone with access to your env variables. (and since env variables are passed by sudo, this is potentially an privilege escalation vulnerability) * it requiere and force that in oirder to execute code inside the par, the env variables must be present. the solution here is to extend `__manifest__.{py,json}` and to support startup_functions, and read them from there... since this is in sitecustomize, * will be execute for any python invcation (excluding -S executions), * they wont get leaked to external pars, because they will have their own. * pars are mounted as readonly, so they cant be temper by an external party. Reviewed By: itamaro, zsol Differential Revision: D53142513 fbshipit-source-id: 9f8f96f6b22b61ede5b832a937fa5edf2d3fcac7 --- prelude/python/make_py_package.bzl | 59 +++++++++++++++++++ .../python/tools/make_par/sitecustomize.py | 29 ++------- 2 files changed, 65 insertions(+), 23 deletions(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 3f227b334..b19c59f7a 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -145,10 +145,12 @@ def make_py_package( srcs.append(pex_modules.extensions.manifest) preload_libraries = _preload_libraries_args(ctx, shared_libraries) + startup_function = generate_startup_function_loader(ctx) manifest_module = generate_manifest_module(ctx, python_toolchain, srcs) common_modules_args, dep_artifacts, debug_artifacts = _pex_modules_common_args( ctx, pex_modules, + [startup_function] if startup_function else [], {name: lib for name, (lib, _) in shared_libraries.items()}, ) @@ -380,6 +382,7 @@ def _pex_bootstrap_args( def _pex_modules_common_args( ctx: AnalysisContext, pex_modules: PexModules, + extra_manifests: list[ArgLike], shared_libraries: dict[str, LinkedObject]) -> (cmd_args, list[(ArgLike, str)], list[(ArgLike, str)]): srcs = [] src_artifacts = [] @@ -397,6 +400,9 @@ def _pex_modules_common_args( srcs.append(pex_modules.extra_manifests.manifest) src_artifacts.extend(pex_modules.extra_manifests.artifacts) + if extra_manifests: + srcs.extend(extra_manifests) + deps.extend(src_artifacts) resources = pex_modules.manifests.resource_manifests() deps.extend(pex_modules.manifests.resource_artifacts_with_paths()) @@ -552,6 +558,59 @@ def _hidden_resources_error_message(current_target: Label, hidden_resources: lis msg += " {}\n".format(resource) return msg +def generate_startup_function_loader(ctx: AnalysisContext) -> ArgLike: + """ + Generate `__startup_function_loader__.py` used for early bootstrap of a par. + Things that go here are also enumerated in `__manifest__['startup_functions']` + Some examples include: + * static extension finder init + * eager import loader init + * cinderx init + """ + + if ctx.attrs.manifest_module_entries == None: + startup_functions_list = "" + else: + startup_functions_list = "\n".join( + [ + '"' + startup_function + '",' + for _, startup_function in sorted(ctx.attrs.manifest_module_entries["startup_functions"].items()) + ], + ) + + src_startup_functions_path = ctx.actions.write( + "manifest/__startup_function_loader__.py", + """ +import importlib +import warnings + +STARTUP_FUNCTIONS=[{startup_functions_list}] + +def load_startup_functions(): + for func in STARTUP_FUNCTIONS: + mod, sep, func = func.partition(":") + if sep: + try: + module = importlib.import_module(mod) + getattr(module, func)() + except Exception as e: + # TODO: Ignoring errors for now. + warnings.warn( + "Startup function %s (%s:%s) not executed: %s" + % (mod, name, func, e), + stacklevel=1, + ) + + """.format(startup_functions_list = startup_functions_list), + ) + return ctx.actions.write_json( + "manifest/startup_function_loader.manifest", + [ + ["__par__/__startup_function_loader__.py", src_startup_functions_path, "prelude//python:make_py_package.bzl"], + ], + with_inputs = True, + ) + def generate_manifest_module( ctx: AnalysisContext, python_toolchain: PythonToolchainInfo, diff --git a/prelude/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py index 310fcf0a3..2066c4675 100644 --- a/prelude/python/tools/make_par/sitecustomize.py +++ b/prelude/python/tools/make_par/sitecustomize.py @@ -8,7 +8,6 @@ from __future__ import annotations -import importlib import multiprocessing.util as mp_util import os import sys @@ -71,28 +70,12 @@ def __clear_env(patch_spawn: bool = True) -> None: def __startup__() -> None: - # ALL STARTUP_* methods will be called here in lexicographic order. - startup_functions = sorted( - [ - (name, var) - for name, var in os.environ.items() - if name.startswith("STARTUP_") - ], - ) - for name, var in startup_functions: - mod, sep, func = var.partition(":") - if sep: - try: - module = importlib.import_module(mod) - getattr(module, func)() - except Exception as e: - # TODO: Ignoring errors for now. The way to properly fix this should be to make - # sure we are still at the same binary that configured `STARTUP_` before importing. - warnings.warn( - "Startup function %s (%s:%s) not executed: %s" - % (mod, name, func, e), - stacklevel=1, - ) + try: + from __par__.__startup_function_loader__ import load_startup_functions + + load_startup_functions() + except Exception: + warnings.warn("could not load startup functions", stacklevel=1) def __passthrough_exec_module() -> None: From 0d927de56bae3e9c882e457e08c82c634e0037ee Mon Sep 17 00:00:00 2001 From: Shayne Fletcher Date: Mon, 12 Feb 2024 06:58:16 -0800 Subject: [PATCH 0271/1133] include runtime dep link flags in binary rule Summary: if we always put the runtime dep link flags on the link line (even if not strictly required) we avoid having to restate them on individual targets (when they are) e.g. ppx X-link: https://github.com/facebook/buck2/pull/562 Reviewed By: ndmitchell Differential Revision: D53590031 Pulled By: shayne-fletcher fbshipit-source-id: 1756c5acb324f452fed5444c9ce4367e5c1c87d1 --- prelude/ocaml/ocaml.bzl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/ocaml/ocaml.bzl b/prelude/ocaml/ocaml.bzl index 7afc804b0..67f0e6d4f 100644 --- a/prelude/ocaml/ocaml.bzl +++ b/prelude/ocaml/ocaml.bzl @@ -722,12 +722,13 @@ def ocaml_library_impl(ctx: AnalysisContext) -> list[Provider]: def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: ocaml_toolchain = ctx.attrs._ocaml_toolchain[OCamlToolchainInfo] + ocaml_toolchain_runtime_deps = ocaml_toolchain.runtime_dep_link_extras env = _mk_env(ctx) ocamlopt = _mk_ocaml_compiler(ctx, env, BuildMode("native")) ocamlc = _mk_ocaml_compiler(ctx, env, BuildMode("bytecode")) - dep_link_infos = _attr_deps_merged_link_infos(ctx) + filter(None, [ocaml_toolchain.libc]) + dep_link_infos = _attr_deps_merged_link_infos(ctx) + filter(None, [ocaml_toolchain.libc]) + [d.get(MergedLinkInfo) for d in ocaml_toolchain_runtime_deps] cxx_toolchain = get_cxx_toolchain_info(ctx) link_args_output = make_link_args( ctx.actions, @@ -757,6 +758,8 @@ def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: # dependencies of the link step. cmd_nat.hidden(cmxs, cmis_nat, cmts_nat, cmtis_nat, objs, link_args_output.hidden) binary_nat = ctx.actions.declare_output(ctx.attrs.name + ".opt") + + cmd_nat.add([cmd_args(["-cclib", f]) for f in ocaml_toolchain.runtime_dep_link_flags]) cmd_nat.add("-cclib", "-lpthread") cmd_nat.add("-o", binary_nat.as_output()) local_only = link_cxx_binary_locally(ctx) @@ -770,6 +773,7 @@ def ocaml_binary_impl(ctx: AnalysisContext) -> list[Provider]: cmd_byt.hidden(cmos, cmis_byt, cmts_byt, cmtis_byt, link_args_output.hidden) binary_byt = ctx.actions.declare_output(ctx.attrs.name) cmd_byt.add("-custom") + cmd_byt.add([cmd_args(["-cclib", f]) for f in ocaml_toolchain.runtime_dep_link_flags]) cmd_byt.add("-cclib", "-lpthread") cmd_byt.add("-o", binary_byt.as_output()) local_only = link_cxx_binary_locally(ctx) From 6bafd27d108b119521aa8ed52afced1a7c4bc9f5 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Mon, 12 Feb 2024 07:09:15 -0800 Subject: [PATCH 0272/1133] pass `--embed-provisioning-profile-when-signing-ad-hoc` from rules Summary: when needed. Flag is unused in bundling script for now. Reviewed By: milend Differential Revision: D53610951 fbshipit-source-id: 453372cb10afc43e056dad7533d1215c0695b757 --- prelude/apple/apple_bundle_config.bzl | 1 + prelude/apple/apple_bundle_part.bzl | 20 ++++++++++++++++++-- prelude/apple/apple_rules_impl_utility.bzl | 4 ++++ prelude/apple/tools/bundling/main.py | 5 +++++ 4 files changed, 28 insertions(+), 2 deletions(-) diff --git a/prelude/apple/apple_bundle_config.bzl b/prelude/apple/apple_bundle_config.bzl index 7dd9d79bd..522bd6e0f 100644 --- a/prelude/apple/apple_bundle_config.bzl +++ b/prelude/apple/apple_bundle_config.bzl @@ -19,6 +19,7 @@ def apple_bundle_config() -> dict[str, typing.Any]: "_codesign_type": read_root_config("apple", "codesign_type_override", None), "_compile_resources_locally_override": _maybe_get_bool("compile_resources_locally_override", None), "_dry_run_code_signing": _maybe_get_bool("dry_run_code_signing", False), + "_embed_provisioning_profile_when_adhoc_code_signing": _maybe_get_bool("embed_provisioning_profile_when_adhoc_code_signing", None), # This is a kill switch for the feature, it can also be disabled by setting # `apple.fast_adhoc_signing_enabled=false` in a global buckconfig file. "_fast_adhoc_signing_enabled": _maybe_get_bool("fast_adhoc_signing_enabled", True), diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index 8f53e9da5..ff6427821 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -104,7 +104,8 @@ def assemble_bundle( codesign_tool, ] - if codesign_type.value != "adhoc": + profile_selection_required = _should_embed_provisioning_profile(ctx, codesign_type) + if profile_selection_required: provisioning_profiles = ctx.attrs._provisioning_profiles[DefaultInfo] expect( len(provisioning_profiles.default_outputs) == 1, @@ -116,10 +117,13 @@ def assemble_bundle( identities_command = ctx.attrs._apple_toolchain[AppleToolchainInfo].codesign_identities_command identities_command_args = ["--codesign-identities-command", cmd_args(identities_command)] if identities_command else [] codesign_args.extend(identities_command_args) - else: + + if codesign_type.value == "adhoc": codesign_args.append("--ad-hoc") if ctx.attrs.codesign_identity: codesign_args.extend(["--ad-hoc-codesign-identity", ctx.attrs.codesign_identity]) + if profile_selection_required: + codesign_args.append("--embed-provisioning-profile-when-signing-ad-hoc") codesign_args += get_entitlements_codesign_args(ctx, codesign_type) codesign_args += _get_extra_codesign_args(ctx) @@ -259,3 +263,15 @@ def _detect_codesign_type(ctx: AnalysisContext, skip_adhoc_signing: bool) -> Cod def _get_extra_codesign_args(ctx: AnalysisContext) -> list[str]: codesign_args = ctx.attrs.codesign_flags if hasattr(ctx.attrs, "codesign_flags") else [] return ["--codesign-args={}".format(flag) for flag in codesign_args] + +def _should_embed_provisioning_profile(ctx: AnalysisContext, codesign_type: CodeSignType) -> bool: + if codesign_type.value == "distribution": + return True + + if codesign_type.value == "adhoc": + # The config-based override value takes priority over target value + if ctx.attrs._embed_provisioning_profile_when_adhoc_code_signing != None: + return ctx.attrs._embed_provisioning_profile_when_adhoc_code_signing + return ctx.attrs.embed_provisioning_profile_when_adhoc_code_signing + + return False diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 63c3d87ad..4633f332d 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -39,6 +39,8 @@ def get_apple_xctoolchain_bundle_id_attr(): APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME = "_archive_objects_locally_override" APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME = "_use_entitlements_when_adhoc_code_signing" APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME = "use_entitlements_when_adhoc_code_signing" +APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME = "_embed_provisioning_profile_when_adhoc_code_signing" +APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME = "embed_provisioning_profile_when_adhoc_code_signing" APPLE_VALIDATION_DEPS_ATTR_NAME = "validation_deps" APPLE_VALIDATION_DEPS_ATTR_TYPE = attrs.set(attrs.dep(), sorted = True, default = []) @@ -69,6 +71,8 @@ def _apple_bundle_like_common_attrs(): "_resource_bundle": attrs.option(attrs.dep(providers = [AppleBundleResourceInfo]), default = None), APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME: attrs.bool(default = False), + APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), + APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME: attrs.bool(default = False), BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, } diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index f9d54ad98..a3b201922 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -120,6 +120,11 @@ def _args_parser() -> argparse.ArgumentParser: action="store_true", help="Perform ad-hoc signing if set.", ) + parser.add_argument( + "--embed-provisioning-profile-when-signing-ad-hoc", + action="store_true", + help="Perform selection of provisioining profile and embed it into final bundle when ad-hoc signing if set.", + ) parser.add_argument( "--ad-hoc-codesign-identity", metavar="", From b9d9cd9b3e951f9efe217990cc0cf95044466c81 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Mon, 12 Feb 2024 07:09:15 -0800 Subject: [PATCH 0273/1133] extract entitlements and Info.plist preparation Summary: into separate method. No behavior change. Reviewed By: milend Differential Revision: D53610952 fbshipit-source-id: 16728d87d5a7af73a79c359a1e60317bd0363af0 --- .../tools/code_signing/codesign_bundle.py | 61 ++++++++++++------- 1 file changed, 40 insertions(+), 21 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 174440ee1..b0fb9e222 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -157,29 +157,16 @@ def codesign_bundle( ) -> None: with tempfile.TemporaryDirectory() as tmp_dir: if isinstance(signing_context, SigningContextWithProfileSelection): - info_plist_metadata = signing_context.info_plist_metadata - selected_profile_info = signing_context.selected_profile_info - prepared_entitlements_path = prepare_code_signing_entitlements( - entitlements_path, - info_plist_metadata.bundle_id, - selected_profile_info.profile, - tmp_dir, - ) - prepared_info_plist_path = prepare_info_plist( - signing_context.info_plist_source, - info_plist_metadata, - selected_profile_info.profile, - tmp_dir, - ) - os.replace( - prepared_info_plist_path, - bundle_path / signing_context.info_plist_destination, + prepared_entitlements_path = _prepare_entitlements_and_info_plist( + bundle_path=bundle_path, + entitlements_path=entitlements_path, + platform=platform, + signing_context=signing_context, + tmp_dir=tmp_dir, ) - shutil.copy2( - selected_profile_info.profile.file_path, - bundle_path / platform.embedded_provisioning_profile_file_name(), + selected_identity_fingerprint = ( + signing_context.selected_profile_info.identity.fingerprint ) - selected_identity_fingerprint = selected_profile_info.identity.fingerprint else: prepared_entitlements_path = entitlements_path selected_identity_fingerprint = signing_context.codesign_identity @@ -218,6 +205,38 @@ def codesign_bundle( ) +def _prepare_entitlements_and_info_plist( + bundle_path: Path, + entitlements_path: Optional[Path], + platform: ApplePlatform, + signing_context: SigningContextWithProfileSelection, + tmp_dir: str, +) -> Path: + info_plist_metadata = signing_context.info_plist_metadata + selected_profile = signing_context.selected_profile_info.profile + prepared_entitlements_path = prepare_code_signing_entitlements( + entitlements_path, + info_plist_metadata.bundle_id, + selected_profile, + tmp_dir, + ) + prepared_info_plist_path = prepare_info_plist( + signing_context.info_plist_source, + info_plist_metadata, + selected_profile, + tmp_dir, + ) + os.replace( + prepared_info_plist_path, + bundle_path / signing_context.info_plist_destination, + ) + shutil.copy2( + selected_profile.file_path, + bundle_path / platform.embedded_provisioning_profile_file_name(), + ) + return prepared_entitlements_path + + def _read_provisioning_profiles( dirpath: Path, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, From da13e01d957f580443942ab92caf19c083651e8b Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Mon, 12 Feb 2024 07:29:28 -0800 Subject: [PATCH 0274/1133] Remove pre-buillt stdlib attrs from GoToolchain Summary: They are now unused and can be safely removed Reviewed By: abulimov Differential Revision: D53614037 fbshipit-source-id: 4f2b50ea75f10b144c0d9f4f421c941bdab3fc71 --- prelude/go/toolchain.bzl | 3 --- 1 file changed, 3 deletions(-) diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 2311a2fb0..35ae0a6e8 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -32,9 +32,6 @@ GoToolchainInfo = provider( "linker": provider_field(typing.Any, default = None), "linker_flags": provider_field(typing.Any, default = None), "packer": provider_field(typing.Any, default = None), - "prebuilt_stdlib": provider_field(typing.Any, default = None), - "prebuilt_stdlib_shared": provider_field(typing.Any, default = None), - "prebuilt_stdlib_noncgo": provider_field(typing.Any, default = None), "tags": provider_field(typing.Any, default = None), }, ) From 1e87b47aec80c1df01155c66710c915998860574 Mon Sep 17 00:00:00 2001 From: generatedunixname89002005307016 Date: Mon, 12 Feb 2024 09:21:51 -0800 Subject: [PATCH 0275/1133] suppress errors in `buck2/prelude/python/tools` Differential Revision: D53663560 fbshipit-source-id: 29c994a7fe9b984bf61a9f25ff50428fc032e2d2 --- prelude/python/tools/make_par/sitecustomize.py | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py index 2066c4675..a9cf16d7f 100644 --- a/prelude/python/tools/make_par/sitecustomize.py +++ b/prelude/python/tools/make_par/sitecustomize.py @@ -71,6 +71,7 @@ def __clear_env(patch_spawn: bool = True) -> None: def __startup__() -> None: try: + # pyre-fixme[21]: Could not find module `__par__.__startup_function_loader__`. from __par__.__startup_function_loader__ import load_startup_functions load_startup_functions() From 099c6d5b0ae4f47081183acb3e61ffed1e38dad9 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Mon, 12 Feb 2024 09:31:47 -0800 Subject: [PATCH 0276/1133] Sanitizers: use sanitizer files Summary: Switch to using `sanitizer_runtime_files` for the C++ sanitizer runtime, so that `apple_bundle()` bundling works correctly re: code signing and incrementality. Reviewed By: blackm00n Differential Revision: D53608517 fbshipit-source-id: 564458f198a950147cceab27663b8588e471a6b0 --- prelude/apple/apple_binary.bzl | 4 +-- prelude/apple/apple_bundle_resources.bzl | 3 ++- prelude/cxx/cxx.bzl | 4 +-- prelude/cxx/cxx_executable.bzl | 8 +++--- prelude/cxx/cxx_link_utility.bzl | 31 ++++++++++++++++-------- prelude/linking/link_info.bzl | 2 +- 6 files changed, 32 insertions(+), 20 deletions(-) diff --git a/prelude/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl index ded300620..fd38b7ff9 100644 --- a/prelude/apple/apple_binary.bzl +++ b/prelude/apple/apple_binary.bzl @@ -186,8 +186,8 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: link_command_providers.append(LinkCommandDebugOutputInfo(debug_outputs = [cxx_output.link_command_debug_output])) sanitizer_runtime_providers = [] - if cxx_output.sanitizer_runtime_dir: - sanitizer_runtime_providers.append(CxxSanitizerRuntimeInfo(runtime_dir = cxx_output.sanitizer_runtime_dir)) + if cxx_output.sanitizer_runtime_files: + sanitizer_runtime_providers.append(CxxSanitizerRuntimeInfo(runtime_files = cxx_output.sanitizer_runtime_files)) return [ DefaultInfo(default_output = cxx_output.binary, sub_targets = cxx_output.sub_targets), diff --git a/prelude/apple/apple_bundle_resources.bzl b/prelude/apple/apple_bundle_resources.bzl index a8d07747e..8f84a9159 100644 --- a/prelude/apple/apple_bundle_resources.bzl +++ b/prelude/apple/apple_bundle_resources.bzl @@ -87,8 +87,9 @@ def get_apple_bundle_resource_part_list(ctx: AnalysisContext) -> AppleBundleReso cxx_sanitizer_runtime_info = ctx.attrs.binary.get(CxxSanitizerRuntimeInfo) if ctx.attrs.binary else None if cxx_sanitizer_runtime_info: runtime_resource_spec = AppleResourceSpec( - content_dirs = [cxx_sanitizer_runtime_info.runtime_dir], + files = cxx_sanitizer_runtime_info.runtime_files, destination = AppleResourceDestination("frameworks"), + # Sanitizer dylibs require signing, for hardened runtime on macOS and iOS device builds codesign_files_on_copy = True, ) resource_specs.append(runtime_resource_spec) diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 535fa544c..2fe804beb 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -244,8 +244,8 @@ def cxx_binary_impl(ctx: AnalysisContext) -> list[Provider]: extra_providers = [] if output.link_command_debug_output: extra_providers.append(LinkCommandDebugOutputInfo(debug_outputs = [output.link_command_debug_output])) - if output.sanitizer_runtime_dir: - extra_providers.append(CxxSanitizerRuntimeInfo(runtime_dir = output.sanitizer_runtime_dir)) + if output.sanitizer_runtime_files: + extra_providers.append(CxxSanitizerRuntimeInfo(runtime_files = output.sanitizer_runtime_files)) # When an executable is the output of a build, also materialize all the # unpacked external debuginfo that goes with it. This makes `buck2 build diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 27393f32b..2152d041d 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -184,7 +184,7 @@ CxxExecutableOutput = record( linker_map_data = [CxxLinkerMapData, None], link_command_debug_output = field([LinkCommandDebugOutput, None], None), dist_info = DistInfo, - sanitizer_runtime_dir = field([Artifact, None], None), + sanitizer_runtime_files = field(list[Artifact], []), ) def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, is_cxx_test: bool = False) -> CxxExecutableOutput: @@ -680,7 +680,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, shared_libs = shlib_info.set, nondebug_runtime_files = runtime_files, ), - sanitizer_runtime_dir = link_result.sanitizer_runtime_dir, + sanitizer_runtime_files = link_result.sanitizer_runtime_files, ) _CxxLinkExecutableResult = record( @@ -697,7 +697,7 @@ _CxxLinkExecutableResult = record( # Optional shared libs symlink tree symlinked_dir action shared_libs_symlink_tree = [list[Artifact], Artifact, None], linker_map_data = [CxxLinkerMapData, None], - sanitizer_runtime_dir = [Artifact, None], + sanitizer_runtime_files = list[Artifact], ) def _link_into_executable( @@ -735,7 +735,7 @@ def _link_into_executable( external_debug_info = executable_args.external_debug_info, shared_libs_symlink_tree = executable_args.shared_libs_symlink_tree, linker_map_data = link_result.linker_map_data, - sanitizer_runtime_dir = executable_args.sanitizer_runtime_dir, + sanitizer_runtime_files = executable_args.sanitizer_runtime_files, ) def get_cxx_executable_product_name(ctx: AnalysisContext) -> str: diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index e6e446a67..7a8416990 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -155,13 +155,13 @@ ExecutableSharedLibArguments = record( external_debug_info = field(list[TransitiveSetArgsProjection], []), # Optional shared libs symlink tree symlinked_dir action. shared_libs_symlink_tree = field(list[Artifact] | Artifact | None, None), - # A directory containing sanitizer runtime shared libraries - sanitizer_runtime_dir = field(Artifact | None, None), + # A list of runtime shared libraries + sanitizer_runtime_files = field(list[Artifact], []), ) CxxSanitizerRuntimeArguments = record( extra_link_args = field(list[ArgLike], []), - sanitizer_runtime_dir = field(Artifact | None, None), + sanitizer_runtime_files = field(list[Artifact], []), ) # @executable_path/Frameworks @@ -176,11 +176,22 @@ def _sanitizer_runtime_arguments( if not sanitizer_runtime_enabled: return CxxSanitizerRuntimeArguments() - if linker_info.sanitizer_runtime_dir == None: - fail("C++ sanitizer runtime enabled but there's no runtime directory") + if not linker_info.sanitizer_runtime_files: + fail("C++ sanitizer runtime enabled but there are no runtime files") if linker_info.type == "darwin": - runtime_rpath = cmd_args(linker_info.sanitizer_runtime_dir, format = "-Wl,-rpath,@executable_path/{}").relative_to(output, parent = 1) + runtime_rpath = cmd_args() + runtime_files = linker_info.sanitizer_runtime_files + for runtime_shared_lib in runtime_files: + # Rpath-relative dylibs have an install name of `@rpath/libName.dylib`, + # which means we need to add the parent dir of the dylib as an rpath. + runtime_shared_lib_dir = cmd_args(runtime_shared_lib).parent() + + # The parent dir of the runtime shared lib must appear as a path + # relative to the parent dir of the binary. `@executable_path` + # represents the parent dir of the binary, not the binary itself. + runtime_shared_lib_rpath = cmd_args(runtime_shared_lib_dir, format = "-Wl,-rpath,@executable_path/{}").relative_to(output, parent = 1) + runtime_rpath.add(runtime_shared_lib_rpath) # Ignore_artifacts() as the runtime directory is not required at _link_ time runtime_rpath = runtime_rpath.ignore_artifacts() @@ -196,7 +207,7 @@ def _sanitizer_runtime_arguments( "-Wl,-rpath,@loader_path/../Frameworks", # macOS "-Wl,-rpath,@executable_path/../Frameworks", # macOS ], - sanitizer_runtime_dir = linker_info.sanitizer_runtime_dir, + sanitizer_runtime_files = runtime_files, ) return CxxSanitizerRuntimeArguments() @@ -242,15 +253,15 @@ def executable_shared_lib_arguments( sanitizer_runtime_args = _sanitizer_runtime_arguments(ctx, cxx_toolchain, output) extra_link_args += sanitizer_runtime_args.extra_link_args - if sanitizer_runtime_args.sanitizer_runtime_dir != None: - runtime_files.append(sanitizer_runtime_args.sanitizer_runtime_dir) + if sanitizer_runtime_args.sanitizer_runtime_files: + runtime_files.extend(sanitizer_runtime_args.sanitizer_runtime_files) return ExecutableSharedLibArguments( extra_link_args = extra_link_args, runtime_files = runtime_files, external_debug_info = external_debug_info, shared_libs_symlink_tree = shared_libs_symlink_tree, - sanitizer_runtime_dir = sanitizer_runtime_args.sanitizer_runtime_dir, + sanitizer_runtime_files = sanitizer_runtime_args.sanitizer_runtime_files, ) def cxx_link_cmd_parts(toolchain: CxxToolchainInfo) -> ((RunInfo | cmd_args), cmd_args): diff --git a/prelude/linking/link_info.bzl b/prelude/linking/link_info.bzl index d9b8cc1b2..856e4f811 100644 --- a/prelude/linking/link_info.bzl +++ b/prelude/linking/link_info.bzl @@ -178,7 +178,7 @@ LinkOrdering = enum( ) CxxSanitizerRuntimeInfo = provider(fields = { - "runtime_dir": provider_field(Artifact), + "runtime_files": provider_field(list[Artifact]), }) def set_link_info_link_whole(info: LinkInfo) -> LinkInfo: From 4d659ae1714027cdb61e1764256c882a1b402759 Mon Sep 17 00:00:00 2001 From: Jacob Rodal Date: Mon, 12 Feb 2024 10:46:20 -0800 Subject: [PATCH 0277/1133] Android relinker windows support Summary: The symbol extraction logic was written with unix in mind and does not work on Windows. I implemented a powershell equivalent for Windows. Reviewed By: IanChilds Differential Revision: D53633275 fbshipit-source-id: 38ac2008251615f2e49de0813691b85f9e30bd59 --- prelude/cxx/symbols.bzl | 52 ++++++++++++++++++++++++++++++----------- 1 file changed, 38 insertions(+), 14 deletions(-) diff --git a/prelude/cxx/symbols.bzl b/prelude/cxx/symbols.bzl index 0e93f1b63..d2aa2abe1 100644 --- a/prelude/cxx/symbols.bzl +++ b/prelude/cxx/symbols.bzl @@ -7,6 +7,7 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//os_lookup:defs.bzl", "OsLookup") def _extract_symbol_names( ctx: AnalysisContext, @@ -45,26 +46,48 @@ def _extract_symbol_names( if dynamic and cxx_toolchain.linker_info.type != "darwin": nm_flags += "D" - script = ( - "set -euo pipefail; " + - '"$1" {} "${{@:2}}"'.format(nm_flags) + - # Grab only the symbol name field. - ' | cut -d" " -f2 ' + - # Strip off ABI Version (@...) when using llvm-nm to keep compat with buck1 - " | cut -d@ -f1 " + - # Sort and dedup symbols. Use the `C` locale and do it in-memory to - # make it significantly faster. CAUTION: if ten of these processes - # run in parallel, they'll have cumulative allocations larger than RAM. - " | LC_ALL=C sort -S 10% -u > {}" - ) + is_windows = hasattr(ctx.attrs, "_exec_os_type") and ctx.attrs._exec_os_type[OsLookup].platform == "windows" - ctx.actions.run( - [ + if is_windows: + script = ( + """& {{ + $result = & $args[0] {} $($args[1..($args.Length-1)] -join " ") + $lines = $result -split '`n' + $lines = $lines | ForEach-Object {{ ($_ -split ' ')[1] }} + $lines = $lines | ForEach-Object {{ ($_ -split '@')[0] }} + $lines = $lines | Sort-Object -Unique + [IO.File]::WriteAllLines('{{}}', $lines) + }}""".format(nm_flags) + ) + symbol_extraction_args = [ + "powershell", + "-Command", + cmd_args(output.as_output(), format = script), + ] + else: + script = ( + "set -euo pipefail; " + + '"$1" {} "${{@:2}}"'.format(nm_flags) + + # Grab only the symbol name field. + ' | cut -d" " -f2 ' + + # Strip off ABI Version (@...) when using llvm-nm to keep compat with buck1 + " | cut -d@ -f1 " + + # Sort and dedup symbols. Use the `C` locale and do it in-memory to + # make it significantly faster. CAUTION: if ten of these processes + # run in parallel, they'll have cumulative allocations larger than RAM. + " | LC_ALL=C sort -S 10% -u > {}" + ) + symbol_extraction_args = [ "/usr/bin/env", "bash", "-c", cmd_args(output.as_output(), format = script), "", + ] + + ctx.actions.run( + symbol_extraction_args + + [ nm, ] + objects, @@ -75,6 +98,7 @@ def _extract_symbol_names( weight_percentage = 15, # 10% + a little padding allow_cache_upload = allow_cache_upload, ) + return output _SymbolsInfo = provider(fields = { From 479eb8d5f4a1a4081ba67fb135a7c5f2a8de6b36 Mon Sep 17 00:00:00 2001 From: Balaji S Date: Mon, 12 Feb 2024 11:17:13 -0800 Subject: [PATCH 0278/1133] Eqwalize ct_daemon_node Summary: Nothing much, I was looking at this module for T159235428 Reviewed By: acw224 Differential Revision: D53666295 fbshipit-source-id: 7c54e15b027360ec29ffdca8ba2423f02f3fe3f6 --- prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl index bbe7810c5..0396f818c 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl @@ -14,6 +14,8 @@ -module(ct_daemon_node). +-compile(warn_missing_spec_all). + -include_lib("kernel/include/logger.hrl"). %% Public API @@ -84,6 +86,7 @@ start( true = erlang:register(?MODULE, self()), port_loop(Port, []). +-spec port_loop(port(), list()) -> ok | {error, {crash_on_startup, integer()}}. port_loop(Port, Acc) -> receive {Port, {data, {eol, Line}}} -> @@ -106,6 +109,7 @@ stop() -> %% monitore node true = erlang:monitor_node(Node, true), %% kill node + %% elp:ignore W0014 _Pid = erlang:spawn(Node, fun() -> erlang:halt() end), %% wait for node to come down receive From 642e7ebf6e13896d202dd36a22ddaec3d3e386f8 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Mon, 12 Feb 2024 11:47:41 -0800 Subject: [PATCH 0279/1133] Rip-out all the validator and validator arg forwarding in package scripts. Summary: Now that `validator` and `validator_args` are dead, there's no reason to have these present on our packaging scripts. Now our packaging scripts have 1 sole purpose: packaging. We don't have to worry about things being dropped on the ground. Reviewed By: passy Differential Revision: D53495144 fbshipit-source-id: 0edf3ee8d5b6bb80f67166f5a85d1a8a79bb134e --- prelude/apple/tools/ipa_package_maker.py | 25 ------------------------ 1 file changed, 25 deletions(-) diff --git a/prelude/apple/tools/ipa_package_maker.py b/prelude/apple/tools/ipa_package_maker.py index f4848dfa4..6cbd21157 100644 --- a/prelude/apple/tools/ipa_package_maker.py +++ b/prelude/apple/tools/ipa_package_maker.py @@ -14,8 +14,6 @@ from pathlib import Path -from typing import List, Optional - from apple.tools.re_compatibility_utils.writable import make_dir_recursively_writable @@ -44,8 +42,6 @@ def _package_ipa_contents( ipa_contents_dir: Path, ipa_output_path: Path, compression_level: int, - validator: Optional[Path], - validator_args: List[str], ) -> None: with tempfile.TemporaryDirectory() as processed_package_dir: processed_package_dir_path = Path(processed_package_dir) @@ -62,18 +58,6 @@ def _package_ipa_contents( # and mirror behavior which Apple expects, so we're future-proof. make_dir_recursively_writable(str(processed_package_dir_path)) - if validator: - validation_command = [ - str(validator), - "--ipa-contents-dir", - str(processed_package_dir_path), - *validator_args, - ] - subprocess.run( - validation_command, - check=True, - ) - with open(ipa_output_path, "wb") as ipa_file: zip_cmd = ["zip", "-X", "-r", f"-{compression_level}", "-", "."] subprocess.run( @@ -106,21 +90,12 @@ def main() -> None: required=True, help="The compression level to use for 'zip'.", ) - parser.add_argument( - "--validator", - type=Path, - required=False, - help="A path to an executable which will be passed the path to the IPA contents dir to validate", - ) - parser.add_argument("--validator-args", required=False, default=[], action="append") args = parser.parse_args() _package_ipa_contents( args.ipa_contents_dir, args.ipa_output_path, args.compression_level, - args.validator, - args.validator_args, ) From e7593000764b9597d275af3a5238755ab1a6a325 Mon Sep 17 00:00:00 2001 From: Cameron Pickett Date: Mon, 12 Feb 2024 12:06:32 -0800 Subject: [PATCH 0280/1133] Macro updates for buckified sysroot changes Summary: Pull out the macro updates from D53548103 in order to make D53548103 easier to revert. Reviewed By: shayne-fletcher Differential Revision: D53671535 fbshipit-source-id: 439f24cdec7e45ef696b27a6eeca143cd22eef48 --- prelude/rust/cargo_buildscript.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/rust/cargo_buildscript.bzl b/prelude/rust/cargo_buildscript.bzl index 0b91c80d4..7ef47487d 100644 --- a/prelude/rust/cargo_buildscript.bzl +++ b/prelude/rust/cargo_buildscript.bzl @@ -154,7 +154,7 @@ _cargo_buildscript_rule = rule( "runner": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//rust/tools:buildscript_run")), # *IMPORTANT* rustc_cfg must be a `dep` and not an `exec_dep` because # we want the `rustc --cfg` for the target platform, not the exec platform. - "rustc_cfg": attrs.default_only(attrs.dep(default = "prelude//rust/tools:rustc_cfg")), + "rustc_cfg": attrs.dep(default = "prelude//rust/tools:rustc_cfg"), "version": attrs.string(), "_exec_os_type": buck.exec_os_type_arg(), "_rust_toolchain": toolchains_common.rust(), From 1a4198913f98da9d268dc36c00ec38fb677add34 Mon Sep 17 00:00:00 2001 From: Ignacio Guridi Date: Mon, 12 Feb 2024 14:12:37 -0800 Subject: [PATCH 0281/1133] buck2: docs: prelude: add comma Summary: Created from CodeHub with https://fburl.com/edit-in-codehub Reviewed By: stepancheg Differential Revision: D53616102 fbshipit-source-id: 3d07bd372e167eec8eab2251a62981a3bc7f6bf4 --- prelude/decls/core_rules.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/decls/core_rules.bzl b/prelude/decls/core_rules.bzl index 2831ac2cf..342f1e7e8 100644 --- a/prelude/decls/core_rules.bzl +++ b/prelude/decls/core_rules.bzl @@ -379,7 +379,7 @@ export_file = prelude_rule( genrule( name = 'demo', - out = 'result.html' + out = 'result.html', cmd = 'cp $(location :example) $OUT', ) From d336f954d0a431792759faaed4d9b1dc9322ff08 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Mon, 12 Feb 2024 14:48:33 -0800 Subject: [PATCH 0282/1133] Back out "parallelize provisioning profiles decryption" Summary: See https://fb.workplace.com/groups/967178219989117/posts/25012688381678098 Original commit changeset: a37d230e5508 Original Phabricator Diff: D53518126 Reviewed By: d16r Differential Revision: D53680018 fbshipit-source-id: cad11d4aa63e767822c4c5dbb2cb2fade8c6ab09 --- .../tools/code_signing/codesign_bundle.py | 42 +++---------------- 1 file changed, 5 insertions(+), 37 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index b0fb9e222..d0d0e743d 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -241,46 +241,14 @@ def _read_provisioning_profiles( dirpath: Path, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, ) -> List[ProvisioningProfileMetadata]: - paths = [ - dirpath / f + return [ + _provisioning_profile_from_file_path( + dirpath / f, + read_provisioning_profile_command_factory, + ) for f in os.listdir(dirpath) if (f.endswith(".mobileprovision") or f.endswith(".provisionprofile")) ] - with tempfile.TemporaryDirectory() as tmp_dir: - path_to_data = _decode_provisioning_profiles( - paths, tmp_dir, read_provisioning_profile_command_factory - ) - return [ - ProvisioningProfileMetadata.from_provisioning_profile_file_content(path, data) - for path, data in path_to_data.items() - ] - - -def _decode_provisioning_profiles( - paths: List[Path], - tmp_dir: str, - read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, -) -> Dict[Path, bytes]: - """Reads multiple provisioning profiles in parallel.""" - processes: Dict[Path, ParallelProcess] = {} - result = {} - with ExitStack() as stack: - for path in paths: - command = read_provisioning_profile_command_factory.read_provisioning_profile_command( - path - ) - process = _spawn_process( - command=command, - tmp_dir=tmp_dir, - stack=stack, - pipe_stdout=True, - ) - processes[path] = process - for path, process in processes.items(): - data, _ = process.process.communicate() - process.check_result() - result[path] = data - return result def _provisioning_profile_from_file_path( From ce7cf525b89cb3827be907ff5fb1ea79b3d6525e Mon Sep 17 00:00:00 2001 From: Itamar Oren Date: Mon, 12 Feb 2024 21:38:52 -0800 Subject: [PATCH 0283/1133] Polish Python 3.8 EOL analysis-time warning Summary: context: https://fb.workplace.com/groups/pyupgrades/posts/3289427784694684?comment_id=3290721887898607&reply_comment_id=3291269481177181 Reviewed By: shonaganuma Differential Revision: D53454704 fbshipit-source-id: 6b0a5735d6454ded9c548bd7f4b2e370a72a3657 --- prelude/python/python_binary.bzl | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 30d62f1d6..f55dfb57f 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -736,10 +736,11 @@ def python_binary_impl(ctx: AnalysisContext) -> list[Provider]: if "python-version=3.8" in ctx.attrs.labels: # buildifier: disable=print print(( - "\033[1;33m \u26A0 " + + "\033[1;33m \u26A0 [Warning] " + "{0} 3.8 is EOL, and is going away by the end of H1 2024. " + - "Upgrade //{1}:{2} to {0} 3.10 now to avoid breakages. " + - "https://fburl.com/py38-sunsetting \033[0m" + "This build triggered //{1}:{2} which still uses {0} 3.8. " + + "Make sure someone (you or the approproiate maintainers) upgrades it to {0} 3.10 soon to avoid breakages. " + + "https://fburl.com/python-eol \033[0m" ).format( "Cinder" if "python-flavor=cinder" in ctx.attrs.labels else "Python", ctx.label.package, From f9b17e02e7aab96d3eb630ad0fc40d438f2e492a Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 13 Feb 2024 07:57:04 -0800 Subject: [PATCH 0284/1133] profile selection for adhoc signing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: profile is not yet copied to bundle, just selection is running, so functionality not yet finished. Selection implementation is a bit different from buck1 — while buck1 was not completely ignoring certificate identities, in buck2 we limit the list of available certificates to the one set directly in target definition though we check this certificate compatibility when selecting provisioning profile. That is more tight approach and more straight forward for implementation. Reviewed By: milend Differential Revision: D53611185 fbshipit-source-id: 501f745d4356c154d61d8f8d08c9b5d5262aedc5 --- prelude/apple/tools/bundling/main.py | 46 +++++++++++++++---- .../tools/code_signing/codesign_bundle.py | 22 +++++---- .../code_signing/list_codesign_identities.py | 24 ++++++++++ 3 files changed, 76 insertions(+), 16 deletions(-) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index a3b201922..5f9beec8d 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -22,7 +22,10 @@ CodesignConfiguration, signing_context_with_profile_selection, ) -from apple.tools.code_signing.list_codesign_identities import ListCodesignIdentities +from apple.tools.code_signing.list_codesign_identities import ( + AdHocListCodesignIdentities, + ListCodesignIdentities, +) from apple.tools.re_compatibility_utils.writable import make_dir_recursively_writable @@ -130,7 +133,7 @@ def _args_parser() -> argparse.ArgumentParser: metavar="", type=str, required=False, - help="Codesign identity to use when ad-hoc signing is performed.", + help="Codesign identity to use when ad-hoc signing is performed. Should be present when selection of provisioining profile is requested for ad-hoc signing.", ) parser.add_argument( "--codesign-configuration", @@ -272,9 +275,40 @@ def _main() -> None: raise RuntimeError( "Apple platform should be set when code signing is required." ) + list_codesign_identities = ( + ListCodesignIdentities.override( + shlex.split(args.codesign_identities_command) + ) + if args.codesign_identities_command + else ListCodesignIdentities.default() + ) if args.ad_hoc: + if args.embed_provisioning_profile_when_signing_ad_hoc: + if not args.profiles_dir: + raise RuntimeError( + "Path to directory with provisioning profile files should be set when selection of provisioining profile is enabled for ad-hoc code signing." + ) + if not args.ad_hoc_codesign_identity: + raise RuntimeError( + "Code signing identity should be set when selection of provisioining profile is enabled for ad-hoc code signing." + ) + profile_selection_context = signing_context_with_profile_selection( + info_plist_source=args.info_plist_source, + info_plist_destination=args.info_plist_destination, + provisioning_profiles_dir=args.profiles_dir, + entitlements_path=args.entitlements, + platform=args.platform, + list_codesign_identities=AdHocListCodesignIdentities( + original=list_codesign_identities, + subject_common_name=args.ad_hoc_codesign_identity, + ), + log_file_path=args.log_file, + ) + else: + profile_selection_context = None signing_context = AdhocSigningContext( - codesign_identity=args.ad_hoc_codesign_identity + codesign_identity=args.ad_hoc_codesign_identity, + profile_selection_context=profile_selection_context, ) selected_identity_argument = args.ad_hoc_codesign_identity else: @@ -288,11 +322,7 @@ def _main() -> None: provisioning_profiles_dir=args.profiles_dir, entitlements_path=args.entitlements, platform=args.platform, - list_codesign_identities=ListCodesignIdentities.override( - shlex.split(args.codesign_identities_command) - ) - if args.codesign_identities_command - else ListCodesignIdentities.default(), + list_codesign_identities=list_codesign_identities, log_file_path=args.log_file, ) selected_identity_argument = ( diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index d0d0e743d..6cd55a64d 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -95,14 +95,6 @@ def _select_provisioning_profile( return selected_profile_info -@dataclass -class AdhocSigningContext: - codesign_identity: str - - def __init__(self, codesign_identity: Optional[str] = None) -> None: - self.codesign_identity = codesign_identity or "-" - - @dataclass class SigningContextWithProfileSelection: info_plist_source: Path @@ -111,6 +103,20 @@ class SigningContextWithProfileSelection: selected_profile_info: SelectedProvisioningProfileInfo +@dataclass +class AdhocSigningContext: + codesign_identity: str + profile_selection_context: Optional[SigningContextWithProfileSelection] + + def __init__( + self, + codesign_identity: Optional[str] = None, + profile_selection_context: Optional[SigningContextWithProfileSelection] = None, + ) -> None: + self.codesign_identity = codesign_identity or "-" + self.profile_selection_context = profile_selection_context + + def signing_context_with_profile_selection( info_plist_source: Path, info_plist_destination: Path, diff --git a/prelude/apple/tools/code_signing/list_codesign_identities.py b/prelude/apple/tools/code_signing/list_codesign_identities.py index b072ebb47..b75ce678c 100644 --- a/prelude/apple/tools/code_signing/list_codesign_identities.py +++ b/prelude/apple/tools/code_signing/list_codesign_identities.py @@ -47,3 +47,27 @@ def _list_identities( encoding="utf-8", ) return CodeSigningIdentity.parse_security_stdout(output) + + +class AdHocListCodesignIdentities(IListCodesignIdentities): + def __init__( + self, original: IListCodesignIdentities, subject_common_name: str + ) -> None: + self.original = original + self.subject_common_name = subject_common_name + + def list_codesign_identities(self) -> List[CodeSigningIdentity]: + unfiltered_identities = self.original.list_codesign_identities() + identity = next( + ( + i + for i in unfiltered_identities + if i.subject_common_name == self.subject_common_name + ), + None, + ) + if not identity: + raise RuntimeError( + f"No identity found with subject common name `{self.subject_common_name}`" + ) + return [identity] From 4326c0ccebd59c7a80cd74aa645e4a2eef51992c Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 13 Feb 2024 07:57:04 -0800 Subject: [PATCH 0285/1133] bundle provisioning profile when adhoc signing Summary: When profile is requested to be bundled during adhoc signing, route Info.plist, entitlement preparation and copying of profile the same route as for distribution signing mode. Reviewed By: milend Differential Revision: D53611184 fbshipit-source-id: c2c70ad6a436a03cc8e03ceba728acd39dbd784f --- .../tools/code_signing/apple_platform.py | 4 ++-- .../tools/code_signing/codesign_bundle.py | 23 ++++++++++++++++--- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/prelude/apple/tools/code_signing/apple_platform.py b/prelude/apple/tools/code_signing/apple_platform.py index e45b0905c..751f652ee 100644 --- a/prelude/apple/tools/code_signing/apple_platform.py +++ b/prelude/apple/tools/code_signing/apple_platform.py @@ -36,12 +36,12 @@ def provisioning_profile_name(self) -> Optional[str]: else: return None - def embedded_provisioning_profile_file_name(self) -> str: + def embedded_provisioning_profile_path(self) -> str: """ Returns: The name of the provisioning profile in the final application bundle. """ if self.is_desktop(): - return "embedded.provisionprofile" + return "Contents/embedded.provisionprofile" else: return "embedded.mobileprovision" diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 6cd55a64d..ebc3f1e8b 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -163,17 +163,34 @@ def codesign_bundle( ) -> None: with tempfile.TemporaryDirectory() as tmp_dir: if isinstance(signing_context, SigningContextWithProfileSelection): + selection_profile_context = signing_context + elif isinstance(signing_context, AdhocSigningContext): + selection_profile_context = signing_context.profile_selection_context + else: + raise RuntimeError( + f"Unexpected type of signing context `{type(signing_context)}`" + ) + + if selection_profile_context: prepared_entitlements_path = _prepare_entitlements_and_info_plist( bundle_path=bundle_path, entitlements_path=entitlements_path, platform=platform, - signing_context=signing_context, + signing_context=selection_profile_context, tmp_dir=tmp_dir, ) selected_identity_fingerprint = ( - signing_context.selected_profile_info.identity.fingerprint + selection_profile_context.selected_profile_info.identity.fingerprint ) else: + if not isinstance(signing_context, AdhocSigningContext): + raise AssertionError( + f"Expected `AdhocSigningContext`, got `{type(signing_context)}` instead." + ) + if signing_context.profile_selection_context: + raise AssertionError( + "Expected no profile selection context in `AdhocSigningContext` when `selection_profile_context` is `None`." + ) prepared_entitlements_path = entitlements_path selected_identity_fingerprint = signing_context.codesign_identity @@ -238,7 +255,7 @@ def _prepare_entitlements_and_info_plist( ) shutil.copy2( selected_profile.file_path, - bundle_path / platform.embedded_provisioning_profile_file_name(), + bundle_path / platform.embedded_provisioning_profile_path(), ) return prepared_entitlements_path From 1a4d83e33dd111fd233586768c850fb3b10acc31 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Tue, 13 Feb 2024 09:19:29 -0800 Subject: [PATCH 0286/1133] Sanitizers: move sanitizer runtime + flags into core link function Summary: Moves the sanitizer flags and runtime propagation to the core linking function. We need C++ sanitizer runtime information at the `cxx_library()` level as well because `apple_test()` needs to bundle the runtime and it's using the impl of `cxx_library()`. Reviewed By: rmaz Differential Revision: D53708692 fbshipit-source-id: 9e477a9b6da19cce1cad35dc89eb0aef566f6b29 --- prelude/cxx/cxx_executable.bzl | 2 +- prelude/cxx/cxx_link_utility.bzl | 10 +--------- prelude/cxx/link.bzl | 16 ++++++++++++++++ 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 2152d041d..7ceabaddc 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -735,7 +735,7 @@ def _link_into_executable( external_debug_info = executable_args.external_debug_info, shared_libs_symlink_tree = executable_args.shared_libs_symlink_tree, linker_map_data = link_result.linker_map_data, - sanitizer_runtime_files = executable_args.sanitizer_runtime_files, + sanitizer_runtime_files = link_result.sanitizer_runtime_files, ) def get_cxx_executable_product_name(ctx: AnalysisContext) -> str: diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 7a8416990..99c6d3f6d 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -155,8 +155,6 @@ ExecutableSharedLibArguments = record( external_debug_info = field(list[TransitiveSetArgsProjection], []), # Optional shared libs symlink tree symlinked_dir action. shared_libs_symlink_tree = field(list[Artifact] | Artifact | None, None), - # A list of runtime shared libraries - sanitizer_runtime_files = field(list[Artifact], []), ) CxxSanitizerRuntimeArguments = record( @@ -166,7 +164,7 @@ CxxSanitizerRuntimeArguments = record( # @executable_path/Frameworks -def _sanitizer_runtime_arguments( +def cxx_sanitizer_runtime_arguments( ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, output: Artifact) -> CxxSanitizerRuntimeArguments: @@ -251,17 +249,11 @@ def executable_shared_lib_arguments( rpath_arg = cmd_args(shared_libs_symlink_tree, format = "-Wl,-rpath,{}/{{}}".format(rpath_reference)).relative_to(output, parent = 1).ignore_artifacts() extra_link_args.append(rpath_arg) - sanitizer_runtime_args = _sanitizer_runtime_arguments(ctx, cxx_toolchain, output) - extra_link_args += sanitizer_runtime_args.extra_link_args - if sanitizer_runtime_args.sanitizer_runtime_files: - runtime_files.extend(sanitizer_runtime_args.sanitizer_runtime_files) - return ExecutableSharedLibArguments( extra_link_args = extra_link_args, runtime_files = runtime_files, external_debug_info = external_debug_info, shared_libs_symlink_tree = shared_libs_symlink_tree, - sanitizer_runtime_files = sanitizer_runtime_args.sanitizer_runtime_files, ) def cxx_link_cmd_parts(toolchain: CxxToolchainInfo) -> ((RunInfo | cmd_args), cmd_args): diff --git a/prelude/cxx/link.bzl b/prelude/cxx/link.bzl index 7436bf809..3e3f0757d 100644 --- a/prelude/cxx/link.bzl +++ b/prelude/cxx/link.bzl @@ -50,6 +50,7 @@ load(":cxx_context.bzl", "get_cxx_toolchain_info") load( ":cxx_link_utility.bzl", "cxx_link_cmd_parts", + "cxx_sanitizer_runtime_arguments", "generates_split_debug", "linker_map_args", "make_link_args", @@ -75,6 +76,8 @@ CxxLinkResult = record( linked_object = LinkedObject, linker_map_data = [CxxLinkerMapData, None], link_execution_preference_info = LinkExecutionPreferenceInfo, + # A list of runtime shared libraries + sanitizer_runtime_files = field(list[Artifact]), ) def link_external_debug_info( @@ -132,6 +135,9 @@ def cxx_link_into( if linker_info.supports_distributed_thinlto and opts.enable_distributed_thinlto: if not linker_info.requires_objects: fail("Cannot use distributed thinlto if the cxx toolchain doesn't require_objects") + sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain_info, output) + if sanitizer_runtime_args.extra_link_args or sanitizer_runtime_args.sanitizer_runtime_files: + fail("Cannot use distributed thinlto with sanitizer runtime") exe = cxx_dist_link( ctx, opts.links, @@ -148,6 +154,7 @@ def cxx_link_into( link_execution_preference_info = LinkExecutionPreferenceInfo( preference = opts.link_execution_preference, ), + sanitizer_runtime_files = [], ) if linker_info.generate_linker_maps: @@ -159,6 +166,9 @@ def cxx_link_into( all_link_args = cmd_args(toolchain_linker_flags) all_link_args.add(get_output_flags(linker_info.type, output)) + sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain_info, output) + all_link_args.add(sanitizer_runtime_args.extra_link_args) + # Darwin LTO requires extra link outputs to preserve debug info split_debug_output = None split_debug_lto_info = get_split_debug_lto_info(ctx.actions, cxx_toolchain_info, output.short_path) @@ -308,6 +318,7 @@ def cxx_link_into( linked_object = linked_object, linker_map_data = linker_map_data, link_execution_preference_info = link_execution_preference_info, + sanitizer_runtime_files = sanitizer_runtime_args.sanitizer_runtime_files, ) _AnonLinkInfo = provider(fields = { @@ -394,6 +405,10 @@ def _anon_cxx_link( split_debug_output = split_debug_output, ) + # The anon target API doesn't allow us to return the list of artifacts for + # sanitizer runtime, so it has be computed here + sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain, output) + return CxxLinkResult( linked_object = LinkedObject( output = output, @@ -405,6 +420,7 @@ def _anon_cxx_link( link_execution_preference_info = LinkExecutionPreferenceInfo( preference = LinkExecutionPreference("any"), ), + sanitizer_runtime_files = sanitizer_runtime_args.sanitizer_runtime_files, ) def cxx_link( From e4beb0a9e1153f1bc3fa1d7dad3299284ab2936b Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Tue, 13 Feb 2024 09:19:29 -0800 Subject: [PATCH 0287/1133] Sanitizers: expose sanitizer runtime from `cxx_library_parameterized()` Summary: Exposes the sanitizer runtime in the output of `cxx_library_parameterized()` which can be used by `apple_test()`. Reviewed By: blackm00n, rmaz Differential Revision: D53709589 fbshipit-source-id: 23814f5580cf20c0d0066e6d447fe8ad67359c82 --- prelude/cxx/cxx_library.bzl | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 0b1f3fdf2..184d31dde 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -261,6 +261,7 @@ _CxxAllLibraryOutputs = record( providers = field(list[Provider], default = []), # Shared object name to shared library mapping if this target produces a shared library. solib = field([(str, LinkedObject), None]), + sanitizer_runtime_files = field(list[Artifact], []), ) _CxxLibraryCompileOutput = record( @@ -317,6 +318,8 @@ _CxxLibraryParameterizedOutput = record( cxx_compilationdb_info = field([CxxCompilationDbInfo, None], None), # LinkableRootInfo provider, same as above. linkable_root = field([LinkableRootInfo, None], None), + # List of shared libraries for the sanitizer runtime linked into the library + sanitizer_runtime_files = field(list[Artifact], []), ) def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams) -> _CxxLibraryParameterizedOutput: @@ -843,6 +846,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc xcode_data_info = xcode_data_info, cxx_compilationdb_info = comp_db_info, linkable_root = linkable_root, + sanitizer_runtime_files = library_outputs.sanitizer_runtime_files, ) def get_default_cxx_library_product_name(ctx, impl_params) -> str: @@ -934,6 +938,7 @@ def _form_library_outputs( solib = None link_infos = {} providers = [] + sanitizer_runtime_files = [] linker_flags = cxx_attr_linker_flags_all(ctx) @@ -1062,6 +1067,12 @@ def _form_library_outputs( providers.append(result.link_result.link_execution_preference_info) + link_sanitizer_runtime_files = result.link_result.sanitizer_runtime_files + if link_sanitizer_runtime_files: + if sanitizer_runtime_files: + fail("Cannot specify sanitizer runtime files multiple times") + sanitizer_runtime_files = link_sanitizer_runtime_files + # you cannot link against header only libraries so create an empty link info info = info if info != None else LinkInfo() if output: @@ -1076,6 +1087,7 @@ def _form_library_outputs( link_infos = link_infos, providers = providers, solib = solib, + sanitizer_runtime_files = sanitizer_runtime_files, ) def _strip_objects(ctx: AnalysisContext, objects: list[Artifact]) -> list[Artifact]: From e78e03643f9b620e0f10c7fef462a503d9befbcd Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Tue, 13 Feb 2024 09:19:29 -0800 Subject: [PATCH 0288/1133] Sanitizers: bundle sanitizer runtime in `apple_test()` Summary: Bundle the C++ sanitizer runtime in `apple_test()` bundles if required. Reviewed By: blackm00n Differential Revision: D53709817 fbshipit-source-id: 2dfc513ab27f0de5b64ddea490136f7f837f0537 --- prelude/apple/apple_rules_impl_utility.bzl | 1 + prelude/apple/apple_test.bzl | 10 ++++++++++ 2 files changed, 11 insertions(+) diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 4633f332d..a377db959 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -102,6 +102,7 @@ def apple_test_extra_attrs(): "resource_group": attrs.option(attrs.string(), default = None), # Expected by `apple_bundle`, for `apple_test` this field is always None. "resource_group_map": attrs.option(attrs.string(), default = None), + "sanitizer_runtime_enabled": attrs.option(attrs.bool(), default = None), "stripped": attrs.bool(default = False), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), "use_m1_simulator": attrs.bool(default = False), diff --git a/prelude/apple/apple_test.bzl b/prelude/apple/apple_test.bzl index fd74d3d38..251283a00 100644 --- a/prelude/apple/apple_test.bzl +++ b/prelude/apple/apple_test.bzl @@ -128,6 +128,16 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: bundle_parts = part_list_output.parts + _get_xctest_framework(ctx, xctest_swift_support_needed) + for sanitizer_runtime_dylib in cxx_library_output.sanitizer_runtime_files: + frameworks_destination = AppleBundleDestination("frameworks") + bundle_parts.append( + AppleBundlePart( + source = sanitizer_runtime_dylib, + destination = frameworks_destination, + codesign_on_copy = True, + ), + ) + primary_binary_rel_path = get_apple_bundle_part_relative_destination_path(ctx, binary_part) swift_stdlib_args = SwiftStdlibArguments(primary_binary_rel_path = primary_binary_rel_path) From df5920057512413ba5ccc7257271124fcf51f9e3 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 13 Feb 2024 09:38:18 -0800 Subject: [PATCH 0289/1133] temporary disable embed provisioning profile for adhoc build Reviewed By: milend Differential Revision: D53713130 fbshipit-source-id: 91c55ff2ea01ceb51ae80a8bd6b3fc163c1757c3 --- prelude/apple/apple_bundle_part.bzl | 2 -- 1 file changed, 2 deletions(-) diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index ff6427821..4f4e550a1 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -122,8 +122,6 @@ def assemble_bundle( codesign_args.append("--ad-hoc") if ctx.attrs.codesign_identity: codesign_args.extend(["--ad-hoc-codesign-identity", ctx.attrs.codesign_identity]) - if profile_selection_required: - codesign_args.append("--embed-provisioning-profile-when-signing-ad-hoc") codesign_args += get_entitlements_codesign_args(ctx, codesign_type) codesign_args += _get_extra_codesign_args(ctx) From e0489572294bca132031ef2d11f4e191f1d95824 Mon Sep 17 00:00:00 2001 From: David Richey Date: Tue, 13 Feb 2024 10:58:15 -0800 Subject: [PATCH 0290/1133] Add target for generating rustdoc coverage Summary: Makes use of Rustdoc's unstable `--show-coverage` feature. We can use this to power doc-coverage-related metrics/goals. Two important things to note: 1. This uses the default ascii table output format (and then parses out the numbers we care about) instead of the json output format because of this bug: https://github.com/rust-lang/rust/issues/117291 2. `rustdoc --show-coverage` only supports writing to stdout, so we have to write a python wrapper to capture the output. Reviewed By: diliop, JakobDegen, dtolnay Differential Revision: D53683830 fbshipit-source-id: 157195093afc0299a1a729022da7115faf5836f1 --- prelude/rust/build.bzl | 56 +++++++++++++++++++++ prelude/rust/rust_library.bzl | 13 ++++- prelude/rust/rust_toolchain.bzl | 2 + prelude/rust/tools/BUCK.v2 | 6 +++ prelude/rust/tools/attrs.bzl | 1 + prelude/rust/tools/rustdoc_coverage.py | 69 ++++++++++++++++++++++++++ prelude/toolchains/rust.bzl | 1 + 7 files changed, 147 insertions(+), 1 deletion(-) create mode 100755 prelude/rust/tools/rustdoc_coverage.py diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 7f4a2a5c0..1b0c06f61 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -228,6 +228,62 @@ def generate_rustdoc( return output +def generate_rustdoc_coverage( + ctx: AnalysisContext, + compile_ctx: CompileContext, + # link strategy doesn't matter, but caller should pass in build params + # with static-pic (to get best cache hits for deps) + params: BuildParams, + default_roots: list[str]) -> Artifact: + toolchain_info = compile_ctx.toolchain_info + + common_args = _compute_common_args( + ctx = ctx, + compile_ctx = compile_ctx, + dep_ctx = compile_ctx.dep_ctx, + # to make sure we get the rmeta's generated for the crate dependencies, + # rather than full .rlibs + emit = Emit("metadata"), + params = params, + default_roots = default_roots, + is_rustdoc_test = False, + ) + + file = common_args.subdir + "-rustdoc-coverage" + output = ctx.actions.declare_output(file) + + rustdoc_cmd = cmd_args( + toolchain_info.rustdoc, + toolchain_info.rustdoc_flags, + ctx.attrs.rustdoc_flags, + common_args.args, + "-Zunstable-options", + "--show-coverage", + ) + + exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" + plain_env, path_env = _process_env(compile_ctx, ctx.attrs.env, exec_is_windows) + plain_env["RUSTDOC_BUCK_TARGET"] = cmd_args(str(ctx.label.raw_target())) + + rustdoc_cmd_action = cmd_args( + [cmd_args("--env=", k, "=", v, delimiter = "") for k, v in plain_env.items()], + [cmd_args("--path-env=", k, "=", v, delimiter = "") for k, v in path_env.items()], + rustdoc_cmd, + ) + + rustdoc_cmd = _long_command( + ctx = ctx, + exe = toolchain_info.rustc_action, + args = rustdoc_cmd_action, + argfile_name = "{}.args".format(file), + ) + + cmd = cmd_args([toolchain_info.rustdoc_coverage, output.as_output(), rustdoc_cmd]) + + ctx.actions.run(cmd, category = "rustdoc_coverage") + + return output + def generate_rustdoc_test( ctx: AnalysisContext, compile_ctx: CompileContext, diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 003f117f3..f7dea97fa 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -72,6 +72,7 @@ load( "RustcOutput", # @unused Used as a type "compile_context", "generate_rustdoc", + "generate_rustdoc_coverage", "generate_rustdoc_test", "rust_compile", "rust_compile_multi", @@ -315,6 +316,13 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: document_private_items = False, ) + rustdoc_coverage = generate_rustdoc_coverage( + ctx = ctx, + compile_ctx = compile_ctx, + params = static_library_params, + default_roots = default_roots, + ) + expand = rust_compile( ctx = ctx, compile_ctx = compile_ctx, @@ -365,6 +373,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: check_artifacts = check_artifacts, expand = expand.output, sources = compile_ctx.symlinked_srcs, + rustdoc_coverage = rustdoc_coverage, ) rust_link_info = _rust_providers( ctx = ctx, @@ -522,12 +531,14 @@ def _default_providers( doctests_enabled: bool, check_artifacts: dict[str, Artifact], expand: Artifact, - sources: Artifact) -> list[Provider]: + sources: Artifact, + rustdoc_coverage: Artifact) -> list[Provider]: targets = {} targets.update(check_artifacts) targets["sources"] = sources targets["expand"] = expand targets["doc"] = rustdoc + targets["doc-coverage"] = rustdoc_coverage sub_targets = { k: [DefaultInfo(default_output = v)] for (k, v) in targets.items() diff --git a/prelude/rust/rust_toolchain.bzl b/prelude/rust/rust_toolchain.bzl index facf9a768..9e2a41955 100644 --- a/prelude/rust/rust_toolchain.bzl +++ b/prelude/rust/rust_toolchain.bzl @@ -77,6 +77,8 @@ rust_toolchain_attrs = { "rustc_action": provider_field(RunInfo | None, default = None), # Wrapper for rustdoc-generated test executables "rustdoc_test_with_resources": provider_field(RunInfo | None, default = None), + # Wrapper for rustdoc coverage + "rustdoc_coverage": provider_field(RunInfo | None, default = None), # Failure filter action "failure_filter_action": provider_field(RunInfo | None, default = None), # The default edition to use, if not specified. diff --git a/prelude/rust/tools/BUCK.v2 b/prelude/rust/tools/BUCK.v2 index f3f1bada5..63fb446d8 100644 --- a/prelude/rust/tools/BUCK.v2 +++ b/prelude/rust/tools/BUCK.v2 @@ -36,3 +36,9 @@ prelude.python_bootstrap_binary( main = "buildscript_run.py", visibility = ["PUBLIC"], ) + +prelude.python_bootstrap_binary( + name = "rustdoc_coverage", + main = "rustdoc_coverage.py", + visibility = ["PUBLIC"], +) diff --git a/prelude/rust/tools/attrs.bzl b/prelude/rust/tools/attrs.bzl index f5fb89307..5e181941a 100644 --- a/prelude/rust/tools/attrs.bzl +++ b/prelude/rust/tools/attrs.bzl @@ -14,6 +14,7 @@ def _internal_tool(default: str) -> Attr: internal_tool_attrs = { "failure_filter_action": _internal_tool("prelude//rust/tools:failure_filter_action"), "rustc_action": _internal_tool("prelude//rust/tools:rustc_action"), + "rustdoc_coverage": _internal_tool("prelude//rust/tools:rustdoc_coverage"), "rustdoc_test_with_resources": _internal_tool("prelude//rust/tools:rustdoc_test_with_resources"), "transitive_dependency_symlinks_tool": _internal_tool("prelude//rust/tools:transitive_dependency_symlinks"), } diff --git a/prelude/rust/tools/rustdoc_coverage.py b/prelude/rust/tools/rustdoc_coverage.py new file mode 100755 index 000000000..a662a53e0 --- /dev/null +++ b/prelude/rust/tools/rustdoc_coverage.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import json +import re +import subprocess +from pathlib import Path +from typing import List, NamedTuple + + +class Args(NamedTuple): + out: Path + cmd: List[str] + + +def arg_parse() -> Args: + parser = argparse.ArgumentParser() + parser.add_argument( + "out", + type=Path, + help="path to output", + ) + parser.add_argument("cmd", nargs=argparse.REMAINDER, help="command to run") + return Args(**vars(parser.parse_args())) + + +_REGEX = re.compile(r"(\d+(?:\.\d+)?)") + + +def main(): + args = arg_parse() + stdout = subprocess.run(args.cmd, capture_output=True, text=True).stdout + + with open(args.out, "w") as f: + # stdout looks like... + # +--------+------------+------------+------------+------------+ + # | File | Documented | Percentage | Examples | Percentage | + # +--------+------------+------------+------------+------------+ + # | foo.rs | 1 | 1.0% | 0 | 0.0% | + # | bar.rs | 2 | 2.1% | 0 | 0.0% | + # +--------+------------+------------+------------+------------+ + # | Total | 3 | 3.1% | 0 | 0.0% | + # +--------+------------+------------+------------+------------+ + total_line = stdout.splitlines()[-2] + nums = _REGEX.findall(total_line) + if len(nums) != 4: + raise Exception( + f"using regex `{_REGEX.pattern}`, expected to find 4 numbers, got {len(nums)} " + f"for line: '{total_line}'" + ) + json.dump( + { + "documented": nums[0], + "documented_percentage": nums[1], + "examples": nums[2], + "examples_percentage": nums[3], + }, + f, + ) + + +if __name__ == "__main__": + main() diff --git a/prelude/toolchains/rust.bzl b/prelude/toolchains/rust.bzl index d018edded..8a5135963 100644 --- a/prelude/toolchains/rust.bzl +++ b/prelude/toolchains/rust.bzl @@ -59,6 +59,7 @@ def _system_rust_toolchain_impl(ctx): rustdoc = RunInfo(args = ["rustdoc"]), rustdoc_flags = ctx.attrs.rustdoc_flags, rustdoc_test_with_resources = ctx.attrs.rustdoc_test_with_resources[RunInfo], + rustdoc_coverage = ctx.attrs.rustdoc_coverage[RunInfo], transitive_dependency_symlinks_tool = ctx.attrs.transitive_dependency_symlinks_tool[RunInfo], warn_lints = ctx.attrs.warn_lints, ), From a86b9de34779f4b41c322caaf218c4a501b94e0d Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Tue, 13 Feb 2024 12:19:28 -0800 Subject: [PATCH 0291/1133] Set default py_version for Pyre to 3.10 for now Reviewed By: connernilsen Differential Revision: D53594003 fbshipit-source-id: 085849542d8f7257b83192bf55817eea7b60e07e --- prelude/python/typing.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prelude/python/typing.bzl b/prelude/python/typing.bzl index a856095b4..22b38d919 100644 --- a/prelude/python/typing.bzl +++ b/prelude/python/typing.bzl @@ -14,6 +14,8 @@ load( ) load(":python.bzl", "PythonLibraryManifestsTSet") +DEFAULT_PY_VERSION = "3.10" + def create_typeshed_manifest_info( ctx: AnalysisContext, typeshed_deps: list[Dependency]) -> ManifestInfo: @@ -64,7 +66,7 @@ def create_per_target_type_check( # Create input configs input_config = { "dependencies": dep_manifests, - "py_version": py_version, + "py_version": py_version or DEFAULT_PY_VERSION, "sources": source_manifests, "typeshed": typeshed_manifest, } From 66ca386b328284d52e954eba75738cbde4ed6a56 Mon Sep 17 00:00:00 2001 From: Itamar Oren Date: Tue, 13 Feb 2024 14:51:29 -0800 Subject: [PATCH 0292/1133] Fix typo in Python 3.8 EOL warning Reviewed By: cxxxs Differential Revision: D53717953 fbshipit-source-id: fcbd837b80a2d804aad7d3b1c99aef29f5ec707e --- prelude/python/python_binary.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index f55dfb57f..2fba56ddb 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -739,7 +739,7 @@ def python_binary_impl(ctx: AnalysisContext) -> list[Provider]: "\033[1;33m \u26A0 [Warning] " + "{0} 3.8 is EOL, and is going away by the end of H1 2024. " + "This build triggered //{1}:{2} which still uses {0} 3.8. " + - "Make sure someone (you or the approproiate maintainers) upgrades it to {0} 3.10 soon to avoid breakages. " + + "Make sure someone (you or the appropriate maintainers) upgrades it to {0} 3.10 soon to avoid breakages. " + "https://fburl.com/python-eol \033[0m" ).format( "Cinder" if "python-flavor=cinder" in ctx.attrs.labels else "Python", From 131a37542dd5774c0caea11585915a24ae964abb Mon Sep 17 00:00:00 2001 From: David Reiss Date: Tue, 13 Feb 2024 23:06:55 -0800 Subject: [PATCH 0293/1133] Recognize "sx" as a suffix for assembler with preprocessor Summary: gcc recognizes this as an alias for ".S", and I personally prefer it because it's not case-sensitive. Reviewed By: jsgf Differential Revision: D53158578 fbshipit-source-id: d8969c2114674a680926bab673efa012c29486f9 --- prelude/cxx/attr_selection.bzl | 2 +- prelude/cxx/compile.bzl | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/prelude/cxx/attr_selection.bzl b/prelude/cxx/attr_selection.bzl index 020040d8d..05e452277 100644 --- a/prelude/cxx/attr_selection.bzl +++ b/prelude/cxx/attr_selection.bzl @@ -41,7 +41,7 @@ def cxx_by_language_ext(x: dict[typing.Any, typing.Any], ext: str) -> list[typin elif ext == ".mm": key_pp = "objcxx" key_compiler = "objcxx_cpp_output" - elif ext in (".s", ".S"): + elif ext in (".s", ".sx", ".S"): key_pp = "assembler_with_cpp" key_compiler = "assembler" elif ext == ".cu": diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index 7941d522b..cadd02edb 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -48,6 +48,7 @@ CxxExtension = enum( ".c++", ".c", ".s", + ".sx", ".S", ".m", ".mm", @@ -512,7 +513,7 @@ def _get_compiler_info(toolchain: CxxToolchainInfo, ext: CxxExtension) -> typing compiler_info = toolchain.cxx_compiler_info elif ext.value in (".c", ".m"): compiler_info = toolchain.c_compiler_info - elif ext.value in (".s", ".S"): + elif ext.value in (".s", ".sx", ".S"): compiler_info = toolchain.as_compiler_info elif ext.value == ".cu": compiler_info = toolchain.cuda_compiler_info @@ -538,7 +539,7 @@ def _get_category(ext: CxxExtension) -> str: return "objc_compile" if ext.value == ".mm": return "objcxx_compile" - elif ext.value in (".s", ".S", ".asm", ".asmpp"): + elif ext.value in (".s", ".sx", ".S", ".asm", ".asmpp"): return "asm_compile" elif ext.value == ".cu": return "cuda_compile" @@ -575,7 +576,7 @@ def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: return DepFileType("c") elif ext.value == ".cu": return DepFileType("cuda") - elif ext.value in (".asmpp"): + elif ext.value in (".asmpp", ".sx"): return DepFileType("asm") else: # This should be unreachable as long as we handle all enum values From 402bec8fd04882133c5c5ccf23a7fe26e72894ff Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Wed, 14 Feb 2024 06:03:14 -0800 Subject: [PATCH 0294/1133] Sanitizers: ensure RunInfo contains sanitizer runtime Summary: Ensure the `RunInfo` for executables contains the sanitizer runtime files. Reviewed By: blackm00n Differential Revision: D53756550 fbshipit-source-id: a6cae751f0133565b6c670bf80895dd19f53bc09 --- prelude/cxx/cxx_executable.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 7ceabaddc..078f3fa51 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -731,7 +731,7 @@ def _link_into_executable( return _CxxLinkExecutableResult( exe = link_result.linked_object, - runtime_files = executable_args.runtime_files, + runtime_files = executable_args.runtime_files + link_result.sanitizer_runtime_files, external_debug_info = executable_args.external_debug_info, shared_libs_symlink_tree = executable_args.shared_libs_symlink_tree, linker_map_data = link_result.linker_map_data, From c08bab8c825ef840b608bcfc480af81b9310deb6 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 14 Feb 2024 07:52:14 -0800 Subject: [PATCH 0295/1133] reenable embed provisioning profile for adhoc build Reviewed By: milend Differential Revision: D53759936 fbshipit-source-id: 7f209f6f6c793725efa47318a592fc8e212a824c --- prelude/apple/apple_bundle_part.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index 4f4e550a1..ff6427821 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -122,6 +122,8 @@ def assemble_bundle( codesign_args.append("--ad-hoc") if ctx.attrs.codesign_identity: codesign_args.extend(["--ad-hoc-codesign-identity", ctx.attrs.codesign_identity]) + if profile_selection_required: + codesign_args.append("--embed-provisioning-profile-when-signing-ad-hoc") codesign_args += get_entitlements_codesign_args(ctx, codesign_type) codesign_args += _get_extra_codesign_args(ctx) From a38c184d570730106e50734172fdd75407a6e12f Mon Sep 17 00:00:00 2001 From: Max Ovtsin Date: Wed, 14 Feb 2024 08:47:29 -0800 Subject: [PATCH 0296/1133] Fix incremental compilation Summary: swift-driver doesn't respect extension for root swiftdeps file and it always has to be `.priors`, otherwise buck2 will fail not been able to find all expected outputs. Also disabling CMO since it's not necessary for debug builds. Reviewed By: milend Differential Revision: D53755745 fbshipit-source-id: 2ad7ef9d8f490ab3755ae87f37de5a57273b1811 --- prelude/apple/swift/swift_incremental_support.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prelude/apple/swift/swift_incremental_support.bzl b/prelude/apple/swift/swift_incremental_support.bzl index 66ff202ef..e0f8cf98b 100644 --- a/prelude/apple/swift/swift_incremental_support.bzl +++ b/prelude/apple/swift/swift_incremental_support.bzl @@ -59,6 +59,7 @@ def _get_incremental_compilation_flags_and_objects( cmd = cmd_args([ "-incremental", "-enable-incremental-imports", + "-disable-cmo", # To minimize changes in generated swiftmodule file. "-enable-batch-mode", "-driver-batch-count", "1", @@ -83,7 +84,8 @@ def _write_output_file_map( srcs: list[CxxSrcWithFlags], compilation_mode: str, # Either "object" or "swiftmodule" extension: str) -> _WriteOutputFileMapOutput: # Either ".o" or ".swiftmodule" - module_swiftdeps = ctx.actions.declare_output("module-build-record." + compilation_mode + ".swiftdeps") + # swift-driver doesn't respect extension for root swiftdeps file and it always has to be `.priors`. + module_swiftdeps = ctx.actions.declare_output("module-build-record." + compilation_mode + ".priors") output_file_map = { "": { From 29001abe723283c6303fb89af9e1b32b665fdae2 Mon Sep 17 00:00:00 2001 From: Han Zhu Date: Wed, 14 Feb 2024 10:45:53 -0800 Subject: [PATCH 0297/1133] Constraint override support for cpp binary and test Summary: Following the examples of D43932867 and D46742363, support constraint override transitions for cpp binaries and tests. Reviewed By: igorsugak Differential Revision: D53684611 fbshipit-source-id: fb8dc257055b72eac1d8be7d31f3785a62da0fdd --- prelude/rules_impl.bzl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index a33152b1e..7c03bf113 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -334,6 +334,7 @@ def _cxx_binary_and_test_attrs(): "binary_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "bolt_flags": attrs.list(attrs.arg(), default = []), "bolt_profile": attrs.option(attrs.source(), default = None), + "constraint_overrides": attrs.list(attrs.string(), default = []), "distributed_thinlto_partial_split_dwarf": attrs.bool(default = False), "enable_distributed_thinlto": attrs.bool(default = False), "link_execution_preference": link_execution_preference_attr(), @@ -605,6 +606,8 @@ extra_attributes = struct(**all_extra_attributes) transitions = { "android_binary": constraint_overrides_transition, "apple_resource": apple_resource_transition, + "cxx_binary": constraint_overrides_transition, + "cxx_test": constraint_overrides_transition, "go_binary": go_binary_transition, "go_exported_library": go_exported_library_transition, "go_test": go_test_transition, From afbec9f5b88a1f6535b21819d8d23f3a1d1b5e76 Mon Sep 17 00:00:00 2001 From: Adam Cmiel Date: Wed, 14 Feb 2024 15:02:48 -0800 Subject: [PATCH 0298/1133] Suppress error for Apple framework pcm Summary: > buck-out/v2/gen/fbsource/0d0ff24630454274/xplat/toolchains/apple/__xcode_15.3.0_15e5178i-iphonesimulator-sdk_genrule__/iPhoneSimulator17.4.sdk/System/Library/Frameworks/AssetsLibrary.framework/Headers/ALAssetsLibrary.h:19:2: error: AssetsLibrary will be removed from the iOS SDK in the next major release [-Werror,-W#warnings] #warning AssetsLibrary will be removed from the iOS SDK in the next major release This is actually a warning from the compilation of the AssetsLibrary pcm in the apple toolchain cache generation. Pass -Wno-error=#warnings to those builds to suppress this error. We should still get it when importing these headers in our own code, but those are being systematically removed already T179144253 by the FoA Deprecated API workstream. Reviewed By: rmaz Differential Revision: D53742183 fbshipit-source-id: a603183a18826786b75af063c8918a5b930d9431 --- prelude/apple/swift/swift_sdk_pcm_compilation.bzl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl index f4e3d0ac5..c96606c89 100644 --- a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl +++ b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl @@ -44,6 +44,10 @@ def get_shared_pcm_compilation_args(module_name: str) -> cmd_args: # to avoid serializing it as an absolute path. "-Xcc", "-working-directory=", + # AssetsLibrary is shipping with a #warning, which we shouldn't error on when compiling + # the SDK module. I don't think this is actually avoidable or removable until the next xcode major version + "-Xcc", + "-Wno-error=#warnings", ]) cmd.add(get_disable_pch_validation_flags()) From c60aafd37e9d7b713ec65cad342aa6a8f0dbdaed Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 14 Feb 2024 17:48:52 -0800 Subject: [PATCH 0299/1133] Support GODEBUG env-var Summary: Support setting [GODEBUG](https://go.dev/doc/godebug) on toolchain level Reviewed By: leoleovich Differential Revision: D53769175 fbshipit-source-id: 5a685f74ff9ad44a22f7fe2271a232697836132f --- prelude/go/toolchain.bzl | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 35ae0a6e8..64aa4818b 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -26,6 +26,7 @@ GoToolchainInfo = provider( "env_go_os": provider_field(typing.Any, default = None), "env_go_arm": provider_field(typing.Any, default = None), "env_go_root": provider_field(typing.Any, default = None), + "env_go_debug": provider_field(dict[str, str], default = {}), "external_linker_flags": provider_field(typing.Any, default = None), "filter_srcs": provider_field(typing.Any, default = None), "go": provider_field(typing.Any, default = None), @@ -50,7 +51,9 @@ def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True, force_dis cmd.add("GOARM={}".format(toolchain.env_go_arm)) if go_root and toolchain.env_go_root != None: cmd.add(cmd_args(toolchain.env_go_root, format = "GOROOT={}")) - + if toolchain.env_go_debug: + godebug = ",".join(["{}={}".format(k, v) for k, v in toolchain.env_go_debug.items()]) + cmd.add("GODEBUG={}".format(godebug)) if force_disable_cgo: cmd.add("CGO_ENABLED=0") else: From f65693c34cc76f811a6d1870c740fd2b4651ffef Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Thu, 15 Feb 2024 03:15:45 -0800 Subject: [PATCH 0300/1133] introduce argument to override package name Summary: Buck1 uses short name of package target, while Buck2 uses name of bundle target to form package file name. To be able to be backward compatible with Buck1 introduce override for package name in this diff. Reviewed By: milend Differential Revision: D53762741 fbshipit-source-id: b7da5583b80c902aca9371fe4ac66eee53126cfe --- prelude/apple/apple_package.bzl | 3 ++- prelude/apple/apple_rules_impl.bzl | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_package.bzl b/prelude/apple/apple_package.bzl index bc35f7ffb..a539e44c8 100644 --- a/prelude/apple/apple_package.bzl +++ b/prelude/apple/apple_package.bzl @@ -15,7 +15,8 @@ load(":apple_swift_stdlib.bzl", "should_copy_swift_stdlib") load(":apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") def apple_package_impl(ctx: AnalysisContext) -> list[Provider]: - package = ctx.actions.declare_output("{}.{}".format(ctx.attrs.bundle.label.name, ctx.attrs.ext)) + package_name = ctx.attrs.package_name if ctx.attrs.package_name else ctx.attrs.bundle.label.name + package = ctx.actions.declare_output("{}.{}".format(package_name, ctx.attrs.ext)) contents = ( ctx.attrs.bundle[DefaultInfo].default_outputs[0] if ctx.attrs.packager else _get_ipa_contents(ctx) diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index a62e4bc62..61a151423 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -151,6 +151,7 @@ extra_attributes = { "apple_package": { "bundle": attrs.dep(providers = [AppleBundleInfo]), "ext": attrs.enum(ApplePackageExtension.values(), default = "ipa"), + "package_name": attrs.option(attrs.string(), default = None), "packager": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "packager_args": attrs.list(attrs.arg(), default = []), "prepackaged_validators": attrs.list( From caee80f6ec1fc5400cade4e9bfd4536711afcf10 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Thu, 15 Feb 2024 07:54:33 -0800 Subject: [PATCH 0301/1133] Don't replace applicationId placeholders when creating android_aar Summary: We don't want the `android_aar` to set the applicationId - the applicationId should be set by whatever binary consumes the AAR. See https://fb.workplace.com/groups/2222954841208728/posts/2608955425941999/ Reviewed By: navidqar Differential Revision: D53806537 fbshipit-source-id: 9fe501b67356d7ada379e26b76f0ec67dbeacaf5 --- prelude/android/android_aar.bzl | 2 +- prelude/android/android_binary_resources_rules.bzl | 7 ++++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/prelude/android/android_aar.bzl b/prelude/android/android_aar.bzl index b0c736f2c..0ba16c451 100644 --- a/prelude/android/android_aar.bzl +++ b/prelude/android/android_aar.bzl @@ -28,7 +28,7 @@ def android_aar_impl(ctx: AnalysisContext) -> list[Provider]: java_packaging_deps = [packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, deps) if not excluded_java_packaging_deps_targets.contains(packaging_dep.label.raw_target())] android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, deps) - android_manifest = get_manifest(ctx, android_packageable_info, manifest_entries = {}) + android_manifest = get_manifest(ctx, android_packageable_info, manifest_entries = {}, should_replace_application_id_placeholders = False) if ctx.attrs.include_build_config_class: build_config_infos = list(android_packageable_info.build_config_infos.traverse()) if android_packageable_info.build_config_infos else [] diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index dccabc41a..765214273 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -56,7 +56,7 @@ def get_android_binary_resources_info( ) resource_infos = filtered_resources_output.resource_infos - android_manifest = get_manifest(ctx, android_packageable_info, manifest_entries) + android_manifest = get_manifest(ctx, android_packageable_info, manifest_entries, should_replace_application_id_placeholders = True) non_proto_format_aapt2_link_info, proto_format_aapt2_link_info = get_aapt2_link( ctx, @@ -428,7 +428,8 @@ def _maybe_package_strings_as_assets( def get_manifest( ctx: AnalysisContext, android_packageable_info: AndroidPackageableInfo, - manifest_entries: dict) -> Artifact: + manifest_entries: dict, + should_replace_application_id_placeholders: bool) -> Artifact: robolectric_manifest = getattr(ctx.attrs, "robolectric_manifest", None) if robolectric_manifest: return robolectric_manifest @@ -456,7 +457,7 @@ def get_manifest( manifest_entries.get("placeholders", {}), ) - if android_toolchain.set_application_id_to_specified_package: + if android_toolchain.set_application_id_to_specified_package and should_replace_application_id_placeholders: android_manifest_with_replaced_application_id = ctx.actions.declare_output("android_manifest_with_replaced_application_id/AndroidManifest.xml") replace_application_id_placeholders_cmd = cmd_args([ ctx.attrs._android_toolchain[AndroidToolchainInfo].replace_application_id_placeholders[RunInfo], From 51115ba24e73b61ecf25d98e2bfd080f4dfc6531 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20L=C3=B6scher?= Date: Thu, 15 Feb 2024 08:49:11 -0800 Subject: [PATCH 0302/1133] improve handling of private header files in tests Summary: This diff addresses a bunch of issues, that are interconnected with private header handling for tests. We discovered, that it is possible to specify an ambiguous reference to a private header file. This diff addresses this as following: - factor out the mechansim to peak at private header files in the dependencies - add a check to verify that these private header files are not ambigiuous - add attribute to applications allowing to peak into privateheaders like tests - dedupe equivalent private headers from transitive dependencies, e.g. test-app and test Reviewed By: ir-regular Differential Revision: D53717441 fbshipit-source-id: 8e82d6d7bcbd82992b0d105b5ae4f6e3ec767bd1 --- prelude/decls/erlang_rules.bzl | 5 +++ prelude/erlang/erlang_application.bzl | 8 ++++ prelude/erlang/erlang_build.bzl | 60 ++++++++++++++++++++++----- prelude/erlang/erlang_tests.bzl | 22 ++++------ 4 files changed, 72 insertions(+), 23 deletions(-) diff --git a/prelude/decls/erlang_rules.bzl b/prelude/decls/erlang_rules.bzl index 1c4b0ac5e..dbee8c448 100644 --- a/prelude/decls/erlang_rules.bzl +++ b/prelude/decls/erlang_rules.bzl @@ -132,6 +132,11 @@ rules_attributes = { difference, that the module name, and the individual start arguments need to be given as the string representation of the corresponding Erlang terms. """), + "peek_private_includes": attrs.bool(default = False, doc = """ + This attribute allows you to use the private includes of the applictions dependencies. This can be useful for + test applications, to create shared abstractions for tests. It's not advisable to use this attribute for prodution + code. All private inclues transitively must be non-ambiguous. + """), "resources": attrs.list(attrs.dep(), default = [], doc = """ The `resources` field specifies targets whose default output are placed in the applications `priv/` directory. For regular files this field is typically combined with `export_file`, `filegroup`, or similar targets. However, it diff --git a/prelude/erlang/erlang_application.bzl b/prelude/erlang/erlang_application.bzl index 22f9daf98..5c8606ed5 100644 --- a/prelude/erlang/erlang_application.bzl +++ b/prelude/erlang/erlang_application.bzl @@ -160,6 +160,14 @@ def _build_erlang_application(ctx: AnalysisContext, toolchain: Toolchain, depend is_private = True, ) + # maybe peek private includes + build_environment = erlang_build.utils.peek_private_includes( + ctx, + toolchain, + build_environment, + dependencies, + ) + # beams build_environment = erlang_build.build_steps.generate_beam_artifacts( ctx, diff --git a/prelude/erlang/erlang_build.bzl b/prelude/erlang/erlang_build.bzl index 951c8fa20..d35dec5f1 100644 --- a/prelude/erlang/erlang_build.bzl +++ b/prelude/erlang/erlang_build.bzl @@ -264,8 +264,6 @@ def _generate_beam_artifacts( for src in src_artifacts } - _check_beam_uniqueness(beam_mapping, build_environment.beams) - # dep files beam_deps = _get_deps_files(ctx, toolchain, anchor, src_artifacts, output_mapping) @@ -287,7 +285,7 @@ def _generate_beam_artifacts( input_mapping = build_environment.input_mapping, ) - dep_info_content = to_term_args({paths.basename(artifact): {"dep_file": dep_file, "path": artifact} for artifact, dep_file in updated_build_environment.deps_files.items()}) + dep_info_content = to_term_args(_build_dep_info_data(updated_build_environment)) dep_info_file = ctx.actions.write(_dep_info_name(toolchain), dep_info_content) for erl in src_artifacts: @@ -295,13 +293,17 @@ def _generate_beam_artifacts( return updated_build_environment -def _check_beam_uniqueness( - local_beams: ModuleArtifactMapping, - global_beams: ModuleArtifactMapping) -> None: - for module in local_beams: - if module in global_beams: - fail("duplicated modules found in build: {}".format([module])) - return None +def _build_dep_info_data(build_environment: BuildEnvironment) -> dict[str, dict[str, Artifact | str]]: + """build input for dependency finalizer, this implements uniqueness checks for headers and beams""" + seen = {} + data = {} + for artifact, dep_file in build_environment.deps_files.items(): + if paths.basename(artifact) in seen: + fail("conflicting artifacts found in build: {} and {}".format(seen[paths.basename(artifact)], artifact)) + else: + seen[paths.basename(artifact)] = artifact + data[paths.basename(artifact)] = {"dep_file": dep_file, "path": artifact} + return data def _generate_chunk_artifacts( ctx: AnalysisContext, @@ -824,6 +826,43 @@ def _run_with_env(ctx: AnalysisContext, toolchain: Toolchain, *args, **kwargs): kwargs["env"] = env ctx.actions.run(*args, **kwargs) +def _peek_private_includes( + ctx: AnalysisContext, + toolchain: Toolchain, + build_environment: BuildEnvironment, + dependencies: ErlAppDependencies, + force_peek: bool = False) -> BuildEnvironment: + # get mutable dict for private includes + new_private_includes = dict(build_environment.private_includes) + new_private_include_dir = list(build_environment.private_include_dir) + + # get private deps from dependencies + for dep in dependencies.values(): + if ErlangAppInfo in dep: + if dep[ErlangAppInfo].private_include_dir: + new_private_include_dir = new_private_include_dir + dep[ErlangAppInfo].private_include_dir[toolchain.name] + new_private_includes.update(dep[ErlangAppInfo].private_includes[toolchain.name]) + if force_peek or ctx.attrs.peek_private_includes: + return BuildEnvironment( + private_includes = new_private_includes, + private_include_dir = new_private_include_dir, + # copied fields + includes = build_environment.includes, + beams = build_environment.beams, + priv_dirs = build_environment.priv_dirs, + include_dirs = build_environment.include_dirs, + ebin_dirs = build_environment.ebin_dirs, + deps_files = build_environment.deps_files, + app_files = build_environment.app_files, + full_dependencies = build_environment.full_dependencies, + app_includes = build_environment.app_includes, + app_beams = build_environment.app_beams, + app_chunks = build_environment.app_chunks, + input_mapping = build_environment.input_mapping, + ) + else: + return build_environment + # export erlang_build = struct( @@ -844,5 +883,6 @@ erlang_build = struct( make_dir_anchor = _make_dir_anchor, build_dir = _build_dir, run_with_env = _run_with_env, + peek_private_includes = _peek_private_includes, ), ) diff --git a/prelude/erlang/erlang_tests.bzl b/prelude/erlang/erlang_tests.bzl index 2fda91993..1924e1d12 100644 --- a/prelude/erlang/erlang_tests.bzl +++ b/prelude/erlang/erlang_tests.bzl @@ -118,26 +118,22 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: # prepare build environment pre_build_environment = erlang_build.prepare_build_environment(ctx, primary_toolchain, dependencies) - new_private_include_dir = pre_build_environment.private_include_dir - - # pre_build_environment.private_includes is immutable, that's how we change that. - new_private_includes = {a: b for (a, b) in pre_build_environment.private_includes.items()} - - #Pull private deps from dependencies - for dep in dependencies.values(): - if ErlangAppInfo in dep: - if dep[ErlangAppInfo].private_include_dir: - new_private_include_dir = new_private_include_dir + dep[ErlangAppInfo].private_include_dir[primary_toolchain_name] - new_private_includes.update(dep[ErlangAppInfo].private_includes[primary_toolchain_name]) + pre_build_environment = erlang_build.utils.peek_private_includes( + ctx, + primary_toolchain, + pre_build_environment, + dependencies, + force_peek = True, + ) # Records are immutable, hence we need to create a new record from the previous one. build_environment = BuildEnvironment( includes = pre_build_environment.includes, - private_includes = new_private_includes, + private_includes = pre_build_environment.private_includes, beams = pre_build_environment.beams, priv_dirs = pre_build_environment.priv_dirs, include_dirs = pre_build_environment.include_dirs, - private_include_dir = new_private_include_dir, + private_include_dir = pre_build_environment.private_include_dir, ebin_dirs = pre_build_environment.ebin_dirs, deps_files = pre_build_environment.deps_files, app_files = pre_build_environment.app_files, From da5dfaf2d879bdefd041df8671fd5e0ed8243d27 Mon Sep 17 00:00:00 2001 From: Junli Qin Date: Thu, 15 Feb 2024 09:57:50 -0800 Subject: [PATCH 0303/1133] use reversed topological sort in resource merging Summary: Following up the resource merging issue I posted here for WA: https://fb.workplace.com/groups/2222954841208728/posts/2607352042769004. It seems we need to use reversed topological sort to sort the resources while merging. Reviewed By: IanChilds Differential Revision: D53716156 fbshipit-source-id: 382bc44771a85b9f571032b5e56e0894ba0a9783 --- prelude/android/android_binary_resources_rules.bzl | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index 765214273..0ba6d6e75 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -44,11 +44,13 @@ def get_android_binary_resources_info( aapt2_min_sdk: [str, None] = None, aapt2_preferred_density: [str, None] = None) -> AndroidBinaryResourcesInfo: android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] - unfiltered_resource_infos = [ + + # Use reverse topological sort in resource merging to make sure a resource target will overwrite its dependencies. + unfiltered_resource_infos = reversed([ resource_info - for resource_info in list(android_packageable_info.resource_infos.traverse() if android_packageable_info.resource_infos else []) + for resource_info in list(android_packageable_info.resource_infos.traverse(ordering = "topological") if android_packageable_info.resource_infos else []) if not (resource_infos_to_exclude and resource_infos_to_exclude.contains(resource_info.raw_target)) - ] + ]) filtered_resources_output = _maybe_filter_resources( ctx, unfiltered_resource_infos, From 865a600a3e988d776853e1fdae0bf157a4ce2b70 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 15 Feb 2024 10:54:19 -0800 Subject: [PATCH 0304/1133] buck2 python: add docs for manifest_module_entries Reviewed By: itamaro, aleivag Differential Revision: D53757479 fbshipit-source-id: 59d1a725e87aecf87bcc173af6761b05eef392a7 --- prelude/rules_impl.bzl | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 7c03bf113..527b8359f 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -291,14 +291,21 @@ def _python_executable_attrs(): """, ), "make_py_package": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), - # entries for the generated __manifest__ python module - "manifest_module_entries": attrs.option(attrs.dict( - key = attrs.string(), - value = attrs.one_of( - attrs.dict(key = attrs.string(), value = attrs.option(attrs.any())), - attrs.list(attrs.string()), + "manifest_module_entries": attrs.option( + attrs.dict( + key = attrs.string(), + value = attrs.one_of( + attrs.dict(key = attrs.string(), value = attrs.option(attrs.any())), + attrs.list(attrs.string()), + ), ), - ), default = None), + default = None, + doc = """If present, it should be a `string` -> `entry` mapping that + gets generated into a `__manifest__` module in the executable. Top + level string keys will be the names of variables in this module (so + they must be valid Python identifiers). An `entry` can be a list of + `string`s, or a further `string`-keyed dictionary.""", + ), "native_link_strategy": attrs.option(attrs.enum(NativeLinkStrategy), default = None), "package_split_dwarf_dwp": attrs.bool(default = False), "par_style": attrs.option(attrs.string(), default = None), From a537b24f3adfa3893dd6397004018d59c372b7f3 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 15 Feb 2024 10:54:19 -0800 Subject: [PATCH 0305/1133] startup_functions: don't blow up if it's empty Reviewed By: aleivag Differential Revision: D53766498 fbshipit-source-id: 993ddb9407906d98e90e36cab61ad58529b777b8 --- prelude/python/make_py_package.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index b19c59f7a..659929f10 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -574,7 +574,7 @@ def generate_startup_function_loader(ctx: AnalysisContext) -> ArgLike: startup_functions_list = "\n".join( [ '"' + startup_function + '",' - for _, startup_function in sorted(ctx.attrs.manifest_module_entries["startup_functions"].items()) + for _, startup_function in sorted(ctx.attrs.manifest_module_entries.get("startup_functions", {}).items()) ], ) From 4cde26719a13621f494f3e2de29b354574f46765 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Thu, 15 Feb 2024 16:42:33 -0800 Subject: [PATCH 0306/1133] Set non-propagating env var to audit invoked PAR path Summary: This diff adds a new `PAR_INVOKED_NAME_TAG` env var to expose the original PAR path for auditing. This env var is immediately unset at startup (unlike e.g. `FB_XAR_INVOKED_NAME`) (to avoid propagating to unrelated subprocesses), but can be read via `/proc//environ` via tools. Unlike using the original command-line, this env var is designed to work with `multiprocessing` workers, which often times lose this information when spawning. Reviewed By: mzlee Differential Revision: D53717691 fbshipit-source-id: a506956842ac08149cbbdff0b4485134c573b007 --- .../make_par/_lpar_bootstrap.sh.template | 4 +++ .../python/tools/make_par/sitecustomize.py | 28 +++++++++++++++---- prelude/python/tools/run_inplace.py.in | 5 ++++ 3 files changed, 31 insertions(+), 6 deletions(-) diff --git a/prelude/python/tools/make_par/_lpar_bootstrap.sh.template b/prelude/python/tools/make_par/_lpar_bootstrap.sh.template index 0a87fdc9e..fc26d7426 100644 --- a/prelude/python/tools/make_par/_lpar_bootstrap.sh.template +++ b/prelude/python/tools/make_par/_lpar_bootstrap.sh.template @@ -34,6 +34,10 @@ export FB_PAR_MAIN_RUNNER_FUNCTION="{main_runner_function}" export FB_PAR_RUNTIME_FILES=$BASE_DIR : ${{FB_LPAR_INVOKED_NAME:="$0"}} export FB_LPAR_INVOKED_NAME +# This environment variable is immediately unset on startup but will also appear +# in e.g. `multiprocessing` workers, and so serves as an audit trail back to +# the originating PAR (and can be read via e.g. `/proc//environ`). +export PAR_INVOKED_NAME_TAG="$FB_LPAR_INVOKED_NAME" {ld_preload} {env} exec {cmd} "$@" diff --git a/prelude/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py index a9cf16d7f..f36c8cbfe 100644 --- a/prelude/python/tools/make_par/sitecustomize.py +++ b/prelude/python/tools/make_par/sitecustomize.py @@ -19,7 +19,7 @@ lock = threading.Lock() -def __patch_spawn(var_names: tuple[str, ...], saved_env: dict[str, str]) -> None: +def __patch_spawn(var_names: list[str], saved_env: dict[str, str]) -> None: std_spawn = mp_util.spawnv_passfds # pyre-fixme[53]: Captured variable `std_spawn` is not annotated. @@ -45,14 +45,30 @@ def spawnv_passfds(path, args, passfds) -> None | int: def __clear_env(patch_spawn: bool = True) -> None: saved_env = {} - darwin_vars = ("DYLD_LIBRARY_PATH", "DYLD_INSERT_LIBRARIES") - linux_vars = ("LD_LIBRARY_PATH", "LD_PRELOAD") - python_vars = ("PYTHONPATH",) + + var_names = [ + "PYTHONPATH", + # We use this env var to tag the process and it's `multiprocessing` + # workers. It's important that we clear it out (so that unrelated sub- + # processes don't inherit it), but it can be read via + # `/proc//environ`. + "PAR_INVOKED_NAME_TAG", + ] if sys.platform == "darwin": - var_names = darwin_vars + python_vars + var_names.extend( + [ + "DYLD_LIBRARY_PATH", + "DYLD_INSERT_LIBRARIES", + ] + ) else: - var_names = linux_vars + python_vars + var_names.extend( + [ + "LD_LIBRARY_PATH", + "LD_PRELOAD", + ] + ) # Restore the original value of environment variables that we altered # as part of the startup process. diff --git a/prelude/python/tools/run_inplace.py.in b/prelude/python/tools/run_inplace.py.in index 9bd849527..1d8dadeae 100644 --- a/prelude/python/tools/run_inplace.py.in +++ b/prelude/python/tools/run_inplace.py.in @@ -154,6 +154,11 @@ for env in ("PYTHONPATH", "LD_LIBRARY_PATH", "LD_PRELOAD", path = os.path.join(dirpath, modules_dir) os.environ["PYTHONPATH"] = path +# This environment variable is immediately unset on startup but will also appear +# in e.g. `multiprocessing` workers, and so serves as an audit trail back to +# the originating PAR (and can be read via e.g. `/proc//environ`). +os.environ["PAR_INVOKED_NAME_TAG"] = sys.argv[0] + if platform.system() == "Windows": # exec on Windows is not true exec - there is only 'spawn' ('CreateProcess'). # However, creating processes unnecessarily is painful, so we only do the spawn From b051447da876534da3ac8440996d4b948509a17f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20L=C3=B6scher?= Date: Fri, 16 Feb 2024 06:20:01 -0800 Subject: [PATCH 0307/1133] add name modifier to erlang_tests macro Summary: This provides support for modifying the names for the targets generated by the `erlang_tests` macro. In practic, this allows to generate targets for the same suite with different configurations, e.g. using different config files or similar. Reviewed By: acw224, jcpetruzza Differential Revision: D53816804 fbshipit-source-id: c33b572901d5a0ef990a886a1712296649cbd457 --- prelude/erlang/erlang_tests.bzl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/prelude/erlang/erlang_tests.bzl b/prelude/erlang/erlang_tests.bzl index 1924e1d12..c59a6b1ac 100644 --- a/prelude/erlang/erlang_tests.bzl +++ b/prelude/erlang/erlang_tests.bzl @@ -42,6 +42,7 @@ def erlang_tests_macro( resources: list[str] = [], property_tests: list[str] = [], srcs: list[str] = [], + postfix: str | None = None, **common_attributes: dict) -> None: """ Generate multiple erlang_test targets based on the `suites` field. @@ -94,6 +95,9 @@ def erlang_tests_macro( suite_resource = [target for target in target_resources] suite_resource.append(data_target) + if postfix != None: + suite_name = "{}_{}".format(suite_name, postfix) + # forward resources and deps fields and generate erlang_test target erlang_test_rule( name = suite_name, From ad3c204efc3ab8d15c2a005d933ded9618561ec6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20L=C3=B6scher?= Date: Fri, 16 Feb 2024 08:46:49 -0800 Subject: [PATCH 0308/1133] postfix -> prefix in erlang_tests macro Summary: Using a prefix is more userfriendly and auto-complete friendly Reviewed By: jcpetruzza Differential Revision: D53855460 fbshipit-source-id: dd0410615a9dd7fd3174c52848b333f56878ebb7 --- prelude/erlang/erlang_tests.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prelude/erlang/erlang_tests.bzl b/prelude/erlang/erlang_tests.bzl index c59a6b1ac..15a4bf797 100644 --- a/prelude/erlang/erlang_tests.bzl +++ b/prelude/erlang/erlang_tests.bzl @@ -42,7 +42,7 @@ def erlang_tests_macro( resources: list[str] = [], property_tests: list[str] = [], srcs: list[str] = [], - postfix: str | None = None, + prefix: str | None = None, **common_attributes: dict) -> None: """ Generate multiple erlang_test targets based on the `suites` field. @@ -95,8 +95,8 @@ def erlang_tests_macro( suite_resource = [target for target in target_resources] suite_resource.append(data_target) - if postfix != None: - suite_name = "{}_{}".format(suite_name, postfix) + if prefix != None: + suite_name = "{}_{}".format(prefix, suite_name) # forward resources and deps fields and generate erlang_test target erlang_test_rule( From 48051767fd3f0af19f34d616a799a3b0c92677a1 Mon Sep 17 00:00:00 2001 From: Ian Levesque Date: Fri, 16 Feb 2024 11:50:08 -0800 Subject: [PATCH 0309/1133] Allow specifying multiple targets for jvm_args_target Summary: We want to be able to profile multiple targets in one build, but not the entire app, which is prohibitively slow. Targeting a single target with javacd/kotlincd.jvm_args_target works, but not for several. This extends the property slightly to accept multiple comma-separated targets. Reviewed By: IanChilds Differential Revision: D53828143 fbshipit-source-id: 7d1da7139729719127c62fe51d4de0a223393d68 --- prelude/jvm/cd_jar_creator_util.bzl | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index e89fe1435..b5f7eafae 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -402,14 +402,16 @@ def prepare_cd_exe( debug_port: [int, None], debug_target: [Label, None], extra_jvm_args: list[str], - extra_jvm_args_target: [Label, None]) -> tuple: + extra_jvm_args_target: list[Label]) -> tuple: local_only = False jvm_args = ["-XX:-MaxFDLimit"] if extra_jvm_args_target: - if qualified_name == qualified_name_with_subtarget(extra_jvm_args_target): - jvm_args = jvm_args + extra_jvm_args - local_only = True + for target in extra_jvm_args_target: + if qualified_name == qualified_name_with_subtarget(target): + jvm_args = jvm_args + extra_jvm_args + local_only = True + break else: jvm_args = jvm_args + extra_jvm_args From b9ed049593f26a3687621974216987fae2edf139 Mon Sep 17 00:00:00 2001 From: Thomas David Cuvillier Date: Fri, 16 Feb 2024 12:23:37 -0800 Subject: [PATCH 0310/1133] Better error reporting for failing to parse arguements Summary: The error for failing to parse did give little indication about what was happening (error, badmatch, ...) With this it helped me much playing around with provider options. Reviewed By: jcpetruzza Differential Revision: D53859544 fbshipit-source-id: f6acefd568254e9400a2f1f7a6c180a18f3a6dab --- .../common_test/common/src/buck_ct_parser.erl | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/prelude/erlang/common_test/common/src/buck_ct_parser.erl b/prelude/erlang/common_test/common/src/buck_ct_parser.erl index ddf6690b7..879ec1fff 100644 --- a/prelude/erlang/common_test/common/src/buck_ct_parser.erl +++ b/prelude/erlang/common_test/common/src/buck_ct_parser.erl @@ -22,6 +22,19 @@ parse_str("") -> []; parse_str(StrArgs) -> - {ok, Tokens, _} = erl_scan:string(StrArgs ++ "."), - {ok, Term} = erl_parse:parse_term(Tokens), - Term. + try + {ok, Tokens, _} = erl_scan:string(StrArgs ++ "."), + erl_parse:parse_term(Tokens) + of + {ok, Term} -> + Term; + {error, Reason} -> + error(lists:flatten(io_lib:format("Error parsing StrArgs ~p, Reason: ~p", [StrArgs, Reason]))) + catch + E:R:S -> + error( + lists:flatten( + io_lib:format("Error parsing StrArgs ~p, error ~p", [StrArgs, erl_error:format_exception(E, R, S)]) + ) + ) + end. From 81958e900f04fdcc3507a7317ad0295c5386999f Mon Sep 17 00:00:00 2001 From: Joshua Selbo Date: Fri, 16 Feb 2024 22:07:01 -0800 Subject: [PATCH 0311/1133] Allow custom javac dep on Android rule types Summary: Apply the same attribute override for `javac` as the Java rules in `java.bzl` Reviewed By: hick209 Differential Revision: D53876785 fbshipit-source-id: 71885e698022fa3da597a9be2a6d38c0f5bbec41 --- prelude/android/android.bzl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 7818bfae5..a4d5f66d0 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -59,6 +59,7 @@ extra_attributes = { "compress_asset_libraries": attrs.default_only(attrs.bool(default = False)), "cpu_filters": attrs.list(attrs.enum(TargetCpuType), default = ALL_CPU_FILTERS), "deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []), + "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), "min_sdk_version": attrs.option(attrs.int(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), "package_asset_libraries": attrs.default_only(attrs.bool(default = True)), @@ -155,6 +156,7 @@ extra_attributes = { }, "android_library": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), + "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), "_android_toolchain": toolchains_common.android(), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), @@ -205,6 +207,7 @@ extra_attributes = { }, "robolectric_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), + "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), "robolectric_runtime_dependencies": attrs.list(attrs.source(), default = []), "test_class_names_file": attrs.option(attrs.source(), default = None), From 68912d7ebc8e9de4bae35459cc4fae9ab7782315 Mon Sep 17 00:00:00 2001 From: Ernald Nicolas Date: Sat, 17 Feb 2024 09:19:01 -0800 Subject: [PATCH 0312/1133] Add support to buck2 for running MacroBenchmark tests Summary: Jetpack's [Macrobenchmark library](https://developer.android.com/topic/performance/benchmarking/macrobenchmark-overview) relies on instrumentation test infra to provide the ability to: - Run a test apk and an *apk under test* in 2 different processes - Have a test apk be self instrumented instead of instrumenting the *apk under test* Gradle already supports this functionality so the goal of this diff is to achieve feature parity with gradle for successfully running MacroBenchmark tests. Similar to [gradle](https://cs.android.com/android-studio/platform/tools/base/+/mirror-goog-studio-main:build-system/gradle-core/src/main/java/com/android/build/gradle/internal/dependency/VariantDependencies.kt;l=109), for this use case, arifact exclusions must be disabled. Without this change in place, among the errors that will be yielded will be: java.lang.AssertionError: ERRORS (not suppressed): NOT-SELF-INSTRUMENTING I've also added the READ/WRITE External Storage permissions to BaseTestingApkAndroidManifest.xml as the test apk requires these permissions when running Macrobenchmark tests. Reviewed By: fbvxp, IanChilds Differential Revision: D53666655 fbshipit-source-id: 298a3c4394242c58ec3d0c3b43c58fa24087fc2d --- prelude/android/android.bzl | 1 + prelude/android/android_instrumentation_apk.bzl | 12 +++++++----- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index a4d5f66d0..cc332236d 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -134,6 +134,7 @@ extra_attributes = { "cpu_filters": attrs.list(attrs.enum(TargetCpuType), default = []), "deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []), "dex_tool": attrs.string(default = "d8"), # Match default in V1 + "is_self_instrumenting": attrs.bool(default = False), "manifest": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "manifest_skeleton": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "min_sdk_version": attrs.option(attrs.int(), default = None), diff --git a/prelude/android/android_instrumentation_apk.bzl b/prelude/android/android_instrumentation_apk.bzl index f8c9b315f..d534f5e33 100644 --- a/prelude/android/android_instrumentation_apk.bzl +++ b/prelude/android/android_instrumentation_apk.bzl @@ -34,10 +34,12 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): # We use the deps that don't have _build_only_native_code = True deps = unfiltered_deps_by_platform.values()[0] + is_self_instrumenting = ctx.attrs.is_self_instrumenting + java_packaging_deps = [ packaging_dep for packaging_dep in get_all_java_packaging_deps(ctx, deps) - if packaging_dep.dex and not apk_under_test_info.java_packaging_deps.contains(packaging_dep.label.raw_target()) + if packaging_dep.dex and (is_self_instrumenting or not apk_under_test_info.java_packaging_deps.contains(packaging_dep.label.raw_target())) ] android_packageable_info = merge_android_packageable_info(ctx.label, ctx.actions, deps) @@ -50,8 +52,8 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): use_proto_format = False, referenced_resources_lists = [], manifest_entries = apk_under_test_info.manifest_entries, - resource_infos_to_exclude = apk_under_test_info.resource_infos, - r_dot_java_packages_to_exclude = apk_under_test_info.r_dot_java_packages.list(), + resource_infos_to_exclude = apk_under_test_info.resource_infos if not is_self_instrumenting else None, + r_dot_java_packages_to_exclude = apk_under_test_info.r_dot_java_packages.list() if not is_self_instrumenting else [], ) android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] java_packaging_deps += [ @@ -96,8 +98,8 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): enhance_ctx, android_packageable_info, filtered_deps_by_platform, - prebuilt_native_library_dirs_to_exclude = apk_under_test_info.prebuilt_native_library_dirs, - shared_libraries_to_exclude = apk_under_test_info.shared_libraries, + prebuilt_native_library_dirs_to_exclude = apk_under_test_info.prebuilt_native_library_dirs if not is_self_instrumenting else None, + shared_libraries_to_exclude = apk_under_test_info.shared_libraries if not is_self_instrumenting else None, ) output_apk = build_apk( From 83c7e043b701abd014afc55d3ba718434f61292a Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Tue, 20 Feb 2024 02:00:44 -0800 Subject: [PATCH 0313/1133] Sanitizers: remove `sanitizer_runtime_dir` field Summary: `sanitizer_runtime_dir` is unused, remove it. Reviewed By: narissiam Differential Revision: D53858185 fbshipit-source-id: ac4d22aac898a120e15a22b1b7b2b4001e66fdc5 --- prelude/cxx/cxx_toolchain.bzl | 2 -- prelude/cxx/cxx_toolchain_types.bzl | 1 - prelude/cxx/user/cxx_toolchain_override.bzl | 3 --- 3 files changed, 6 deletions(-) diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index f71205f56..55fb4576c 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -95,7 +95,6 @@ def cxx_toolchain_impl(ctx): independent_shlib_interface_linker_flags = ctx.attrs.shared_library_interface_flags, requires_archives = value_or(ctx.attrs.requires_archives, True), requires_objects = value_or(ctx.attrs.requires_objects, False), - sanitizer_runtime_dir = ctx.attrs.sanitizer_runtime_dir[DefaultInfo].default_outputs[0] if ctx.attrs.sanitizer_runtime_dir else None, sanitizer_runtime_enabled = ctx.attrs.sanitizer_runtime_enabled, sanitizer_runtime_files = flatten([runtime_file[DefaultInfo].default_outputs for runtime_file in ctx.attrs.sanitizer_runtime_files]), supports_distributed_thinlto = ctx.attrs.supports_distributed_thinlto, @@ -197,7 +196,6 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): "public_headers_symlinks_enabled": attrs.bool(default = True), "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), "requires_objects": attrs.bool(default = False), - "sanitizer_runtime_dir": attrs.option(attrs.dep(), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform "sanitizer_runtime_enabled": attrs.bool(default = False), "sanitizer_runtime_files": attrs.set(attrs.dep(), sorted = True, default = []), # Use `attrs.dep()` as it's not a tool, always propagate target platform "shared_library_interface_mode": attrs.enum(ShlibInterfacesMode.values(), default = "disabled"), diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index 2a8a9b6d3..acebc51b0 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -44,7 +44,6 @@ LinkerInfo = provider( # "o" on Unix, "obj" on Windows "object_file_extension": provider_field(typing.Any, default = None), # str "sanitizer_runtime_enabled": provider_field(bool, default = False), - "sanitizer_runtime_dir": provider_field([Artifact, None], default = None), "sanitizer_runtime_files": provider_field(list[Artifact], default = []), "shlib_interfaces": provider_field(ShlibInterfacesMode), "shared_dep_runtime_ld_flags": provider_field(typing.Any, default = None), diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index 6d4437f4b..680a24589 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -75,7 +75,6 @@ def _cxx_toolchain_override(ctx): # linker flags should be changed as well. pdb_expected = linker_type == "windows" and pdb_expected shlib_interfaces = ShlibInterfacesMode(ctx.attrs.shared_library_interface_mode) if ctx.attrs.shared_library_interface_mode else None - sanitizer_runtime_dir = ctx.attrs.sanitizer_runtime_dir[DefaultInfo].default_outputs[0] if ctx.attrs.sanitizer_runtime_dir else None sanitizer_runtime_files = flatten([runtime_file[DefaultInfo].default_outputs for runtime_file in ctx.attrs.sanitizer_runtime_files]) if ctx.attrs.sanitizer_runtime_files != None else None linker_info = LinkerInfo( archiver = _pick_bin(ctx.attrs.archiver, base_linker_info.archiver), @@ -100,7 +99,6 @@ def _cxx_toolchain_override(ctx): requires_objects = base_linker_info.requires_objects, supports_distributed_thinlto = base_linker_info.supports_distributed_thinlto, independent_shlib_interface_linker_flags = base_linker_info.independent_shlib_interface_linker_flags, - sanitizer_runtime_dir = value_or(sanitizer_runtime_dir, base_linker_info.sanitizer_runtime_dir), sanitizer_runtime_enabled = value_or(ctx.attrs.sanitizer_runtime_enabled, base_linker_info.sanitizer_runtime_enabled), sanitizer_runtime_files = value_or(sanitizer_runtime_files, base_linker_info.sanitizer_runtime_files), shared_dep_runtime_ld_flags = [], @@ -211,7 +209,6 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "platform_name": attrs.option(attrs.string(), default = None), "produce_interface_from_stub_shared_library": attrs.option(attrs.bool(), default = None), "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), - "sanitizer_runtime_dir": attrs.option(attrs.dep(), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform "sanitizer_runtime_enabled": attrs.bool(default = False), "sanitizer_runtime_files": attrs.option(attrs.set(attrs.dep(), sorted = True, default = []), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform "shared_library_interface_mode": attrs.option(attrs.enum(ShlibInterfacesMode.values()), default = None), From e62b691a946efaad0bb35c61cd1a34554fd4e592 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Tue, 20 Feb 2024 09:30:29 -0800 Subject: [PATCH 0314/1133] Support -race flag Summary: Enabling [Go race detector](https://go.dev/doc/articles/race_detector) Reviewed By: abulimov Differential Revision: D53866218 fbshipit-source-id: 634df3c5aef13c5710a2bab18c962dcca337314f --- prelude/decls/go_common.bzl | 8 +++++ prelude/decls/go_rules.bzl | 3 ++ prelude/go/cgo_library.bzl | 11 +++++-- prelude/go/compile.bzl | 7 +++- prelude/go/constraints/BUCK.v2 | 17 ++++++++++ prelude/go/go_binary.bzl | 2 ++ prelude/go/go_exported_library.bzl | 2 ++ prelude/go/go_library.bzl | 7 ++-- prelude/go/go_stdlib.bzl | 1 + prelude/go/go_test.bzl | 4 ++- prelude/go/link.bzl | 4 +++ prelude/go/transitions/defs.bzl | 51 ++++++++++++++++++++++++++---- prelude/rules_impl.bzl | 5 ++- 13 files changed, 108 insertions(+), 14 deletions(-) diff --git a/prelude/decls/go_common.bzl b/prelude/decls/go_common.bzl index 46e5305d4..00c0391f8 100644 --- a/prelude/decls/go_common.bzl +++ b/prelude/decls/go_common.bzl @@ -132,6 +132,13 @@ def _cgo_enabled_arg(): """), } +def _race_arg(): + return { + "race": attrs.bool(default = False, doc = """ + If true, enable data race detection. +"""), + } + go_common = struct( deps_arg = _deps_arg, srcs_arg = _srcs_arg, @@ -145,4 +152,5 @@ go_common = struct( external_linker_flags_arg = _external_linker_flags_arg, embedcfg_arg = _embedcfg_arg, cgo_enabled_arg = _cgo_enabled_arg, + race_arg = _race_arg, ) diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index 973c35bf3..9cb4653f2 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -185,6 +185,7 @@ go_binary = prelude_rule( go_common.external_linker_flags_arg() | go_common.embedcfg_arg() | go_common.cgo_enabled_arg() | + go_common.race_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files to be symlinked into the working directory of the test. You can access these in your @@ -270,6 +271,7 @@ go_exported_library = prelude_rule( go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | go_common.cgo_enabled_arg() | + go_common.race_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files to be symlinked into the working directory of the test. You can access these in your @@ -420,6 +422,7 @@ go_test = prelude_rule( go_common.external_linker_flags_arg() | go_common.embedcfg_arg() | go_common.cgo_enabled_arg() | + go_common.race_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files that are symlinked into the working directory of the diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 217d203fe..046d90ba0 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -157,7 +157,7 @@ def _cgo( return go_srcs, c_headers, c_srcs -def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, coverage_mode: GoCoverageMode, shared: bool = False) -> (Artifact, cmd_args): +def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, coverage_mode: GoCoverageMode, shared: bool, race: bool) -> (Artifact, cmd_args): cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, shared) srcs = cov_res.srcs coverage_vars = cov_res.variables @@ -168,6 +168,7 @@ def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, deps = ctx.attrs.deps + ctx.attrs.exported_deps, coverage_mode = coverage_mode, shared = shared, + race = race, ) return (coverage_pkg, coverage_vars) @@ -237,15 +238,19 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: if ctx.attrs.go_srcs: all_srcs.add(get_filtered_srcs(ctx, ctx.attrs.go_srcs)) + shared = ctx.attrs._compile_shared + race = ctx.attrs._race + # Build Go library. compiled_pkg = compile( ctx, pkg_name, all_srcs, deps = ctx.attrs.deps + ctx.attrs.exported_deps, - shared = ctx.attrs._compile_shared, + shared = shared, + race = race, ) - pkg_with_coverage = {mode: _compile_with_coverage(ctx, pkg_name, all_srcs, mode) for mode in GoCoverageMode} + pkg_with_coverage = {mode: _compile_with_coverage(ctx, pkg_name, all_srcs, mode, shared, race = race) for mode in GoCoverageMode} pkgs = { pkg_name: GoPkg( cgo = True, diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index f33f1e761..a5646aa13 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -98,6 +98,7 @@ def _compile_cmd( deps: list[Dependency] = [], flags: list[str] = [], shared: bool = False, + race: bool = False, coverage_mode: [GoCoverageMode, None] = None) -> cmd_args: go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] @@ -114,6 +115,9 @@ def _compile_cmd( if shared: cmd.add("-shared") + if race: + cmd.add("-race") + # Add Go pkgs inherited from deps to compiler search path. all_pkgs = merge_pkgs([ pkgs, @@ -135,6 +139,7 @@ def compile( compile_flags: list[str] = [], assemble_flags: list[str] = [], shared: bool = False, + race: bool = False, coverage_mode: [GoCoverageMode, None] = None) -> Artifact: go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] root = _out_root(shared, coverage_mode) @@ -143,7 +148,7 @@ def compile( cmd = get_toolchain_cmd_args(go_toolchain) cmd.add(go_toolchain.compile_wrapper[RunInfo]) cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(_compile_cmd(ctx, root, pkg_name, pkgs, deps, compile_flags, shared = shared, coverage_mode = coverage_mode), format = "--compiler={}")) + cmd.add(cmd_args(_compile_cmd(ctx, root, pkg_name, pkgs, deps, compile_flags, shared = shared, race = race, coverage_mode = coverage_mode), format = "--compiler={}")) cmd.add(cmd_args(_assemble_cmd(ctx, pkg_name, assemble_flags, shared = shared), format = "--assembler={}")) cmd.add(cmd_args(go_toolchain.packer, format = "--packer={}")) if ctx.attrs.embedcfg: diff --git a/prelude/go/constraints/BUCK.v2 b/prelude/go/constraints/BUCK.v2 index a4b034fe7..0e0c0fb76 100644 --- a/prelude/go/constraints/BUCK.v2 +++ b/prelude/go/constraints/BUCK.v2 @@ -37,3 +37,20 @@ constraint_value( constraint_setting = ":compile_shared", visibility = ["PUBLIC"], ) + +constraint_setting( + name = "race", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "race_false", + constraint_setting = ":race", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "race_true", + constraint_setting = ":race", + visibility = ["PUBLIC"], +) diff --git a/prelude/go/go_binary.bzl b/prelude/go/go_binary.bzl index 2ce58d917..e33d445fc 100644 --- a/prelude/go/go_binary.bzl +++ b/prelude/go/go_binary.bzl @@ -26,6 +26,7 @@ def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: get_filtered_srcs(ctx, ctx.attrs.srcs), deps = ctx.attrs.deps, compile_flags = ctx.attrs.compiler_flags, + race = ctx.attrs.race, ) (bin, runtime_files, external_debug_info) = link( ctx, @@ -34,6 +35,7 @@ def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, link_mode = ctx.attrs.link_mode, + race = ctx.attrs.race, ) # runtime_files are all the artifacts that must be present in order for this diff --git a/prelude/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl index 28101cf52..3616b4b75 100644 --- a/prelude/go/go_exported_library.bzl +++ b/prelude/go/go_exported_library.bzl @@ -25,6 +25,7 @@ def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: deps = ctx.attrs.deps, compile_flags = ctx.attrs.compiler_flags, shared = True, + race = ctx.attrs.race, ) (bin, runtime_files, _external_debug_info) = link( ctx, @@ -35,6 +36,7 @@ def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: linker_flags = ctx.attrs.linker_flags, external_linker_flags = ctx.attrs.external_linker_flags, shared = True, + race = ctx.attrs.race, ) return [ DefaultInfo( diff --git a/prelude/go/go_library.bzl b/prelude/go/go_library.bzl index 07bc8f463..3c5eb2fdd 100644 --- a/prelude/go/go_library.bzl +++ b/prelude/go/go_library.bzl @@ -28,7 +28,7 @@ load(":coverage.bzl", "GoCoverageMode", "cover_srcs") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") -def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, coverage_mode: GoCoverageMode, shared: bool = False) -> (Artifact, cmd_args): +def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, coverage_mode: GoCoverageMode, shared: bool, race: bool) -> (Artifact, cmd_args): cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, shared) srcs = cov_res.srcs coverage_vars = cov_res.variables @@ -40,6 +40,7 @@ def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, compile_flags = ctx.attrs.compiler_flags, coverage_mode = coverage_mode, shared = shared, + race = race, ) return (coverage_pkg, coverage_vars) @@ -53,6 +54,7 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, force_disable_cgo = True) shared = ctx.attrs._compile_shared + race = ctx.attrs._race compiled_pkg = compile( ctx, @@ -62,9 +64,10 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: compile_flags = ctx.attrs.compiler_flags, assemble_flags = ctx.attrs.assembler_flags, shared = shared, + race = race, ) - pkg_with_coverage = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, shared) for mode in GoCoverageMode} + pkg_with_coverage = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, shared, race = race) for mode in GoCoverageMode} default_output = compiled_pkg pkgs[pkg_name] = GoPkg( diff --git a/prelude/go/go_stdlib.bzl b/prelude/go/go_stdlib.bzl index f09ee8f2d..49414dc7c 100644 --- a/prelude/go/go_stdlib.bzl +++ b/prelude/go/go_stdlib.bzl @@ -52,6 +52,7 @@ def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: cmd_args(["-gcflags=", cmd_args(compiler_flags, delimiter = " ")], delimiter = "") if compiler_flags else [], cmd_args(["-ldflags=", cmd_args(linker_flags, delimiter = " ")], delimiter = "") if linker_flags else [], ["-tags", ",".join(tags)] if tags else [], + ["-race"] if ctx.attrs._race else [], "std", ]) diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index f32d17182..80bfc675f 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -96,13 +96,14 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: pkgs = pkgs, compile_flags = ctx.attrs.compiler_flags, coverage_mode = coverage_mode, + race = ctx.attrs.race, ) # Generate a main function which runs the tests and build that into another # package. gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, srcs) pkgs[pkg_name] = tests - main = compile(ctx, "main", cmd_args(gen_main), pkgs = pkgs, coverage_mode = coverage_mode) + main = compile(ctx, "main", cmd_args(gen_main), pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs.race) # Link the above into a Go binary. (bin, runtime_files, external_debug_info) = link( @@ -114,6 +115,7 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: linker_flags = ctx.attrs.linker_flags, shared = False, coverage_mode = coverage_mode, + race = ctx.attrs.race, ) run_cmd = cmd_args(bin).hidden(runtime_files, external_debug_info) diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 09d637edf..6bf2ca797 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -114,6 +114,7 @@ def link( linker_flags: list[typing.Any] = [], external_linker_flags: list[typing.Any] = [], shared: bool = False, + race: bool = False, coverage_mode: [GoCoverageMode, None] = None): go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] if go_toolchain.env_go_os == "windows": @@ -134,6 +135,9 @@ def link( cmd.add("-buildmode=" + _build_mode_param(build_mode)) cmd.add("-buildid=") # Setting to a static buildid helps make the binary reproducible. + if race: + cmd.add("-race") + # Add inherited Go pkgs to library search path. all_pkgs = merge_pkgs([ pkgs, diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl index 5686852ac..128d894e1 100644 --- a/prelude/go/transitions/defs.bzl +++ b/prelude/go/transitions/defs.bzl @@ -48,6 +48,33 @@ def _compile_shared_transition(platform, refs, _): configuration = new_cfg, ) +def _race_transition(platform, refs, attrs): + constraints = platform.configuration.constraints + + # Cancel transition if the value already set + # to enable using configuration modifiers for overiding this option + race_setting = refs.race_false[ConstraintValueInfo].setting + if race_setting.label in constraints: + return platform + + if attrs.race == True: + race_ref = refs.race_true + else: + race_ref = refs.race_false + + race_value = race_ref[ConstraintValueInfo] + constraints[race_value.setting.label] = race_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + def _chain_transitions(transitions): def tr(platform, refs, attrs): for t in transitions: @@ -57,36 +84,42 @@ def _chain_transitions(transitions): return tr go_binary_transition = transition( - impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition]), + impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition, _race_transition]), refs = { "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", "compile_shared_value": "prelude//go/constraints:compile_shared_false", + "race_false": "prelude//go/constraints:race_false", + "race_true": "prelude//go/constraints:race_true", }, - attrs = ["cgo_enabled"], + attrs = ["cgo_enabled", "race"], ) go_test_transition = transition( - impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition]), + impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition, _race_transition]), refs = { "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", "compile_shared_value": "prelude//go/constraints:compile_shared_false", + "race_false": "prelude//go/constraints:race_false", + "race_true": "prelude//go/constraints:race_true", }, - attrs = ["cgo_enabled"], + attrs = ["cgo_enabled", "race"], ) go_exported_library_transition = transition( - impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition]), + impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition, _race_transition]), refs = { "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", "compile_shared_value": "prelude//go/constraints:compile_shared_true", + "race_false": "prelude//go/constraints:race_false", + "race_true": "prelude//go/constraints:race_true", }, - attrs = ["cgo_enabled"], + attrs = ["cgo_enabled", "race"], ) cgo_enabled_attr = attrs.default_only(attrs.option(attrs.bool(), default = select({ @@ -101,3 +134,9 @@ compile_shared_attr = attrs.default_only(attrs.bool(default = select({ "prelude//go/constraints:compile_shared_false": False, "prelude//go/constraints:compile_shared_true": True, }))) + +race_attr = attrs.default_only(attrs.bool(default = select({ + "DEFAULT": False, + "prelude//go/constraints:race_false": False, + "prelude//go/constraints:race_true": True, +}))) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 527b8359f..f430318d0 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -28,7 +28,7 @@ load("@prelude//go:go_stdlib.bzl", "go_stdlib_impl") load("@prelude//go:go_test.bzl", "go_test_impl") load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl", "haskell_toolchain_library_impl") -load("@prelude//go/transitions:defs.bzl", "cgo_enabled_attr", "compile_shared_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition") +load("@prelude//go/transitions:defs.bzl", "cgo_enabled_attr", "compile_shared_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition", "race_attr") load("@prelude//haskell:haskell_ghci.bzl", "haskell_ghci_impl") load("@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_impl") load("@prelude//haskell:haskell_ide.bzl", "haskell_ide_impl") @@ -386,6 +386,7 @@ inlined_extra_attributes = { "_exec_os_type": buck.exec_os_type_arg(), "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, }, # csharp "csharp_library": { @@ -445,12 +446,14 @@ inlined_extra_attributes = { "_compile_shared": compile_shared_attr, "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, }, "go_stdlib": { "_cgo_enabled": cgo_enabled_attr, "_compile_shared": compile_shared_attr, "_exec_os_type": buck.exec_os_type_arg(), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, }, "go_test": { "coverage_mode": attrs.option(attrs.enum(GoCoverageMode.values()), default = None), From 4aab754129749a87be1f76f1b126d6bf462fd1ba Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Tue, 20 Feb 2024 09:30:29 -0800 Subject: [PATCH 0315/1133] Override -race via configuration modifiers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: This solution looks a bit hacky, let me know if I can do better 😀 We need to override `race` option of go_binary/etc with a CLI flag (configuration modifier) - Option `race` controls a configuration transition - We add a "fake" option `_race` which controlled by a configuration constraint Reviewed By: awalterschulze Differential Revision: D53921064 fbshipit-source-id: 22f927b0ac4925a4bdbfe1ba4c3e4144c2e02667 --- prelude/go/go_binary.bzl | 4 ++-- prelude/go/go_exported_library.bzl | 4 ++-- prelude/go/go_test.bzl | 6 +++--- prelude/rules_impl.bzl | 3 +++ 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/prelude/go/go_binary.bzl b/prelude/go/go_binary.bzl index e33d445fc..6d8628651 100644 --- a/prelude/go/go_binary.bzl +++ b/prelude/go/go_binary.bzl @@ -26,7 +26,7 @@ def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: get_filtered_srcs(ctx, ctx.attrs.srcs), deps = ctx.attrs.deps, compile_flags = ctx.attrs.compiler_flags, - race = ctx.attrs.race, + race = ctx.attrs._race, ) (bin, runtime_files, external_debug_info) = link( ctx, @@ -35,7 +35,7 @@ def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, link_mode = ctx.attrs.link_mode, - race = ctx.attrs.race, + race = ctx.attrs._race, ) # runtime_files are all the artifacts that must be present in order for this diff --git a/prelude/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl index 3616b4b75..b069f85f4 100644 --- a/prelude/go/go_exported_library.bzl +++ b/prelude/go/go_exported_library.bzl @@ -25,7 +25,7 @@ def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: deps = ctx.attrs.deps, compile_flags = ctx.attrs.compiler_flags, shared = True, - race = ctx.attrs.race, + race = ctx.attrs._race, ) (bin, runtime_files, _external_debug_info) = link( ctx, @@ -36,7 +36,7 @@ def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: linker_flags = ctx.attrs.linker_flags, external_linker_flags = ctx.attrs.external_linker_flags, shared = True, - race = ctx.attrs.race, + race = ctx.attrs._race, ) return [ DefaultInfo( diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index 80bfc675f..ecfaedd35 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -96,14 +96,14 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: pkgs = pkgs, compile_flags = ctx.attrs.compiler_flags, coverage_mode = coverage_mode, - race = ctx.attrs.race, + race = ctx.attrs._race, ) # Generate a main function which runs the tests and build that into another # package. gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, srcs) pkgs[pkg_name] = tests - main = compile(ctx, "main", cmd_args(gen_main), pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs.race) + main = compile(ctx, "main", cmd_args(gen_main), pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs._race) # Link the above into a Go binary. (bin, runtime_files, external_debug_info) = link( @@ -115,7 +115,7 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: linker_flags = ctx.attrs.linker_flags, shared = False, coverage_mode = coverage_mode, - race = ctx.attrs.race, + race = ctx.attrs._race, ) run_cmd = cmd_args(bin).hidden(runtime_files, external_debug_info) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index f430318d0..c94c1fa28 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -433,12 +433,14 @@ inlined_extra_attributes = { "_exec_os_type": buck.exec_os_type_arg(), "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, }, "go_exported_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "_exec_os_type": buck.exec_os_type_arg(), "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, }, "go_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), @@ -462,6 +464,7 @@ inlined_extra_attributes = { "_exec_os_type": buck.exec_os_type_arg(), "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), + "_race": race_attr, "_testmaingen": attrs.default_only(attrs.exec_dep(default = "prelude//go/tools:testmaingen")), }, From 66c848bd6255e388412b63725c18f06521204cb1 Mon Sep 17 00:00:00 2001 From: Navid Qaragozlou Date: Tue, 20 Feb 2024 11:47:45 -0800 Subject: [PATCH 0316/1133] XRMaps app builds with buck2 Summary: TSIA Reviewed By: mzlee Differential Revision: D53674063 fbshipit-source-id: cbe0cb295d45ef6ec25ca96804d503182b137c4a --- prelude/genrule_local_labels.bzl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/prelude/genrule_local_labels.bzl b/prelude/genrule_local_labels.bzl index 428f61d76..0fec9753b 100644 --- a/prelude/genrule_local_labels.bzl +++ b/prelude/genrule_local_labels.bzl @@ -203,6 +203,9 @@ _GENRULE_LOCAL_LABELS = {label: True for label in [ # Uses Apple's codesign command which might not be in RE "uses_codesign", + + # Uses jf which is not on RE + "uses_jf", ]} def genrule_labels_require_local(labels): From bbd324789bfbe916cd5a4500bc81cd49aaa8f15c Mon Sep 17 00:00:00 2001 From: Loren Arthur Date: Tue, 20 Feb 2024 14:37:49 -0800 Subject: [PATCH 0317/1133] Add SharedOnlyLibraryInfo. Summary: Mark cxx_libraries that only support dynamic linking and make sure we include them with cxx_python_extensions. Reviewed By: zsol Differential Revision: D53861184 fbshipit-source-id: 313c4d6dd7efa2f6d4633d087524fb1b64f363ef --- prelude/cxx/cxx.bzl | 3 +++ prelude/cxx/cxx_library.bzl | 5 +++++ prelude/linking/linkable_graph.bzl | 3 +++ prelude/python/native_python_util.bzl | 12 ++++-------- 4 files changed, 15 insertions(+), 8 deletions(-) diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 2fe804beb..bdc43031f 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -47,6 +47,7 @@ load( "@prelude//linking:linkable_graph.bzl", "DlopenableLibraryInfo", "LinkableGraph", + "SharedOnlyLibraryInfo", "create_linkable_graph", "create_linkable_graph_node", "create_linkable_node", @@ -355,6 +356,8 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx.attrs.platform_header_dirs, ) preferred_linkage = _prebuilt_linkage(ctx) + if preferred_linkage == Linkage("shared"): + providers.append(SharedOnlyLibraryInfo()) # Prepare the stripped static lib. static_lib_stripped = None diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 184d31dde..af38e66c3 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -87,6 +87,7 @@ load( "@prelude//linking:linkable_graph.bzl", "DlopenableLibraryInfo", "LinkableRootInfo", + "SharedOnlyLibraryInfo", "create_linkable_graph", "create_linkable_graph_node", "create_linkable_node", @@ -380,6 +381,10 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc sub_targets = {} providers = [] + # Mark the library as shared only if preferred linkage is set + if preferred_linkage == Linkage("shared"): + providers.append(SharedOnlyLibraryInfo()) + if len(ctx.attrs.tests) > 0 and impl_params.generate_providers.preprocessor_for_tests: providers.append( CPreprocessorForTestsInfo( diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index 1f73160ed..7da630583 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -129,6 +129,9 @@ LinkableGraph = provider(fields = { # dynamically, at runtime (e.g. via `dlopen`). DlopenableLibraryInfo = provider(fields = {}) +# Used to tag a rule as providing only a shared native library that may be loaded at runtime +SharedOnlyLibraryInfo = provider(fields = {}) + def _get_required_outputs_for_linkage(linkage: Linkage) -> list[LibOutputStyle]: if linkage == Linkage("shared"): return [LibOutputStyle("shared_lib")] diff --git a/prelude/python/native_python_util.bzl b/prelude/python/native_python_util.bzl index e049465d4..166e3a82c 100644 --- a/prelude/python/native_python_util.bzl +++ b/prelude/python/native_python_util.bzl @@ -12,13 +12,12 @@ load( "LibOutputStyle", "LinkInfo", "LinkInfos", - "MergedLinkInfo", "ObjectsLinkable", ) load( "@prelude//linking:linkable_graph.bzl", "DlopenableLibraryInfo", - "LinkableRootInfo", + "SharedOnlyLibraryInfo", ) load( "@prelude//linking:linkables.bzl", @@ -65,17 +64,14 @@ def merge_cxx_extension_info( dlopen_deps[dep.label] = linkable(dep) continue - # Try to detect prebuilt, shared-only libraries. - # TODO(agallagher): We need a more general way to support this, which - # should *just* use `preferred_linkage` (and so it supports non-prebuilt - # libs too), but this will require hoisting the rules first-order deps - # up the tree as `dlopen_deps` so that we link them properly. - if MergedLinkInfo in dep and LinkableRootInfo not in dep: + if SharedOnlyLibraryInfo in dep: shared_only_libs[dep.label] = linkable(dep) for dep in deps: cxx_extension_info = dep.get(CxxExtensionLinkInfo) if cxx_extension_info == None: + if SharedOnlyLibraryInfo in dep: + shared_only_libs[dep.label] = linkable(dep) continue linkable_provider_children.append(cxx_extension_info.linkable_providers) artifacts.update(cxx_extension_info.artifacts) From 93147b877851b8053b44cc659ca13149d10bcd23 Mon Sep 17 00:00:00 2001 From: Arsen Tumanyan Date: Tue, 20 Feb 2024 16:43:54 -0800 Subject: [PATCH 0318/1133] Add jar post processing option Summary: **Context** There are cases when the jar file generated by the java rules (java_library, kotlin_library) needs modifications. For example, in waandroid repo, where Hilt is used, these is a need to modify the bytecode of the generated jar file. This is currently done with [a chain of rules](https://www.internalfb.com/code/whatsapp-android/[master%3Acc4b5df5c1a7]/tools/buck/build_defs/build_config.bzl?lines=329) and creates issues when enabling dep_files, locating owners of the main targets, IDE integration etc. **This diff** - Adds an option to provide a post processor binary to `android_library`, `java_library` and `kotlin_library` rules. - The new action which runs the post_processor outputs the new jar into `"post_processed/{}.jar"` **Example Usage** ``` android_library( name = "a", srcs = ["A.kt"], language = "KOTLIN", jar_postprocessor = ":post_processor" ) java_binary( name = "post_processor", main_class = "ByteCodeModifier", visibility = ["PUBLIC"], deps = [":post_processor_jar"], ) ``` Reviewed By: IanChilds Differential Revision: D53842308 fbshipit-source-id: ce384edddb6964ec4d974bede5a08c8adea283de --- prelude/decls/android_rules.bzl | 1 + prelude/decls/java_rules.bzl | 1 + prelude/decls/kotlin_rules.bzl | 1 + prelude/java/java_library.bzl | 6 ++++- prelude/java/javacd_jar_creator.bzl | 12 ++++++++- prelude/jvm/cd_jar_creator_util.bzl | 35 ++++++++++++++++++++++--- prelude/kotlin/kotlin_library.bzl | 1 + prelude/kotlin/kotlincd_jar_creator.bzl | 4 ++- 8 files changed, 54 insertions(+), 7 deletions(-) diff --git a/prelude/decls/android_rules.bzl b/prelude/decls/android_rules.bzl index 3c15a772a..4182f28b4 100644 --- a/prelude/decls/android_rules.bzl +++ b/prelude/decls/android_rules.bzl @@ -752,6 +752,7 @@ android_library = prelude_rule( "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "friend_paths": attrs.list(attrs.dep(), default = []), "java_version": attrs.option(attrs.string(), default = None), + "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "kotlin_compiler_plugins": attrs.dict(key = attrs.source(), value = attrs.dict(key = attrs.string(), value = attrs.string(), sorted = False), sorted = False, default = {}), "labels": attrs.list(attrs.string(), default = []), "language": attrs.option(attrs.enum(JvmLanguage), default = None), diff --git a/prelude/decls/java_rules.bzl b/prelude/decls/java_rules.bzl index 2f8a0424c..53f03a474 100644 --- a/prelude/decls/java_rules.bzl +++ b/prelude/decls/java_rules.bzl @@ -281,6 +281,7 @@ java_library = prelude_rule( "annotation_processors": attrs.list(attrs.string(), default = []), "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), + "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "labels": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), diff --git a/prelude/decls/kotlin_rules.bzl b/prelude/decls/kotlin_rules.bzl index b1550992a..ddd4d92a8 100644 --- a/prelude/decls/kotlin_rules.bzl +++ b/prelude/decls/kotlin_rules.bzl @@ -181,6 +181,7 @@ kotlin_library = prelude_rule( "extra_non_source_only_abi_kotlinc_arguments": attrs.list(attrs.string(), default = []), "java_version": attrs.option(attrs.string(), default = None), "javac": attrs.option(attrs.source(), default = None), + "jar_postprocessor": attrs.option(attrs.exec_dep(), default = None), "licenses": attrs.list(attrs.source(), default = []), "manifest_file": attrs.option(attrs.source(), default = None), "maven_coords": attrs.option(attrs.string(), default = None), diff --git a/prelude/java/java_library.bzl b/prelude/java/java_library.bzl index 512b6c503..e5265b722 100644 --- a/prelude/java/java_library.bzl +++ b/prelude/java/java_library.bzl @@ -31,6 +31,7 @@ load( ) load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") load("@prelude//java/utils:java_utils.bzl", "declare_prefixed_name", "derive_javac", "get_abi_generation_mode", "get_class_to_source_map_info", "get_default_info", "get_java_version_attributes", "to_java_version") +load("@prelude//jvm:cd_jar_creator_util.bzl", "post_process_jar") load("@prelude//jvm:nullsafe.bzl", "get_nullsafe_info") load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo") load("@prelude//utils:expect.bzl", "expect") @@ -433,8 +434,11 @@ def _create_jar_artifact( abi = None if (not srcs and not additional_compiled_srcs) or abi_generation_mode == AbiGenerationMode("none") or java_toolchain.is_bootstrap_toolchain else create_abi(ctx.actions, java_toolchain.class_abi_generator, jar_out) + has_postprocessor = hasattr(ctx.attrs, "jar_postprocessor") and ctx.attrs.jar_postprocessor + final_jar = post_process_jar(ctx.actions, ctx.attrs.jar_postprocessor[RunInfo], jar_out, actions_identifier) if has_postprocessor else jar_out + return make_compile_outputs( - full_library = jar_out, + full_library = final_jar, class_abi = abi, required_for_source_only_abi = required_for_source_only_abi, annotation_processor_output = generated_sources_dir, diff --git a/prelude/java/javacd_jar_creator.bzl b/prelude/java/javacd_jar_creator.bzl index 1806facc6..d9344e72a 100644 --- a/prelude/java/javacd_jar_creator.bzl +++ b/prelude/java/javacd_jar_creator.bzl @@ -305,7 +305,17 @@ def create_jar_artifact_javacd( path_to_class_hashes_out, is_creating_subtarget, ) - final_jar = prepare_final_jar(actions, actions_identifier, output, output_paths, additional_compiled_srcs, java_toolchain.jar_builder) + jar_postprocessor = ctx.attrs.jar_postprocessor[RunInfo] if hasattr(ctx.attrs, "jar_postprocessor") and ctx.attrs.jar_postprocessor else None + final_jar = prepare_final_jar( + actions = actions, + actions_identifier = actions_identifier, + output = output, + output_paths = output_paths, + additional_compiled_srcs = additional_compiled_srcs, + jar_builder = java_toolchain.jar_builder, + jar_postprocessor = jar_postprocessor, + ) + if not is_creating_subtarget: class_abi, source_abi, source_only_abi, classpath_abi, classpath_abi_dir = generate_abi_jars( actions, diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index b5f7eafae..edef56774 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -443,12 +443,18 @@ def prepare_final_jar( output: [Artifact, None], output_paths: OutputPaths, additional_compiled_srcs: [Artifact, None], - jar_builder: RunInfo) -> Artifact: + jar_builder: RunInfo, + jar_postprocessor: [RunInfo, None]) -> Artifact: if not additional_compiled_srcs: + output_jar = output_paths.jar if output: actions.copy_file(output.as_output(), output_paths.jar) - return output - return output_paths.jar + output_jar = output + + if jar_postprocessor: + return post_process_jar(actions, jar_postprocessor, output_jar, actions_identifier) + else: + return output_jar merged_jar = output if not merged_jar: @@ -466,7 +472,11 @@ def prepare_final_jar( category = "merge_additional_srcs", identifier = actions_identifier, ) - return merged_jar + + if jar_postprocessor: + return post_process_jar(actions, jar_postprocessor, merged_jar, actions_identifier) + else: + return merged_jar def generate_abi_jars( actions: AnalysisActions, @@ -556,3 +566,20 @@ def generate_abi_jars( classpath_abi_dir = class_abi_output_dir return class_abi, source_abi, source_only_abi, classpath_abi, classpath_abi_dir + +def post_process_jar( + actions: AnalysisActions, + jar_postprocessor: RunInfo, + original_jar: Artifact, + actions_identifier: [str, None]) -> Artifact: + post_processed_output = actions.declare_output("post_processed_{}".format(original_jar.short_path)) + processor_cmd_args = cmd_args( + jar_postprocessor, + original_jar, + post_processed_output.as_output(), + ) + + identifier = actions_identifier if actions_identifier else "" + actions.run(processor_cmd_args, category = "post_processed{}".format(identifier)) + + return post_processed_output diff --git a/prelude/kotlin/kotlin_library.bzl b/prelude/kotlin/kotlin_library.bzl index cf3dbb3b3..71f09eb39 100644 --- a/prelude/kotlin/kotlin_library.bzl +++ b/prelude/kotlin/kotlin_library.bzl @@ -349,6 +349,7 @@ def build_kotlin_library( "extra_kotlinc_arguments": ctx.attrs.extra_kotlinc_arguments, "friend_paths": ctx.attrs.friend_paths, "is_building_android_binary": ctx.attrs._is_building_android_binary, + "jar_postprocessor": ctx.attrs.jar_postprocessor[RunInfo] if hasattr(ctx.attrs, "jar_postprocessor") and ctx.attrs.jar_postprocessor else None, "java_toolchain": ctx.attrs._java_toolchain[JavaToolchainInfo], "javac_tool": derive_javac(ctx.attrs.javac) if ctx.attrs.javac else None, "k2": ctx.attrs.k2, diff --git a/prelude/kotlin/kotlincd_jar_creator.bzl b/prelude/kotlin/kotlincd_jar_creator.bzl index 24bfa1531..c9d8a6225 100644 --- a/prelude/kotlin/kotlincd_jar_creator.bzl +++ b/prelude/kotlin/kotlincd_jar_creator.bzl @@ -76,7 +76,8 @@ def create_jar_artifact_kotlincd( extra_kotlinc_arguments: list[str], k2: bool, is_creating_subtarget: bool = False, - optional_dirs: list[OutputArtifact] = []) -> JavaCompileOutputs: + optional_dirs: list[OutputArtifact] = [], + jar_postprocessor: [RunInfo, None] = None) -> JavaCompileOutputs: resources_map = get_resources_map( java_toolchain = java_toolchain, package = label.package, @@ -356,6 +357,7 @@ def create_jar_artifact_kotlincd( output_paths = output_paths, additional_compiled_srcs = None, jar_builder = java_toolchain.jar_builder, + jar_postprocessor = jar_postprocessor, ) if not is_creating_subtarget: From 49606ddb376b7a460a7faea865c69e69f365bea9 Mon Sep 17 00:00:00 2001 From: Jason Lu Date: Tue, 20 Feb 2024 22:14:33 -0800 Subject: [PATCH 0319/1133] Back out "Add SharedOnlyLibraryInfo." Summary: Original commit changeset: 313c4d6dd7ef Original Phabricator Diff: D53861184 Reviewed By: BrandonTheBuilder Differential Revision: D53983496 fbshipit-source-id: b726560fb33741e467e1c4e41a9f934b4b7e12df --- prelude/cxx/cxx.bzl | 3 --- prelude/cxx/cxx_library.bzl | 5 ----- prelude/linking/linkable_graph.bzl | 3 --- prelude/python/native_python_util.bzl | 12 ++++++++---- 4 files changed, 8 insertions(+), 15 deletions(-) diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index bdc43031f..2fe804beb 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -47,7 +47,6 @@ load( "@prelude//linking:linkable_graph.bzl", "DlopenableLibraryInfo", "LinkableGraph", - "SharedOnlyLibraryInfo", "create_linkable_graph", "create_linkable_graph_node", "create_linkable_node", @@ -356,8 +355,6 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx.attrs.platform_header_dirs, ) preferred_linkage = _prebuilt_linkage(ctx) - if preferred_linkage == Linkage("shared"): - providers.append(SharedOnlyLibraryInfo()) # Prepare the stripped static lib. static_lib_stripped = None diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index af38e66c3..184d31dde 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -87,7 +87,6 @@ load( "@prelude//linking:linkable_graph.bzl", "DlopenableLibraryInfo", "LinkableRootInfo", - "SharedOnlyLibraryInfo", "create_linkable_graph", "create_linkable_graph_node", "create_linkable_node", @@ -381,10 +380,6 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc sub_targets = {} providers = [] - # Mark the library as shared only if preferred linkage is set - if preferred_linkage == Linkage("shared"): - providers.append(SharedOnlyLibraryInfo()) - if len(ctx.attrs.tests) > 0 and impl_params.generate_providers.preprocessor_for_tests: providers.append( CPreprocessorForTestsInfo( diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index 7da630583..1f73160ed 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -129,9 +129,6 @@ LinkableGraph = provider(fields = { # dynamically, at runtime (e.g. via `dlopen`). DlopenableLibraryInfo = provider(fields = {}) -# Used to tag a rule as providing only a shared native library that may be loaded at runtime -SharedOnlyLibraryInfo = provider(fields = {}) - def _get_required_outputs_for_linkage(linkage: Linkage) -> list[LibOutputStyle]: if linkage == Linkage("shared"): return [LibOutputStyle("shared_lib")] diff --git a/prelude/python/native_python_util.bzl b/prelude/python/native_python_util.bzl index 166e3a82c..e049465d4 100644 --- a/prelude/python/native_python_util.bzl +++ b/prelude/python/native_python_util.bzl @@ -12,12 +12,13 @@ load( "LibOutputStyle", "LinkInfo", "LinkInfos", + "MergedLinkInfo", "ObjectsLinkable", ) load( "@prelude//linking:linkable_graph.bzl", "DlopenableLibraryInfo", - "SharedOnlyLibraryInfo", + "LinkableRootInfo", ) load( "@prelude//linking:linkables.bzl", @@ -64,14 +65,17 @@ def merge_cxx_extension_info( dlopen_deps[dep.label] = linkable(dep) continue - if SharedOnlyLibraryInfo in dep: + # Try to detect prebuilt, shared-only libraries. + # TODO(agallagher): We need a more general way to support this, which + # should *just* use `preferred_linkage` (and so it supports non-prebuilt + # libs too), but this will require hoisting the rules first-order deps + # up the tree as `dlopen_deps` so that we link them properly. + if MergedLinkInfo in dep and LinkableRootInfo not in dep: shared_only_libs[dep.label] = linkable(dep) for dep in deps: cxx_extension_info = dep.get(CxxExtensionLinkInfo) if cxx_extension_info == None: - if SharedOnlyLibraryInfo in dep: - shared_only_libs[dep.label] = linkable(dep) continue linkable_provider_children.append(cxx_extension_info.linkable_providers) artifacts.update(cxx_extension_info.artifacts) From f5f505d6fea2aea3da268274f2079022c5d31b15 Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Tue, 20 Feb 2024 22:25:12 -0800 Subject: [PATCH 0320/1133] Add a BXL script to batch-run `[typecheck]` subtarget on a list of target expressions Reviewed By: zsol Differential Revision: D53874679 fbshipit-source-id: 7219975050d45a46f5fe02ea888aad1da1430f00 --- prelude/python/typecheck/batch.bxl | 37 ++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 prelude/python/typecheck/batch.bxl diff --git a/prelude/python/typecheck/batch.bxl b/prelude/python/typecheck/batch.bxl new file mode 100644 index 000000000..3ba17c491 --- /dev/null +++ b/prelude/python/typecheck/batch.bxl @@ -0,0 +1,37 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//utils:utils.bzl", "flatten") +load("@prelude//python/sourcedb/filter.bxl", "do_filter") + +def _run_entry_point(ctx: bxl.Context) -> None: + targets = flatten(ctx.cli_args.target) + uquery_universe = ctx.target_universe(targets).target_set() + checked_targets = do_filter(ctx.cquery(), uquery_universe) + + build_result = ctx.build([ + target.with_sub_target("typecheck") + for target in checked_targets + ]) + output = ctx.output.ensure_multiple(build_result) + + ctx.output.print_json({ + label.raw_target(): [artifact.rel_path() for artifact in artifacts] + for label, artifacts in output.items() + }) + +run = bxl_main( + doc = "Run [typecheck] on a set of targets or target patterns.", + impl = _run_entry_point, + cli_args = { + "target": cli_args.list( + cli_args.target_expr( + doc = "Target pattern to run type checking on", + ), + ), + }, +) From 7b48f5628ec180318104305684283644dc1fa857 Mon Sep 17 00:00:00 2001 From: Navid Qaragozlou Date: Wed, 21 Feb 2024 07:12:07 -0800 Subject: [PATCH 0321/1133] Add `buck2_compatibility` field to all rules Summary: D53692037 added buck2_compatibility everywhere for buck1. This change does it for buck2. Reviewed By: hick209 Differential Revision: D53956541 fbshipit-source-id: 12187c7e29ed4380ed06768436c2010fe9a9f55c --- prelude/android/android.bzl | 6 ------ prelude/apple/apple_rules_impl.bzl | 3 --- prelude/apple/apple_rules_impl_utility.bzl | 2 -- prelude/rules.bzl | 5 ++++- 4 files changed, 4 insertions(+), 12 deletions(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index cc332236d..1fdd96830 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -5,7 +5,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS") load("@prelude//java:java.bzl", "AbiGenerationMode", "dex_min_sdk_version") load("@prelude//decls/android_rules.bzl", "AaptMode", "DuplicateResourceBehaviour") @@ -97,7 +96,6 @@ extra_attributes = { "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), "_java_toolchain": toolchains_common.java_for_android(), - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, }, "android_build_config": { "_android_toolchain": toolchains_common.android(), @@ -126,7 +124,6 @@ extra_attributes = { "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), "_java_toolchain": toolchains_common.java_for_android(), - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, }, "android_instrumentation_apk": { "aapt_mode": attrs.enum(AaptMode, default = "aapt1"), # Match default in V1 @@ -153,7 +150,6 @@ extra_attributes = { "_android_toolchain": toolchains_common.android(), "_exec_os_type": buck.exec_os_type_arg(), "_java_toolchain": toolchains_common.java_for_android(), - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, }, "android_library": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), @@ -167,7 +163,6 @@ extra_attributes = { "_is_building_android_binary": is_building_android_binary_attr(), "_java_toolchain": toolchains_common.java_for_android(), "_kotlin_toolchain": toolchains_common.kotlin(), - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, }, "android_manifest": { "_android_toolchain": toolchains_common.android(), @@ -220,6 +215,5 @@ extra_attributes = { "_java_test_toolchain": toolchains_common.java_test(), "_java_toolchain": toolchains_common.java_for_host_test(), "_kotlin_toolchain": toolchains_common.kotlin(), - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, }, } diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 61a151423..894ba0dd4 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -5,7 +5,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") load("@prelude//apple/swift:swift_toolchain.bzl", "swift_toolchain_impl") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") @@ -95,7 +94,6 @@ def _apple_binary_extra_attrs(): "_apple_xctoolchain": get_apple_xctoolchain_attr(), "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), "_stripped_default": attrs.bool(default = False), - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, } attribs.update(apple_dsymutil_attrs()) @@ -122,7 +120,6 @@ def _apple_library_extra_attrs(): "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), "_stripped_default": attrs.bool(default = False), APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, } attribs.update(apple_dsymutil_attrs()) diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index a377db959..398b821fb 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -5,7 +5,6 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE") load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_system_identification_attrs") load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo", "AppleBundleTypeAttributeType") load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignType") @@ -73,7 +72,6 @@ def _apple_bundle_like_common_attrs(): APPLE_USE_ENTITLEMENTS_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME: attrs.bool(default = False), APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_CONFIG_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), APPLE_EMBED_PROVISIONING_PROFILE_WHEN_ADHOC_CODE_SIGNING_ATTR_NAME: attrs.bool(default = False), - BUCK2_COMPATIBILITY_ATTRIB_NAME: BUCK2_COMPATIBILITY_ATTRIB_TYPE, APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, } attribs.update(get_apple_info_plist_build_system_identification_attrs()) diff --git a/prelude/rules.bzl b/prelude/rules.bzl index 9653302b8..64ab5e5f3 100644 --- a/prelude/rules.bzl +++ b/prelude/rules.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:buck2_compatibility.bzl", "check_buck2_compatibility") +load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "BUCK2_COMPATIBILITY_ATTRIB_TYPE", "check_buck2_compatibility") load("@prelude//configurations:rules.bzl", _config_implemented_rules = "implemented_rules") load("@prelude//decls/common.bzl", "prelude_rule") load("@prelude//is_full_meta_repo.bzl", "is_full_meta_repo") @@ -37,6 +37,9 @@ def _mk_rule(rule_spec: typing.Any, extra_attrs: dict[str, typing.Any] = dict(), if toolchain_attr in attributes: fat_platform_compatible = False + #Add buck2_compatibility attribute to all rules + extra_attrs[BUCK2_COMPATIBILITY_ATTRIB_NAME] = BUCK2_COMPATIBILITY_ATTRIB_TYPE + # Fat platforms is an idea specific to our toolchains, so doesn't apply to # open source. Ideally this restriction would be done at the toolchain level. if not is_full_meta_repo(): From 27b12dcbae0341a0ec2a8e7ee05c65ea164c5629 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Wed, 21 Feb 2024 10:21:36 -0800 Subject: [PATCH 0322/1133] provide AppleBundleInfo from apple_xcuitest Summary: Return an AppleBundleInfo provider from the apple_xcuitest rule. This is required to be able to package it via apple_package. Reviewed By: milend Differential Revision: D53947225 fbshipit-source-id: c731ce16722ebbd4245d32aaad89710ebbc6b742 --- prelude/apple/apple_xcuitest.bzl | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_xcuitest.bzl b/prelude/apple/apple_xcuitest.bzl index 14941bfd5..265a49d0c 100644 --- a/prelude/apple/apple_xcuitest.bzl +++ b/prelude/apple/apple_xcuitest.bzl @@ -9,6 +9,7 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load(":apple_bundle_destination.bzl", "AppleBundleDestination") load(":apple_bundle_part.bzl", "AppleBundlePart", "assemble_bundle") +load(":apple_bundle_types.bzl", "AppleBundleInfo", "AppleBundleType") load(":apple_info_plist.bzl", "process_info_plist") def apple_xcuitest_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: @@ -27,7 +28,17 @@ def apple_xcuitest_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: swift_stdlib_args = None, ) - return [DefaultInfo(default_output = output_bundle)] + return [ + DefaultInfo(default_output = output_bundle), + AppleBundleInfo( + bundle = output_bundle, + bundle_type = AppleBundleType("default"), + binary_name = ctx.attrs.name, + contains_watchapp = False, + # The test runner binary does not contain Swift + skip_copying_swift_stdlib = True, + ), + ] def _get_uitest_bundle(ctx: AnalysisContext) -> AppleBundlePart: return AppleBundlePart( From c87fefaf203b6eea9f0f19c1c25fdeddcb2f22b6 Mon Sep 17 00:00:00 2001 From: Jacob Rodal Date: Wed, 21 Feb 2024 12:20:59 -0800 Subject: [PATCH 0323/1133] class_to_srcs_map use argsfile Summary: I'm seeing a lot of `class_to_srcs_map` "command line is too long" errors on Windows while migrating targets to buck2. The rest of the actions in this file are already using an argsfile - let's do the same for `class_to_srcs_map`. Reviewed By: IanChilds Differential Revision: D53833212 fbshipit-source-id: 6b75803d1d098c6b4b5d495d4c20ebe3af28d0ae --- prelude/java/class_to_srcs.bzl | 5 +++-- prelude/java/tools/gen_class_to_source_map.py | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/prelude/java/class_to_srcs.bzl b/prelude/java/class_to_srcs.bzl index 352a67feb..d253cab69 100644 --- a/prelude/java/class_to_srcs.bzl +++ b/prelude/java/class_to_srcs.bzl @@ -78,8 +78,9 @@ def create_class_to_source_map_from_jar( cmd = cmd_args(java_toolchain.gen_class_to_source_map[RunInfo]) cmd.add("-o", output.as_output()) cmd.add(jar) - for src in srcs: - cmd.add(cmd_args(src)) + inputs_file = actions.write("class_to_srcs_map_argsfile.txt", srcs) + cmd.add(cmd_args(inputs_file, format = "@{}")) + cmd.hidden(srcs) actions.run(cmd, category = "class_to_srcs_map") return output diff --git a/prelude/java/tools/gen_class_to_source_map.py b/prelude/java/tools/gen_class_to_source_map.py index eed5c8473..8e58557d4 100644 --- a/prelude/java/tools/gen_class_to_source_map.py +++ b/prelude/java/tools/gen_class_to_source_map.py @@ -13,7 +13,7 @@ def main(argv): - parser = argparse.ArgumentParser() + parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument( "--output", "-o", type=argparse.FileType("w"), default=sys.stdin ) From a0f45f6a7f796a40eabe539c7b0d608c64dee9b1 Mon Sep 17 00:00:00 2001 From: Kyle Into Date: Wed, 21 Feb 2024 12:56:49 -0800 Subject: [PATCH 0324/1133] Set enabled_typing default to False for reduced Buck memory usages Reviewed By: samwgoldman Differential Revision: D53972296 fbshipit-source-id: 69eff32b5870f705d1bc8eea06b09513dc90893d --- prelude/decls/python_rules.bzl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/prelude/decls/python_rules.bzl b/prelude/decls/python_rules.bzl index 424f4f8c2..53721a538 100644 --- a/prelude/decls/python_rules.bzl +++ b/prelude/decls/python_rules.bzl @@ -17,8 +17,10 @@ def _typing_arg(): "py_version_for_type_checking": attrs.option(attrs.string(), default = None, doc = """ This option will force the type checker to perform checking under a specific version of Python interpreter. """), - "typing": attrs.bool(default = True, doc = """ - Determines whether to perform type checking on the given target. Default is True. + # NOTE(grievejia): Setting default to True here may have non-trivial impact on build memory + # usage (see S395002) + "typing": attrs.bool(default = False, doc = """ + Determines whether to perform type checking on the given target. Default is False. """), } From 4db3ba5f22da3a064d345082fa70467fe8ef05e2 Mon Sep 17 00:00:00 2001 From: Overhatted <15021741+Overhatted@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:21:44 -0800 Subject: [PATCH 0325/1133] Added example using Buck with Visual Studio Code on Windows Summary: 1. I made the default platform in the prelude public so it can be inherited from. I don't know why it wasn't already like that. Maybe I'm misunderstanding how to use it 2. Not sure if I'm using the config_setting and constraint_setting properly since it seems duplicated. 3. The script to generate the compile_commands.json was more or less copied from https://github.com/facebook/buck2/issues/510. There are still a lot of improvements to be made but I think this is good enough to be merged: 1. With MSVC the compilation should probably be done with /Z7 by default and then, if the "stripped" sub-target is asked for, that flag is removed. 2. The cxx toolchain needs the compilation options looked at, not sure if Linux's defaults work properly. 3. The Install.ps1 and Copy.ps1 need to be ported to Linux. X-link: https://github.com/facebook/buck2/pull/517 Reviewed By: KapJI Differential Revision: D53692733 Pulled By: JakobDegen fbshipit-source-id: 8d5962084831a3cccde128f0aa0360a1aaf72004 --- prelude/platforms/BUCK.v2 | 1 + prelude/toolchains/cxx.bzl | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/platforms/BUCK.v2 b/prelude/platforms/BUCK.v2 index 6e45ed1d2..d10b161f8 100644 --- a/prelude/platforms/BUCK.v2 +++ b/prelude/platforms/BUCK.v2 @@ -9,6 +9,7 @@ execution_platform( cpu_configuration = host_configuration.cpu, os_configuration = host_configuration.os, use_windows_path_separators = host_info().os.is_windows, + visibility = ["PUBLIC"], ) prelude.constraint_setting( diff --git a/prelude/toolchains/cxx.bzl b/prelude/toolchains/cxx.bzl index 8863bce95..682ea8ca3 100644 --- a/prelude/toolchains/cxx.bzl +++ b/prelude/toolchains/cxx.bzl @@ -67,7 +67,6 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): shared_library_name_default_prefix = "" shared_library_name_format = "{}.dll" shared_library_versioned_name_format = "{}.dll" - additional_linker_flags = ["msvcrt.lib"] pic_behavior = PicBehavior("not_supported") elif ctx.attrs.linker == "g++" or ctx.attrs.cxx_compiler == "g++": pass From b6adcca945402733bbccb12fc2bf0114a5408fa9 Mon Sep 17 00:00:00 2001 From: Joshua Selbo Date: Thu, 22 Feb 2024 07:24:16 -0800 Subject: [PATCH 0326/1133] Use exec_dep for javac attribute Reviewed By: IanChilds Differential Revision: D54051621 fbshipit-source-id: e011be983278ecb53577e1a6f9420be990db02cb --- prelude/android/android.bzl | 6 +++--- prelude/java/java.bzl | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 1fdd96830..3166ac7e0 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -58,7 +58,7 @@ extra_attributes = { "compress_asset_libraries": attrs.default_only(attrs.bool(default = False)), "cpu_filters": attrs.list(attrs.enum(TargetCpuType), default = ALL_CPU_FILTERS), "deps": attrs.list(attrs.split_transition_dep(cfg = cpu_split_transition), default = []), - "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "min_sdk_version": attrs.option(attrs.int(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), "package_asset_libraries": attrs.default_only(attrs.bool(default = True)), @@ -153,7 +153,7 @@ extra_attributes = { }, "android_library": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), "_android_toolchain": toolchains_common.android(), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), @@ -203,7 +203,7 @@ extra_attributes = { }, "robolectric_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), "robolectric_runtime_dependencies": attrs.list(attrs.source(), default = []), "test_class_names_file": attrs.option(attrs.source(), default = None), diff --git a/prelude/java/java.bzl b/prelude/java/java.bzl index 92137a352..f3be03b5a 100644 --- a/prelude/java/java.bzl +++ b/prelude/java/java.bzl @@ -64,7 +64,7 @@ extra_attributes = { }, "java_library": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_dex_min_sdk_version": attrs.option(attrs.int(), default = dex_min_sdk_version()), @@ -78,7 +78,7 @@ extra_attributes = { }, "java_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), - "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), + "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), "test_class_names_file": attrs.option(attrs.source(), default = None), "unbundled_resources_root": attrs.option(attrs.source(allow_directory = True), default = None), From ead3f883070b2ab809c3dde15ca45d100e02636c Mon Sep 17 00:00:00 2001 From: David Richey Date: Thu, 22 Feb 2024 08:07:07 -0800 Subject: [PATCH 0327/1133] Add comment pointing to upstream doc-coverage json bug Summary: For posterity Reviewed By: diliop, JakobDegen Differential Revision: D53953042 fbshipit-source-id: b0757909ff0ba6c19c59f54c271d270180d39e0b --- prelude/rust/tools/rustdoc_coverage.py | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/rust/tools/rustdoc_coverage.py b/prelude/rust/tools/rustdoc_coverage.py index a662a53e0..adaec1564 100755 --- a/prelude/rust/tools/rustdoc_coverage.py +++ b/prelude/rust/tools/rustdoc_coverage.py @@ -38,6 +38,7 @@ def main(): stdout = subprocess.run(args.cmd, capture_output=True, text=True).stdout with open(args.out, "w") as f: + # not using json output until https://github.com/rust-lang/rust/issues/117291 is fixed # stdout looks like... # +--------+------------+------------+------------+------------+ # | File | Documented | Percentage | Examples | Percentage | From 62679c1f7451bf0d6a4cf54a8c51df499f27e79a Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Thu, 22 Feb 2024 09:07:57 -0800 Subject: [PATCH 0328/1133] Only generate separate R.java for strings and ids if we're doing pre-dexing Summary: If we're pre-dexing, we want to write any strings and ids we have to a separate Java library, since when we're pre-dexing we have a limit of 65K fields and that gives us more headroom. If we're not pre-dexing, then we don't need to do that since we're just going to pass every Java library to `d8` at the same time anyway. This now matches `buck1` behavior, where this writing to a separate Java library is done in `SplitUberRDotJava` and is only invoked for pre-dexing. Reviewed By: navidqar Differential Revision: D54065428 fbshipit-source-id: b2e28e8ed53cecf5fdb75bac90dc9e44d669f981 --- prelude/android/android_binary.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/android/android_binary.bzl b/prelude/android/android_binary.bzl index 2573ccb0d..a47e4e4e7 100644 --- a/prelude/android/android_binary.bzl +++ b/prelude/android/android_binary.bzl @@ -82,6 +82,7 @@ def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBina use_proto_format = use_proto_format, referenced_resources_lists = referenced_resources_lists, manifest_entries = ctx.attrs.manifest_entries, + generate_strings_and_ids_separately = should_pre_dex, aapt2_preferred_density = ctx.attrs.aapt2_preferred_density, ) android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] From 166435f303545e6bc0bece24cdd2d6fa4a2be39b Mon Sep 17 00:00:00 2001 From: "Chen Xie (Reality Labs)" Date: Thu, 22 Feb 2024 10:08:04 -0800 Subject: [PATCH 0329/1133] parse ABI info and pass to external runner info Summary: RL instrumentation tests require ABI information to be passed to RE, this is not supported in Buck2. Add the support to pass the ABI info based on context labels. See more details in https://fb.workplace.com/groups/tpx.users/permalink/1775085046290925/ Reviewed By: navidqar Differential Revision: D53987626 fbshipit-source-id: 6fbc713db157339790735a037b9fa8dbccb9c5af --- .../android/android_instrumentation_test.bzl | 22 +++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/prelude/android/android_instrumentation_test.bzl b/prelude/android/android_instrumentation_test.bzl index f14709ac3..3b64d4599 100644 --- a/prelude/android/android_instrumentation_test.bzl +++ b/prelude/android/android_instrumentation_test.bzl @@ -13,6 +13,7 @@ load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os load("@prelude//utils:expect.bzl", "expect") load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") +ANDROID_EMULATOR_ABI_LABEL_PREFIX = "tpx-re-config::" DEFAULT_ANDROID_SUBPLATFORM = "android-30" DEFAULT_ANDROID_PLATFORM = "android-emulator" DEFAULT_ANDROID_INSTRUMENTATION_TESTS_USE_CASE = "instrumentation-tests" @@ -80,6 +81,14 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): ], ) + remote_execution_properties = { + "platform": _compute_emulator_platform(ctx.attrs.labels or []), + "subplatform": _compute_emulator_subplatform(ctx.attrs.labels or []), + } + re_emulator_abi = _compute_emulator_abi(ctx.attrs.labels or []) + if re_emulator_abi != None: + remote_execution_properties["abi"] = re_emulator_abi + test_info = ExternalRunnerTestInfo( type = "android_instrumentation", command = cmd, @@ -92,10 +101,7 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): "android-emulator": CommandExecutorConfig( local_enabled = android_toolchain.instrumentation_test_can_run_locally, remote_enabled = True, - remote_execution_properties = { - "platform": _compute_emulator_platform(ctx.attrs.labels or []), - "subplatform": _compute_emulator_subplatform(ctx.attrs.labels or []), - }, + remote_execution_properties = remote_execution_properties, remote_execution_use_case = _compute_re_use_case(ctx.attrs.labels or []), ), "static-listing": CommandExecutorConfig( @@ -118,6 +124,14 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): DefaultInfo(), ] + classmap_source_info +def _compute_emulator_abi(labels: list[str]): + emulator_abi_labels = [label for label in labels if label.startswith(ANDROID_EMULATOR_ABI_LABEL_PREFIX)] + expect(len(emulator_abi_labels) <= 1, "multiple '{}' labels were found:[{}], there must be only one!".format(ANDROID_EMULATOR_ABI_LABEL_PREFIX, ", ".join(emulator_abi_labels))) + if len(emulator_abi_labels) == 0: + return None + else: # len(emulator_abi_labels) == 1: + return emulator_abi_labels[0].replace(ANDROID_EMULATOR_ABI_LABEL_PREFIX, "") + # replicating the logic in https://fburl.com/code/1fqowxu4 to match buck1's behavior def _compute_emulator_subplatform(labels: list[str]) -> str: emulator_subplatform_labels = [label for label in labels if label.startswith("re_emulator_")] From b2b79c69aeb5bb2bf6ba6470f15094eeb3415a74 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Thu, 22 Feb 2024 12:00:58 -0800 Subject: [PATCH 0330/1133] Ensure that target json files are used to scrub SelectedDebugPaths.txt Summary: Context: https://fb.workplace.com/groups/buck2apple/posts/724583839460778/ TL:DR; Users are complaining that they are stopping in the debugger for every single breakpoint in the app. The root cause is that the `focused_targets.json` file (which is populated by the iOS OnDemand tool) is not used to filter the `SelectedDebugPaths.txt` in `apple_bundle.bzl` it only looks at variable. As a result, almost all paths in the binary are included (yikes). This diff updates the `apple_bundle` logic so that it can parse the `focused_targets.json` and use it to further scrub the `SelectedDebugPaths.txt`. Reviewed By: milend Differential Revision: D54032576 fbshipit-source-id: eb0d4b6ff0e33e633b72c8e9ce95d1bc146092f8 --- prelude/apple/apple_bundle.bzl | 10 ++++-- .../apple/user/apple_selective_debugging.bzl | 34 ++++++++++++++++++- 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index 357e8c1f0..4e56f711a 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -266,12 +266,18 @@ def _get_all_agg_debug_info(ctx: AnalysisContext, binary_output: AppleBundleBina all_debug_infos = all_debug_infos + [binary_debuggable_info] return get_aggregated_debug_info(ctx, all_debug_infos) +def _maybe_scrub_selected_debug_paths_file(ctx: AnalysisContext, package_names: list[str]) -> Artifact: + if not ctx.attrs.selective_debugging: + return ctx.actions.write(SELECTED_DEBUG_PATH_FILE_NAME, sorted(set(package_names).list())) + + selective_debugging_info = ctx.attrs.selective_debugging[AppleSelectiveDebuggingInfo] + return selective_debugging_info.scrub_selected_debug_paths_file(ctx, package_names, SELECTED_DEBUG_PATH_FILE_NAME) + def _get_selected_debug_targets_part(ctx: AnalysisContext, agg_debug_info: AggregatedAppleDebugInfo) -> [AppleBundlePart, None]: # Only app bundle need this, and this file is searched by FBReport at the bundle root if ctx.attrs.extension == "app" and agg_debug_info.debug_info.filtered_map: package_names = [label.package for label in agg_debug_info.debug_info.filtered_map.keys()] - package_names = set(package_names).list() - output = ctx.actions.write(SELECTED_DEBUG_PATH_FILE_NAME, package_names) + output = _maybe_scrub_selected_debug_paths_file(ctx, package_names) return AppleBundlePart(source = output, destination = AppleBundleDestination("bundleroot"), new_name = SELECTED_DEBUG_PATH_FILE_NAME) else: return None diff --git a/prelude/apple/user/apple_selective_debugging.bzl b/prelude/apple/user/apple_selective_debugging.bzl index 66fe2b27b..054a731c9 100644 --- a/prelude/apple/user/apple_selective_debugging.bzl +++ b/prelude/apple/user/apple_selective_debugging.bzl @@ -20,6 +20,7 @@ load( "parse_build_target_pattern", ) load("@prelude//utils:lazy.bzl", "lazy") +load("@prelude//utils:set.bzl", "set") _SelectionCriteria = record( include_build_target_patterns = field(list[BuildTargetPattern], []), @@ -33,6 +34,7 @@ AppleSelectiveDebuggingInfo = provider( fields = { "scrub_binary": provider_field(typing.Callable), "filter": provider_field(typing.Callable), + "scrub_selected_debug_paths_file": provider_field(typing.Callable), }, ) @@ -72,10 +74,10 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: scrubber = ctx.attrs._apple_tools[AppleToolsInfo].selective_debugging_scrubber + targets_json_file = ctx.attrs.targets_json_file or ctx.actions.write_json("targets.json", {"targets": []}) cmd = cmd_args(scrubber) if json_type == _SelectiveDebuggingJsonType("targets"): # If a targets json file is not provided, write an empty json file: - targets_json_file = ctx.attrs.targets_json_file or ctx.actions.write_json("targets_json.txt", {"targets": []}) cmd.add("--targets-file") cmd.add(targets_json_file) elif json_type == _SelectiveDebuggingJsonType("spec"): @@ -98,6 +100,35 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: exclude_regular_expressions = exclude_regular_expressions, ) + def scrub_selected_debug_paths_file(inner_ctx: AnalysisContext, package_names: list[str], output_name: str) -> Artifact: + # In the event that _SelectiveDebuggingJsonType was "spec", we expect that `package_names` + # was already filtered as part of scrubbing the binary in the apple_bundle. + # + # See `_maybe_scrub_binary()` in apple_bundle.bzl + if json_type != _SelectiveDebuggingJsonType("targets"): + return inner_ctx.actions.write(output_name, sorted(set(package_names).list())) + + def scrub_selected_debug_paths_action(dynamic_ctx: AnalysisContext, artifacts, outputs): + packages = [ + # "cell//path/to/some/thing:target" -> "path/to/some/thing" + target.split("//")[1].split(":")[0] + for target in artifacts[targets_json_file].read_json()["targets"] + ] + dynamic_ctx.actions.write( + outputs.values()[0], + sorted(set(filter(lambda p: p in packages, package_names)).list()), + ) + + output = inner_ctx.actions.declare_output(output_name) + inner_ctx.actions.dynamic_output( + dynamic = [targets_json_file], + inputs = [], + outputs = [output], + f = scrub_selected_debug_paths_action, + ) + + return output + def scrub_binary(inner_ctx, executable: Artifact, executable_link_execution_preference: LinkExecutionPreference, adhoc_codesign_tool: [RunInfo, None]) -> Artifact: inner_cmd = cmd_args(cmd) output = inner_ctx.actions.declare_output("debug_scrubbed/{}".format(executable.short_path)) @@ -155,6 +186,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: AppleSelectiveDebuggingInfo( scrub_binary = scrub_binary, filter = filter_debug_info, + scrub_selected_debug_paths_file = scrub_selected_debug_paths_file, ), LinkExecutionPreferenceDeterminatorInfo(preference_for_links = preference_for_links), ] From b1b1d7c969cbed19df2b888f0108545d57db0cee Mon Sep 17 00:00:00 2001 From: Jacob Rodal Date: Thu, 22 Feb 2024 16:40:26 -0800 Subject: [PATCH 0331/1133] reduce aapt2_link filepaths with dummy custom package Summary: **This might be the dumbest code I've ever written, so RFC.** The aapt2 link action generates really long `rdotjava` paths and is causing some problems migrating windows builds to buck2. I previously shortened them in D53201812 by tweaking this aapt2_link.bzl file a bit, but I found at least another 30 targets that still fail due to long path issues today. This is an example long path: ` buck-out\v2\gen\fbsource\ab13b903b30df809\arvr\libraries\spatial_persistence\scene_manager\test\__scene_anchor_state_runtime_ipc_test-test-apk__\not_proto\init-rjava\com\facebook\oculus\jni_test\scene_anchor_state_runtime_ipc_test\test` The package, `com\facebook\oculus\jni_test\scene_anchor_state_runtime_ipc_test\test`, is quite long. Per the code comments in this file, "# We don't need the R.java output, but aapt2 won't output R.txt unless we also request R.java." **If we don't need the R.java output, can we specify a dummy custom package to drastically reduce the path length?** This would be a lot less cumbersome than finding and fixing each individual target. Things seem to build, and the `R.txt` doesn't seem to have any references to the package, but I'm not too familiar with android development and don't know if I'm trying something really dumb. Reviewed By: IanChilds Differential Revision: D54024420 fbshipit-source-id: dc410c1ecdf6ca3bbc57cf645dc8772b0d1fca96 --- prelude/android/aapt2_link.bzl | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/prelude/android/aapt2_link.bzl b/prelude/android/aapt2_link.bzl index 4044e2d3f..4126c11c6 100644 --- a/prelude/android/aapt2_link.bzl +++ b/prelude/android/aapt2_link.bzl @@ -48,8 +48,13 @@ def get_aapt2_link( aapt2_command.add(["--proguard", proguard_config.as_output()]) # We don't need the R.java output, but aapt2 won't output R.txt unless we also request R.java. - r_dot_java = ctx.actions.declare_output("{}/init-rjava".format(identifier), dir = True) + # A drawback of this is that the directory structure for the R.java output is deep, resulting + # in long path issues on Windows. The structure is //unused-rjava//R.java + # We can declare a custom dummy package to drastically shorten , which is sketchy, but effective + r_dot_java = ctx.actions.declare_output("{}/unused-rjava".format(identifier), dir = True) aapt2_command.add(["--java", r_dot_java.as_output()]) + aapt2_command.add(["--custom-package", "dummy.package"]) + r_dot_txt = ctx.actions.declare_output("{}/R.txt".format(identifier)) aapt2_command.add(["--output-text-symbols", r_dot_txt.as_output()]) From beb58fe2a99e9da821e4be39061f3a58416585e4 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Thu, 22 Feb 2024 20:31:54 -0800 Subject: [PATCH 0332/1133] Conditionally create targets.json file. Summary: Per feedback on the last diff... lets only make this file if we are in `json_type == _SelectiveDebuggingJsonType("targets")` Reviewed By: chatura-atapattu Differential Revision: D54085368 fbshipit-source-id: 4a43ea32e089730d056c2b7683a7179ce5a72a25 --- prelude/apple/user/apple_selective_debugging.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prelude/apple/user/apple_selective_debugging.bzl b/prelude/apple/user/apple_selective_debugging.bzl index 054a731c9..6df13c4ee 100644 --- a/prelude/apple/user/apple_selective_debugging.bzl +++ b/prelude/apple/user/apple_selective_debugging.bzl @@ -74,9 +74,11 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: scrubber = ctx.attrs._apple_tools[AppleToolsInfo].selective_debugging_scrubber - targets_json_file = ctx.attrs.targets_json_file or ctx.actions.write_json("targets.json", {"targets": []}) + targets_json_file = None cmd = cmd_args(scrubber) if json_type == _SelectiveDebuggingJsonType("targets"): + targets_json_file = ctx.attrs.targets_json_file or ctx.actions.write_json("targets.json", {"targets": []}) + # If a targets json file is not provided, write an empty json file: cmd.add("--targets-file") cmd.add(targets_json_file) From e1d20f49a9150bde1690b9bcef833a584e86e957 Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Thu, 22 Feb 2024 21:16:18 -0800 Subject: [PATCH 0333/1133] Persist typeshed manifests directly on the toolchain Reviewed By: connernilsen Differential Revision: D54023079 fbshipit-source-id: 7dec9b67768525ee77f8ff5cc94afce07518ca95 --- prelude/python/python_binary.bzl | 2 +- prelude/python/python_library.bzl | 2 +- prelude/python/typing.bzl | 23 ++++------------------- 3 files changed, 6 insertions(+), 21 deletions(-) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 2fba56ddb..b84d4da14 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -411,7 +411,7 @@ def python_executable( type_checker, src_manifest, python_deps, - typeshed_stubs = python_toolchain.typeshed_stubs, + typeshed = python_toolchain.typeshed_stubs, py_version = ctx.attrs.py_version_for_type_checking, typing_enabled = ctx.attrs.typing, ), diff --git a/prelude/python/python_library.bzl b/prelude/python/python_library.bzl index 295a076ca..16ff1ff41 100644 --- a/prelude/python/python_library.bzl +++ b/prelude/python/python_library.bzl @@ -321,7 +321,7 @@ def python_library_impl(ctx: AnalysisContext) -> list[Provider]: type_checker, src_type_manifest, deps, - typeshed_stubs = python_toolchain.typeshed_stubs, + typeshed = python_toolchain.typeshed_stubs, py_version = ctx.attrs.py_version_for_type_checking, typing_enabled = ctx.attrs.typing, ), diff --git a/prelude/python/typing.bzl b/prelude/python/typing.bzl index 22b38d919..e66e083b4 100644 --- a/prelude/python/typing.bzl +++ b/prelude/python/typing.bzl @@ -5,35 +5,21 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:artifacts.bzl", "ArtifactGroupInfo") load("@prelude//python:python.bzl", "PythonLibraryInfo") load( ":manifest.bzl", "ManifestInfo", # @unused Used as a type - "create_manifest_for_source_map", ) load(":python.bzl", "PythonLibraryManifestsTSet") DEFAULT_PY_VERSION = "3.10" -def create_typeshed_manifest_info( - ctx: AnalysisContext, - typeshed_deps: list[Dependency]) -> ManifestInfo: - # NOTE(grievejia): This assumes that if multiple typeshed targets offer - # the same stub file, the target that comes later wins. - srcs = { - artifact.short_path: artifact - for typeshed_dep in typeshed_deps - for artifact in typeshed_dep[ArtifactGroupInfo].artifacts - } - return create_manifest_for_source_map(ctx, "typeshed", srcs) - def create_per_target_type_check( ctx: AnalysisContext, executable: RunInfo, srcs: ManifestInfo | None, deps: list[PythonLibraryInfo], - typeshed_stubs: list[Dependency], + typeshed: ManifestInfo | None, py_version: str | None, typing_enabled: bool) -> DefaultInfo: output_file_name = "type_check_result.json" @@ -56,10 +42,9 @@ def create_per_target_type_check( cmd.hidden(dep_manifest_tset.project_as_args("source_type_artifacts")) # Typeshed artifacts - if len(typeshed_stubs) > 0: - typeshed_manifest_info = create_typeshed_manifest_info(ctx, typeshed_stubs) - cmd.hidden([a for a, _ in typeshed_manifest_info.artifacts]) - typeshed_manifest = typeshed_manifest_info.manifest + if typeshed != None: + cmd.hidden([a for a, _ in typeshed.artifacts]) + typeshed_manifest = typeshed.manifest else: typeshed_manifest = None From da18e83fad7c8cf2b28289850f335c4c11d143af Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Thu, 22 Feb 2024 21:55:13 -0800 Subject: [PATCH 0334/1133] Add shared_object_targets.txt to native_merge_debug subtarget Reviewed By: yozhu Differential Revision: D54096032 fbshipit-source-id: 65c3ca04c1f2474cf4d22ca674405c01148d0293 --- .../android_binary_native_library_rules.bzl | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 5dbaf94c3..ef19f0050 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -288,6 +288,17 @@ def get_android_binary_native_library_info( ) debug_data_json = ctx.actions.write_json("native_merge_debug.json", merged_linkables.debug_info, pretty = True) native_library_merge_debug_outputs["native_merge_debug.json"] = debug_data_json + + shared_object_targets = {} + for _, merged_libs in merged_linkables.shared_libs_by_platform.items(): + for soname, lib in merged_libs.items(): + shared_object_targets[soname] = [str(target.raw_target()) for target in lib.primary_constituents] + shared_object_targets_lines = "" + for soname, targets in shared_object_targets.items(): + shared_object_targets_lines += soname + " " + " ".join(targets) + "\n" + shared_object_targets_txt = ctx.actions.write("shared_object_targets.txt", shared_object_targets_lines) + native_library_merge_debug_outputs["shared_object_targets.txt"] = shared_object_targets_txt + if mergemap_gencode_jar: merged_library_map = write_merged_library_map(ctx, merged_linkables) mergemap_gencode = run_mergemap_codegen(ctx, merged_library_map) @@ -731,6 +742,7 @@ MergedSharedLibrary = record( # this only includes solib constituents that are included in the android merge map solib_constituents = list[str], is_actually_merged = bool, + primary_constituents = list[Label], ) # Output of the linkables merge process, the list of shared libs for each platform and @@ -1077,6 +1089,7 @@ def _get_merged_linkables( apk_module = group_data.apk_module, solib_constituents = [], is_actually_merged = False, + primary_constituents = [target], ) continue @@ -1173,6 +1186,7 @@ def _get_merged_linkables( apk_module = group_data.apk_module, solib_constituents = solib_constituents, is_actually_merged = is_actually_merged, + primary_constituents = group_data.constituents, ) debug_info.group_debug.setdefault( From fb97d5fa38bc8feeb97b4a36a0c51c24958cd62f Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Fri, 23 Feb 2024 09:00:00 -0800 Subject: [PATCH 0335/1133] Validate if coverage_mode is atomic on race=True Summary: As per `go build` code ``` if cfg.BuildRace && cfg.BuildCoverMode != "atomic" { base.Fatalf(`-covermode must be "atomic", not %q, when -race is enabled`, cfg.BuildCoverMode) } ``` Reviewed By: abulimov Differential Revision: D54083373 fbshipit-source-id: c5df9186e0013c4c5de33d86d626b84e5b12e40f --- prelude/go/go_test.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index ecfaedd35..7aef22fd4 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -74,6 +74,8 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: coverage_vars = {} pkgs = {} if ctx.attrs.coverage_mode != None: + if ctx.attrs._race and ctx.attrs.coverage_mode != "atomic": + fail("`coverage_mode` must be `atomic` when `race=True`") coverage_mode = GoCoverageMode(ctx.attrs.coverage_mode) cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, False) srcs = cov_res.srcs From 34095a14933bd6b9e9c50fe395947a91721303c5 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Fri, 23 Feb 2024 09:00:26 -0800 Subject: [PATCH 0336/1133] Remove apk_under_test_ prefix from AndroidBinaryNativeLibsInfo fields Summary: I want to use this for something else in the next diff, and in any case, `apk_under_test` doesn't really make a ton of sense as part of `AndroidBinaryNativeLibsInfo`. Reviewed By: navidqar Differential Revision: D53949301 fbshipit-source-id: 4e583ac46b30d8262a964a99c1ec90832d897fa1 --- prelude/android/android_apk.bzl | 4 ++-- prelude/android/android_binary_native_library_rules.bzl | 8 ++++---- prelude/android/android_providers.bzl | 4 ++-- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index c9004529e..dff79ff7c 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -58,12 +58,12 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: java_packaging_deps = set([dep.label.raw_target() for dep in java_packaging_deps]), keystore = keystore, manifest_entries = ctx.attrs.manifest_entries, - prebuilt_native_library_dirs = set([native_lib.raw_target for native_lib in native_library_info.apk_under_test_prebuilt_native_library_dirs]), + prebuilt_native_library_dirs = set([native_lib.raw_target for native_lib in native_library_info.prebuilt_native_library_dirs]), platforms = android_binary_info.deps_by_platform.keys(), primary_platform = android_binary_info.primary_platform, resource_infos = set([info.raw_target for info in resources_info.unfiltered_resource_infos]), r_dot_java_packages = set([info.specified_r_dot_java_package for info in resources_info.unfiltered_resource_infos if info.specified_r_dot_java_package]), - shared_libraries = set(native_library_info.apk_under_test_shared_libraries), + shared_libraries = set(native_library_info.shared_libraries), ), DefaultInfo(default_output = output_apk, other_outputs = _get_exopackage_outputs(exopackage_info) + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, has_native_libs = has_native_libs), diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index ef19f0050..d15f870b6 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -121,8 +121,8 @@ def get_android_binary_native_library_info( enhance_ctx.debug_output("unstripped_native_libraries", ctx.actions.write("unstripped_native_libraries", [])) enhance_ctx.debug_output("unstripped_native_libraries_json", ctx.actions.write_json("unstripped_native_libraries_json", {})) return AndroidBinaryNativeLibsInfo( - apk_under_test_prebuilt_native_library_dirs = [], - apk_under_test_shared_libraries = [], + prebuilt_native_library_dirs = [], + shared_libraries = [], native_libs_for_primary_apk = [], exopackage_info = None, root_module_native_lib_assets = [], @@ -370,8 +370,8 @@ def get_android_binary_native_library_info( native_libs_for_primary_apk, exopackage_info = _get_exopackage_info(ctx, native_libs_always_in_primary_apk, native_libs, native_libs_metadata) return AndroidBinaryNativeLibsInfo( - apk_under_test_prebuilt_native_library_dirs = all_prebuilt_native_library_dirs, - apk_under_test_shared_libraries = included_shared_lib_targets, + prebuilt_native_library_dirs = all_prebuilt_native_library_dirs, + shared_libraries = included_shared_lib_targets, native_libs_for_primary_apk = native_libs_for_primary_apk, exopackage_info = exopackage_info, root_module_native_lib_assets = [native_lib_assets_for_primary_apk, stripped_native_linkable_assets_for_primary_apk, root_module_metadata_assets, root_module_compressed_lib_assets], diff --git a/prelude/android/android_providers.bzl b/prelude/android/android_providers.bzl index d93c12a3c..bf899c595 100644 --- a/prelude/android/android_providers.bzl +++ b/prelude/android/android_providers.bzl @@ -47,10 +47,10 @@ RDotJavaInfo = record( ) AndroidBinaryNativeLibsInfo = record( - apk_under_test_prebuilt_native_library_dirs = list[PrebuiltNativeLibraryDir], + prebuilt_native_library_dirs = list[PrebuiltNativeLibraryDir], # Indicates which shared lib producing targets are included in the binary. Used by instrumentation tests # to exclude those from the test apk. - apk_under_test_shared_libraries = list[TargetLabel], + shared_libraries = list[TargetLabel], exopackage_info = ["ExopackageNativeInfo", None], root_module_native_lib_assets = list[Artifact], non_root_module_native_lib_assets = list[Artifact], From 4b7ff8880c01dd5e851b23a79a086e61d12d0c0d Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Fri, 23 Feb 2024 09:00:26 -0800 Subject: [PATCH 0337/1133] Only check cpu_filters if we are sure the APK has native libs Summary: If we only have `prebuilt_native_library_dirs`, then we might not actually have any native libs since those dirs can be empty (e.g. `android_prebuilt_aar` creates one regardless of whether there are any native libs in the prebuilt AAR). If we have no native libs then we don't want to do the check to see whether the native libs are compatible with the device we are installing to. Therefore, only do that check when we have shared libraries, since in that case we do know that we are going to have native libs in the APK. Reviewed By: navidqar Differential Revision: D53951566 fbshipit-source-id: f0fa3b388270e71b9094cba5e50788eb3f2b997d --- prelude/android/android_apk.bzl | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index dff79ff7c..c5c14ac1c 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -33,13 +33,6 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: compress_resources_dot_arsc = ctx.attrs.resource_compression == "enabled" or ctx.attrs.resource_compression == "enabled_with_strings_as_assets", ) - has_native_libs = bool( - native_library_info.exopackage_info or - native_library_info.native_libs_for_primary_apk or - native_library_info.root_module_native_lib_assets or - native_library_info.non_root_module_native_lib_assets, - ) - exopackage_info = ExopackageInfo( secondary_dex_info = dex_files_info.secondary_dex_exopackage_info, native_library_info = native_library_info.exopackage_info, @@ -52,6 +45,9 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: deps = android_binary_info.deps_by_platform[android_binary_info.primary_platform], ) + # We can only be sure that an APK has native libs if it has any shared libraries. Prebuilt native libraries dirs can exist but be empty. + definitely_has_native_libs = bool(native_library_info.shared_libraries) + return [ AndroidApkInfo(apk = output_apk, manifest = resources_info.manifest, materialized_artifacts = android_binary_info.materialized_artifacts), AndroidApkUnderTestInfo( @@ -66,7 +62,7 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: shared_libraries = set(native_library_info.shared_libraries), ), DefaultInfo(default_output = output_apk, other_outputs = _get_exopackage_outputs(exopackage_info) + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), - get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, has_native_libs = has_native_libs), + get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, definitely_has_native_libs = definitely_has_native_libs), TemplatePlaceholderInfo( keyed_variables = { "classpath": cmd_args([dep.jar for dep in java_packaging_deps if dep.jar], delimiter = get_path_separator_for_exec_os(ctx)), @@ -145,7 +141,7 @@ def get_install_info( output_apk: Artifact, manifest: Artifact, exopackage_info: [ExopackageInfo, None], - has_native_libs: bool = True) -> InstallInfo: + definitely_has_native_libs: bool = True) -> InstallInfo: files = { ctx.attrs.name: output_apk, "manifest": manifest, @@ -180,7 +176,7 @@ def get_install_info( if secondary_dex_exopackage_info or native_library_exopackage_info or resources_info: files["exopackage_agent_apk"] = ctx.attrs._android_toolchain[AndroidToolchainInfo].exopackage_agent_apk - if has_native_libs and hasattr(ctx.attrs, "cpu_filters"): + if definitely_has_native_libs and hasattr(ctx.attrs, "cpu_filters"): files["cpu_filters"] = ctx.actions.write("cpu_filters.txt", ctx.attrs.cpu_filters) return InstallInfo( From e72a1845fab78b10cdc4eecee013618da411e0aa Mon Sep 17 00:00:00 2001 From: Michael Lee Date: Fri, 23 Feb 2024 16:27:28 -0800 Subject: [PATCH 0338/1133] Fix the `hmaptool` shebang Summary: Because the shebang is not at the top of the file, you cannot actually just execute this directly. (You can `python3 hmaptool`, but then why make it executable?) Reviewed By: andrewjcg Differential Revision: D54139101 fbshipit-source-id: a2b6df2e41c9dc673794db2af98bc6b5b02fcd4f --- prelude/third-party/hmaptool/hmaptool | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/third-party/hmaptool/hmaptool b/prelude/third-party/hmaptool/hmaptool index 581e77d40..a85bc5177 100755 --- a/prelude/third-party/hmaptool/hmaptool +++ b/prelude/third-party/hmaptool/hmaptool @@ -1,3 +1,5 @@ +#!/usr/bin/env python3 + # ===----------------------------------------------------------------------=== # # # Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. @@ -6,7 +8,6 @@ # # ===----------------------------------------------------------------------=== # -#!/usr/bin/env python3 from __future__ import absolute_import, division, print_function import json From c376d0d1b8fff64e131b6ccf92536873069e5a9f Mon Sep 17 00:00:00 2001 From: Nate Stedman Date: Fri, 23 Feb 2024 17:01:40 -0800 Subject: [PATCH 0339/1133] Improve reusability of validation_deps code Summary: The `validation_deps` feature is currently only supported on Apple targets. It's a broadly useful concept and I want to apply it to Android apps. 1. Rename everything to remove `apple`. 2. Move file out of `apple` folder. Reviewed By: milend Differential Revision: D54097351 fbshipit-source-id: f08ebc9ee8517dc9c680a5ca93c2e72ceb2268a6 --- prelude/apple/apple_binary.bzl | 4 ++-- prelude/apple/apple_bundle.bzl | 4 ++-- prelude/apple/apple_library.bzl | 4 ++-- prelude/apple/apple_rules_impl.bzl | 11 +++++++---- .../apple_validation_deps.bzl => validation_deps.bzl} | 9 +++++---- 5 files changed, 18 insertions(+), 14 deletions(-) rename prelude/{apple/apple_validation_deps.bzl => validation_deps.bzl} (59%) diff --git a/prelude/apple/apple_binary.bzl b/prelude/apple/apple_binary.bzl index fd38b7ff9..fce5397e4 100644 --- a/prelude/apple/apple_binary.bzl +++ b/prelude/apple/apple_binary.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//apple:apple_stripping.bzl", "apple_strip_args") # @oss-disable: load("@prelude//apple/meta_only:linker_outputs.bzl", "add_extra_linker_outputs") load( @@ -65,7 +66,6 @@ load(":apple_entitlements.bzl", "entitlements_link_flags") load(":apple_frameworks.bzl", "get_framework_search_path_flags") load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback") -load(":apple_validation_deps.bzl", "get_apple_validation_deps_outputs") load(":debug.bzl", "AppleDebuggableInfo") load(":resource_groups.bzl", "create_resource_graph") load(":xcode.bzl", "apple_populate_xcode_attributes") @@ -109,7 +109,7 @@ def apple_binary_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: swift_compile, ) - validation_deps_outputs = get_apple_validation_deps_outputs(ctx) + validation_deps_outputs = get_validation_deps_outputs(ctx) stripped = get_apple_stripped_attr_value_with_default_fallback(ctx) constructor_params = CxxRuleConstructorParams( rule_type = "apple_binary", diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index 4e56f711a..b461efff0 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -12,6 +12,7 @@ load( "project_artifacts", ) load("@prelude//:paths.bzl", "paths") +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") # @oss-disable: load("@prelude//apple/meta_only:linker_outputs.bzl", "subtargets_for_apple_bundle_extra_outputs") load("@prelude//apple/user:apple_selected_debug_path_file.bzl", "SELECTED_DEBUG_PATH_FILE_NAME") @@ -62,7 +63,6 @@ load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_default_ load(":apple_dsym.bzl", "DSYM_INFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym", "get_apple_dsym_ext", "get_apple_dsym_info") load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_universal_binaries.bzl", "create_universal_binary") -load(":apple_validation_deps.bzl", "get_apple_validation_deps_outputs") load( ":debug.bzl", "AggregatedAppleDebugInfo", @@ -324,7 +324,7 @@ def apple_bundle_impl(ctx: AnalysisContext) -> list[Provider]: primary_binary_rel_path = get_apple_bundle_part_relative_destination_path(ctx, primary_binary_part) - validation_deps_outputs = get_apple_validation_deps_outputs(ctx) + validation_deps_outputs = get_validation_deps_outputs(ctx) sub_targets = assemble_bundle( ctx, bundle, diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index afe9e6f34..ccb920d5e 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -9,6 +9,7 @@ load( "@prelude//:artifact_tset.bzl", "project_artifacts", ) +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//apple:apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym") load("@prelude//apple:apple_stripping.bzl", "apple_strip_args") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") @@ -73,7 +74,6 @@ load(":apple_frameworks.bzl", "get_framework_search_path_flags") load(":apple_modular_utility.bzl", "MODULE_CACHE_PATH") load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback", "get_module_name") -load(":apple_validation_deps.bzl", "get_apple_validation_deps_outputs") load( ":debug.bzl", "AppleDebuggableInfo", @@ -218,7 +218,7 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte relative_args = CPreprocessorArgs(args = [framework_search_paths_flags]), ) - validation_deps_outputs = get_apple_validation_deps_outputs(ctx) + validation_deps_outputs = get_validation_deps_outputs(ctx) return CxxRuleConstructorParams( rule_type = params.rule_type, is_test = (params.rule_type == "apple_test"), diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 894ba0dd4..f7aded0d3 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -5,6 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//:validation_deps.bzl", + "VALIDATION_DEPS_ATTR_NAME", + "VALIDATION_DEPS_ATTR_TYPE", +) load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") load("@prelude//apple/swift:swift_toolchain.bzl", "swift_toolchain_impl") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") @@ -26,8 +31,6 @@ load(":apple_resource.bzl", "apple_resource_impl") load( ":apple_rules_impl_utility.bzl", "APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME", - "APPLE_VALIDATION_DEPS_ATTR_NAME", - "APPLE_VALIDATION_DEPS_ATTR_TYPE", "apple_bundle_extra_attrs", "apple_dsymutil_attrs", "apple_test_extra_attrs", @@ -94,7 +97,7 @@ def _apple_binary_extra_attrs(): "_apple_xctoolchain": get_apple_xctoolchain_attr(), "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), "_stripped_default": attrs.bool(default = False), - APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, + VALIDATION_DEPS_ATTR_NAME: VALIDATION_DEPS_ATTR_TYPE, } attribs.update(apple_dsymutil_attrs()) return attribs @@ -120,7 +123,7 @@ def _apple_library_extra_attrs(): "_apple_xctoolchain_bundle_id": get_apple_xctoolchain_bundle_id_attr(), "_stripped_default": attrs.bool(default = False), APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME: attrs.option(attrs.bool(), default = None), - APPLE_VALIDATION_DEPS_ATTR_NAME: APPLE_VALIDATION_DEPS_ATTR_TYPE, + VALIDATION_DEPS_ATTR_NAME: VALIDATION_DEPS_ATTR_TYPE, } attribs.update(apple_dsymutil_attrs()) return attribs diff --git a/prelude/apple/apple_validation_deps.bzl b/prelude/validation_deps.bzl similarity index 59% rename from prelude/apple/apple_validation_deps.bzl rename to prelude/validation_deps.bzl index fa948a4f6..a057e353d 100644 --- a/prelude/apple/apple_validation_deps.bzl +++ b/prelude/validation_deps.bzl @@ -5,12 +5,13 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -_VALIDATION_DEPS_ATTR_NAME = "validation_deps" +VALIDATION_DEPS_ATTR_NAME = "validation_deps" +VALIDATION_DEPS_ATTR_TYPE = attrs.set(attrs.dep(), sorted = True, default = []) -def get_apple_validation_deps_outputs(ctx: AnalysisContext) -> list[Artifact]: +def get_validation_deps_outputs(ctx: AnalysisContext) -> list[Artifact]: artifacts = [] - if hasattr(ctx.attrs, _VALIDATION_DEPS_ATTR_NAME): - validation_deps = getattr(ctx.attrs, _VALIDATION_DEPS_ATTR_NAME) + if hasattr(ctx.attrs, VALIDATION_DEPS_ATTR_NAME): + validation_deps = getattr(ctx.attrs, VALIDATION_DEPS_ATTR_NAME) for dep in validation_deps: default_info = dep[DefaultInfo] artifacts += default_info.default_outputs From ce8926afe609c0e8f065c960be874ec89b5236e7 Mon Sep 17 00:00:00 2001 From: Nate Stedman Date: Fri, 23 Feb 2024 17:01:40 -0800 Subject: [PATCH 0340/1133] Add support for validation_deps to android_binary Summary: Now that `validation_deps` is not coupled (by name) to Apple targets, we can reuse it in `android_binary` so that build-time validation rules (e.g. dependency rules) can be added to Android apps as well. This could also apply to `android_library` etc. but I don't have a need for it there and I don't know if we'd really want to add them, as it would create a lot of additional targets in the graph. So I'm only adding it to `android_binary`. Reviewed By: IanChilds Differential Revision: D54097352 fbshipit-source-id: e8db6d34d36f75ec76914bf4d8830e5adedb9ac3 --- prelude/android/android.bzl | 5 +++++ prelude/android/android_apk.bzl | 10 +++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 3166ac7e0..261530310 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -5,6 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//:validation_deps.bzl", + "VALIDATION_DEPS_ATTR_NAME", +) load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS") load("@prelude//java:java.bzl", "AbiGenerationMode", "dex_min_sdk_version") load("@prelude//decls/android_rules.bzl", "AaptMode", "DuplicateResourceBehaviour") @@ -96,6 +100,7 @@ extra_attributes = { "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), "_java_toolchain": toolchains_common.java_for_android(), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.transition_dep(cfg = cpu_transition), sorted = True, default = []), }, "android_build_config": { "_android_toolchain": toolchains_common.android(), diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index c5c14ac1c..6dd0662f0 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_binary.bzl", "get_binary_info") load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidApkUnderTestInfo", "AndroidBinaryNativeLibsInfo", "AndroidBinaryResourcesInfo", "DexFilesInfo", "ExopackageInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") @@ -31,6 +32,7 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: native_library_info = native_library_info, resources_info = resources_info, compress_resources_dot_arsc = ctx.attrs.resource_compression == "enabled" or ctx.attrs.resource_compression == "enabled_with_strings_as_assets", + validation_deps_outputs = get_validation_deps_outputs(ctx), ) exopackage_info = ExopackageInfo( @@ -80,7 +82,8 @@ def build_apk( dex_files_info: DexFilesInfo, native_library_info: AndroidBinaryNativeLibsInfo, resources_info: AndroidBinaryResourcesInfo, - compress_resources_dot_arsc: bool = False) -> Artifact: + compress_resources_dot_arsc: bool = False, + validation_deps_outputs: [list[Artifact], None] = None) -> Artifact: output_apk = actions.declare_output("{}.apk".format(label.name)) apk_builder_args = cmd_args([ @@ -99,6 +102,11 @@ def build_apk( android_toolchain.zipalign[RunInfo], ]) + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + if validation_deps_outputs: + apk_builder_args.hidden(validation_deps_outputs) + if android_toolchain.package_meta_inf_version_files: apk_builder_args.add("--package-meta-inf-version-files") if compress_resources_dot_arsc: From fcf38f9bd18798dd59ebfa65ed19623dbac21f8d Mon Sep 17 00:00:00 2001 From: Ben Chang Date: Sat, 24 Feb 2024 13:11:51 -0800 Subject: [PATCH 0341/1133] piping through enabled_voltron_non_asset_libs flag to android_toolchain Summary: see title. Reviewed By: IanChilds Differential Revision: D54041721 fbshipit-source-id: 9dcd9936dfd975f54529140259de5f7d48b337e8 --- prelude/android/android_toolchain.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/android/android_toolchain.bzl b/prelude/android/android_toolchain.bzl index b7da69cc7..89059e948 100644 --- a/prelude/android/android_toolchain.bzl +++ b/prelude/android/android_toolchain.bzl @@ -26,6 +26,7 @@ AndroidToolchainInfo = provider( "combine_native_library_dirs": provider_field(typing.Any, default = None), "compress_libraries": provider_field(typing.Any, default = None), "d8_command": provider_field(typing.Any, default = None), + "enabled_voltron_non_asset_libs": provider_field(typing.Any, default = None), "exo_resources_rewriter": provider_field(typing.Any, default = None), "exopackage_agent_apk": provider_field(typing.Any, default = None), "filter_dex_class_names": provider_field(typing.Any, default = None), From 2d497636f1c09cbc0494d675b86ca13919df34e0 Mon Sep 17 00:00:00 2001 From: Ben Chang Date: Sat, 24 Feb 2024 13:11:51 -0800 Subject: [PATCH 0342/1133] add native support for modules with non-asset cxx_library with gating Summary: This stack of diffs is config-erized version of D53624135 Buck was written to support asset native libs but not non-asset native libs. We've had a need for this with helium that we supported via hacks in our voltron postprocessor. In preparation to enable Voltron for WA, we need to build a similar solution as they do not use asset libraries at all. - We can force WA to use asset libs - We can port over existing postprocessor hack to WA as well as write similar support for AABs - We can add support to Buck to handle this correctly (we're going with this) The expected behavior for non-asset native libs is for the .so artifact to be placed in /lib// in both the APK as well as the AAB. For AABs, we should also expect a native.pb file to be generated. ===== when this is fully rolled out, consider reverting this stack and just pushing D53624135 for clean up Reviewed By: IanChilds Differential Revision: D53968378 fbshipit-source-id: b423fd60e707bb753ef575f1bd37e48705ffd47a --- .../android_binary_native_library_rules.bzl | 22 ++++++++++++++++--- prelude/android/android_bundle.bzl | 18 ++++++++++++--- 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index d15f870b6..ec419b265 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -492,7 +492,10 @@ def _get_native_libs_and_assets( for module, native_lib_assets in native_lib_module_assets_map.items(): metadata_file, native_library_paths = _get_native_libs_as_assets_metadata(ctx, native_lib_assets, module) - non_root_module_metadata_srcs[paths.join(_get_native_libs_as_assets_dir(module), "libs.txt")] = metadata_file + libs_metadata_path = "libs.txt" + if ctx.attrs._android_toolchain[AndroidToolchainInfo].enabled_voltron_non_asset_libs: + libs_metadata_path = paths.join("assets", "libs.txt") + non_root_module_metadata_srcs[paths.join(_get_native_libs_as_assets_dir(module), libs_metadata_path)] = metadata_file if ctx.attrs.compress_asset_libraries: compressed_lib_dir = _get_compressed_native_libs_as_assets(ctx, native_lib_assets, native_library_paths, module) non_root_module_compressed_lib_srcs[_get_native_libs_as_assets_dir(module)] = compressed_lib_dir @@ -582,6 +585,7 @@ def _get_native_linkables( platform_to_native_linkables: dict[str, dict[str, SharedLibrary]], get_module_from_target: typing.Callable, package_native_libs_as_assets_enabled: bool) -> _StrippedNativeLinkables: + enabled_voltron_non_asset_libs = ctx.attrs._android_toolchain[AndroidToolchainInfo].enabled_voltron_non_asset_libs stripped_native_linkables_srcs = {} stripped_native_linkables_always_in_primary_apk_srcs = {} stripped_native_linkable_assets_for_primary_apk_srcs = {} @@ -606,11 +610,23 @@ def _get_native_linkables( "{} which is marked as needing to be in the primary APK cannot be an asset".format(native_linkable_target), ) if native_linkable.can_be_asset and not is_root_module(module): - so_name_path = paths.join(_get_native_libs_as_assets_dir(module), abi_directory, so_name) + if enabled_voltron_non_asset_libs: + native_libs_assets_dir = paths.join(_get_native_libs_as_assets_dir(module), "assets") + else: + native_libs_assets_dir = _get_native_libs_as_assets_dir(module) + so_name_path = paths.join(native_libs_assets_dir, abi_directory, so_name) stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = native_linkable.stripped_lib elif native_linkable.can_be_asset and package_native_libs_as_assets_enabled: - so_name_path = paths.join(_get_native_libs_as_assets_dir(module), abi_directory, so_name) + if enabled_voltron_non_asset_libs: + native_libs_assets_dir = paths.join(_get_native_libs_as_assets_dir(module), "assets") + else: + native_libs_assets_dir = _get_native_libs_as_assets_dir(module) + so_name_path = paths.join(native_libs_assets_dir, abi_directory, so_name) stripped_native_linkable_assets_for_primary_apk_srcs[so_name_path] = native_linkable.stripped_lib + elif (enabled_voltron_non_asset_libs and # TODO: when cleaning up this code, restructure if statements to be more clear (start with root module, then non-root module cases) + not native_linkable.can_be_asset and not is_root_module(module)): + so_name_path = paths.join(_get_native_libs_as_assets_dir(module), "lib", abi_directory, so_name) + stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = native_linkable.stripped_lib else: so_name_path = paths.join(abi_directory, so_name) if native_linkable.for_primary_apk: diff --git a/prelude/android/android_bundle.bzl b/prelude/android/android_bundle.bzl index f8ae6f285..f0a7d4e05 100644 --- a/prelude/android/android_bundle.bzl +++ b/prelude/android/android_bundle.bzl @@ -59,9 +59,19 @@ def build_bundle( root_module_asset_directories = native_library_info.root_module_native_lib_assets + dex_files_info.root_module_secondary_dex_dirs root_module_asset_directories_file = actions.write("root_module_asset_directories.txt", root_module_asset_directories) bundle_builder_args.hidden(root_module_asset_directories) - non_root_module_asset_directories = resources_info.module_manifests + native_library_info.non_root_module_native_lib_assets + dex_files_info.non_root_module_secondary_dex_dirs - non_root_module_asset_directories_file = actions.write("non_root_module_asset_directories.txt", non_root_module_asset_directories) - bundle_builder_args.hidden(non_root_module_asset_directories) + + if android_toolchain.enabled_voltron_non_asset_libs: + non_root_module_asset_directories = resources_info.module_manifests + dex_files_info.non_root_module_secondary_dex_dirs + non_root_module_asset_directories_file = actions.write("non_root_module_asset_directories.txt", non_root_module_asset_directories) + bundle_builder_args.hidden(non_root_module_asset_directories) + non_root_module_asset_native_lib_directories = actions.write("non_root_module_asset_native_lib_directories.txt", native_library_info.non_root_module_native_lib_assets) + bundle_builder_args.hidden(native_library_info.non_root_module_native_lib_assets) + else: + non_root_module_asset_directories = resources_info.module_manifests + native_library_info.non_root_module_native_lib_assets + dex_files_info.non_root_module_secondary_dex_dirs + non_root_module_asset_directories_file = actions.write("non_root_module_asset_directories.txt", non_root_module_asset_directories) + bundle_builder_args.hidden(non_root_module_asset_directories) + non_root_module_asset_native_lib_directories = actions.write("non_root_module_asset_native_lib_directories.txt", "") + native_library_directories = actions.write("native_library_directories", native_library_info.native_libs_for_primary_apk) bundle_builder_args.hidden(native_library_info.native_libs_for_primary_apk) all_zip_files = [resources_info.packaged_string_assets] if resources_info.packaged_string_assets else [] @@ -75,6 +85,8 @@ def build_bundle( root_module_asset_directories_file, "--non-root-module-asset-directories-list", non_root_module_asset_directories_file, + "--non-root-module-asset-native-lib-directories-list", + non_root_module_asset_native_lib_directories, "--native-libraries-directories-list", native_library_directories, "--zip-files-list", From 10ab27a909d5a66d9adbd2cb31f4b84a1ea84258 Mon Sep 17 00:00:00 2001 From: Ben Chang Date: Sat, 24 Feb 2024 13:11:51 -0800 Subject: [PATCH 0343/1133] adding support for prebuilt native libraries in voltron with gating Summary: ensures that prebuilt native libs are properly allocated into voltron artifacts Reviewed By: IanChilds Differential Revision: D54084390 fbshipit-source-id: bd055d9420a1567a60292b632781008fac814e94 --- .../android_binary_native_library_rules.bzl | 27 ++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index ec419b265..b9160df2b 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -412,11 +412,13 @@ def _get_native_libs_and_assets( all_prebuilt_native_library_dirs: list[PrebuiltNativeLibraryDir], platform_to_native_linkables: dict[str, dict[str, SharedLibrary]]) -> _NativeLibsAndAssetsInfo: is_packaging_native_libs_as_assets_supported = getattr(ctx.attrs, "package_asset_libraries", False) + enabled_voltron_non_asset_libs = ctx.attrs._android_toolchain[AndroidToolchainInfo].enabled_voltron_non_asset_libs prebuilt_native_library_dirs = [] prebuilt_native_library_dirs_always_in_primary_apk = [] prebuilt_native_library_dir_assets_for_primary_apk = [] prebuilt_native_library_dir_module_assets_map = {} + prebuilt_native_library_dir_module_libs_map = {} for native_lib in all_prebuilt_native_library_dirs: native_lib_target = str(native_lib.raw_target) module = get_module_from_target(native_lib_target) @@ -431,6 +433,8 @@ def _get_native_libs_and_assets( if not is_root_module(module): if native_lib.is_asset: prebuilt_native_library_dir_module_assets_map.setdefault(module, []).append(native_lib) + elif enabled_voltron_non_asset_libs: + prebuilt_native_library_dir_module_libs_map.setdefault(module, []).append(native_lib) else: prebuilt_native_library_dirs.append(native_lib) elif native_lib.is_asset and is_packaging_native_libs_as_assets_supported: @@ -460,13 +464,22 @@ def _get_native_libs_and_assets( ) if prebuilt_native_library_dir_assets_for_primary_apk else None native_lib_module_assets_map = {} for module, native_lib_dir in prebuilt_native_library_dir_module_assets_map.items(): - native_lib_module_assets_map[module] = [_filter_prebuilt_native_library_dir( + native_lib_module_assets_map.setdefault(module, []).append(_filter_prebuilt_native_library_dir( ctx, native_lib_dir, "native_lib_assets_for_module_{}".format(module), package_as_assets = True, module = module, - )] + )) + if enabled_voltron_non_asset_libs: + for module, native_lib_dir in prebuilt_native_library_dir_module_libs_map.items(): + native_lib_module_assets_map.setdefault(module, []).append(_filter_prebuilt_native_library_dir( + ctx, + native_lib_dir, + "native_lib_libs_for_module_{}".format(module), + package_as_assets = False, + module = module, + )) stripped_linkables = _get_native_linkables(ctx, platform_to_native_linkables, get_module_from_target, is_packaging_native_libs_as_assets_supported) for module, native_linkable_assets in stripped_linkables.linkable_module_assets_map.items(): @@ -564,7 +577,15 @@ def _filter_prebuilt_native_library_dir( native_libs_dirs = [native_lib.dir for native_lib in native_libs] native_libs_dirs_file = ctx.actions.write("{}_list.txt".format(identifier), native_libs_dirs) base_output_dir = ctx.actions.declare_output(identifier, dir = True) - output_dir = base_output_dir.project(_get_native_libs_as_assets_dir(module)) if package_as_assets else base_output_dir + if ctx.attrs._android_toolchain[AndroidToolchainInfo].enabled_voltron_non_asset_libs: + if module == ROOT_MODULE: + output_dir = base_output_dir.project(_get_native_libs_as_assets_dir(module)) if package_as_assets else base_output_dir + elif package_as_assets: + output_dir = base_output_dir.project(paths.join(_get_native_libs_as_assets_dir(module), "assets")) + else: + output_dir = base_output_dir.project(paths.join(_get_native_libs_as_assets_dir(module), "lib")) + else: + output_dir = base_output_dir.project(_get_native_libs_as_assets_dir(module)) if package_as_assets else base_output_dir ctx.actions.run( cmd_args([filter_tool, native_libs_dirs_file, output_dir.as_output(), "--abis"] + abis).hidden(native_libs_dirs), category = "filter_prebuilt_native_library_dir", From ff3cf4edc5de7f2a5a97d7075df8d4e4c5c0d76c Mon Sep 17 00:00:00 2001 From: Rainer Dreyer Date: Mon, 26 Feb 2024 09:11:13 -0800 Subject: [PATCH 0344/1133] Make stripping configurable in Android APKs Summary: This allows us to build unstripped .apks and have fully symbolicated crashes in logcat without having to symbolicate the crashes manually. This is mostly a proof of concept for now. I'm happy to do additional cleanup (rename all stripped_* variables, etc) if we decide to go ahead with this. Note for reviewers: I'm curious if you agree that this is worth doing. Reviewed By: IanChilds Differential Revision: D53854006 fbshipit-source-id: 35a92d56bed0bf00c76d4299fe559e4ac0ef8fbc --- prelude/android/android.bzl | 3 +++ .../android/android_binary_native_library_rules.bzl | 12 +++++++----- prelude/decls/android_rules.bzl | 1 + 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 261530310..484b25b3a 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -55,6 +55,7 @@ implemented_rules = { # Can't load `read_bool` here because it will cause circular load. FORCE_SINGLE_CPU = read_root_config("buck2", "android_force_single_cpu") in ("True", "true") FORCE_SINGLE_DEFAULT_CPU = read_root_config("buck2", "android_force_single_default_cpu") in ("True", "true") +DISABLE_STRIPPING = read_root_config("android", "disable_stripping") in ("True", "true") extra_attributes = { "android_aar": { @@ -67,6 +68,7 @@ extra_attributes = { "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), "package_asset_libraries": attrs.default_only(attrs.bool(default = True)), "resources_root": attrs.option(attrs.string(), default = None), + "strip_libraries": attrs.default_only(attrs.bool(default = not DISABLE_STRIPPING)), "_android_toolchain": toolchains_common.android(), "_cxx_toolchain": attrs.split_transition_dep(cfg = cpu_split_transition, default = "toolchains//:android-hack"), "_is_building_android_binary": attrs.default_only(attrs.bool(default = True)), @@ -92,6 +94,7 @@ extra_attributes = { "module_manifest_skeleton": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "native_library_merge_code_generator": attrs.option(attrs.exec_dep(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), + "strip_libraries": attrs.bool(default = not DISABLE_STRIPPING), "_android_toolchain": toolchains_common.android(), "_cxx_toolchain": attrs.split_transition_dep(cfg = cpu_split_transition, default = "toolchains//:android-hack"), "_dex_toolchain": toolchains_common.dex(), diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index b9160df2b..b8523dcc5 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -611,6 +611,7 @@ def _get_native_linkables( stripped_native_linkables_always_in_primary_apk_srcs = {} stripped_native_linkable_assets_for_primary_apk_srcs = {} stripped_native_linkable_module_assets_srcs = {} + strip_libraries = getattr(ctx.attrs, "strip_libraries", True) cpu_filters = ctx.attrs.cpu_filters for platform, native_linkables in platform_to_native_linkables.items(): @@ -621,6 +622,7 @@ def _get_native_linkables( for so_name, native_linkable in native_linkables.items(): native_linkable_target = str(native_linkable.label.raw_target()) module = get_module_from_target(native_linkable_target) + lib = native_linkable.stripped_lib if strip_libraries else native_linkable.lib.output expect( not native_linkable.for_primary_apk or is_root_module(module), @@ -636,24 +638,24 @@ def _get_native_linkables( else: native_libs_assets_dir = _get_native_libs_as_assets_dir(module) so_name_path = paths.join(native_libs_assets_dir, abi_directory, so_name) - stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = native_linkable.stripped_lib + stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = lib elif native_linkable.can_be_asset and package_native_libs_as_assets_enabled: if enabled_voltron_non_asset_libs: native_libs_assets_dir = paths.join(_get_native_libs_as_assets_dir(module), "assets") else: native_libs_assets_dir = _get_native_libs_as_assets_dir(module) so_name_path = paths.join(native_libs_assets_dir, abi_directory, so_name) - stripped_native_linkable_assets_for_primary_apk_srcs[so_name_path] = native_linkable.stripped_lib + stripped_native_linkable_assets_for_primary_apk_srcs[so_name_path] = lib elif (enabled_voltron_non_asset_libs and # TODO: when cleaning up this code, restructure if statements to be more clear (start with root module, then non-root module cases) not native_linkable.can_be_asset and not is_root_module(module)): so_name_path = paths.join(_get_native_libs_as_assets_dir(module), "lib", abi_directory, so_name) - stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = native_linkable.stripped_lib + stripped_native_linkable_module_assets_srcs.setdefault(module, {})[so_name_path] = lib else: so_name_path = paths.join(abi_directory, so_name) if native_linkable.for_primary_apk: - stripped_native_linkables_always_in_primary_apk_srcs[so_name_path] = native_linkable.stripped_lib + stripped_native_linkables_always_in_primary_apk_srcs[so_name_path] = lib else: - stripped_native_linkables_srcs[so_name_path] = native_linkable.stripped_lib + stripped_native_linkables_srcs[so_name_path] = lib stripped_native_linkables = ctx.actions.symlinked_dir( "stripped_native_linkables", diff --git a/prelude/decls/android_rules.bzl b/prelude/decls/android_rules.bzl index 4182f28b4..eeed2d27d 100644 --- a/prelude/decls/android_rules.bzl +++ b/prelude/decls/android_rules.bzl @@ -264,6 +264,7 @@ android_binary = prelude_rule( "secondary_dex_weight_limit": attrs.option(attrs.int(), default = None), "skip_crunch_pngs": attrs.option(attrs.bool(), default = None), "skip_proguard": attrs.bool(default = False), + "strip_libraries": attrs.bool(default = True), "trim_resource_ids": attrs.bool(default = False), "use_split_dex": attrs.bool(default = False), "xz_compression_level": attrs.int(default = 4), From c747ef3e289e7c6d00e2a615dab9e43c26ef28f3 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Mon, 26 Feb 2024 11:50:52 -0800 Subject: [PATCH 0345/1133] Refactoring: extract common transition parameters to variables Reviewed By: awalterschulze Differential Revision: D54042135 fbshipit-source-id: 18bf9fef9580ab2a6a69802c897144aed34de3d9 --- prelude/go/transitions/defs.bzl | 45 +++++++++++++++------------------ 1 file changed, 21 insertions(+), 24 deletions(-) diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl index 128d894e1..0f6086a8b 100644 --- a/prelude/go/transitions/defs.bzl +++ b/prelude/go/transitions/defs.bzl @@ -83,43 +83,40 @@ def _chain_transitions(transitions): return tr +_tansitions = [_cgo_enabled_transition, _compile_shared_transition, _race_transition] + +_refs = { + "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", + "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", + "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", + "race_false": "prelude//go/constraints:race_false", + "race_true": "prelude//go/constraints:race_true", +} + +_attrs = ["cgo_enabled", "race"] + go_binary_transition = transition( - impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition, _race_transition]), - refs = { - "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", - "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", - "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", + impl = _chain_transitions(_tansitions), + refs = _refs | { "compile_shared_value": "prelude//go/constraints:compile_shared_false", - "race_false": "prelude//go/constraints:race_false", - "race_true": "prelude//go/constraints:race_true", }, - attrs = ["cgo_enabled", "race"], + attrs = _attrs, ) go_test_transition = transition( - impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition, _race_transition]), - refs = { - "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", - "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", - "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", + impl = _chain_transitions(_tansitions), + refs = _refs | { "compile_shared_value": "prelude//go/constraints:compile_shared_false", - "race_false": "prelude//go/constraints:race_false", - "race_true": "prelude//go/constraints:race_true", }, - attrs = ["cgo_enabled", "race"], + attrs = _attrs, ) go_exported_library_transition = transition( - impl = _chain_transitions([_cgo_enabled_transition, _compile_shared_transition, _race_transition]), - refs = { - "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", - "cgo_enabled_false": "prelude//go/constraints:cgo_enabled_false", - "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", + impl = _chain_transitions(_tansitions), + refs = _refs | { "compile_shared_value": "prelude//go/constraints:compile_shared_true", - "race_false": "prelude//go/constraints:race_false", - "race_true": "prelude//go/constraints:race_true", }, - attrs = ["cgo_enabled", "race"], + attrs = _attrs, ) cgo_enabled_attr = attrs.default_only(attrs.option(attrs.bool(), default = select({ From 425b1dc3ddea79f3e094256521cab532967a7b71 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Mon, 26 Feb 2024 11:50:52 -0800 Subject: [PATCH 0346/1133] Support buildtags Summary: Buck2 currently doesn't allow us to implement propagation arrays of arbitrary strings from top-level targets to their dependencies. However stepancheg suggested a brilliant idea how to avoid this limitation using configuration_modifiers, fbcode_macro and pre-generated finite set of constraint_value. Usage: 1. Build tag to .buildconfig attribute`go.allowed_tags` 2. Use the tag set on `tags` attribute of a top-level target (go_binary/etc) UPDATE: scottcao suggested to implement it inside prelude in a different manner Implementation details: - Read supported tags from `go.allowed_tags` - Generate a constraint setting and a constraint value for each tag (we have to generate a constraint setting for each tag, because each setting can have only one value) - Use configuration transition to put constraint values into `ConfigurationInfo` using `tags` attribute and define selects on `go_library/cgo_library` Reviewed By: scottcao Differential Revision: D53954453 fbshipit-source-id: ac034ac407a23eaccb07b7b1a803660d0310d7bb --- prelude/decls/go_common.bzl | 8 +++++++ prelude/decls/go_rules.bzl | 3 +++ prelude/go/compile.bzl | 2 +- prelude/go/constraints/BUCK.v2 | 4 ++++ prelude/go/constraints/defs.bzl | 23 +++++++++++++++++++ prelude/go/transitions/defs.bzl | 31 ++++++++++++++++++++++++-- prelude/go/transitions/tags_helper.bzl | 23 +++++++++++++++++++ prelude/rules_impl.bzl | 8 ++++++- 8 files changed, 98 insertions(+), 4 deletions(-) create mode 100644 prelude/go/constraints/defs.bzl create mode 100644 prelude/go/transitions/tags_helper.bzl diff --git a/prelude/decls/go_common.bzl b/prelude/decls/go_common.bzl index 00c0391f8..dc7042a3e 100644 --- a/prelude/decls/go_common.bzl +++ b/prelude/decls/go_common.bzl @@ -139,6 +139,13 @@ def _race_arg(): """), } +def _tags_arg(): + return { + "tags": attrs.list(attrs.string(), default = [], doc = """ + Build tags to apply to this target and its dependencies. +"""), + } + go_common = struct( deps_arg = _deps_arg, srcs_arg = _srcs_arg, @@ -153,4 +160,5 @@ go_common = struct( embedcfg_arg = _embedcfg_arg, cgo_enabled_arg = _cgo_enabled_arg, race_arg = _race_arg, + tags_arg = _tags_arg, ) diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index 9cb4653f2..3f56acaf0 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -186,6 +186,7 @@ go_binary = prelude_rule( go_common.embedcfg_arg() | go_common.cgo_enabled_arg() | go_common.race_arg() | + go_common.tags_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files to be symlinked into the working directory of the test. You can access these in your @@ -272,6 +273,7 @@ go_exported_library = prelude_rule( go_common.external_linker_flags_arg() | go_common.cgo_enabled_arg() | go_common.race_arg() | + go_common.tags_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files to be symlinked into the working directory of the test. You can access these in your @@ -423,6 +425,7 @@ go_test = prelude_rule( go_common.embedcfg_arg() | go_common.cgo_enabled_arg() | go_common.race_arg() | + go_common.tags_arg() | { "resources": attrs.list(attrs.source(), default = [], doc = """ Static files that are symlinked into the working directory of the diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index a5646aa13..a3565e29d 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -66,7 +66,7 @@ def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], tests: bool = filter_cmd.add(cmd_args(go_toolchain.go, format = "--go={}")) if tests: filter_cmd.add("--tests") - filter_cmd.add(cmd_args(",".join(go_toolchain.tags), format = "--tags={}")) + filter_cmd.add(cmd_args(",".join(go_toolchain.tags + ctx.attrs._tags), format = "--tags={}")) filter_cmd.add(cmd_args(filtered_srcs.as_output(), format = "--output={}")) filter_cmd.add(srcs_dir) ctx.actions.run(filter_cmd, category = "go_filter_srcs") diff --git a/prelude/go/constraints/BUCK.v2 b/prelude/go/constraints/BUCK.v2 index 0e0c0fb76..6476119c1 100644 --- a/prelude/go/constraints/BUCK.v2 +++ b/prelude/go/constraints/BUCK.v2 @@ -1,3 +1,5 @@ +load(":defs.bzl", "generate_tag_constraints") + constraint_setting( name = "cgo_enabled", visibility = ["PUBLIC"], @@ -54,3 +56,5 @@ constraint_value( constraint_setting = ":race", visibility = ["PUBLIC"], ) + +generate_tag_constraints() diff --git a/prelude/go/constraints/defs.bzl b/prelude/go/constraints/defs.bzl new file mode 100644 index 000000000..8da0b3e81 --- /dev/null +++ b/prelude/go/constraints/defs.bzl @@ -0,0 +1,23 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:native.bzl", "native") +load("@prelude//go/transitions:tags_helper.bzl", "allowed_tags") + +def generate_tag_constraints(): + for tag in allowed_tags: + setting_name = "tag_" + tag + native.constraint_setting( + name = "tag_" + tag, + visibility = ["PUBLIC"], + ) + + native.constraint_value( + name = setting_name + "__value", + constraint_setting = ":" + setting_name, + visibility = ["PUBLIC"], + ) diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl index 0f6086a8b..8c063e240 100644 --- a/prelude/go/transitions/defs.bzl +++ b/prelude/go/transitions/defs.bzl @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load(":tags_helper.bzl", "selects_for_tags", "tag_to_constrant_value") + def _cgo_enabled_transition(platform, refs, attrs): constraints = platform.configuration.constraints @@ -75,6 +77,26 @@ def _race_transition(platform, refs, attrs): configuration = new_cfg, ) +def _tags_transition(platform, refs, attrs): + constraints = platform.configuration.constraints + for tag in attrs.tags: + ref_name = "tag_{}__value".format(tag) + if not hasattr(refs, ref_name): + fail("Add tags to .buckconfig attrubute `go.allowed_tags` to allow using it") + + tag_value = getattr(refs, ref_name)[ConstraintValueInfo] + constraints[tag_value.setting.label] = tag_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + def _chain_transitions(transitions): def tr(platform, refs, attrs): for t in transitions: @@ -83,7 +105,7 @@ def _chain_transitions(transitions): return tr -_tansitions = [_cgo_enabled_transition, _compile_shared_transition, _race_transition] +_tansitions = [_cgo_enabled_transition, _compile_shared_transition, _race_transition, _tags_transition] _refs = { "cgo_enabled_auto": "prelude//go/constraints:cgo_enabled_auto", @@ -91,9 +113,12 @@ _refs = { "cgo_enabled_true": "prelude//go/constraints:cgo_enabled_true", "race_false": "prelude//go/constraints:race_false", "race_true": "prelude//go/constraints:race_true", +} | { + "tag_{}__value".format(tag): constrant_value + for tag, constrant_value in tag_to_constrant_value().items() } -_attrs = ["cgo_enabled", "race"] +_attrs = ["cgo_enabled", "race", "tags"] go_binary_transition = transition( impl = _chain_transitions(_tansitions), @@ -137,3 +162,5 @@ race_attr = attrs.default_only(attrs.bool(default = select({ "prelude//go/constraints:race_false": False, "prelude//go/constraints:race_true": True, }))) + +tags_attr = attrs.default_only(attrs.list(attrs.string(), default = selects_for_tags())) diff --git a/prelude/go/transitions/tags_helper.bzl b/prelude/go/transitions/tags_helper.bzl new file mode 100644 index 000000000..966fbb69d --- /dev/null +++ b/prelude/go/transitions/tags_helper.bzl @@ -0,0 +1,23 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//utils:buckconfig.bzl", "read_list") + +allowed_tags = read_list("go", "allowed_tags", default = [], root_cell = True) + +def tag_to_constrant_value(): + return {tag: "prelude//go/constraints:tag_{}__value".format(tag) for tag in allowed_tags} + +def selects_for_tags(): + selects = [] + for tag in allowed_tags: + selects += select({ + "DEFAULT": [], + "prelude//go/constraints:tag_{}__value".format(tag): [tag], + }) + + return selects diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index c94c1fa28..99f87191a 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -28,7 +28,7 @@ load("@prelude//go:go_stdlib.bzl", "go_stdlib_impl") load("@prelude//go:go_test.bzl", "go_test_impl") load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl", "haskell_toolchain_library_impl") -load("@prelude//go/transitions:defs.bzl", "cgo_enabled_attr", "compile_shared_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition", "race_attr") +load("@prelude//go/transitions:defs.bzl", "cgo_enabled_attr", "compile_shared_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition", "race_attr", "tags_attr") load("@prelude//haskell:haskell_ghci.bzl", "haskell_ghci_impl") load("@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_impl") load("@prelude//haskell:haskell_ide.bzl", "haskell_ide_impl") @@ -387,6 +387,7 @@ inlined_extra_attributes = { "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), "_race": race_attr, + "_tags": tags_attr, }, # csharp "csharp_library": { @@ -434,6 +435,7 @@ inlined_extra_attributes = { "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), "_race": race_attr, + "_tags": tags_attr, }, "go_exported_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), @@ -441,6 +443,7 @@ inlined_extra_attributes = { "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), "_race": race_attr, + "_tags": tags_attr, }, "go_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), @@ -449,6 +452,7 @@ inlined_extra_attributes = { "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), "_race": race_attr, + "_tags": tags_attr, }, "go_stdlib": { "_cgo_enabled": cgo_enabled_attr, @@ -456,6 +460,7 @@ inlined_extra_attributes = { "_exec_os_type": buck.exec_os_type_arg(), "_go_toolchain": toolchains_common.go(), "_race": race_attr, + "_tags": tags_attr, }, "go_test": { "coverage_mode": attrs.option(attrs.enum(GoCoverageMode.values()), default = None), @@ -465,6 +470,7 @@ inlined_extra_attributes = { "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), "_race": race_attr, + "_tags": tags_attr, "_testmaingen": attrs.default_only(attrs.exec_dep(default = "prelude//go/tools:testmaingen")), }, From e31fb7b8463dadf647ef54ee605c7c62a68b81bc Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Mon, 26 Feb 2024 11:50:52 -0800 Subject: [PATCH 0347/1133] Fix e2e test for build tags Summary: - Recover broken test for go tags - Use new tags API Reviewed By: scottcao Differential Revision: D54128794 fbshipit-source-id: 3ac960b99deb533698519cff317859d9e77462d6 --- prelude/go/transitions/defs.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl index 8c063e240..742fda139 100644 --- a/prelude/go/transitions/defs.bzl +++ b/prelude/go/transitions/defs.bzl @@ -82,7 +82,7 @@ def _tags_transition(platform, refs, attrs): for tag in attrs.tags: ref_name = "tag_{}__value".format(tag) if not hasattr(refs, ref_name): - fail("Add tags to .buckconfig attrubute `go.allowed_tags` to allow using it") + fail("Add tag '{}' to .buckconfig attrubute `go.allowed_tags` to allow using it".format(tag)) tag_value = getattr(refs, ref_name)[ConstraintValueInfo] constraints[tag_value.setting.label] = tag_value From c95f832917cecc4e3c64bf265943e621c66f922c Mon Sep 17 00:00:00 2001 From: Steven Myers Date: Mon, 26 Feb 2024 13:28:49 -0800 Subject: [PATCH 0348/1133] Add matlab as a toolchain along with matlab_program rule Summary: This is a first attempt at adding Matlab as a toolchain to Buck2 along with the rule `matlab_program`. The toolchain currently only supports Windows as I test against my local installation of Matlab. The end goal here is to make Matlab programs runnable from Buck to take advantage of RE both from local workstations and in Sandcastle CI. The RE tier of machines is currently being set up with the necessary licensing provisioning. We may end up with a couple of rules here: 1. `matlab_program` which invokes the Matlab exe and passes in the entry file (Matlab is more of an interpreter and doesn't produce artifacts like a compiler, rather it just takes a source file, runs the contents and spits out the results to stdout) 2. `matlab_test` which ultimately will hook into tpx Reviewed By: JakobDegen Differential Revision: D53296346 fbshipit-source-id: 9e26aedeba7b533b56cfcaf30ab6174ae480f24d --- prelude/matlab/matlab.bzl | 20 ++++++++++++++++++++ prelude/matlab/matlab_info.bzl | 10 ++++++++++ prelude/matlab/matlab_program.bzl | 23 +++++++++++++++++++++++ prelude/matlab/matlab_toolchain.bzl | 16 ++++++++++++++++ prelude/rules_impl.bzl | 3 +++ 5 files changed, 72 insertions(+) create mode 100644 prelude/matlab/matlab.bzl create mode 100644 prelude/matlab/matlab_info.bzl create mode 100644 prelude/matlab/matlab_program.bzl create mode 100644 prelude/matlab/matlab_toolchain.bzl diff --git a/prelude/matlab/matlab.bzl b/prelude/matlab/matlab.bzl new file mode 100644 index 000000000..f1fedd9d0 --- /dev/null +++ b/prelude/matlab/matlab.bzl @@ -0,0 +1,20 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":matlab_program.bzl", "matlab_program_impl") +load(":matlab_toolchain.bzl", "matlab_toolchain") + +implemented_rules = { + "matlab_program": matlab_program_impl, +} + +extra_attributes = { + "matlab_program": { + "main": attrs.source(), + "_matlab_toolchain": matlab_toolchain(), + }, +} diff --git a/prelude/matlab/matlab_info.bzl b/prelude/matlab/matlab_info.bzl new file mode 100644 index 000000000..3e15fc162 --- /dev/null +++ b/prelude/matlab/matlab_info.bzl @@ -0,0 +1,10 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +MatlabToolchainInfo = provider(fields = { + "matlab_exe": RunInfo, +}) diff --git a/prelude/matlab/matlab_program.bzl b/prelude/matlab/matlab_program.bzl new file mode 100644 index 000000000..42b150e61 --- /dev/null +++ b/prelude/matlab/matlab_program.bzl @@ -0,0 +1,23 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":matlab_info.bzl", "MatlabToolchainInfo") + +def matlab_program_impl(ctx: AnalysisContext) -> list[Provider]: + toolchain = ctx.attrs._matlab_toolchain[MatlabToolchainInfo] + + cmd = cmd_args(toolchain.matlab_exe) + cmd.add( + "-batch", + cmd_args( + ctx.attrs.main.basename.rstrip(".m"), + quote = "shell", + ), + ) + cmd.add("-sd", cmd_args(ctx.attrs.main).parent()) + + return [DefaultInfo(default_output = None, other_outputs = [cmd]), RunInfo(cmd)] diff --git a/prelude/matlab/matlab_toolchain.bzl b/prelude/matlab/matlab_toolchain.bzl new file mode 100644 index 000000000..23456cf56 --- /dev/null +++ b/prelude/matlab/matlab_toolchain.bzl @@ -0,0 +1,16 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load(":matlab_info.bzl", "MatlabToolchainInfo") + +def matlab_toolchain(): + return attrs.default_only( + attrs.toolchain_dep( + default = "toolchains//:matlab", + providers = [MatlabToolchainInfo], + ), + ) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 99f87191a..334d1823b 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -42,6 +42,7 @@ load("@prelude//linking:link_info.bzl", "LinkOrdering") load("@prelude//lua:cxx_lua_extension.bzl", "cxx_lua_extension_impl") load("@prelude//lua:lua_binary.bzl", "lua_binary_impl") load("@prelude//lua:lua_library.bzl", "lua_library_impl") +load("@prelude//matlab:matlab.bzl", _matlab_extra_attributes = "extra_attributes", _matlab_implemented_rules = "implemented_rules") load("@prelude//ocaml:attrs.bzl", _ocaml_extra_attributes = "ocaml_extra_attributes") load("@prelude//ocaml:ocaml.bzl", "ocaml_binary_impl", "ocaml_library_impl", "ocaml_object_impl", "ocaml_shared_impl", "prebuilt_ocaml_library_impl") load("@prelude//python:cxx_python_extension.bzl", "cxx_python_extension_impl") @@ -215,6 +216,7 @@ extra_implemented_rules = struct( _js_implemented_rules, _julia_implemented_rules, _kotlin_implemented_rules, + _matlab_implemented_rules, _zip_file_implemented_rules, ]) ) @@ -591,6 +593,7 @@ all_extra_attributes = _merge_dictionaries([ _js_extra_attributes, _julia_extra_attributes, _kotlin_extra_attributes, + _matlab_extra_attributes, _ocaml_extra_attributes, _zip_file_extra_attributes, ]) From 69aa8dbfa1c2cb937cb0c181c52dd4079df93bf4 Mon Sep 17 00:00:00 2001 From: Jon Janzen Date: Mon, 26 Feb 2024 15:42:51 -0800 Subject: [PATCH 0349/1133] Rename/remove BUCK.v2 files in fbcode Summary: These are the only such files in fbcode outside of test cases. There are 2 things going on here: 1. Deleted the libbpfgo BUCK.v2 file. This file originated from a fork but the fork has gone away and this is leftover from that. We can delete it. 2. There are 2 files in the buck2 prelude that don't need the v2 suffix. They were imported from PRs so I suspect the rewrite rule regexes did it this way (can't wait to delete them! Reviewed By: JakobDegen Differential Revision: D54211996 fbshipit-source-id: ec4c81045ddebaea693e9b9da9a2a4ea56a8cbf3 --- prelude/toolchains/conan/{BUCK.v2 => BUCK} | 28 ++++++++++---------- prelude/toolchains/cxx/zig/{BUCK.v2 => BUCK} | 0 2 files changed, 14 insertions(+), 14 deletions(-) rename prelude/toolchains/conan/{BUCK.v2 => BUCK} (77%) rename prelude/toolchains/cxx/zig/{BUCK.v2 => BUCK} (100%) diff --git a/prelude/toolchains/conan/BUCK.v2 b/prelude/toolchains/conan/BUCK similarity index 77% rename from prelude/toolchains/conan/BUCK.v2 rename to prelude/toolchains/conan/BUCK index a7d9546de..8c665881e 100644 --- a/prelude/toolchains/conan/BUCK.v2 +++ b/prelude/toolchains/conan/BUCK @@ -1,57 +1,57 @@ -native.export_file( +export_file( name = "buckler", src = "buckler/conanfile.py", visibility = ["PUBLIC"], ) -native.python_bootstrap_library( +python_bootstrap_library( name = "conan_common", srcs = ["conan_common.py"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_generate", main = "conan_generate.py", - deps = [":conan_common"], visibility = ["PUBLIC"], + deps = [":conan_common"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_init", main = "conan_init.py", - deps = [":conan_common"], visibility = ["PUBLIC"], + deps = [":conan_common"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_lock", main = "conan_lock.py", - deps = [":conan_common"], visibility = ["PUBLIC"], + deps = [":conan_common"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_package", main = "conan_package.py", - deps = [":conan_common"], visibility = ["PUBLIC"], + deps = [":conan_common"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_package_extract", main = "conan_package_extract.py", visibility = ["PUBLIC"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "conan_update", main = "conan_update.py", visibility = ["PUBLIC"], ) -native.python_bootstrap_binary( +python_bootstrap_binary( name = "lock_generate", main = "lock_generate.py", - deps = [":conan_common"], visibility = ["PUBLIC"], + deps = [":conan_common"], ) diff --git a/prelude/toolchains/cxx/zig/BUCK.v2 b/prelude/toolchains/cxx/zig/BUCK similarity index 100% rename from prelude/toolchains/cxx/zig/BUCK.v2 rename to prelude/toolchains/cxx/zig/BUCK From 3614e4ca9e86b7c8d7ae77f0e88cbd74810b0b0b Mon Sep 17 00:00:00 2001 From: Balaji S Date: Tue, 27 Feb 2024 08:07:16 -0800 Subject: [PATCH 0350/1133] Handle failures in cthooks in init_per_* functions correctly Summary: ## Context init_per_* functions can return `{fail, some_reason}` or `{skip, some_reason}` to cause the test to be marked as failed or skipped. This information is passed on to cthooks post_init_per functions as `Result` param (see e.g. https://www.erlang.org/doc/man/ct_hooks#Module:post_init_per_suite-4) Cthooks like wa_ct_sanitizer also can return these values test shell mimics the bahaviour of ct and runs these hooks itself. However there was an inconsistency here - ct uses `{fail, ...}` and `{skip, ...}` but test shell handled these as `{failed, ...}` and `{skipped, ...}` ## This diff * First verified that the Return value passed to post hooks are indeed `{fail, ..` in ct by adding some logs to ct_wa_sanitizer: {F1459313497} * This confirms that the failures are indeed passed {fail and {skip * We now modify `ct_daemon_hooks` the file that emulates ct's hooks behaviour to match this Reviewed By: jcpetruzza Differential Revision: D53998603 fbshipit-source-id: fefdbb692f629c5be0a32c5cdce07719d46cc5ca --- .../test_exec/src/ct_daemon_core.erl | 8 ++-- .../test_exec/src/ct_daemon_hooks.erl | 43 ++++++++----------- 2 files changed, 23 insertions(+), 28 deletions(-) diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl index b38b0a9e8..dd3c560f2 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl @@ -235,8 +235,8 @@ test_part(Config, Suite, Test, Path) -> InitResult = case safe_call(wrap_ct_hook(init_per_testcase, Path, fun Suite:init_per_testcase/2), [Config]) of {error, not_exported} -> Config; - {skipped, Reason} -> {error, {skip, init_per_testcase, Reason}}; - {failed, InitErrReason} -> {error, {skip, init_per_testcase, InitErrReason}}; + {skip, Reason} -> {error, {skip, init_per_testcase, Reason}}; + {fail, InitErrReason} -> {error, {skip, init_per_testcase, InitErrReason}}; {error, InitErrReason} -> {error, {skip, init_per_testcase, InitErrReason}}; InitOutConfig -> InitOutConfig end, @@ -328,10 +328,10 @@ do_part_safe(Id, Fun, Config, TimeTrap) -> end, {name, FunName} = erlang:fun_info(Fun, name), try Fun(Config) of - {skipped, Reason} -> + {skip, Reason} -> ?LOG_DEBUG("got skip for ~p because of: ~p", [Id, Reason]), ParentPid ! {RspRef, {skip, {FunName, Id}, Reason}}; - {failed, Reason} -> + {fail, Reason} -> ?LOG_DEBUG("got fail for ~p because of: ~p", [Id, Reason]), ParentPid ! {RspRef, {fail, {FunName, Id}, Reason}}; {skip_and_save, Reason, _} -> diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl index 8a8ef4399..f9eed57cb 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl @@ -198,9 +198,9 @@ wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> end, case call_if_exists_with_fallback_store_state(Hook, pre(Part), PathArg ++ [ConfigArg0], ok) of {skip, SkipReason} -> - {skipped, SkipReason}; + {skip, SkipReason}; {fail, FailReason} -> - {failed, FailReason}; + {fail, FailReason}; HookCallbackResult -> ConfigArg1 = case is_list(HookCallbackResult) of @@ -219,12 +219,12 @@ wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> {tc_status, {skipped, SkipReason}} | lists:keydelete(tc_status, 1, ConfigArg1) ], - {skipped, SkipReason} + {skip, SkipReason} }; {fail, FailReason} -> { [{tc_status, {failed, FailReason}} | lists:keydelete(tc_status, 1, ConfigArg1)], - {failed, FailReason} + {fail, FailReason} }; OkResult -> ConfigArg2 = @@ -256,15 +256,10 @@ wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> [Suite | _] = PathArg, Result = try WrappedWithPreAndPost(PathArg, ConfigArg) of - Skip = {skipped, _Reason} -> - Skip; - Fail = {failed, _Reason} -> - Fail; - %% if we don't have a hook setup, we still need to do the conversion from skip/fail to skipped/failed {skip, SkipReason} -> - {skipped, SkipReason}; + {skip, SkipReason}; {fail, FailReason} -> - {failed, FailReason}; + {fail, FailReason}; MaybeConfig -> case init_or_end(Part) of 'end' -> @@ -279,7 +274,7 @@ wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> end end catch - Class:Reason:Stacktrace -> {failed, {'EXIT', {{Class, Reason}, Stacktrace}}} + Class:Reason:Stacktrace -> {fail, {'EXIT', {{Class, Reason}, Stacktrace}}} end, handle_post_result(HooksInInstallationOrder, build_test_name(Part, PathArg), Suite, Result) end. @@ -287,42 +282,42 @@ wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> handle_post_result(Hooks, TestName, Suite, Result) -> ReverseHooks = lists:reverse(Hooks), case Result of - SkipResult = {skipped, _} -> + {skip, SkipReason} -> [ call_if_exists_with_fallback_store_state( - Hook, on_tc_skip, [Suite, TestName, SkipResult], ok + Hook, on_tc_skip, [Suite, TestName, {tc_user_skip, SkipReason}], ok ) || Hook <- ReverseHooks ], - SkipResult; - FailResult = {failed, _} -> + {skip, SkipReason}; + {fail, FailReason} -> [ call_if_exists_with_fallback_store_state( - Hook, on_tc_fail, [Suite, TestName, FailResult], ok + Hook, on_tc_fail, [Suite, TestName, FailReason], ok ) || Hook <- ReverseHooks ], - FailResult; + {fail, FailReason}; {ok, Config} -> case lists:keyfind(tc_status, 1, Config) of false -> Config; - {tc_status, SkipResult = {skipped, _}} -> + {tc_status, {skipped, SkipReason}} -> [ call_if_exists_with_fallback_store_state( - Hook, on_tc_skip, [Suite, TestName, SkipResult], ok + Hook, on_tc_skip, [Suite, TestName, {tc_user_skip, SkipReason}], ok ) || Hook <- ReverseHooks ], - SkipResult; - {tc_status, FailResult = {failed, _}} -> + {skip, SkipReason}; + {tc_status, {failed, FailReason}} -> [ call_if_exists_with_fallback_store_state( - Hook, on_tc_fail, [Suite, TestName, FailResult], ok + Hook, on_tc_fail, [Suite, TestName, FailReason], ok ) || Hook <- ReverseHooks ], - FailResult + {fail, FailReason} end end. From e4ce7ed38f67454bca061cecf36a21aeeb87a6ba Mon Sep 17 00:00:00 2001 From: Arsen Tumanyan Date: Tue, 27 Feb 2024 09:00:49 -0800 Subject: [PATCH 0351/1133] Fix naming of jar_postprocessor variables Summary: Removes "_" from all naming definitions with jar_post_processor. All references now use jar_postprocessor or postprocessor Reviewed By: cjhopman Differential Revision: D54211821 fbshipit-source-id: 114263c49ea6db140cbc0208b91bb39fc8812dd7 --- prelude/java/java_library.bzl | 4 ++-- prelude/jvm/cd_jar_creator_util.bzl | 14 +++++++------- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/prelude/java/java_library.bzl b/prelude/java/java_library.bzl index e5265b722..5c3571633 100644 --- a/prelude/java/java_library.bzl +++ b/prelude/java/java_library.bzl @@ -31,7 +31,7 @@ load( ) load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") load("@prelude//java/utils:java_utils.bzl", "declare_prefixed_name", "derive_javac", "get_abi_generation_mode", "get_class_to_source_map_info", "get_default_info", "get_java_version_attributes", "to_java_version") -load("@prelude//jvm:cd_jar_creator_util.bzl", "post_process_jar") +load("@prelude//jvm:cd_jar_creator_util.bzl", "postprocess_jar") load("@prelude//jvm:nullsafe.bzl", "get_nullsafe_info") load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo") load("@prelude//utils:expect.bzl", "expect") @@ -435,7 +435,7 @@ def _create_jar_artifact( abi = None if (not srcs and not additional_compiled_srcs) or abi_generation_mode == AbiGenerationMode("none") or java_toolchain.is_bootstrap_toolchain else create_abi(ctx.actions, java_toolchain.class_abi_generator, jar_out) has_postprocessor = hasattr(ctx.attrs, "jar_postprocessor") and ctx.attrs.jar_postprocessor - final_jar = post_process_jar(ctx.actions, ctx.attrs.jar_postprocessor[RunInfo], jar_out, actions_identifier) if has_postprocessor else jar_out + final_jar = postprocess_jar(ctx.actions, ctx.attrs.jar_postprocessor[RunInfo], jar_out, actions_identifier) if has_postprocessor else jar_out return make_compile_outputs( full_library = final_jar, diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index edef56774..1a013f056 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -452,7 +452,7 @@ def prepare_final_jar( output_jar = output if jar_postprocessor: - return post_process_jar(actions, jar_postprocessor, output_jar, actions_identifier) + return postprocess_jar(actions, jar_postprocessor, output_jar, actions_identifier) else: return output_jar @@ -474,7 +474,7 @@ def prepare_final_jar( ) if jar_postprocessor: - return post_process_jar(actions, jar_postprocessor, merged_jar, actions_identifier) + return postprocess_jar(actions, jar_postprocessor, merged_jar, actions_identifier) else: return merged_jar @@ -567,19 +567,19 @@ def generate_abi_jars( return class_abi, source_abi, source_only_abi, classpath_abi, classpath_abi_dir -def post_process_jar( +def postprocess_jar( actions: AnalysisActions, jar_postprocessor: RunInfo, original_jar: Artifact, actions_identifier: [str, None]) -> Artifact: - post_processed_output = actions.declare_output("post_processed_{}".format(original_jar.short_path)) + postprocessed_output = actions.declare_output("postprocessed_{}".format(original_jar.short_path)) processor_cmd_args = cmd_args( jar_postprocessor, original_jar, - post_processed_output.as_output(), + postprocessed_output.as_output(), ) identifier = actions_identifier if actions_identifier else "" - actions.run(processor_cmd_args, category = "post_processed{}".format(identifier)) + actions.run(processor_cmd_args, category = "postprocessed{}".format(identifier)) - return post_processed_output + return postprocessed_output From d44d1ac03ae5b32244497e37d16348ae2bba4f2b Mon Sep 17 00:00:00 2001 From: Joshua Selbo Date: Tue, 27 Feb 2024 11:39:53 -0800 Subject: [PATCH 0352/1133] Expose jdk.compiler for CD workers Reviewed By: hick209 Differential Revision: D54266892 fbshipit-source-id: 7892a3ccffd6840a088490dee9fee7710a155ed7 --- prelude/jvm/cd_jar_creator_util.bzl | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index 1a013f056..37841cbd2 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -415,6 +415,23 @@ def prepare_cd_exe( else: jvm_args = jvm_args + extra_jvm_args + # Allow JVM compiler daemon to access internal jdk.compiler APIs + jvm_args += [ + "--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.jvm=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED", + "--add-opens=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED", + "--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED", + "--add-opens=jdk.compiler/com.sun.tools.javac.jvm=ALL-UNNAMED", + "--add-opens=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED", + "--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED", + ] + if debug_port and qualified_name == qualified_name_with_subtarget(debug_target): # Do not use a worker when debugging is enabled local_only = True From 6247d13c27a3f810ef5d4c0b0ef3001140670338 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Tue, 27 Feb 2024 13:53:11 -0800 Subject: [PATCH 0353/1133] Set correct type hints for runnabe fields of GoToolchainInfo Summary: To unblock an OSS contribution D54233342 Reviewed By: stepancheg Differential Revision: D54263379 fbshipit-source-id: 0c8e2caaf1cb66137c3603f3b140c79a81134d99 --- prelude/go/cgo_library.bzl | 2 +- prelude/go/compile.bzl | 4 ++-- prelude/go/coverage.bzl | 2 +- prelude/go/toolchain.bzl | 32 +++++++++++++++++--------------- 4 files changed, 21 insertions(+), 19 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 046d90ba0..dd200d898 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -96,7 +96,7 @@ def _cgo( cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] cmd = get_toolchain_cmd_args(go_toolchain, go_root = False) - cmd.add(go_toolchain.cgo_wrapper[RunInfo]) + cmd.add(go_toolchain.cgo_wrapper) args = cmd_args() args.add(cmd_args(go_toolchain.cgo, format = "--cgo={}")) diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index a3565e29d..10e9f3549 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -62,7 +62,7 @@ def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], tests: bool = {src.short_path: src for src in srcs}, ) filter_cmd = get_toolchain_cmd_args(go_toolchain, go_root = True, force_disable_cgo = force_disable_cgo) - filter_cmd.add(go_toolchain.filter_srcs[RunInfo]) + filter_cmd.add(go_toolchain.filter_srcs) filter_cmd.add(cmd_args(go_toolchain.go, format = "--go={}")) if tests: filter_cmd.add("--tests") @@ -146,7 +146,7 @@ def compile( output = ctx.actions.declare_output(root, paths.basename(pkg_name) + ".a") cmd = get_toolchain_cmd_args(go_toolchain) - cmd.add(go_toolchain.compile_wrapper[RunInfo]) + cmd.add(go_toolchain.compile_wrapper) cmd.add(cmd_args(output.as_output(), format = "--output={}")) cmd.add(cmd_args(_compile_cmd(ctx, root, pkg_name, pkgs, deps, compile_flags, shared = shared, race = race, coverage_mode = coverage_mode), format = "--compiler={}")) cmd.add(cmd_args(_assemble_cmd(ctx, pkg_name, assemble_flags, shared = shared), format = "--assembler={}")) diff --git a/prelude/go/coverage.bzl b/prelude/go/coverage.bzl index 102a3502a..ba5707f97 100644 --- a/prelude/go/coverage.bzl +++ b/prelude/go/coverage.bzl @@ -33,7 +33,7 @@ def cover_srcs(ctx: AnalysisContext, pkg_name: str, mode: GoCoverageMode, srcs: go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] cmd = cmd_args() - cmd.add(go_toolchain.cover_srcs[RunInfo]) + cmd.add(go_toolchain.cover_srcs) cmd.add("--cover", go_toolchain.cover) cmd.add("--coverage-mode", mode.value) cmd.add("--coverage-var-argsfile", out_coverage_vars_argsfile.as_output()) diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 64aa4818b..978b5e2ad 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -5,34 +5,36 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") + GoToolchainInfo = provider( # @unsorted-dict-items fields = { - "assembler": provider_field(typing.Any, default = None), + "assembler": provider_field(RunInfo), "assembler_flags": provider_field(typing.Any, default = None), "c_compiler_flags": provider_field(typing.Any, default = None), - "cgo": provider_field(typing.Any, default = None), - "cgo_wrapper": provider_field(typing.Any, default = None), - "gen_stdlib_importcfg": provider_field(typing.Any, default = None), - "go_wrapper": provider_field(typing.Any, default = None), - "compile_wrapper": provider_field(typing.Any, default = None), - "compiler": provider_field(typing.Any, default = None), + "cgo": provider_field(RunInfo | None, default = None), + "cgo_wrapper": provider_field(RunInfo), + "gen_stdlib_importcfg": provider_field(RunInfo), + "go_wrapper": provider_field(RunInfo), + "compile_wrapper": provider_field(RunInfo), + "compiler": provider_field(RunInfo), "compiler_flags": provider_field(typing.Any, default = None), - "concat_files": provider_field(typing.Any, default = None), - "cover": provider_field(typing.Any, default = None), - "cover_srcs": provider_field(typing.Any, default = None), - "cxx_toolchain_for_linking": provider_field(typing.Any, default = None), + "concat_files": provider_field(RunInfo), + "cover": provider_field(RunInfo), + "cover_srcs": provider_field(RunInfo), + "cxx_toolchain_for_linking": provider_field(CxxToolchainInfo | None, default = None), "env_go_arch": provider_field(typing.Any, default = None), "env_go_os": provider_field(typing.Any, default = None), "env_go_arm": provider_field(typing.Any, default = None), "env_go_root": provider_field(typing.Any, default = None), "env_go_debug": provider_field(dict[str, str], default = {}), "external_linker_flags": provider_field(typing.Any, default = None), - "filter_srcs": provider_field(typing.Any, default = None), - "go": provider_field(typing.Any, default = None), - "linker": provider_field(typing.Any, default = None), + "filter_srcs": provider_field(RunInfo), + "go": provider_field(RunInfo), + "linker": provider_field(RunInfo), "linker_flags": provider_field(typing.Any, default = None), - "packer": provider_field(typing.Any, default = None), + "packer": provider_field(RunInfo), "tags": provider_field(typing.Any, default = None), }, ) From f2bd51720082854121d28c702362e71157b80b98 Mon Sep 17 00:00:00 2001 From: Andrew Krieger Date: Tue, 27 Feb 2024 14:23:25 -0800 Subject: [PATCH 0354/1133] buck2 should respect cxx.link_weight in case of fallback Summary: The default cxx_toolchain_impl assumed a link_weight of 1. Most places were already updated to propagate link_weight through from the cxx_toolchain. For CI though we pass a custom cxx.link_weight on Windows machines to avoid OOMs (as well as some other platforms), and this was not respected in buck2. Although unlikely, because it depends on RE falling over causing a thundering herd of link actions to run locally, we should at least not OOM as a result. Reviewed By: cjhopman Differential Revision: D54141738 fbshipit-source-id: 2285bb94f7d6cd5fee9d974d4088a40ad67b79bf --- prelude/cxx/cxx_toolchain.bzl | 3 ++- prelude/cxx/cxx_toolchain_types.bzl | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index 55fb4576c..c256f623a 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -84,7 +84,7 @@ def cxx_toolchain_impl(ctx): link_binaries_locally = not value_or(ctx.attrs.cache_links, True), link_libraries_locally = False, link_style = LinkStyle(ctx.attrs.link_style), - link_weight = 1, + link_weight = ctx.attrs.link_weight, link_ordering = ctx.attrs.link_ordering, linker = ctx.attrs.linker[RunInfo], linker_flags = cmd_args(ctx.attrs.linker_flags).add(c_lto_flags), @@ -178,6 +178,7 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): "generate_linker_maps": attrs.bool(default = False), "hip_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "link_ordering": attrs.enum(LinkOrdering.values(), default = "preorder"), + "link_weight": attrs.int(default = 1), "linker": dep_type(providers = [RunInfo]), "llvm_link": attrs.option(dep_type(providers = [RunInfo]), default = None), "lto_mode": attrs.enum(LtoMode.values(), default = "none"), diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index acebc51b0..9582e4983 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -35,7 +35,7 @@ LinkerInfo = provider( # GiBs of object files (which can also lead to RE errors/timesouts etc). "link_libraries_locally": provider_field(typing.Any, default = None), "link_style": provider_field(typing.Any, default = None), # LinkStyle - "link_weight": provider_field(typing.Any, default = None), # int + "link_weight": provider_field(int, default = 1), # int "link_ordering": provider_field(typing.Any, default = None), # LinkOrdering "linker": provider_field(typing.Any, default = None), "linker_flags": provider_field(typing.Any, default = None), From 2a9968f2a242074ccbe9f960ae38887439348b6b Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Tue, 27 Feb 2024 15:29:34 -0800 Subject: [PATCH 0355/1133] Include Buck root in BXL output Summary: This was suggested in D54145100: We could simply include Buck root dir in the output of BXL to save us a round trip of `buck root` cli command Reviewed By: zertosh Differential Revision: D54231045 fbshipit-source-id: cac45bab60b4637478087b05100640e7001b1d88 --- prelude/python/typecheck/batch.bxl | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/prelude/python/typecheck/batch.bxl b/prelude/python/typecheck/batch.bxl index 3ba17c491..1cd94a1ee 100644 --- a/prelude/python/typecheck/batch.bxl +++ b/prelude/python/typecheck/batch.bxl @@ -20,8 +20,11 @@ def _run_entry_point(ctx: bxl.Context) -> None: output = ctx.output.ensure_multiple(build_result) ctx.output.print_json({ - label.raw_target(): [artifact.rel_path() for artifact in artifacts] - for label, artifacts in output.items() + "artifacts": { + label.raw_target(): [artifact.rel_path() for artifact in artifacts] + for label, artifacts in output.items() + }, + "root": ctx.root(), }) run = bxl_main( From 4b144b5d02855d5d73103d22a7ee5481a03b8a43 Mon Sep 17 00:00:00 2001 From: Kevin Vigor Date: Tue, 27 Feb 2024 18:55:35 -0800 Subject: [PATCH 0356/1133] pass CARGO_ENCODED_RUSTFLAGS to build scripts. Summary: The riscv-rt package has a build.rs that consults CARGO_ENCODED_RUSTFLAGS to determine the enabled features. Currently reindeer does not generate this variable and instead we bang in an emptry string. This causes improper config when the features are disabled with -C target-feature. Now we pass the configured rustc flags. Reviewed By: dtolnay Differential Revision: D54285019 fbshipit-source-id: 69de0ae5deeeb67130397b806071093628079f9f --- prelude/rust/cargo_buildscript.bzl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/prelude/rust/cargo_buildscript.bzl b/prelude/rust/cargo_buildscript.bzl index 7ef47487d..3602bab0a 100644 --- a/prelude/rust/cargo_buildscript.bzl +++ b/prelude/rust/cargo_buildscript.bzl @@ -116,6 +116,10 @@ def _cargo_buildscript_impl(ctx: AnalysisContext) -> list[Provider]: env["RUST_BACKTRACE"] = "1" env["TARGET"] = toolchain_info.rustc_target_triple + # \037 == \x1f == the magic delimiter specified in the environment variable + # reference above. + env["CARGO_ENCODED_RUSTFLAGS"] = cmd_args(toolchain_info.rustc_flags, delimiter = "\037") + host_triple = targets.exec_triple(ctx) if host_triple: env["HOST"] = host_triple From f15118941e67f00473241ad463eee5563ac27893 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 28 Feb 2024 04:44:53 -0800 Subject: [PATCH 0357/1133] Add Coverage transition Summary: - Add coverage_mode constraint_setting - Add transition by coverage_mode attribute - Add field _coverage_mode to go_library, cgo_library and go_test Reviewed By: awalterschulze Differential Revision: D54214732 fbshipit-source-id: 83efbe5d1aebfd936afc9b1de2ba522129e7a498 --- prelude/go/constraints/BUCK.v2 | 23 ++++++++++++++++ prelude/go/transitions/defs.bzl | 48 +++++++++++++++++++++++++++++++-- prelude/rules_impl.bzl | 5 +++- 3 files changed, 73 insertions(+), 3 deletions(-) diff --git a/prelude/go/constraints/BUCK.v2 b/prelude/go/constraints/BUCK.v2 index 6476119c1..e19d48243 100644 --- a/prelude/go/constraints/BUCK.v2 +++ b/prelude/go/constraints/BUCK.v2 @@ -57,4 +57,27 @@ constraint_value( visibility = ["PUBLIC"], ) +constraint_setting( + name = "coverage_mode", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "coverage_mode_set", + constraint_setting = ":coverage_mode", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "coverage_mode_count", + constraint_setting = ":coverage_mode", + visibility = ["PUBLIC"], +) + +constraint_value( + name = "coverage_mode_atomic", + constraint_setting = ":coverage_mode", + visibility = ["PUBLIC"], +) + generate_tag_constraints() diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl index 742fda139..1ad7f390c 100644 --- a/prelude/go/transitions/defs.bzl +++ b/prelude/go/transitions/defs.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//go:coverage.bzl", "GoCoverageMode") load(":tags_helper.bzl", "selects_for_tags", "tag_to_constrant_value") def _cgo_enabled_transition(platform, refs, attrs): @@ -77,6 +78,39 @@ def _race_transition(platform, refs, attrs): configuration = new_cfg, ) +def _coverage_mode_transition(platform, refs, attrs): + constraints = platform.configuration.constraints + + # Cancel transition if the value already set + # to enable using configuration modifiers for overiding this option + coverage_mode_setting = refs.coverage_mode_set[ConstraintValueInfo].setting + if coverage_mode_setting.label in constraints: + return platform + + if attrs.coverage_mode == None: + return platform + elif attrs.coverage_mode == "set": + coverage_mode_ref = refs.coverage_mode_set + elif attrs.coverage_mode == "count": + coverage_mode_ref = refs.coverage_mode_count + elif attrs.coverage_mode == "atomic": + coverage_mode_ref = refs.coverage_mode_atomic + else: + fail("`coverage_mode` can be either: 'set', 'count', 'atomic' or None, got: {}".format(attrs.coverage_mode)) + + coverage_mode_value = coverage_mode_ref[ConstraintValueInfo] + constraints[coverage_mode_value.setting.label] = coverage_mode_value + + new_cfg = ConfigurationInfo( + constraints = constraints, + values = platform.configuration.values, + ) + + return PlatformInfo( + label = platform.label, + configuration = new_cfg, + ) + def _tags_transition(platform, refs, attrs): constraints = platform.configuration.constraints for tag in attrs.tags: @@ -129,11 +163,14 @@ go_binary_transition = transition( ) go_test_transition = transition( - impl = _chain_transitions(_tansitions), + impl = _chain_transitions(_tansitions + [_coverage_mode_transition]), refs = _refs | { "compile_shared_value": "prelude//go/constraints:compile_shared_false", + "coverage_mode_atomic": "prelude//go/constraints:coverage_mode_atomic", + "coverage_mode_count": "prelude//go/constraints:coverage_mode_count", + "coverage_mode_set": "prelude//go/constraints:coverage_mode_set", }, - attrs = _attrs, + attrs = _attrs + ["coverage_mode"], ) go_exported_library_transition = transition( @@ -163,4 +200,11 @@ race_attr = attrs.default_only(attrs.bool(default = select({ "prelude//go/constraints:race_true": True, }))) +coverage_mode_attr = attrs.default_only(attrs.option(attrs.enum(GoCoverageMode.values()), default = select({ + "DEFAULT": None, + "prelude//go/constraints:coverage_mode_atomic": "atomic", + "prelude//go/constraints:coverage_mode_count": "count", + "prelude//go/constraints:coverage_mode_set": "set", +}))) + tags_attr = attrs.default_only(attrs.list(attrs.string(), default = selects_for_tags())) diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 334d1823b..d80865e85 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -28,7 +28,7 @@ load("@prelude//go:go_stdlib.bzl", "go_stdlib_impl") load("@prelude//go:go_test.bzl", "go_test_impl") load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl", "haskell_toolchain_library_impl") -load("@prelude//go/transitions:defs.bzl", "cgo_enabled_attr", "compile_shared_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition", "race_attr", "tags_attr") +load("@prelude//go/transitions:defs.bzl", "cgo_enabled_attr", "compile_shared_attr", "coverage_mode_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition", "race_attr", "tags_attr") load("@prelude//haskell:haskell_ghci.bzl", "haskell_ghci_impl") load("@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_impl") load("@prelude//haskell:haskell_ide.bzl", "haskell_ide_impl") @@ -384,6 +384,7 @@ inlined_extra_attributes = { "cgo_library": { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "_compile_shared": compile_shared_attr, + "_coverage_mode": coverage_mode_attr, "_cxx_toolchain": toolchains_common.cxx(), "_exec_os_type": buck.exec_os_type_arg(), "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), @@ -451,6 +452,7 @@ inlined_extra_attributes = { "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "_cgo_enabled": cgo_enabled_attr, "_compile_shared": compile_shared_attr, + "_coverage_mode": coverage_mode_attr, "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), "_race": race_attr, @@ -468,6 +470,7 @@ inlined_extra_attributes = { "coverage_mode": attrs.option(attrs.enum(GoCoverageMode.values()), default = None), "embedcfg": attrs.option(attrs.source(allow_directory = False), default = None), "resources": attrs.list(attrs.source(allow_directory = True), default = []), + "_coverage_mode": coverage_mode_attr, "_exec_os_type": buck.exec_os_type_arg(), "_go_stdlib": attrs.default_only(attrs.dep(default = "prelude//go/tools:stdlib")), "_go_toolchain": toolchains_common.go(), From 010a18003ca3e5b3be6b611b4b903331ddddc017 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 28 Feb 2024 04:44:53 -0800 Subject: [PATCH 0358/1133] Get rid of creating action for compilation with all possible coverage_mode options Summary: Use _covegare_mode property (added in D54214732) instead of producing actions for all possible coverage modes Reviewed By: awalterschulze Differential Revision: D54252350 fbshipit-source-id: ab6e6ae7c0674e32aa24ae04b945134d6169ae5e --- prelude/go/cgo_library.bzl | 36 ++++++++++---------------- prelude/go/compile.bzl | 22 +++++++++++----- prelude/go/coverage.bzl | 6 ++--- prelude/go/go_binary.bzl | 2 +- prelude/go/go_exported_library.bzl | 2 +- prelude/go/go_library.bzl | 33 +++++------------------- prelude/go/go_test.bzl | 41 +++++++++++++----------------- prelude/go/link.bzl | 10 ++------ prelude/go/packages.bzl | 21 +++------------ 9 files changed, 63 insertions(+), 110 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index dd200d898..0b2e3fc51 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -49,7 +49,7 @@ load( "map_idx", ) load(":compile.bzl", "GoPkgCompileInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") -load(":coverage.bzl", "GoCoverageMode", "cover_srcs") +load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") @@ -157,21 +157,6 @@ def _cgo( return go_srcs, c_headers, c_srcs -def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, coverage_mode: GoCoverageMode, shared: bool, race: bool) -> (Artifact, cmd_args): - cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, shared) - srcs = cov_res.srcs - coverage_vars = cov_res.variables - coverage_pkg = compile( - ctx, - pkg_name, - srcs = srcs, - deps = ctx.attrs.deps + ctx.attrs.exported_deps, - coverage_mode = coverage_mode, - shared = shared, - race = race, - ) - return (coverage_pkg, coverage_vars) - def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: pkg_name = go_attr_pkg_name(ctx) @@ -240,6 +225,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: shared = ctx.attrs._compile_shared race = ctx.attrs._race + coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None # Build Go library. compiled_pkg = compile( @@ -249,14 +235,18 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: deps = ctx.attrs.deps + ctx.attrs.exported_deps, shared = shared, race = race, + coverage_mode = coverage_mode, + ) + + # Temporarily hack, it seems like we can update record, so create new one + compiled_pkg = GoPkg( + cgo = True, + pkg = compiled_pkg.pkg, + coverage_vars = compiled_pkg.coverage_vars, ) - pkg_with_coverage = {mode: _compile_with_coverage(ctx, pkg_name, all_srcs, mode, shared, race = race) for mode in GoCoverageMode} + pkgs = { - pkg_name: GoPkg( - cgo = True, - pkg = compiled_pkg, - pkg_with_coverage = pkg_with_coverage, - ), + pkg_name: compiled_pkg, } # We need to keep pre-processed cgo source files, @@ -264,7 +254,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: # to work with cgo. And when nearly every FB service client is cgo, # we need to support it well. return [ - DefaultInfo(default_output = compiled_pkg, other_outputs = go_srcs), + DefaultInfo(default_output = compiled_pkg.pkg, other_outputs = go_srcs), GoPkgCompileInfo(pkgs = merge_pkgs([ pkgs, get_inherited_compile_pkgs(ctx.attrs.exported_deps), diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index 10e9f3549..bf9a0f055 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -10,7 +10,7 @@ load("@prelude//:paths.bzl", "paths") # @unused this comment is to make the linter happy. The linter thinks # GoCoverageMode is unused despite it being used in the function signature of # multiple functions. -load(":coverage.bzl", "GoCoverageMode") +load(":coverage.bzl", "GoCoverageMode", "cover_srcs") load( ":packages.bzl", "GoPkg", # @Unused used as type @@ -98,8 +98,7 @@ def _compile_cmd( deps: list[Dependency] = [], flags: list[str] = [], shared: bool = False, - race: bool = False, - coverage_mode: [GoCoverageMode, None] = None) -> cmd_args: + race: bool = False) -> cmd_args: go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] cmd = cmd_args() @@ -121,7 +120,7 @@ def _compile_cmd( # Add Go pkgs inherited from deps to compiler search path. all_pkgs = merge_pkgs([ pkgs, - pkg_artifacts(get_inherited_compile_pkgs(deps), coverage_mode = coverage_mode), + pkg_artifacts(get_inherited_compile_pkgs(deps)), ]) importcfg = make_importcfg(ctx, root, pkg_name, all_pkgs, with_importmap = True) @@ -140,7 +139,7 @@ def compile( assemble_flags: list[str] = [], shared: bool = False, race: bool = False, - coverage_mode: [GoCoverageMode, None] = None) -> Artifact: + coverage_mode: GoCoverageMode | None = None) -> GoPkg: go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] root = _out_root(shared, coverage_mode) output = ctx.actions.declare_output(root, paths.basename(pkg_name) + ".a") @@ -148,13 +147,22 @@ def compile( cmd = get_toolchain_cmd_args(go_toolchain) cmd.add(go_toolchain.compile_wrapper) cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(_compile_cmd(ctx, root, pkg_name, pkgs, deps, compile_flags, shared = shared, race = race, coverage_mode = coverage_mode), format = "--compiler={}")) + cmd.add(cmd_args(_compile_cmd(ctx, root, pkg_name, pkgs, deps, compile_flags, shared = shared, race = race), format = "--compiler={}")) cmd.add(cmd_args(_assemble_cmd(ctx, pkg_name, assemble_flags, shared = shared), format = "--assembler={}")) cmd.add(cmd_args(go_toolchain.packer, format = "--packer={}")) if ctx.attrs.embedcfg: cmd.add(cmd_args(ctx.attrs.embedcfg, format = "--embedcfg={}")) argsfile = ctx.actions.declare_output(root, pkg_name + ".go.argsfile") + + coverage_vars = None + if coverage_mode != None: + if race and coverage_mode != GoCoverageMode("atomic"): + fail("`coverage_mode` must be `atomic` when `race=True`") + cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, shared) + srcs = cov_res.srcs + coverage_vars = cov_res.variables + srcs_args = cmd_args(srcs) ctx.actions.write(argsfile.as_output(), srcs_args, allow_args = True) @@ -168,4 +176,4 @@ def compile( ctx.actions.run(cmd, category = "go_compile", identifier = identifier) - return output + return GoPkg(pkg = output, coverage_vars = coverage_vars) diff --git a/prelude/go/coverage.bzl b/prelude/go/coverage.bzl index ba5707f97..872ee437d 100644 --- a/prelude/go/coverage.bzl +++ b/prelude/go/coverage.bzl @@ -24,9 +24,9 @@ GoCoverResult = record( ) def cover_srcs(ctx: AnalysisContext, pkg_name: str, mode: GoCoverageMode, srcs: cmd_args, shared: bool) -> GoCoverResult: - path = "static_" + mode.value + path = pkg_name + "_static_" + mode.value if shared: - path = "shared_" + mode.value + path = pkg_name + "shared_" + mode.value out_covered_src_dir = ctx.actions.declare_output("__covered_" + path + "_srcs__", dir = True) out_srcs_argsfile = ctx.actions.declare_output("covered_" + path + "_srcs.txt") out_coverage_vars_argsfile = ctx.actions.declare_output("coverage_" + path + "_vars.txt") @@ -41,7 +41,7 @@ def cover_srcs(ctx: AnalysisContext, pkg_name: str, mode: GoCoverageMode, srcs: cmd.add("--out-srcs-argsfile", out_srcs_argsfile.as_output()) cmd.add("--pkg-name", pkg_name) cmd.add(srcs) - ctx.actions.run(cmd, category = "go_cover_" + path) + ctx.actions.run(cmd, category = "go_cover", identifier = path) return GoCoverResult( srcs = cmd_args(out_srcs_argsfile, format = "@{}").hidden(out_covered_src_dir).hidden(srcs), diff --git a/prelude/go/go_binary.bzl b/prelude/go/go_binary.bzl index 6d8628651..f5ee6c7ac 100644 --- a/prelude/go/go_binary.bzl +++ b/prelude/go/go_binary.bzl @@ -30,7 +30,7 @@ def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: ) (bin, runtime_files, external_debug_info) = link( ctx, - lib, + lib.pkg, deps = ctx.attrs.deps, link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, diff --git a/prelude/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl index b069f85f4..5210f1742 100644 --- a/prelude/go/go_exported_library.bzl +++ b/prelude/go/go_exported_library.bzl @@ -29,7 +29,7 @@ def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: ) (bin, runtime_files, _external_debug_info) = link( ctx, - lib, + lib.pkg, deps = ctx.attrs.deps, build_mode = GoBuildMode(ctx.attrs.build_mode), link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static_pic")), diff --git a/prelude/go/go_library.bzl b/prelude/go/go_library.bzl index 3c5eb2fdd..00309adaf 100644 --- a/prelude/go/go_library.bzl +++ b/prelude/go/go_library.bzl @@ -24,25 +24,9 @@ load( "map_idx", ) load(":compile.bzl", "GoPkgCompileInfo", "GoTestInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") -load(":coverage.bzl", "GoCoverageMode", "cover_srcs") +load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") -load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") - -def _compile_with_coverage(ctx: AnalysisContext, pkg_name: str, srcs: cmd_args, coverage_mode: GoCoverageMode, shared: bool, race: bool) -> (Artifact, cmd_args): - cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, shared) - srcs = cov_res.srcs - coverage_vars = cov_res.variables - coverage_pkg = compile( - ctx, - pkg_name, - srcs = srcs, - deps = ctx.attrs.deps + ctx.attrs.exported_deps, - compile_flags = ctx.attrs.compiler_flags, - coverage_mode = coverage_mode, - shared = shared, - race = race, - ) - return (coverage_pkg, coverage_vars) +load(":packages.bzl", "go_attr_pkg_name", "merge_pkgs") def go_library_impl(ctx: AnalysisContext) -> list[Provider]: pkgs = {} @@ -55,8 +39,9 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, force_disable_cgo = True) shared = ctx.attrs._compile_shared race = ctx.attrs._race + coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None - compiled_pkg = compile( + pkg = compile( ctx, pkg_name, srcs = srcs, @@ -65,15 +50,11 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: assemble_flags = ctx.attrs.assembler_flags, shared = shared, race = race, + coverage_mode = coverage_mode, ) - pkg_with_coverage = {mode: _compile_with_coverage(ctx, pkg_name, srcs, mode, shared, race = race) for mode in GoCoverageMode} - - default_output = compiled_pkg - pkgs[pkg_name] = GoPkg( - pkg = compiled_pkg, - pkg_with_coverage = pkg_with_coverage, - ) + default_output = pkg.pkg + pkgs[pkg_name] = pkg return [ DefaultInfo(default_output = default_output), diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index 7aef22fd4..2c8759d28 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -20,9 +20,9 @@ load( ) load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") load(":compile.bzl", "GoTestInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") -load(":coverage.bzl", "GoCoverageMode", "cover_srcs") +load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "link") -load(":packages.bzl", "go_attr_pkg_name", "pkg_artifact", "pkg_coverage_vars") +load(":packages.bzl", "go_attr_pkg_name") def _gen_test_main( ctx: AnalysisContext, @@ -36,8 +36,9 @@ def _gen_test_main( output = ctx.actions.declare_output("main.go") cmd = cmd_args() cmd.add(ctx.attrs._testmaingen[RunInfo]) - if ctx.attrs.coverage_mode: - cmd.add(cmd_args(ctx.attrs.coverage_mode, format = "--cover-mode={}")) + + # if ctx.attrs.coverage_mode: + # cmd.add(cmd_args(ctx.attrs.coverage_mode, format = "--cover-mode={}")) cmd.add(cmd_args(output.as_output(), format = "--output={}")) cmd.add(cmd_args(pkg_name, format = "--import-path={}")) if coverage_mode != None: @@ -70,24 +71,9 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: # If coverage is enabled for this test, we need to preprocess the sources # with the Go cover tool. - coverage_mode = None + coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None coverage_vars = {} pkgs = {} - if ctx.attrs.coverage_mode != None: - if ctx.attrs._race and ctx.attrs.coverage_mode != "atomic": - fail("`coverage_mode` must be `atomic` when `race=True`") - coverage_mode = GoCoverageMode(ctx.attrs.coverage_mode) - cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, False) - srcs = cov_res.srcs - coverage_vars[pkg_name] = cov_res.variables - - # Get all packages that are linked to the test (i.e. the entire dependency tree) - for name, pkg in get_inherited_compile_pkgs(deps).items(): - if ctx.label != None and is_subpackage_of(name, ctx.label.package): - artifact = pkg_artifact(pkg, coverage_mode) - vars = pkg_coverage_vars("", pkg, coverage_mode) - coverage_vars[name] = vars - pkgs[name] = artifact # Compile all tests into a package. tests = compile( @@ -101,22 +87,31 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: race = ctx.attrs._race, ) + if coverage_mode != None: + coverage_vars[pkg_name] = tests.coverage_vars + + # Get all packages that are linked to the test (i.e. the entire dependency tree) + for name, pkg in get_inherited_compile_pkgs(deps).items(): + if ctx.label != None and is_subpackage_of(name, ctx.label.package): + coverage_vars[name] = pkg.coverage_vars + pkgs[name] = pkg.pkg + + pkgs[pkg_name] = tests.pkg + # Generate a main function which runs the tests and build that into another # package. gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, srcs) - pkgs[pkg_name] = tests main = compile(ctx, "main", cmd_args(gen_main), pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs._race) # Link the above into a Go binary. (bin, runtime_files, external_debug_info) = link( ctx = ctx, - main = main, + main = main.pkg, pkgs = pkgs, deps = deps, link_style = value_or(map_val(LinkStyle, ctx.attrs.link_style), LinkStyle("static")), linker_flags = ctx.attrs.linker_flags, shared = False, - coverage_mode = coverage_mode, race = ctx.attrs._race, ) diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 6bf2ca797..9d55072e4 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -29,11 +29,6 @@ load( "@prelude//utils:utils.bzl", "map_idx", ) - -# @unused this comment is to make the linter happy. The linter thinks -# GoCoverageMode is unused despite it being used in the function signature of -# link. -load(":coverage.bzl", "GoCoverageMode") load( ":packages.bzl", "GoPkg", # @Unused used as type @@ -114,8 +109,7 @@ def link( linker_flags: list[typing.Any] = [], external_linker_flags: list[typing.Any] = [], shared: bool = False, - race: bool = False, - coverage_mode: [GoCoverageMode, None] = None): + race: bool = False): go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] if go_toolchain.env_go_os == "windows": executable_extension = ".exe" @@ -141,7 +135,7 @@ def link( # Add inherited Go pkgs to library search path. all_pkgs = merge_pkgs([ pkgs, - pkg_artifacts(get_inherited_link_pkgs(deps), coverage_mode = coverage_mode), + pkg_artifacts(get_inherited_link_pkgs(deps)), ]) importcfg = make_importcfg(ctx, "", "", all_pkgs, with_importmap = False) diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index 6ca43871f..8010ea162 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -7,12 +7,11 @@ load("@prelude//go:toolchain.bzl", "GoToolchainInfo") load("@prelude//utils:utils.bzl", "value_or") -load(":coverage.bzl", "GoCoverageMode") GoPkg = record( cgo = field(bool, default = False), pkg = field(Artifact), - pkg_with_coverage = field(dict[GoCoverageMode, (Artifact, cmd_args)]), + coverage_vars = field(cmd_args | None, default = None), ) GoStdlib = provider( @@ -44,26 +43,12 @@ def merge_pkgs(pkgss: list[dict[str, typing.Any]]) -> dict[str, typing.Any]: return all_pkgs -def pkg_artifact(pkg: GoPkg, coverage_mode: [GoCoverageMode, None]) -> Artifact: - if coverage_mode: - artifact = pkg.pkg_with_coverage - return artifact[coverage_mode][0] - return pkg.pkg - -def pkg_coverage_vars(name: str, pkg: GoPkg, coverage_mode: [GoCoverageMode, None]) -> [cmd_args, None]: - if coverage_mode: - artifact = pkg.pkg_with_coverage - if coverage_mode not in artifact: - fail("coverage variables don't exist for {}".format(name)) - return artifact[coverage_mode][1] - fail("coverage variables were requested but coverage_mode is None") - -def pkg_artifacts(pkgs: dict[str, GoPkg], coverage_mode: [GoCoverageMode, None] = None) -> dict[str, Artifact]: +def pkg_artifacts(pkgs: dict[str, GoPkg]) -> dict[str, Artifact]: """ Return a map package name to a `shared` or `static` package artifact. """ return { - name: pkg_artifact(pkg, coverage_mode) + name: pkg.pkg for name, pkg in pkgs.items() } From 8ffaaf3949118546f46f98e3688fd5b243d89953 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 28 Feb 2024 04:44:53 -0800 Subject: [PATCH 0359/1133] Get rid of coverage/shared prefixes for artifact names Summary: We don't need it anymore, since we don't create artifacts for all possible compilation modes Reviewed By: awalterschulze Differential Revision: D54261203 fbshipit-source-id: 9a02b8fd3af13535ff5c0b8541df00e1acc32c8c --- prelude/go/compile.bzl | 18 ++++-------------- prelude/go/link.bzl | 2 +- prelude/go/packages.bzl | 7 +++---- 3 files changed, 8 insertions(+), 19 deletions(-) diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index bf9a0f055..86f7f7133 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -36,14 +36,6 @@ GoTestInfo = provider( }, ) -def _out_root(shared: bool = False, coverage_mode: [GoCoverageMode, None] = None): - path = "static" - if shared: - path = "shared" - if coverage_mode: - path += "__coverage_" + coverage_mode.value + "__" - return path - def get_inherited_compile_pkgs(deps: list[Dependency]) -> dict[str, GoPkg]: return merge_pkgs([d[GoPkgCompileInfo].pkgs for d in deps if GoPkgCompileInfo in d]) @@ -92,7 +84,6 @@ def _assemble_cmd( def _compile_cmd( ctx: AnalysisContext, - root: str, pkg_name: str, pkgs: dict[str, Artifact] = {}, deps: list[Dependency] = [], @@ -123,7 +114,7 @@ def _compile_cmd( pkg_artifacts(get_inherited_compile_pkgs(deps)), ]) - importcfg = make_importcfg(ctx, root, pkg_name, all_pkgs, with_importmap = True) + importcfg = make_importcfg(ctx, pkg_name, all_pkgs, with_importmap = True) cmd.add("-importcfg", importcfg) @@ -141,19 +132,18 @@ def compile( race: bool = False, coverage_mode: GoCoverageMode | None = None) -> GoPkg: go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - root = _out_root(shared, coverage_mode) - output = ctx.actions.declare_output(root, paths.basename(pkg_name) + ".a") + output = ctx.actions.declare_output(paths.basename(pkg_name) + ".a") cmd = get_toolchain_cmd_args(go_toolchain) cmd.add(go_toolchain.compile_wrapper) cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(_compile_cmd(ctx, root, pkg_name, pkgs, deps, compile_flags, shared = shared, race = race), format = "--compiler={}")) + cmd.add(cmd_args(_compile_cmd(ctx, pkg_name, pkgs, deps, compile_flags, shared = shared, race = race), format = "--compiler={}")) cmd.add(cmd_args(_assemble_cmd(ctx, pkg_name, assemble_flags, shared = shared), format = "--assembler={}")) cmd.add(cmd_args(go_toolchain.packer, format = "--packer={}")) if ctx.attrs.embedcfg: cmd.add(cmd_args(ctx.attrs.embedcfg, format = "--embedcfg={}")) - argsfile = ctx.actions.declare_output(root, pkg_name + ".go.argsfile") + argsfile = ctx.actions.declare_output(pkg_name + ".go.argsfile") coverage_vars = None if coverage_mode != None: diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 9d55072e4..3c3c4d93d 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -138,7 +138,7 @@ def link( pkg_artifacts(get_inherited_link_pkgs(deps)), ]) - importcfg = make_importcfg(ctx, "", "", all_pkgs, with_importmap = False) + importcfg = make_importcfg(ctx, "", all_pkgs, with_importmap = False) cmd.add("-importcfg", importcfg) diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index 8010ea162..a07ba3f4f 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -54,7 +54,6 @@ def pkg_artifacts(pkgs: dict[str, GoPkg]) -> dict[str, Artifact]: def make_importcfg( ctx: AnalysisContext, - root: str, pkg_name: str, own_pkgs: dict[str, typing.Any], with_importmap: bool) -> cmd_args: @@ -71,10 +70,10 @@ def make_importcfg( real_name_ = name_.removeprefix("third-party-source/go/") content.append(cmd_args("importmap ", real_name_, "=", name_, delimiter = "")) - own_importcfg = ctx.actions.declare_output(root, "{}.importcfg".format(pkg_name)) + own_importcfg = ctx.actions.declare_output("{}.importcfg".format(pkg_name)) ctx.actions.write(own_importcfg, content) - final_importcfg = ctx.actions.declare_output(root, "{}.final.importcfg".format(pkg_name)) + final_importcfg = ctx.actions.declare_output("{}.final.importcfg".format(pkg_name)) ctx.actions.run( [ go_toolchain.concat_files, @@ -84,7 +83,7 @@ def make_importcfg( own_importcfg, ], category = "concat_importcfgs", - identifier = "{}/{}".format(root, pkg_name), + identifier = pkg_name, ) return cmd_args(final_importcfg).hidden(stdlib.pkgdir).hidden(own_pkgs.values()) From 856c6b89218e9507df9747e6bda1a44caca6422d Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Wed, 28 Feb 2024 14:24:03 -0800 Subject: [PATCH 0360/1133] Add more somerge test targets Summary: Used to test later diffs in this stack. Made one small behavioral tweak to fix the tests - allow setting `native_library_merge_sequence` without setting `native_library_merge_sequence_blocklist` (set it to []). Reviewed By: IanChilds Differential Revision: D53834066 fbshipit-source-id: dfd059e0677d8754114e944460da6a4120e15843 --- prelude/android/android_binary_native_library_rules.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index b8523dcc5..af90f0bd6 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -203,7 +203,7 @@ def get_android_binary_native_library_info( native_library_merge_input_file = ctx.actions.write_json("mergemap.input", { "linkable_graphs_by_platform": encode_linkable_graph_for_mergemap(flattened_linkable_graphs_by_platform), "native_library_merge_sequence": ctx.attrs.native_library_merge_sequence, - "native_library_merge_sequence_blocklist": ctx.attrs.native_library_merge_sequence_blocklist, + "native_library_merge_sequence_blocklist": ctx.attrs.native_library_merge_sequence_blocklist or [], }) mergemap_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].mergemap_tool) mergemap_cmd.add(cmd_args(native_library_merge_input_file, format = "--mergemap-input={}")) From e67951ae0ce26bce86233c8eed16c0ba00a6a085 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nivaldo=20Bondan=C3=A7a?= Date: Wed, 28 Feb 2024 15:31:21 -0800 Subject: [PATCH 0361/1133] Actually check config for used classes being enabled Summary: Currently changing the config `kotlin.track_class_usage` does not result in any change for Buck2 and that's was because it was not set to check the config. In this diff I actually update that to check the value of the config. Reviewed By: nghuiqin Differential Revision: D54310801 fbshipit-source-id: 656bde2f74d044fb0477d167b5c994c9d0414fd8 --- prelude/kotlin/kotlincd_jar_creator.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/kotlin/kotlincd_jar_creator.bzl b/prelude/kotlin/kotlincd_jar_creator.bzl index c9d8a6225..07788429b 100644 --- a/prelude/kotlin/kotlincd_jar_creator.bzl +++ b/prelude/kotlin/kotlincd_jar_creator.bzl @@ -146,7 +146,7 @@ def create_jar_artifact_kotlincd( compiling_deps_tset = get_compiling_deps_tset(actions, deps, additional_classpath_entries) # external javac does not support used classes - track_class_usage = javac_tool == None + track_class_usage = javac_tool == None and kotlin_toolchain.track_class_usage_plugin != None def encode_library_command( output_paths: OutputPaths, From 2007b3f0b6e1dda3845c8b64e4945a0d65377e39 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Wed, 28 Feb 2024 19:03:04 -0800 Subject: [PATCH 0362/1133] prelude: Add `audit_providers_universe` bxl script Summary: `buck2 audit providers` has no `--universe` parameter. I could go and add that, but that's a lot more work than writing a small BXL script which is already capable of this functionality Reviewed By: scottcao Differential Revision: D54046242 fbshipit-source-id: 50ca282d40c7722a2745c5964a9e962aca54503d --- prelude/tools/audit_providers_universe.bxl | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 prelude/tools/audit_providers_universe.bxl diff --git a/prelude/tools/audit_providers_universe.bxl b/prelude/tools/audit_providers_universe.bxl new file mode 100644 index 000000000..6916ed7a3 --- /dev/null +++ b/prelude/tools/audit_providers_universe.bxl @@ -0,0 +1,18 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def _impl(ctx): + ts = ctx.target_universe(ctx.cli_args.universe).lookup(ctx.cli_args.target) + ctx.output.print(pstr({t: a.providers() for t, a in ctx.analysis(ts).items()})) + +run = bxl_main( + impl = _impl, + cli_args = { + "target": cli_args.target_label(), + "universe": cli_args.target_label(), + }, +) From 062b9adae5cbd5f193db2995f97329ad25cfbe6b Mon Sep 17 00:00:00 2001 From: Tunahan Karlibas Date: Thu, 29 Feb 2024 05:45:56 -0800 Subject: [PATCH 0363/1133] Fix system_go_toolchain Summary: Fix system_go_toolchain by adding the missing tool references X-link: https://github.com/facebook/buck2/pull/577 Reviewed By: JakobDegen Differential Revision: D54233342 Pulled By: podtserkovskiy fbshipit-source-id: 7c14920a8d681558ec90a368c6a8b7cb2ce636c7 --- prelude/toolchains/go.bzl | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/prelude/toolchains/go.bzl b/prelude/toolchains/go.bzl index ad9f97127..de535b606 100644 --- a/prelude/toolchains/go.bzl +++ b/prelude/toolchains/go.bzl @@ -34,6 +34,7 @@ def _system_go_toolchain_impl(ctx): cgo = get_go_tool("cgo"), cgo_wrapper = ctx.attrs.cgo_wrapper, compile_wrapper = ctx.attrs.compile_wrapper, + concat_files = ctx.attrs.concat_files, compiler = get_go_tool("compile"), cover = get_go_tool("cover"), cover_srcs = ctx.attrs.cover_srcs, @@ -43,10 +44,15 @@ def _system_go_toolchain_impl(ctx): env_go_root = go_root, external_linker_flags = None, filter_srcs = ctx.attrs.filter_srcs, + gen_stdlib_importcfg = ctx.attrs.gen_stdlib_importcfg, go = go_binary, + go_wrapper = ctx.attrs.go_wrapper, linker = get_go_tool("link"), packer = get_go_tool("pack"), tags = [], + linker_flags = [], + assembler_flags = [], + compiler_flags = [], ), ] @@ -61,9 +67,12 @@ system_go_toolchain = rule( attrs = { "cgo_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cgo_wrapper")), "compile_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:compile_wrapper")), + "concat_files": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:concat_files")), "cover_srcs": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cover_srcs")), "filter_srcs": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:filter_srcs")), + "gen_stdlib_importcfg": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:gen_stdlib_importcfg")), "go_root": attrs.string(), + "go_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:go_wrapper")), }, is_toolchain_rule = True, ) From d9c0bd5ceed69ccbf548d295f7addbf65059116c Mon Sep 17 00:00:00 2001 From: Balaji S Date: Thu, 29 Feb 2024 08:05:42 -0800 Subject: [PATCH 0364/1133] eqwalize ct_daemon_hooks Summary: * Mostly straightforward, defined all types separately Reviewed By: TheGeorge Differential Revision: D54355798 fbshipit-source-id: 8870239b473941c5259adb1bba253e6203120dfc --- .../test_exec/src/ct_daemon_hooks.erl | 80 +++++++++++++++---- 1 file changed, 64 insertions(+), 16 deletions(-) diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl index f9eed57cb..d1026c559 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl @@ -7,12 +7,13 @@ %%%------------------------------------------------------------------- %%% @doc -%%% Implementation of hooks functionality +%%% Implementation of hooks functionality. We mimic the behaviour of +%%% common test hooks so that they can run in test shell %%% @end %%% % @format -module(ct_daemon_hooks). --compile(warn_missing_spec). +-compile(warn_missing_spec_all). -behaviour(gen_server). @@ -63,16 +64,50 @@ | on_tc_fail | on_tc_skip. +-type post_hook_call() :: + post_init_per_suite + | post_init_per_group + | post_init_per_testcase + | post_end_per_suite + | post_end_per_group + | post_end_per_testcase. + +-type pre_hook_call() :: + pre_init_per_suite + | pre_init_per_group + | pre_init_per_testcase + | pre_end_per_suite + | pre_end_per_group + | pre_end_per_testcase. + +-type hook_level() :: + suite + | group + | testcase. + +-type hook_response() :: + [config()] + | {skip, term()} + | {fail, term()}. + +-type hook_config() :: + module() + | {module(), Options :: [term()]} + | {module(), Options :: [term()], Priority :: integer()}. + %%-------------------------------------------------------------------- %%% API -spec set_state(id(), hook_state()) -> ok. set_state(Id, State) -> - gen_server:call(?MODULE, {set_state, Id, State}). + ok = gen_server:call(?MODULE, {set_state, Id, State}). --spec get_state(id()) -> {ok, hook_state()} | {error, not_found}. +-spec get_state(id()) -> {ok, hook_state()} | {error, {not_found, list()}}. get_state(Id) -> - gen_server:call(?MODULE, {get_state, Id}). + case gen_server:call(?MODULE, {get_state, Id}) of + {ok, State} -> {ok, State}; + Error = {error, {not_found, Details}} when is_list(Details) -> Error + end. -spec wrap(part(), [atom()], fun()) -> fun(). wrap(Part, Path, Fun) -> @@ -86,7 +121,7 @@ get_hooks() -> %% @doc %% Starts the server within supervision tree --spec start_monitor() -> gen_server:start_ret(). +-spec start_monitor() -> gen_server:start_mon_ret(). start_monitor() -> gen_server:start_monitor({local, ?MODULE}, ?MODULE, [], []). @@ -97,12 +132,13 @@ start_monitor() -> init([]) -> {ok, initialize_hooks()}. --spec handle_call(Request :: term(), From :: gen_server:from(), State :: state()) -> - no_return(). +-spec handle_call({get_state, id()}, gen_server:from(), state()) -> {reply, {ok, hook_state()}, state()} | {error, {not_found, list()}}; + ({set_state, id(), hook_state()}, gen_server:from(), state()) -> {reply, ok, state()}; + ({wrap, part(), fun()}, gen_server:from(), state()) -> {reply, fun(([atom() | config()]) -> term()), state()}. handle_call({get_state, Id}, _From, State = #{states := HookStates}) -> case HookStates of #{Id := HookState} -> {reply, {ok, HookState}, State}; - _ -> {error, not_found, [{state, State}, {id, Id}]} + _ -> {error, {not_found, [{state, State}, {id, Id}]}} end; handle_call({set_state, Id, HookState}, _From, State = #{states := HookStates}) -> {reply, ok, State#{states => HookStates#{Id => HookState}}}; @@ -156,6 +192,7 @@ initialize_hooks() -> hooks => [Hook || {_Priority, Hook} <- SortedHooks] }. +-spec get_hooks_config() -> [hook_config()]. get_hooks_config() -> application:get_env(test_exec, ct_daemon_hooks, []) ++ proplists:get_value(ct_hooks, application:get_env(test_exec, daemon_options, []), []). @@ -164,6 +201,7 @@ get_hooks_config() -> wrap_part(Part, Fun, State) -> wrap_init_end(Part, Fun, State). +-spec wrap_init_end(part(), fun(), state()) -> fun(([atom() | config()]) -> term()). wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> %% NOTE ON EXECUTION ORDER: %% @@ -279,6 +317,7 @@ wrap_init_end(Part, Fun, #{hooks := HooksInInstallationOrder}) -> handle_post_result(HooksInInstallationOrder, build_test_name(Part, PathArg), Suite, Result) end. +-spec handle_post_result([hook()], test_name(), module(), {ok, [config()]} | {skip, term()} | {fail, term()}) -> hook_response(). handle_post_result(Hooks, TestName, Suite, Result) -> ReverseHooks = lists:reverse(Hooks), case Result of @@ -354,22 +393,21 @@ build_test_name(end_per_testcase, Path) -> [Test, Group | _] = lists:reverse(Path), {Group, Test}. --spec get_hook_module(module() | {module(), Options} | {module(), Options, Priority}) -> module() when - Options :: list(), Priority :: integer(). +-spec get_hook_module(hook_config()) -> module(). get_hook_module({Mod, _, _}) -> Mod; get_hook_module({Mod, _}) -> Mod; get_hook_module(Mod) -> Mod. --spec get_hook_opts(module() | {module(), Options} | {module(), Options, Priority}) -> Options when - Options :: list(), Priority :: integer(). + +-spec get_hook_opts(hook_config()) -> [term()]. get_hook_opts({_, Opts, _}) -> Opts; get_hook_opts({_, Opts}) -> Opts; get_hook_opts(_) -> []. --spec get_hook_priority(module() | {module(), Options} | {module(), Options, Priority}) -> Priority when - Options :: list(), Priority :: integer(). +-spec get_hook_priority(hook_config()) -> integer() | undefined. get_hook_priority({_, _, Prio}) -> Prio; get_hook_priority(_) -> undefined. +-spec normalize_part(part(), fun()) -> fun(). normalize_part(Part, Fun) -> SafeFun = get_safe_part(Part, Fun), case level(Part) of @@ -379,21 +417,24 @@ normalize_part(Part, Fun) -> end. %% wrappers because most calls are optional +-spec call_if_exists(module(), atom(), [term()], Default :: {'$lazy', LazyFun :: fun(() -> term())} | term()) -> term(). call_if_exists(Mod, Fun, Args, Default) -> case erlang:function_exported(Mod, Fun, erlang:length(Args)) of true -> erlang:apply(Mod, Fun, Args); false -> case Default of - {'$lazy', LazyFun} -> LazyFun(); + {'$lazy', LazyFun} when is_function(LazyFun, 0) -> LazyFun(); _ -> Default end end. +-spec call_if_exists_with_fallback(module(), atom(), [term()], term()) -> term(). call_if_exists_with_fallback(Mod, Fun, Args, ReturnDefault) -> [_ | FallbackArgs] = Args, call_if_exists(Mod, Fun, Args, {'$lazy', fun() -> call_if_exists(Mod, Fun, FallbackArgs, ReturnDefault) end}). +-spec call_if_exists_with_fallback_store_state({module(), term()}, atom(), [term()], term()) -> term(). call_if_exists_with_fallback_store_state({Mod, Id}, Fun, Args, ReturnDefault) -> {ok, State} = get_state(Id), Default = @@ -431,6 +472,7 @@ wrapped_init({Mod, Id}, Opts, ConfiguredPriority) -> _ -> {ConfiguredPriority, InitState} end. +-spec pre(part()) -> pre_hook_call(). pre(init_per_suite) -> pre_init_per_suite; pre(init_per_group) -> pre_init_per_group; pre(init_per_testcase) -> pre_init_per_testcase; @@ -438,6 +480,7 @@ pre(end_per_suite) -> pre_end_per_suite; pre(end_per_group) -> pre_end_per_group; pre(end_per_testcase) -> pre_end_per_testcase. +-spec post(part()) -> post_hook_call(). post(init_per_suite) -> post_init_per_suite; post(init_per_group) -> post_init_per_group; post(init_per_testcase) -> post_init_per_testcase; @@ -445,6 +488,7 @@ post(end_per_suite) -> post_end_per_suite; post(end_per_group) -> post_end_per_group; post(end_per_testcase) -> post_end_per_testcase. +-spec level(part()) -> hook_level(). level(init_per_suite) -> suite; level(init_per_group) -> group; level(init_per_testcase) -> testcase; @@ -452,6 +496,7 @@ level(end_per_suite) -> suite; level(end_per_group) -> group; level(end_per_testcase) -> testcase. +-spec init_or_end(part()) -> init | 'end'. init_or_end(init_per_suite) -> init; init_or_end(init_per_group) -> init; init_or_end(init_per_testcase) -> init; @@ -459,12 +504,14 @@ init_or_end(end_per_suite) -> 'end'; init_or_end(end_per_group) -> 'end'; init_or_end(end_per_testcase) -> 'end'. +-spec get_safe_part(part(), fun()) -> fun(). get_safe_part(Part, Fun) -> case is_exported(Fun) of true -> Fun; false -> dummy(Part) end. +-spec dummy(part()) -> fun(). dummy(init_per_suite) -> fun(Config) -> Config end; dummy(init_per_group) -> fun(_, Config) -> Config end; dummy(init_per_testcase) -> fun(_, Config) -> Config end; @@ -472,6 +519,7 @@ dummy(end_per_suite) -> fun(_) -> ok end; dummy(end_per_group) -> fun(_, _) -> ok end; dummy(end_per_testcase) -> fun(_, _) -> ok end. +-spec is_exported(fun()) -> boolean(). is_exported(Fun) -> case maps:from_list(erlang:fun_info(Fun)) of #{ From fefd24be62c9aa276f8bfbc3babd0f62d96a4490 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Thu, 29 Feb 2024 11:44:00 -0800 Subject: [PATCH 0365/1133] Add KotlinCD and JavaCD support for manifest_file Summary: We had the param already, but we weren't plumbing it through anywhere. This just adds the plumbing to pass it through to JavaCD and KotlinCD so that it is accounted for. Reviewed By: mdzyuba Differential Revision: D54357786 fbshipit-source-id: ccccf46a2572c9bfbfae71a4a78b181800539f9f --- prelude/java/javacd_jar_creator.bzl | 7 +++---- prelude/jvm/cd_jar_creator_util.bzl | 6 ++++-- prelude/kotlin/kotlin_library.bzl | 1 + prelude/kotlin/kotlincd_jar_creator.bzl | 5 ++++- 4 files changed, 12 insertions(+), 7 deletions(-) diff --git a/prelude/java/javacd_jar_creator.bzl b/prelude/java/javacd_jar_creator.bzl index d9344e72a..8b8e582ce 100644 --- a/prelude/java/javacd_jar_creator.bzl +++ b/prelude/java/javacd_jar_creator.bzl @@ -84,9 +84,6 @@ def create_jar_artifact_javacd( actions = ctx.actions resources_map = get_resources_map(java_toolchain, label.package, resources, resources_root) - # TODO(cjhopman): Handle manifest file. - _ = manifest_file # buildifier: disable=unused-variable - bootclasspath_entries = add_java_7_8_bootclasspath(target_level, bootclasspath_entries, java_toolchain) abi_generation_mode = get_abi_generation_mode(abi_generation_mode, java_toolchain, srcs, annotation_processor_properties) @@ -132,6 +129,7 @@ def create_jar_artifact_javacd( resources_map, annotation_processor_properties, plugin_params, + manifest_file, extra_arguments, source_only_abi_compiling_deps = [], track_class_usage = track_class_usage, @@ -171,11 +169,12 @@ def create_jar_artifact_javacd( resources_map, annotation_processor_properties, plugin_params, + manifest_file, extra_arguments, source_only_abi_compiling_deps = source_only_abi_compiling_deps, track_class_usage = track_class_usage, ) - abi_params = encode_jar_params(remove_classes, output_paths) + abi_params = encode_jar_params(remove_classes, output_paths, manifest_file) abi_command = struct( baseJarCommand = base_jar_command, diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index 37841cbd2..0e13eb1e1 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -158,13 +158,14 @@ def encode_output_paths(label: Label, paths: OutputPaths, target_type: TargetTyp libraryTargetFullyQualifiedName = base_qualified_name(label), ) -def encode_jar_params(remove_classes: list[str], output_paths: OutputPaths) -> struct: +def encode_jar_params(remove_classes: list[str], output_paths: OutputPaths, manifest_file: [Artifact, None]) -> struct: return struct( jarPath = output_paths.jar.as_output(), removeEntryPredicate = struct( patterns = remove_classes, ), entriesToJar = [output_paths.classes.as_output()], + manifestFile = manifest_file, duplicatesLogLevel = "FINE", ) @@ -288,10 +289,11 @@ def encode_base_jar_command( resources_map: dict[str, Artifact], annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], + manifest_file: [Artifact, None], extra_arguments: cmd_args, source_only_abi_compiling_deps: list[JavaClasspathEntry], track_class_usage: bool) -> struct: - library_jar_params = encode_jar_params(remove_classes, output_paths) + library_jar_params = encode_jar_params(remove_classes, output_paths, manifest_file) qualified_name = get_qualified_name(label, target_type) if target_type == TargetType("source_only_abi"): compiling_classpath = classpath_jars_tag.tag_artifacts([dep.abi for dep in source_only_abi_compiling_deps]) diff --git a/prelude/kotlin/kotlin_library.bzl b/prelude/kotlin/kotlin_library.bzl index 71f09eb39..b58fd9fbc 100644 --- a/prelude/kotlin/kotlin_library.bzl +++ b/prelude/kotlin/kotlin_library.bzl @@ -356,6 +356,7 @@ def build_kotlin_library( "kotlin_compiler_plugins": ctx.attrs.kotlin_compiler_plugins, "kotlin_toolchain": kotlin_toolchain, "label": ctx.label, + "manifest_file": ctx.attrs.manifest_file, "remove_classes": ctx.attrs.remove_classes, "required_for_source_only_abi": ctx.attrs.required_for_source_only_abi, "resources": ctx.attrs.resources, diff --git a/prelude/kotlin/kotlincd_jar_creator.bzl b/prelude/kotlin/kotlincd_jar_creator.bzl index 07788429b..0eaf5681c 100644 --- a/prelude/kotlin/kotlincd_jar_creator.bzl +++ b/prelude/kotlin/kotlincd_jar_creator.bzl @@ -62,6 +62,7 @@ def create_jar_artifact_kotlincd( resources_root: [str, None], annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], + manifest_file: [Artifact, None], source_level: int, target_level: int, deps: list[Dependency], @@ -169,6 +170,7 @@ def create_jar_artifact_kotlincd( resources_map, annotation_processor_properties = annotation_processor_properties, plugin_params = plugin_params, + manifest_file = manifest_file, extra_arguments = cmd_args(extra_arguments), source_only_abi_compiling_deps = [], track_class_usage = track_class_usage, @@ -212,11 +214,12 @@ def create_jar_artifact_kotlincd( resources_map, annotation_processor_properties, plugin_params, + manifest_file, cmd_args(extra_arguments), source_only_abi_compiling_deps = source_only_abi_compiling_deps, track_class_usage = True, ) - abi_params = encode_jar_params(remove_classes, output_paths) + abi_params = encode_jar_params(remove_classes, output_paths, manifest_file) abi_command = struct( kotlinExtraParams = kotlin_extra_params, baseJarCommand = base_jar_command, From ab91c9d8bca46be611ca65585c5514bdc663c597 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 29 Feb 2024 17:42:03 -0800 Subject: [PATCH 0366/1133] Pass correct rpath origin setting to linker on Darwin for Haskell Summary: This is a follow-up to https://github.com/facebook/buck2/issues/536 which fixes a problem on Darwin when trying to execute a haskell_binary fails with: ``` dyld[13654]: Library not loaded: rpath/lib_backend.dylib Referenced from: /Users/runner/work/buck-out/v2/gen/root/6e975d09ae66fe71/backend/one-off-task/__one-off-task__/one-off-task Reason: tried: '$ORIGIN/__one-off-task__shared_libs_symlink_tree/lib_backend.dylib' (no such file), '$ORIGIN/__one-off-task__shared_libs_symlink_tree/lib_backend.dylib' (no such file), '/usr/local/lib/lib_backend.dylib' (no such file), '/usr/lib/lib_backend.dylib' (no such file, not in dyld cache) ``` X-link: https://github.com/facebook/buck2/pull/567 Reviewed By: simonmar Differential Revision: D54346421 Pulled By: JakobDegen fbshipit-source-id: 00d5d9fd44fb52b24f80bfecdf0cf3683b1e44b1 --- prelude/haskell/haskell.bzl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index ceb40af50..7e84f453b 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -13,6 +13,10 @@ load( "@prelude//cxx:cxx.bzl", "get_auto_link_group_specs", ) +load( + "@prelude//cxx:cxx_context.bzl", + "get_cxx_toolchain_info", +) load( "@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", @@ -40,6 +44,7 @@ load( load( "@prelude//cxx:linker.bzl", "LINKERS", + "get_rpath_origin", "get_shared_library_flags", ) load( From ec6d5ed674c030c1577b6710f54b8088de54d0fb Mon Sep 17 00:00:00 2001 From: Ben Chang Date: Thu, 29 Feb 2024 21:59:52 -0800 Subject: [PATCH 0367/1133] enabled bundle config in buck2 Summary: we're not piping this value through to the AAB builder executable. Reviewed By: IanChilds Differential Revision: D54380912 fbshipit-source-id: 8a658226f9245a7c058336d83c8c4b0cb3bbc9d7 --- prelude/android/android_bundle.bzl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/android/android_bundle.bzl b/prelude/android/android_bundle.bzl index f0a7d4e05..072489d81 100644 --- a/prelude/android/android_bundle.bzl +++ b/prelude/android/android_bundle.bzl @@ -20,6 +20,7 @@ def android_bundle_impl(ctx: AnalysisContext) -> list[Provider]: dex_files_info = android_binary_info.dex_files_info, native_library_info = android_binary_info.native_library_info, resources_info = android_binary_info.resources_info, + bundle_config = ctx.attrs.bundle_config_file, ) java_packaging_deps = android_binary_info.java_packaging_deps @@ -40,7 +41,8 @@ def build_bundle( android_toolchain: AndroidToolchainInfo, dex_files_info: DexFilesInfo, native_library_info: AndroidBinaryNativeLibsInfo, - resources_info: AndroidBinaryResourcesInfo) -> Artifact: + resources_info: AndroidBinaryResourcesInfo, + bundle_config: [Artifact, None]) -> Artifact: output_bundle = actions.declare_output("{}.aab".format(label.name)) bundle_builder_args = cmd_args([ @@ -52,6 +54,8 @@ def build_bundle( "--dex-file", dex_files_info.primary_dex, ]) + if bundle_config: + bundle_builder_args.add(["--path-to-bundle-config-file", bundle_config]) if android_toolchain.package_meta_inf_version_files: bundle_builder_args.add("--package-meta-inf-version-files") From 4d2d3ee7613a16001dfe0ae9d25ae1da391f82cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20L=C3=B6scher?= Date: Fri, 1 Mar 2024 05:57:44 -0800 Subject: [PATCH 0368/1133] increase timeout for outer cth_tpx server Summary: We areseeing sometimes timeouts when applying the update function. Since the update function also does some gen_server calls, we want to see them crash/timeout instead the outer gen_server. Reviewed By: Balajiganapathi Differential Revision: D54414474 fbshipit-source-id: 7dd9fc779fcbf5c1f540cc86017e833660fe48c4 --- prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl index f719e0807..443902106 100644 --- a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl +++ b/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl @@ -31,11 +31,11 @@ start_link(InitialState) -> -spec get(Handle :: handle()) -> CurrentState :: term(). get(Handle) -> - gen_server:call(Handle, get). + gen_server:call(Handle, get, 6000). -spec modify(Handle :: handle(), Fun :: fun((State) -> {A, State})) -> A. modify(Handle, Fun) -> - gen_server:call(Handle, {modify, Fun}). + gen_server:call(Handle, {modify, Fun}, 6000). %% ---- gen_server callbacks ---------- From 06b0308b3826c04991fd15a5f03bd641d5a30357 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Fri, 1 Mar 2024 08:28:33 -0800 Subject: [PATCH 0369/1133] Support ndk.cpu_abis for android_aar Summary: Same as we have for `android_binary`, we can use this config setting for `android_aar` to restrict the cpus that we build for the aar. Reviewed By: christolliday Differential Revision: D54358751 fbshipit-source-id: f4e222ad7f71c65170e5fbb0c522573e62501e2a --- prelude/native.bzl | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/prelude/native.bzl b/prelude/native.bzl index 1bbadd2ab..89b493a40 100644 --- a/prelude/native.bzl +++ b/prelude/native.bzl @@ -182,6 +182,14 @@ def _get_valid_cpu_filters(cpu_filters: [list[str], None]) -> list[str]: return [cpu_filter for cpu_filter in cpu_filters if cpu_filter in cpu_abis] +def _android_aar_macro_stub( + cpu_filters = None, + **kwargs): + __rules__["android_aar"]( + cpu_filters = _get_valid_cpu_filters(cpu_filters), + **kwargs + ) + def _android_binary_macro_stub( allow_r_dot_java_in_secondary_dex = False, cpu_filters = None, @@ -426,6 +434,7 @@ def _rust_test_macro_stub(**kwargs): # Probably good if they were defined to take in the base rule that # they are wrapping and return the wrapped one. __extra_rules__ = { + "android_aar": _android_aar_macro_stub, "android_binary": _android_binary_macro_stub, "android_instrumentation_apk": _android_instrumentation_apk_macro_stub, "apple_binary": _apple_binary_macro_stub, From d3723e4a33d460cc04a6b851854cb02b2eac6a17 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Fri, 1 Mar 2024 14:50:31 -0800 Subject: [PATCH 0370/1133] Fix `system_go_toolchain` and add examples Summary: X-link: https://github.com/facebook/buck2/pull/588 - Enabled `system_go_toolchain` to work with `go` binary from PATH for Linux and MacOS - Added examples of go_library, go_binary and go_test to `examples/with_prelude/go/hello` - Added the same version of Go as we use at Meta to CI - Added go ~1.22.0 to Github actions CI Reviewed By: ndmitchell Differential Revision: D54364160 fbshipit-source-id: b3c0d9b9ff322158d961e7e095651d6e77ab79ed --- prelude/go/tools/go_wrapper.py | 4 +++- prelude/toolchains/demo.bzl | 6 +++++ prelude/toolchains/go.bzl | 44 +++++++++++++++++----------------- 3 files changed, 31 insertions(+), 23 deletions(-) diff --git a/prelude/go/tools/go_wrapper.py b/prelude/go/tools/go_wrapper.py index bb830da97..83b3c9ef5 100644 --- a/prelude/go/tools/go_wrapper.py +++ b/prelude/go/tools/go_wrapper.py @@ -32,7 +32,9 @@ def main(argv): env = os.environ.copy() # Make paths absolute, otherwise go build will fail. - env["GOROOT"] = os.path.realpath(env["GOROOT"]) + if "GOROOT" in env: + env["GOROOT"] = os.path.realpath(env["GOROOT"]) + env["GOCACHE"] = os.path.realpath(env["BUCK_SCRATCH_PATH"]) cwd = os.getcwd() diff --git a/prelude/toolchains/demo.bzl b/prelude/toolchains/demo.bzl index 51bec438a..17ee3f5e7 100644 --- a/prelude/toolchains/demo.bzl +++ b/prelude/toolchains/demo.bzl @@ -7,6 +7,7 @@ load("@prelude//toolchains:cxx.bzl", "system_cxx_toolchain") load("@prelude//toolchains:genrule.bzl", "system_genrule_toolchain") +load("@prelude//toolchains:go.bzl", "system_go_toolchain") load("@prelude//toolchains:haskell.bzl", "system_haskell_toolchain") load("@prelude//toolchains:ocaml.bzl", "system_ocaml_toolchain") load("@prelude//toolchains:python.bzl", "system_python_bootstrap_toolchain", "system_python_toolchain") @@ -28,6 +29,11 @@ def system_demo_toolchains(): visibility = ["PUBLIC"], ) + system_go_toolchain( + name = "go", + visibility = ["PUBLIC"], + ) + system_haskell_toolchain( name = "haskell", visibility = ["PUBLIC"], diff --git a/prelude/toolchains/go.bzl b/prelude/toolchains/go.bzl index de535b606..176203a63 100644 --- a/prelude/toolchains/go.bzl +++ b/prelude/toolchains/go.bzl @@ -6,11 +6,9 @@ # of this source tree. load("@prelude//go:toolchain.bzl", "GoToolchainInfo") +load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") def _system_go_toolchain_impl(ctx): - go_root = ctx.attrs.go_root - go_binary = go_root + "/bin/go" - arch = host_info().arch if arch.is_aarch64: go_arch = "arm64" @@ -18,37 +16,41 @@ def _system_go_toolchain_impl(ctx): go_arch = "amd64" else: fail("Unsupported go arch: {}".format(arch)) + os = host_info().os if os.is_macos: go_os = "darwin" elif os.is_linux: go_os = "linux" + elif os.is_windows: + go_os = "windows" else: fail("Unsupported go os: {}".format(os)) - get_go_tool = lambda go_tool: "{}/pkg/tool/{}_{}/{}".format(go_root, go_os, go_arch, go_tool) + script_os = ScriptOs("windows" if os.is_windows else "unix") + go = "go.exe" if os.is_windows else "go" + return [ DefaultInfo(), GoToolchainInfo( - assembler = get_go_tool("asm"), - cgo = get_go_tool("cgo"), - cgo_wrapper = ctx.attrs.cgo_wrapper, - compile_wrapper = ctx.attrs.compile_wrapper, - concat_files = ctx.attrs.concat_files, - compiler = get_go_tool("compile"), - cover = get_go_tool("cover"), - cover_srcs = ctx.attrs.cover_srcs, + assembler = RunInfo(cmd_script(ctx, "asm", cmd_args(go, "tool", "asm"), script_os)), + cgo = RunInfo(cmd_script(ctx, "cgo", cmd_args(go, "tool", "cgo"), script_os)), + cgo_wrapper = ctx.attrs.cgo_wrapper[RunInfo], + compile_wrapper = ctx.attrs.compile_wrapper[RunInfo], + concat_files = ctx.attrs.concat_files[RunInfo], + compiler = RunInfo(cmd_script(ctx, "compile", cmd_args(go, "tool", "compile"), script_os)), + cover = RunInfo(cmd_script(ctx, "cover", cmd_args(go, "tool", "cover"), script_os)), + cover_srcs = ctx.attrs.cover_srcs[RunInfo], cxx_toolchain_for_linking = None, env_go_arch = go_arch, env_go_os = go_os, - env_go_root = go_root, - external_linker_flags = None, - filter_srcs = ctx.attrs.filter_srcs, - gen_stdlib_importcfg = ctx.attrs.gen_stdlib_importcfg, - go = go_binary, - go_wrapper = ctx.attrs.go_wrapper, - linker = get_go_tool("link"), - packer = get_go_tool("pack"), + external_linker_flags = [], + filter_srcs = ctx.attrs.filter_srcs[RunInfo], + gen_stdlib_importcfg = ctx.attrs.gen_stdlib_importcfg[RunInfo], + go = RunInfo(cmd_script(ctx, "go", cmd_args(go), script_os)), + go_wrapper = ctx.attrs.go_wrapper[RunInfo], + linker = RunInfo(cmd_script(ctx, "link", cmd_args(go, "tool", "link"), script_os)), + packer = RunInfo(cmd_script(ctx, "pack", cmd_args(go, "tool", "pack"), script_os)), tags = [], linker_flags = [], assembler_flags = [], @@ -61,7 +63,6 @@ system_go_toolchain = rule( doc = """Example system go toolchain rules (WIP). Usage: system_go_toolchain( name = "go", - go_root = "/opt/homebrew/Cellar/go/1.20.4/libexec", visibility = ["PUBLIC"], )""", attrs = { @@ -71,7 +72,6 @@ system_go_toolchain = rule( "cover_srcs": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cover_srcs")), "filter_srcs": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:filter_srcs")), "gen_stdlib_importcfg": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:gen_stdlib_importcfg")), - "go_root": attrs.string(), "go_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:go_wrapper")), }, is_toolchain_rule = True, From 978203a23b59907eb8ba3a24e28ecbc3405726ad Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Fri, 1 Mar 2024 20:43:53 -0800 Subject: [PATCH 0371/1133] _get_merged_linkables -> _get_merged_linkables_for_platform Summary: Make this function operate on a single platform and remove LinkableMergeData and MergedLinkables so that the input/output types are in the function signature, just to make it a bit more readable. Reviewed By: IanChilds Differential Revision: D53834067 fbshipit-source-id: 96c81fb7805309044e4982244d3364bbfb37566f --- .../android_binary_native_library_rules.bzl | 525 +++++++++--------- 1 file changed, 253 insertions(+), 272 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index af90f0bd6..b1893607a 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -273,26 +273,28 @@ def get_android_binary_native_library_info( else: fail("unreachable") - merged_linkables = _get_merged_linkables( - ctx, - { - platform: LinkableMergeData( - glue_linkable = glue_linkables[platform] if glue_linkables else None, - default_shared_libs = platform_to_original_native_linkables[platform], - linkable_nodes = flattened_linkable_graphs_by_platform[platform], - merge_map = merge_map_by_platform[platform], - apk_module_graph = get_module_from_target, - ) - for platform in platform_to_original_native_linkables - }, - ) - debug_data_json = ctx.actions.write_json("native_merge_debug.json", merged_linkables.debug_info, pretty = True) - native_library_merge_debug_outputs["native_merge_debug.json"] = debug_data_json - shared_object_targets = {} - for _, merged_libs in merged_linkables.shared_libs_by_platform.items(): - for soname, lib in merged_libs.items(): + debug_info_by_platform = {} + shared_libs_by_platform = {} + for platform in platform_to_original_native_linkables: + shared_libs, debug_info = _get_merged_linkables_for_platform( + ctx, + ctx.attrs._cxx_toolchain[platform][CxxToolchainInfo], + platform if len(platform_to_original_native_linkables) > 1 else None, + glue_linkable = glue_linkables[platform] if glue_linkables else None, + default_shared_libs = platform_to_original_native_linkables[platform], + linkable_nodes = flattened_linkable_graphs_by_platform[platform], + merge_map = merge_map_by_platform[platform], + apk_module_graph = get_module_from_target, + ) + debug_info_by_platform[platform] = debug_info + shared_libs_by_platform[platform] = shared_libs + for soname, lib in shared_libs.items(): shared_object_targets[soname] = [str(target.raw_target()) for target in lib.primary_constituents] + + debug_data_json = ctx.actions.write_json("native_merge_debug.json", debug_info_by_platform, pretty = True) + native_library_merge_debug_outputs["native_merge_debug.json"] = debug_data_json + shared_object_targets_lines = "" for soname, targets in shared_object_targets.items(): shared_object_targets_lines += soname + " " + " ".join(targets) + "\n" @@ -300,7 +302,7 @@ def get_android_binary_native_library_info( native_library_merge_debug_outputs["shared_object_targets.txt"] = shared_object_targets_txt if mergemap_gencode_jar: - merged_library_map = write_merged_library_map(ctx, merged_linkables) + merged_library_map = write_merged_library_map(ctx, shared_libs_by_platform) mergemap_gencode = run_mergemap_codegen(ctx, merged_library_map) compile_to_jar(ctx, [mergemap_gencode], output = outputs[mergemap_gencode_jar]) native_library_merge_debug_outputs["NativeLibraryMergeGeneratedCode.java"] = mergemap_gencode @@ -311,7 +313,7 @@ def get_android_binary_native_library_info( final_platform_to_native_linkables = { platform: {soname: d.lib for soname, d in merged_shared_libs.items()} - for platform, merged_shared_libs in merged_linkables.shared_libs_by_platform.items() + for platform, merged_shared_libs in shared_libs_by_platform.items() } elif enable_relinker: final_platform_to_native_linkables, native_library_merge_debug_outputs = _create_all_relinkable_links( @@ -784,23 +786,6 @@ MergedSharedLibrary = record( primary_constituents = list[Label], ) -# Output of the linkables merge process, the list of shared libs for each platform and -# debug information about the merge process itself. -MergedLinkables = record( - # dict[platform, dict[final_soname, MergedSharedLibrary]] - shared_libs_by_platform = dict[str, dict[str, MergedSharedLibrary]], - debug_info = dict[str, MergedLinkablesDebugInfo], -) - -# Input data to the linkables merge process -LinkableMergeData = record( - glue_linkable = [(Label, LinkInfo), None], - default_shared_libs = dict[str, SharedLibrary], - linkable_nodes = dict[Label, LinkableNode], - merge_map = dict[str, [str, None]], - apk_module_graph = typing.Callable, -) - # information about a link group derived from the merge mapping LinkGroupData = record( group_name = [str, Label], @@ -832,7 +817,7 @@ LinkGroupLinkableNode = record( exported_linker_flags = [(list[typing.Any], list[typing.Any]), None], ) -def write_merged_library_map(ctx: AnalysisContext, merged_linkables: MergedLinkables) -> Artifact: +def write_merged_library_map(ctx: AnalysisContext, shared_libs_by_platform: dict[str, dict[str, MergedSharedLibrary]]) -> Artifact: """ Writes the "merged library map". This is a map of original soname to final soname of the form: @@ -844,7 +829,7 @@ def write_merged_library_map(ctx: AnalysisContext, merged_linkables: MergedLinka ``` """ solib_map = {} # dict[final_soname, set[original_soname]] - for _, shared_libs in merged_linkables.shared_libs_by_platform.items(): + for _, shared_libs in shared_libs_by_platform.items(): for soname in shared_libs.keys(): merged_shared_lib = shared_libs[soname] if merged_shared_lib.is_actually_merged: @@ -953,9 +938,15 @@ def _shared_lib_for_prebuilt_shared( label = target, ) -def _get_merged_linkables( +def _get_merged_linkables_for_platform( ctx: AnalysisContext, - merged_data_by_platform: dict[str, LinkableMergeData]) -> MergedLinkables: + cxx_toolchain: CxxToolchainInfo, + platform: str | None, + glue_linkable: [(Label, LinkInfo), None], + default_shared_libs: dict[str, SharedLibrary], + linkable_nodes: dict[Label, LinkableNode], + merge_map: dict[str, [str, None]], + apk_module_graph: typing.Callable) -> (dict[str, MergedSharedLibrary], MergedLinkablesDebugInfo): """ This takes the merge mapping and constructs the resulting merged shared libraries. @@ -997,257 +988,247 @@ def _get_merged_linkables( of a primary constituent. A public node is linked via "link whole". 2. linker_flags of primary constituents are included in the link, for non primary they are not """ - debug_info_by_platform = {} - shared_libs_by_platform = {} - for platform, merge_data in merged_data_by_platform.items(): - debug_info = debug_info_by_platform.setdefault(platform, MergedLinkablesDebugInfo( - unmerged_statics = [], - group_debug = {}, - with_default_soname = [], - missing_default_solibs = [], - )) - linkable_nodes = merge_data.linkable_nodes - - linkable_nodes_graph = {k: dedupe(v.deps + v.exported_deps) for k, v in linkable_nodes.items()} - topo_sorted_targets = pre_order_traversal(linkable_nodes_graph) - - # first we collect basic information about each link group, this will populate the fields in LinkGroupData and - # map target labels to their link group name. - link_groups = {} - target_to_link_group = {} - - # Because we cannot attach this to the LinkableNode after the fact, declare a cache for each platform - transitive_linkable_cache = {} - - for target in topo_sorted_targets: - expect(target not in target_to_link_group, "prelude internal error, target seen twice?") - target_apk_module = merge_data.apk_module_graph(str(target.raw_target())) - - link_group = merge_data.merge_map.get(str(target), None) - if not link_group: - link_group = str(target) - link_groups[link_group] = LinkGroupData( - group_name = target, - constituents = [target], - apk_module = target_apk_module, + debug_info = MergedLinkablesDebugInfo( + unmerged_statics = [], + group_debug = {}, + with_default_soname = [], + missing_default_solibs = [], + ) + + linkable_nodes_graph = {k: dedupe(v.deps + v.exported_deps) for k, v in linkable_nodes.items()} + topo_sorted_targets = pre_order_traversal(linkable_nodes_graph) + + # first we collect basic information about each link group, this will populate the fields in LinkGroupData and + # map target labels to their link group name. + link_groups = {} + target_to_link_group = {} + + # Because we cannot attach this to the LinkableNode after the fact, declare a cache for each platform + transitive_linkable_cache = {} + + for target in topo_sorted_targets: + expect(target not in target_to_link_group, "prelude internal error, target seen twice?") + target_apk_module = apk_module_graph(str(target.raw_target())) + + link_group = merge_map.get(str(target), None) + if not link_group: + link_group = str(target) + link_groups[link_group] = LinkGroupData( + group_name = target, + constituents = [target], + apk_module = target_apk_module, + ) + elif link_group in link_groups: + link_group_data = link_groups[link_group] + + # TODO(cjhopman): buck1 provides a more useful error here in that it lists the module mappings for all + # constituents of the merge group (rather than just one conflict). That allows users to resolve all the + # issues at once. With merge sequence merging (the replacement for merge map), this error shouldn't ever be hit + # and so maybe it's not necessary to improve it. + expect( + link_group_data.apk_module == target_apk_module, + "Native library merge of {} has inconsistent application module mappings:\n{} is in module {}\n{} is in module {}", + link_group_data.group_name, + target, + target_apk_module, + link_group_data.constituents[0], + link_group_data.apk_module, + ) + link_groups[link_group].constituents.append(target) + else: + link_groups[link_group] = LinkGroupData( + group_name = link_group, + constituents = [target], + apk_module = target_apk_module, + ) + + target_to_link_group[target] = link_group + + # Now that all targets are assigned to a link group, build up the link group graph. + link_groups_graph_builder = {} + for target in topo_sorted_targets: + target_group = target_to_link_group[target] + group_deps = link_groups_graph_builder.setdefault(target_group, {}) + for dep in linkable_nodes_graph[target]: + dep_group = target_to_link_group[dep] + if target_group != dep_group: + group_deps[dep_group] = True + link_groups_graph = {k: list(v.keys()) for k, v in link_groups_graph_builder.items()} + + archive_output_style = LibOutputStyle("pic_archive") + shlib_output_style = LibOutputStyle("shared_lib") + + link_group_linkable_nodes = {} + group_shared_libs = {} + included_default_solibs = {} + + # Now we will traverse from the leaves up the graph (the link groups graph). As we traverse, we will produce + # a link group linkablenode for each group. + for group in post_order_traversal(link_groups_graph): + group_data = link_groups[group] + is_actually_merged = len(group_data.constituents) > 1 + + can_be_asset = True + for target in group_data.constituents: + if not linkable_nodes[target].can_be_asset: + can_be_asset = False + break + + if not is_actually_merged: + target = group_data.constituents[0] + node_data = linkable_nodes[target] + + if node_data.preferred_linkage == Linkage("static") or not _has_linkable(node_data): + debug_info.unmerged_statics.append(target) + link_group_linkable_nodes[group] = LinkGroupLinkableNode( + link = node_data.link_infos[archive_output_style].default, + deps = dedupe_by_value([target_to_link_group[t] for t in node_data.deps]), + exported_deps = dedupe_by_value([target_to_link_group[t] for t in node_data.exported_deps]), + shared_lib = None, + exported_linker_flags = (node_data.linker_flags.exported_flags, node_data.linker_flags.exported_post_flags), ) - elif link_group in link_groups: - link_group_data = link_groups[link_group] - - # TODO(cjhopman): buck1 provides a more useful error here in that it lists the module mappings for all - # constituents of the merge group (rather than just one conflict). That allows users to resolve all the - # issues at once. With merge sequence merging (the replacement for merge map), this error shouldn't ever be hit - # and so maybe it's not necessary to improve it. - expect( - link_group_data.apk_module == target_apk_module, - "Native library merge of {} has inconsistent application module mappings:\n{} is in module {}\n{} is in module {}", - link_group_data.group_name, + continue + + if _is_prebuilt_shared(node_data): + shared_lib = _shared_lib_for_prebuilt_shared( + ctx, + cxx_toolchain, target, - target_apk_module, - link_group_data.constituents[0], - link_group_data.apk_module, + node_data, + linkable_nodes, + transitive_linkable_cache, + platform, ) - link_groups[link_group].constituents.append(target) - else: - link_groups[link_group] = LinkGroupData( - group_name = link_group, - constituents = [target], - apk_module = target_apk_module, + link_group_linkable_nodes[group] = LinkGroupLinkableNode( + link = node_data.link_infos[shlib_output_style].default, + deps = [], + exported_deps = [], + shared_lib = shared_lib, + # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents + exported_linker_flags = None, ) + group_shared_libs[shared_lib.soname] = MergedSharedLibrary( + soname = shared_lib.soname, + lib = shared_lib, + apk_module = group_data.apk_module, + solib_constituents = [], + is_actually_merged = False, + primary_constituents = [target], + ) + continue - target_to_link_group[target] = link_group + exported_linker_flags = [] + exported_linker_post_flags = [] + links = [] - # Now that all targets are assigned to a link group, build up the link group graph. - link_groups_graph_builder = {} - for target in topo_sorted_targets: - target_group = target_to_link_group[target] - group_deps = link_groups_graph_builder.setdefault(target_group, {}) - for dep in linkable_nodes_graph[target]: - dep_group = target_to_link_group[dep] - if target_group != dep_group: - group_deps[dep_group] = True - link_groups_graph = {k: list(v.keys()) for k, v in link_groups_graph_builder.items()} + if is_actually_merged and glue_linkable: + links.append(set_link_info_link_whole(glue_linkable[1])) - archive_output_style = LibOutputStyle("pic_archive") - shlib_output_style = LibOutputStyle("shared_lib") + solib_constituents = [] + group_deps = [] + group_exported_deps = [] + for key in group_data.constituents: + expect(target_to_link_group[key] == group) + node = linkable_nodes[key] - cxx_toolchain = ctx.attrs._cxx_toolchain[platform][CxxToolchainInfo] + default_solibs = list(node.shared_libs.keys()) + if not default_solibs and node.preferred_linkage == Linkage("static"): + default_solibs = [node.default_soname] - link_group_linkable_nodes = {} - group_shared_libs = {} - included_default_solibs = {} - - # Now we will traverse from the leaves up the graph (the link groups graph). As we traverse, we will produce - # a link group linkablenode for each group. - for group in post_order_traversal(link_groups_graph): - group_data = link_groups[group] - is_actually_merged = len(group_data.constituents) > 1 - - can_be_asset = True - for target in group_data.constituents: - if not linkable_nodes[target].can_be_asset: - can_be_asset = False - break - - if not is_actually_merged: - target = group_data.constituents[0] - node_data = linkable_nodes[target] - - if node_data.preferred_linkage == Linkage("static") or not _has_linkable(node_data): - debug_info.unmerged_statics.append(target) - link_group_linkable_nodes[group] = LinkGroupLinkableNode( - link = node_data.link_infos[archive_output_style].default, - deps = dedupe_by_value([target_to_link_group[t] for t in node_data.deps]), - exported_deps = dedupe_by_value([target_to_link_group[t] for t in node_data.exported_deps]), - shared_lib = None, - exported_linker_flags = (node_data.linker_flags.exported_flags, node_data.linker_flags.exported_post_flags), - ) - continue - - if _is_prebuilt_shared(node_data): - shared_lib = _shared_lib_for_prebuilt_shared( - ctx, - cxx_toolchain, - target, - node_data, - linkable_nodes, - transitive_linkable_cache, - platform if len(merged_data_by_platform) > 1 else None, - ) - link_group_linkable_nodes[group] = LinkGroupLinkableNode( - link = node_data.link_infos[shlib_output_style].default, - deps = [], - exported_deps = [], - shared_lib = shared_lib, - # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents - exported_linker_flags = None, - ) - group_shared_libs[shared_lib.soname] = MergedSharedLibrary( - soname = shared_lib.soname, - lib = shared_lib, - apk_module = group_data.apk_module, - solib_constituents = [], - is_actually_merged = False, - primary_constituents = [target], - ) - continue - - exported_linker_flags = [] - exported_linker_post_flags = [] - links = [] - - if is_actually_merged and merge_data.glue_linkable: - links.append(set_link_info_link_whole(merge_data.glue_linkable[1])) - - solib_constituents = [] - group_deps = [] - group_exported_deps = [] - for key in group_data.constituents: - expect(target_to_link_group[key] == group) - node = linkable_nodes[key] - - default_solibs = list(node.shared_libs.keys()) - if not default_solibs and node.preferred_linkage == Linkage("static"): - default_solibs = [node.default_soname] - - for soname in default_solibs: - included_default_solibs[soname] = True - if node.include_in_android_mergemap: - solib_constituents.append(soname) - - node = linkable_nodes[key] - link_info = node.link_infos[archive_output_style].default - - # the propagated link info should already be wrapped with exported flags. - link_info = wrap_link_info( - link_info, - pre_flags = node.linker_flags.flags, - post_flags = node.linker_flags.post_flags, - ) - exported_linker_flags.extend(node.linker_flags.exported_flags) - exported_linker_post_flags.extend(node.linker_flags.exported_post_flags) - links.append(set_link_info_link_whole(link_info)) + for soname in default_solibs: + included_default_solibs[soname] = True + if node.include_in_android_mergemap: + solib_constituents.append(soname) - dep_groups = [target_to_link_group[dep] for dep in node.deps] - group_deps.extend([dep_group for dep_group in dep_groups if dep_group != group]) + node = linkable_nodes[key] + link_info = node.link_infos[archive_output_style].default - exported_dep_groups = [target_to_link_group[dep] for dep in node.exported_deps] - group_exported_deps.extend([dep_group for dep_group in exported_dep_groups if dep_group != group]) + # the propagated link info should already be wrapped with exported flags. + link_info = wrap_link_info( + link_info, + pre_flags = node.linker_flags.flags, + post_flags = node.linker_flags.post_flags, + ) + exported_linker_flags.extend(node.linker_flags.exported_flags) + exported_linker_post_flags.extend(node.linker_flags.exported_post_flags) + links.append(set_link_info_link_whole(link_info)) - soname = group - if not is_actually_merged: - soname = linkable_nodes[group_data.constituents[0]].default_soname - debug_info.with_default_soname.append((soname, group_data.constituents[0])) + dep_groups = [target_to_link_group[dep] for dep in node.deps] + group_deps.extend([dep_group for dep_group in dep_groups if dep_group != group]) - output_path = _platform_output_path(soname, platform if len(merged_data_by_platform) > 1 else None) + exported_dep_groups = [target_to_link_group[dep] for dep in node.exported_deps] + group_exported_deps.extend([dep_group for dep_group in exported_dep_groups if dep_group != group]) - link_merge_info = LinkGroupMergeInfo( - label = group, - deps = dedupe_by_value(group_deps), - exported_deps = dedupe_by_value(group_exported_deps), - constituent_link_infos = links, - ) - link_args, shlib_deps, link_deps_graph = _create_merged_link_args( - root_target = link_merge_info, - linkable_nodes = link_group_linkable_nodes, - cxx_toolchain = cxx_toolchain, - ) + soname = group + if not is_actually_merged: + soname = linkable_nodes[group_data.constituents[0]].default_soname + debug_info.with_default_soname.append((soname, group_data.constituents[0])) - shared_lib = create_shared_lib( - ctx, - output_path = output_path, - soname = soname, - link_args = [link_args], - cxx_toolchain = cxx_toolchain, - shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname for label in shlib_deps], - label = group_data.constituents[0], - can_be_asset = can_be_asset, - ) + output_path = _platform_output_path(soname, platform) - link_group_linkable_nodes[group] = LinkGroupLinkableNode( - link = LinkInfo( - name = soname, - pre_flags = exported_linker_flags, - linkables = [SharedLibLinkable( - lib = shared_lib.lib.output, - )], - post_flags = exported_linker_post_flags, - ), - deps = link_merge_info.deps, - exported_deps = link_merge_info.exported_deps, - shared_lib = shared_lib, - # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents - exported_linker_flags = None, - ) - group_shared_libs[soname] = MergedSharedLibrary( + link_merge_info = LinkGroupMergeInfo( + label = group, + deps = dedupe_by_value(group_deps), + exported_deps = dedupe_by_value(group_exported_deps), + constituent_link_infos = links, + ) + link_args, shlib_deps, link_deps_graph = _create_merged_link_args( + root_target = link_merge_info, + linkable_nodes = link_group_linkable_nodes, + cxx_toolchain = cxx_toolchain, + ) + + shared_lib = create_shared_lib( + ctx, + output_path = output_path, + soname = soname, + link_args = [link_args], + cxx_toolchain = cxx_toolchain, + shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname for label in shlib_deps], + label = group_data.constituents[0], + can_be_asset = can_be_asset, + ) + + link_group_linkable_nodes[group] = LinkGroupLinkableNode( + link = LinkInfo( + name = soname, + pre_flags = exported_linker_flags, + linkables = [SharedLibLinkable( + lib = shared_lib.lib.output, + )], + post_flags = exported_linker_post_flags, + ), + deps = link_merge_info.deps, + exported_deps = link_merge_info.exported_deps, + shared_lib = shared_lib, + # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents + exported_linker_flags = None, + ) + group_shared_libs[soname] = MergedSharedLibrary( + soname = soname, + lib = shared_lib, + apk_module = group_data.apk_module, + solib_constituents = solib_constituents, + is_actually_merged = is_actually_merged, + primary_constituents = group_data.constituents, + ) + + debug_info.group_debug.setdefault( + group, + struct( soname = soname, - lib = shared_lib, - apk_module = group_data.apk_module, - solib_constituents = solib_constituents, - is_actually_merged = is_actually_merged, + merged = is_actually_merged, primary_constituents = group_data.constituents, - ) - - debug_info.group_debug.setdefault( - group, - struct( - soname = soname, - merged = is_actually_merged, - primary_constituents = group_data.constituents, - real_constituents = link_deps_graph.keys(), - shlib_deps = shlib_deps, - exported_linker_flags = exported_linker_flags, - exported_linker_post_flags = exported_linker_post_flags, - ), - ) + real_constituents = link_deps_graph.keys(), + shlib_deps = shlib_deps, + exported_linker_flags = exported_linker_flags, + exported_linker_post_flags = exported_linker_post_flags, + ), + ) - shared_libs_by_platform[platform] = group_shared_libs - debug_info.missing_default_solibs.extend([d for d in merge_data.default_shared_libs if d not in included_default_solibs]) + debug_info.missing_default_solibs.extend([d for d in default_shared_libs if d not in included_default_solibs]) - return MergedLinkables( - shared_libs_by_platform = shared_libs_by_platform, - debug_info = debug_info_by_platform, - ) + return group_shared_libs, debug_info # The current default link strategies don't produce enough information in the # SharedLibrary objects to perform relinking. To do that best, linking should be based on From c16299dd7896b89ea98a4d006058b72dcbeb34be Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Fri, 1 Mar 2024 20:43:53 -0800 Subject: [PATCH 0372/1133] Renaming in android_binary_native_library_rules Summary: Trying to make variables names more consistent for readability, just reusing names we already use for these in different contexts. Reviewed By: IanChilds Differential Revision: D53834065 fbshipit-source-id: 555348ea051877ebbf65b0cbaa8b9975ab962b5f --- .../android_binary_native_library_rules.bzl | 56 +++++++++---------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index b1893607a..2a9906496 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -108,14 +108,14 @@ def get_android_binary_native_library_info( ] included_shared_lib_targets = [] - platform_to_original_native_linkables = {} + original_shared_libs_by_platform = {} # dict[str, dict[str (soname), list[SharedLibrary]]] for platform, deps in deps_by_platform.items(): if platform == CPU_FILTER_FOR_PRIMARY_PLATFORM and platform not in ctx.attrs.cpu_filters: continue - native_linkables = get_native_linkables_by_default(ctx, platform, deps, shared_libraries_to_exclude) - included_shared_lib_targets.extend([lib.label.raw_target() for lib in native_linkables.values()]) - platform_to_original_native_linkables[platform] = native_linkables + shared_libs = get_default_shared_libs(ctx, deps, shared_libraries_to_exclude) + included_shared_lib_targets.extend([lib.label.raw_target() for lib in shared_libs.values()]) + original_shared_libs_by_platform[platform] = shared_libs if not all_prebuilt_native_library_dirs and not included_shared_lib_targets: enhance_ctx.debug_output("unstripped_native_libraries", ctx.actions.write("unstripped_native_libraries", [])) @@ -181,7 +181,7 @@ def get_android_binary_native_library_info( expect(glue_linkable.preferred_linkage == Linkage("static"), "buck2 currently only supports preferred_linkage='static' native_library_merge_glue") glue_linkables[platform] = (glue.label, glue_linkable.link_infos[LibOutputStyle("pic_archive")].default) - flattened_linkable_graphs_by_platform = {} + linkable_nodes_by_platform = {} native_library_merge_sequence = getattr(ctx.attrs, "native_library_merge_sequence", None) has_native_merging = native_library_merge_sequence or getattr(ctx.attrs, "native_library_merge_map", None) enable_relinker = getattr(ctx.attrs, "enable_relinker", False) @@ -197,11 +197,11 @@ def get_android_binary_native_library_info( graph_node_map = get_linkable_graph_node_map_func(linkable_graph)() linkables_debug = ctx.actions.write("linkables." + platform, list(graph_node_map.keys())) enhance_ctx.debug_output("linkables." + platform, linkables_debug) - flattened_linkable_graphs_by_platform[platform] = graph_node_map + linkable_nodes_by_platform[platform] = graph_node_map if native_library_merge_sequence: native_library_merge_input_file = ctx.actions.write_json("mergemap.input", { - "linkable_graphs_by_platform": encode_linkable_graph_for_mergemap(flattened_linkable_graphs_by_platform), + "linkable_graphs_by_platform": encode_linkable_graph_for_mergemap(linkable_nodes_by_platform), "native_library_merge_sequence": ctx.attrs.native_library_merge_sequence, "native_library_merge_sequence_blocklist": ctx.attrs.native_library_merge_sequence_blocklist or [], }) @@ -250,7 +250,7 @@ def get_android_binary_native_library_info( native_library_merge_debug_outputs["merge_sequence_output"] = native_library_merge_dir elif ctx.attrs.native_library_merge_map: merge_map_by_platform = {} - for platform, linkable_nodes in flattened_linkable_graphs_by_platform.items(): + for platform, linkable_nodes in linkable_nodes_by_platform.items(): merge_map = merge_map_by_platform.setdefault(platform, {}) merge_lib_to_fancy_regexes = { merge_lib: [regex(pattern, fancy = True) for pattern in patterns] @@ -274,22 +274,22 @@ def get_android_binary_native_library_info( fail("unreachable") shared_object_targets = {} - debug_info_by_platform = {} - shared_libs_by_platform = {} - for platform in platform_to_original_native_linkables: - shared_libs, debug_info = _get_merged_linkables_for_platform( + debug_info_by_platform = {} # dict[str, MergedLinkablesDebugInfo] + merged_shared_libs_by_platform = {} # dict[str, dict[str, MergedSharedLibrary]] + for platform in original_shared_libs_by_platform: + merged_shared_libs, debug_info = _get_merged_linkables_for_platform( ctx, ctx.attrs._cxx_toolchain[platform][CxxToolchainInfo], - platform if len(platform_to_original_native_linkables) > 1 else None, + platform if len(original_shared_libs_by_platform) > 1 else None, glue_linkable = glue_linkables[platform] if glue_linkables else None, - default_shared_libs = platform_to_original_native_linkables[platform], - linkable_nodes = flattened_linkable_graphs_by_platform[platform], + default_shared_libs = original_shared_libs_by_platform[platform], + linkable_nodes = linkable_nodes_by_platform[platform], merge_map = merge_map_by_platform[platform], apk_module_graph = get_module_from_target, ) debug_info_by_platform[platform] = debug_info - shared_libs_by_platform[platform] = shared_libs - for soname, lib in shared_libs.items(): + merged_shared_libs_by_platform[platform] = merged_shared_libs + for soname, lib in merged_shared_libs.items(): shared_object_targets[soname] = [str(target.raw_target()) for target in lib.primary_constituents] debug_data_json = ctx.actions.write_json("native_merge_debug.json", debug_info_by_platform, pretty = True) @@ -302,7 +302,7 @@ def get_android_binary_native_library_info( native_library_merge_debug_outputs["shared_object_targets.txt"] = shared_object_targets_txt if mergemap_gencode_jar: - merged_library_map = write_merged_library_map(ctx, shared_libs_by_platform) + merged_library_map = write_merged_library_map(ctx, merged_shared_libs_by_platform) mergemap_gencode = run_mergemap_codegen(ctx, merged_library_map) compile_to_jar(ctx, [mergemap_gencode], output = outputs[mergemap_gencode_jar]) native_library_merge_debug_outputs["NativeLibraryMergeGeneratedCode.java"] = mergemap_gencode @@ -311,26 +311,26 @@ def get_android_binary_native_library_info( ctx.actions.symlinked_dir(outputs[native_merge_debug], native_library_merge_debug_outputs) - final_platform_to_native_linkables = { + final_shared_libs_by_platform = { platform: {soname: d.lib for soname, d in merged_shared_libs.items()} - for platform, merged_shared_libs in shared_libs_by_platform.items() + for platform, merged_shared_libs in merged_shared_libs_by_platform.items() } elif enable_relinker: - final_platform_to_native_linkables, native_library_merge_debug_outputs = _create_all_relinkable_links( + final_shared_libs_by_platform, native_library_merge_debug_outputs = _create_all_relinkable_links( ctx, - platform_to_original_native_linkables, - flattened_linkable_graphs_by_platform, + original_shared_libs_by_platform, + linkable_nodes_by_platform, ) ctx.actions.symlinked_dir(outputs[native_merge_debug], native_library_merge_debug_outputs) else: - final_platform_to_native_linkables = platform_to_original_native_linkables + final_shared_libs_by_platform = original_shared_libs_by_platform if enable_relinker: - final_platform_to_native_linkables = relink_libraries(ctx, final_platform_to_native_linkables) + final_shared_libs_by_platform = relink_libraries(ctx, final_shared_libs_by_platform) unstripped_libs = {} - for platform, libs in final_platform_to_native_linkables.items(): + for platform, libs in final_shared_libs_by_platform.items(): for lib in libs.values(): unstripped_libs[lib.lib.output] = platform ctx.actions.write(outputs[unstripped_native_libraries], unstripped_libs.keys()) @@ -344,7 +344,7 @@ def get_android_binary_native_library_info( ctx, get_module_from_target, all_prebuilt_native_library_dirs, - final_platform_to_native_linkables, + final_shared_libs_by_platform, ) # Since we are using a dynamic action, we need to declare the outputs in advance. @@ -727,7 +727,7 @@ def _get_compressed_native_libs_as_assets( def _get_native_libs_as_assets_dir(module: str) -> str: return "assets/{}".format("lib" if is_root_module(module) else module) -def get_native_linkables_by_default(ctx: AnalysisContext, _platform: str, deps: list[Dependency], shared_libraries_to_exclude) -> dict[str, SharedLibrary]: +def get_default_shared_libs(ctx: AnalysisContext, deps: list[Dependency], shared_libraries_to_exclude) -> dict[str, SharedLibrary]: shared_library_info = merge_shared_libraries( ctx.actions, deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), From d53703d5ec6d1e8ad357bc3fa907e137213048bb Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Fri, 1 Mar 2024 20:43:53 -0800 Subject: [PATCH 0373/1133] Create split group mapping for merge sequences Summary: Create a mapping so that we can link original merge groups to final split group outputs when creating debug subtargets (without trying to guess based on split group names). Example mapping: `["libcoldstart_1.so": "libcoldstart.so"]` Reviewed By: IanChilds Differential Revision: D54226627 fbshipit-source-id: 11658138a9be83101237829578e25e128a2bae9c --- prelude/android/android_binary_native_library_rules.bzl | 2 ++ prelude/android/tools/merge_sequence.py | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 2a9906496..b2963a576 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -211,11 +211,13 @@ def get_android_binary_native_library_info( mergemap_cmd.add(cmd_args(apk_module_graph_file, format = "--apk-module-graph={}")) native_library_merge_dir = ctx.actions.declare_output("merge_sequence_output") native_library_merge_map = native_library_merge_dir.project("merge.map") + split_groups_map = native_library_merge_dir.project("split_groups.map") mergemap_cmd.add(cmd_args(native_library_merge_dir.as_output(), format = "--output={}")) ctx.actions.run(mergemap_cmd, category = "compute_mergemap") enhance_ctx.debug_output("compute_merge_sequence", native_library_merge_dir) dynamic_inputs.append(native_library_merge_map) + dynamic_inputs.append(split_groups_map) mergemap_gencode_jar = None if has_native_merging and ctx.attrs.native_library_merge_code_generator: diff --git a/prelude/android/tools/merge_sequence.py b/prelude/android/tools/merge_sequence.py index 4478e681b..6050467b2 100644 --- a/prelude/android/tools/merge_sequence.py +++ b/prelude/android/tools/merge_sequence.py @@ -683,6 +683,7 @@ def main() -> int: # noqa: C901 final_result = {} debug_results = {} + split_groups = {} mergemap_input = read_mergemap_input(args.mergemap_input) for platform, nodes in mergemap_input.nodes_by_platform.items(): ( @@ -704,6 +705,9 @@ def main() -> int: # noqa: C901 final_mapping[target] = None else: final_mapping[target] = final_lib_names[node.final_lib_key] + split_groups[ + final_lib_names[node.final_lib_key] + ] = node.base_library_name else: final_mapping[target] = str(target) debug_results[platform] = ( @@ -717,6 +721,8 @@ def main() -> int: # noqa: C901 pathlib.Path(args.output).mkdir(parents=True, exist_ok=True) with open(os.path.join(args.output, "merge.map"), "w") as outfile: json.dump(final_result, outfile, indent=2) + with open(os.path.join(args.output, "split_groups.map"), "w") as outfile: + json.dump(split_groups, outfile, indent=2) # When writing an output dir we also produce some debugging information. for platform, result in final_result.items(): From ec0e1edb47537e10e64206589d6df32b3d8a7e30 Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Fri, 1 Mar 2024 20:43:53 -0800 Subject: [PATCH 0374/1133] Add subtargets for individual merged native libraries Summary: For debugging It's useful to build individual merged (or unmerged) libraries as they are built for an app instead of waiting for all of them to build. Due to the fact that the final merged outputs are produced as dynamic outputs (determined after analysis), we aren't able to match subtarget names to final outputs exactly. - Split groups are not known at analysis time, so we expose a subtarget for each merge group, that builds all the split groups for a merge group - The libraries that are unmerged in the final output are unknown, so building ones that end up in merge group will build that merge group instead - Some merge groups aren't in the final output because they don't include anything, building these builds the native merge debug output Instead of linking to alternative outputs, we could trigger an action that always fails when building subtargets that aren't in the final output, but the error that would be produced is a bit hard to parse and not ergonomic to generate, so this seems a bit better. Reviewed By: IanChilds Differential Revision: D53834064 fbshipit-source-id: c633698605833c8664a634ee03a29572ccbc53f7 --- .../android_binary_native_library_rules.bzl | 104 +++++++++++++++++- prelude/android/util.bzl | 4 +- 2 files changed, 103 insertions(+), 5 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index b2963a576..1e106c443 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -165,7 +165,7 @@ def get_android_binary_native_library_info( dynamic_inputs = [fake_input] if apk_module_graph_file: dynamic_inputs.append(apk_module_graph_file) - native_library_merge_map = None + split_groups_map = None native_library_merge_dir = None native_merge_debug = None generated_java_code = [] @@ -183,7 +183,8 @@ def get_android_binary_native_library_info( linkable_nodes_by_platform = {} native_library_merge_sequence = getattr(ctx.attrs, "native_library_merge_sequence", None) - has_native_merging = native_library_merge_sequence or getattr(ctx.attrs, "native_library_merge_map", None) + native_library_merge_map = getattr(ctx.attrs, "native_library_merge_map", None) + has_native_merging = native_library_merge_sequence or native_library_merge_map enable_relinker = getattr(ctx.attrs, "enable_relinker", False) if has_native_merging or enable_relinker: @@ -199,6 +200,8 @@ def get_android_binary_native_library_info( enhance_ctx.debug_output("linkables." + platform, linkables_debug) linkable_nodes_by_platform[platform] = graph_node_map + lib_outputs_by_platform = _declare_library_subtargets(ctx, dynamic_outputs, original_shared_libs_by_platform, native_library_merge_map, native_library_merge_sequence) + if native_library_merge_sequence: native_library_merge_input_file = ctx.actions.write_json("mergemap.input", { "linkable_graphs_by_platform": encode_linkable_graph_for_mergemap(linkable_nodes_by_platform), @@ -242,6 +245,8 @@ def get_android_binary_native_library_info( if apk_module_graph_file: get_module_from_target = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts).target_to_module_mapping_function + split_groups = None + merged_shared_lib_targets_by_platform = {} # dict[str, dict[Label, str]] if has_native_merging: native_library_merge_debug_outputs = {} @@ -249,6 +254,7 @@ def get_android_binary_native_library_info( # then set it as the binary's precomputed_apk_module_graph attr. if ctx.attrs.native_library_merge_sequence: merge_map_by_platform = artifacts[native_library_merge_map].read_json() + split_groups = artifacts[split_groups_map].read_json() native_library_merge_debug_outputs["merge_sequence_output"] = native_library_merge_dir elif ctx.attrs.native_library_merge_map: merge_map_by_platform = {} @@ -291,9 +297,14 @@ def get_android_binary_native_library_info( ) debug_info_by_platform[platform] = debug_info merged_shared_libs_by_platform[platform] = merged_shared_libs + merged_shared_lib_targets = {} for soname, lib in merged_shared_libs.items(): shared_object_targets[soname] = [str(target.raw_target()) for target in lib.primary_constituents] + for target in lib.primary_constituents: + merged_shared_lib_targets[target] = soname + merged_shared_lib_targets_by_platform[platform] = merged_shared_lib_targets + debug_data_json = ctx.actions.write_json("native_merge_debug.json", debug_info_by_platform, pretty = True) native_library_merge_debug_outputs["native_merge_debug.json"] = debug_data_json @@ -331,6 +342,8 @@ def get_android_binary_native_library_info( if enable_relinker: final_shared_libs_by_platform = relink_libraries(ctx, final_shared_libs_by_platform) + _link_library_subtargets(ctx, outputs, lib_outputs_by_platform, original_shared_libs_by_platform, final_shared_libs_by_platform, merged_shared_lib_targets_by_platform, split_groups, native_merge_debug) + unstripped_libs = {} for platform, libs in final_shared_libs_by_platform.items(): for lib in libs.values(): @@ -365,7 +378,8 @@ def get_android_binary_native_library_info( ctx.actions.dynamic_output(dynamic = dynamic_inputs, inputs = [], outputs = dynamic_outputs, f = dynamic_native_libs_info) all_native_libs = ctx.actions.symlinked_dir("debug_all_native_libs", {"others": native_libs, "primary": native_libs_always_in_primary_apk}) - enhance_ctx.debug_output("debug_native_libs", all_native_libs) + lib_subtargets = _create_library_subtargets(lib_outputs_by_platform, native_libs) + enhance_ctx.debug_output("native_libs", all_native_libs, sub_targets = lib_subtargets) if native_merge_debug: enhance_ctx.debug_output("native_merge_debug", native_merge_debug) @@ -383,6 +397,90 @@ def get_android_binary_native_library_info( generated_java_code = generated_java_code, ) +# Merged libraries are dynamic dependencies, but outputs need to be declared in advance to be used by subtargets. +# This means we have to declare outputs for all possible merged libs (every merged name and every unmerged library name). +def _declare_library_subtargets( + ctx: AnalysisContext, + dynamic_outputs: list[Artifact], + original_shared_libs_by_platform: dict[str, dict[str, SharedLibrary]], + native_library_merge_map, + native_library_merge_sequence) -> dict[str, dict[str, Artifact]]: + lib_outputs_by_platform = {} + for platform, original_shared_libs in original_shared_libs_by_platform.items(): + sonames = set() + sonames.update(original_shared_libs.keys()) + if native_library_merge_map: + sonames.update(native_library_merge_map.keys()) + elif native_library_merge_sequence: + sonames.update([soname for (soname, _) in native_library_merge_sequence]) + + lib_outputs = {} + for soname in sonames.list(): + output_path = _platform_output_path(soname, platform if len(original_shared_libs_by_platform) > 1 else None) + lib_output = ctx.actions.declare_output(output_path, dir = True) + dynamic_outputs.append(lib_output) + lib_outputs[soname] = lib_output + lib_outputs_by_platform[platform] = lib_outputs + return lib_outputs_by_platform + +# Bind debug library subtarget outputs to actual outputs. +# For individual libraries, link to either the unmerged or merged output. +# For merged libraries, link to either the merged output, or a symlinked dir of all merged split group outputs. +def _link_library_subtargets( + ctx: AnalysisContext, + outputs, # IndexSet[OutputArtifact] + lib_outputs_by_platform: dict[str, dict[str, Artifact]], # dict[platform, dict[soname, Artifact]] + original_shared_libs_by_platform: dict[str, dict[str, SharedLibrary]], + final_shared_libs_by_platform: dict[str, dict[str, SharedLibrary]], + merged_shared_lib_targets_by_platform: dict[str, dict[Label, str]], + split_groups: dict[str, str] | None, + native_merge_debug): + for platform, final_shared_libs in final_shared_libs_by_platform.items(): + merged_lib_outputs = {} + for soname, lib in final_shared_libs.items(): + base_soname = soname + if split_groups and soname in split_groups: + base_soname = split_groups[soname] + + group_outputs = merged_lib_outputs.setdefault(base_soname, {}) + group_outputs[soname] = lib.lib.output + + for soname, _ in lib_outputs_by_platform[platform].items(): + if soname in merged_lib_outputs: + group_outputs = merged_lib_outputs[soname] + elif soname in original_shared_libs_by_platform[platform]: + # link unmerged soname to merged output + original_shared_lib = original_shared_libs_by_platform[platform][soname] + merged_soname = merged_shared_lib_targets_by_platform[platform][original_shared_lib.label] + if split_groups and merged_soname in split_groups: + merged_soname = split_groups[merged_soname] + group_outputs = merged_lib_outputs[merged_soname] + else: + # merged group name has no constituents, link to debug output + group_outputs = {soname: native_merge_debug} + + ctx.actions.symlinked_dir(outputs[lib_outputs_by_platform[platform][soname]], group_outputs) + +def _create_library_subtargets(lib_outputs_by_platform: dict[str, dict[str, Artifact]], native_libs: Artifact): + if len(lib_outputs_by_platform) > 1: + return { + platform: [DefaultInfo(default_outputs = [native_libs], sub_targets = { + soname: [DefaultInfo(default_outputs = [output])] + for soname, output in lib_outputs.items() + })] + for platform, lib_outputs in lib_outputs_by_platform.items() + } + elif len(lib_outputs_by_platform) == 1: + lib_outputs = list(lib_outputs_by_platform.values())[0] + return { + soname: [DefaultInfo(default_outputs = [output])] + for soname, output in lib_outputs.items() + } + else: + # TODO(ctolliday) at this point we should have thrown an error earlier if no libraries matched cpu_filters + # (or returned earlier if there are no native library deps) + return {} + # We could just return two artifacts of libs (one for the primary APK, one which can go # either into the primary APK or be exopackaged), and one artifact of assets, # but we'd need an extra action in order to combine them (we can't use `symlinked_dir` since diff --git a/prelude/android/util.bzl b/prelude/android/util.bzl index 484b2cdda..6c6454452 100644 --- a/prelude/android/util.bzl +++ b/prelude/android/util.bzl @@ -21,9 +21,9 @@ EnhancementContext = record( def create_enhancement_context(ctx: AnalysisContext) -> EnhancementContext: extra_sub_targets = {} - def debug_output(name: str, output: Artifact, other_outputs = []): + def debug_output(name: str, output: Artifact, other_outputs = [], sub_targets: dict[str, typing.Any] = {}): """Adds a subtarget to expose debugging outputs.""" - extra_sub_targets[name] = [DefaultInfo(default_outputs = [output], other_outputs = other_outputs)] + extra_sub_targets[name] = [DefaultInfo(default_outputs = [output], other_outputs = other_outputs, sub_targets = sub_targets)] def get_sub_targets(): return extra_sub_targets From 830e7e37e27fdc9c4607465a3580a679c20f9437 Mon Sep 17 00:00:00 2001 From: Mark Vitale Date: Sat, 2 Mar 2024 07:42:29 -0800 Subject: [PATCH 0375/1133] Add flag for future provisioning profile parsing performance improvements Summary: We currently spin up a shell and run openssl on each provisioning profile in order to extract the plist content from a provisioning profile, which is quite slow. Let's introduce a flag that will allow us to speed up this process when supplied. Differential Revision: D54441264 fbshipit-source-id: 02cb52d8742ac84cf2f3fe7f8baccc058d001606 --- prelude/apple/apple_bundle_config.bzl | 1 + prelude/apple/apple_bundle_part.bzl | 3 +++ prelude/apple/apple_rules_impl_utility.bzl | 1 + prelude/apple/tools/bundling/main.py | 5 +++++ prelude/apple/tools/code_signing/main.py | 5 +++++ 5 files changed, 15 insertions(+) diff --git a/prelude/apple/apple_bundle_config.bzl b/prelude/apple/apple_bundle_config.bzl index 522bd6e0f..47376252f 100644 --- a/prelude/apple/apple_bundle_config.bzl +++ b/prelude/apple/apple_bundle_config.bzl @@ -23,6 +23,7 @@ def apple_bundle_config() -> dict[str, typing.Any]: # This is a kill switch for the feature, it can also be disabled by setting # `apple.fast_adhoc_signing_enabled=false` in a global buckconfig file. "_fast_adhoc_signing_enabled": _maybe_get_bool("fast_adhoc_signing_enabled", True), + "_fast_provisioning_profile_parsing_enabled": _maybe_get_bool("fast_provisioning_profile_parsing_enabled", False), "_incremental_bundling_enabled": _maybe_get_bool("incremental_bundling_enabled", True), "_info_plist_identify_build_system_default": _maybe_get_bool("info_plist_identify_build_system", True), "_profile_bundling_enabled": _maybe_get_bool("profile_bundling_enabled", False), diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index ff6427821..9ad42106c 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -173,6 +173,9 @@ def assemble_bundle( profile_output = ctx.actions.declare_output("bundling_profile.txt").as_output() command.add("--profile-output", profile_output) + if ctx.attrs._fast_provisioning_profile_parsing_enabled: + command.add("--fast-provisioning-profile-parsing") + subtargets = {} if ctx.attrs._bundling_log_file_enabled: bundling_log_output = ctx.actions.declare_output("bundling_log.txt") diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 398b821fb..57fe14c1b 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -63,6 +63,7 @@ def _apple_bundle_like_common_attrs(): "_compile_resources_locally_override": attrs.option(attrs.bool(), default = None), "_dry_run_code_signing": attrs.bool(default = False), "_fast_adhoc_signing_enabled": attrs.bool(default = False), + "_fast_provisioning_profile_parsing_enabled": attrs.bool(default = False), "_incremental_bundling_enabled": attrs.bool(default = False), "_profile_bundling_enabled": attrs.bool(default = False), # FIXME: prelude// should be standalone (not refer to fbsource//) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 5f9beec8d..72f948832 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -237,6 +237,11 @@ def _args_parser() -> argparse.ArgumentParser: action="store_true", help="Check there are no path conflicts between different source parts of the bundle if enabled.", ) + parser.add_argument( + "--fast-provisioning-profile-parsing", + action="store_true", + help="Uses experimental faster provisioning profile parsing.", + ) return parser diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index c0faf2d74..d634785bd 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -78,6 +78,11 @@ def _args_parser() -> argparse.ArgumentParser: required=False, help="Bundle relative path that should be codesigned prior to result bundle.", ) + parser.add_argument( + "--fast-provisioning-profile-parsing", + action="store_true", + help="Uses experimental faster provisioning profile parsing.", + ) return parser From 2fbabca07af574c65fa1f75b06a5b7e6e3763316 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sat, 2 Mar 2024 17:12:31 -0800 Subject: [PATCH 0376/1133] docs: Fix some unintended octal escapes Summary: Pointed out by a user in https://github.com/facebook/buck2/issues/579 Reviewed By: lmvasquezg Differential Revision: D54454923 fbshipit-source-id: 01ac931314a4f1ef7aaf340060fead7b2434bcbb --- prelude/decls/core_rules.bzl | 2 +- prelude/decls/cxx_rules.bzl | 4 ++-- prelude/decls/ios_rules.bzl | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/prelude/decls/core_rules.bzl b/prelude/decls/core_rules.bzl index 342f1e7e8..3fe843546 100644 --- a/prelude/decls/core_rules.bzl +++ b/prelude/decls/core_rules.bzl @@ -752,7 +752,7 @@ http_archive = prelude_rule( * foo\\_prime/bar-0.1.2 Only `data.dat` will be extracted, and it will be extracted into the output - directory specified in\302\240`http\\_archive()out`. + directory specified in `out`. """), "excludes": attrs.list(attrs.regex(), default = [], doc = """ An optional list of regex patterns. All file paths in the extracted archive which match diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 8234ff2d6..d0c9771b2 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -136,8 +136,8 @@ cxx_genrule = prelude_rule( name = "cxx_genrule", docs = """ A `cxx_genrule()` enables you to run shell commands as part - of the Buck build process. A `cxx_genrule()` exposes\342\200\224through - a set of string parameter macros and variables\342\200\224information about the + of the Buck build process. A `cxx_genrule()` exposes - through + a set of string parameter macros and variables - information about the tools and configuration options used by the Buck environment, specifically those related to the C/C++ toolchain. diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 4a4608d7a..44fee26c4 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -83,7 +83,7 @@ apple_asset_catalog = prelude_rule( apple_binary = prelude_rule( name = "apple_binary", docs = """ - An `apple_binary()` rule builds a native executable\342\200\224such as an iOS or OSX app\342\200\224from + An `apple_binary()` rule builds a native executable - such as an iOS or OSX app - from the supplied set of Objective-C/C++ source files and dependencies. It is similar to a `cxx\\_binary()`rule with which it shares many attributes. In addition to those common attributes, `apple_binary()` has a some additional attributes From 454ccf8ad8c06434e07632430afb47979c8bcf3a Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Sat, 2 Mar 2024 17:31:19 -0800 Subject: [PATCH 0377/1133] apply Black 2024 style in fbcode (10/16) Summary: Formats the covered files with pyfmt. paintitblack Reviewed By: aleivag Differential Revision: D54447733 fbshipit-source-id: 11ac742489579bb1dfec025514aa956159cf4959 --- prelude/apple/tools/bundling/incremental_state.py | 6 +++--- prelude/apple/tools/bundling/incremental_utils.py | 2 +- prelude/apple/tools/code_signing/identity.py | 8 +++++--- .../prepare_code_signing_entitlements.py | 1 + .../apple/tools/code_signing/prepare_info_plist.py | 1 + .../code_signing/provisioning_profile_diagnostics.py | 8 ++++++-- .../code_signing/provisioning_profile_selection.py | 2 +- .../apple/tools/info_plist_processor/preprocess.py | 12 +++++++----- prelude/apple/tools/info_plist_processor/process.py | 1 + prelude/cxx/dist_lto/tools/dist_lto_opt.py | 1 + prelude/cxx/tools/show_headers_to_dep_file.py | 1 + prelude/cxx/tools/show_includes_to_dep_file.py | 2 ++ prelude/python/tools/make_par/__run_lpar_main__.py | 1 + 13 files changed, 31 insertions(+), 15 deletions(-) diff --git a/prelude/apple/tools/bundling/incremental_state.py b/prelude/apple/tools/bundling/incremental_state.py index e2bd67fbb..bcc890117 100644 --- a/prelude/apple/tools/bundling/incremental_state.py +++ b/prelude/apple/tools/bundling/incremental_state.py @@ -54,9 +54,9 @@ def default(self, o: object) -> object: return { "items": [self.default(i) for i in o.items], "codesigned": o.codesigned, - "codesign_configuration": o.codesign_configuration.value - if o.codesign_configuration - else None, + "codesign_configuration": ( + o.codesign_configuration.value if o.codesign_configuration else None + ), "codesign_on_copy_paths": [str(p) for p in o.codesign_on_copy_paths], "codesign_identity": o.codesign_identity, "swift_stdlib_paths": [str(p) for p in o.swift_stdlib_paths], diff --git a/prelude/apple/tools/bundling/incremental_utils.py b/prelude/apple/tools/bundling/incremental_utils.py index df5af7584..bb1f18d1d 100644 --- a/prelude/apple/tools/bundling/incremental_utils.py +++ b/prelude/apple/tools/bundling/incremental_utils.py @@ -111,7 +111,7 @@ def calculate_incremental_state( """ result = [] source_with_destination_files = _source_with_destination_files(spec) - for (src, dst) in source_with_destination_files: + for src, dst in source_with_destination_files: is_symlink = src.is_symlink() new_digest = _get_new_digest(action_metadata, src) if not is_symlink else None resolved_symlink = Path(os.readlink(src)) if is_symlink else None diff --git a/prelude/apple/tools/code_signing/identity.py b/prelude/apple/tools/code_signing/identity.py index ed6ba5827..35e5006bd 100644 --- a/prelude/apple/tools/code_signing/identity.py +++ b/prelude/apple/tools/code_signing/identity.py @@ -22,9 +22,11 @@ class _ReGroupName(str, Enum): fingerprint = "fingerprint" subject_common_name = "subject_common_name" - _re_string: str = '(?P<{fingerprint}>[A-F0-9]{{40}}) "(?P<{subject_common_name}>.+)"(?!.*CSSMERR_.+)'.format( - fingerprint=_ReGroupName.fingerprint, - subject_common_name=_ReGroupName.subject_common_name, + _re_string: str = ( + '(?P<{fingerprint}>[A-F0-9]{{40}}) "(?P<{subject_common_name}>.+)"(?!.*CSSMERR_.+)'.format( + fingerprint=_ReGroupName.fingerprint, + subject_common_name=_ReGroupName.subject_common_name, + ) ) _pattern: re.Pattern[str] = re.compile(_re_string) diff --git a/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py b/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py index 90ffe1c21..a1e92299c 100644 --- a/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py +++ b/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py @@ -15,6 +15,7 @@ from .provisioning_profile_metadata import ProvisioningProfileMetadata + # Buck v1 corresponding code is in `ProvisioningProfileCopyStep::execute` in `ProvisioningProfileCopyStep.java` def prepare_code_signing_entitlements( entitlements_path: Optional[Path], diff --git a/prelude/apple/tools/code_signing/prepare_info_plist.py b/prelude/apple/tools/code_signing/prepare_info_plist.py index 8130a949e..6bd03d505 100644 --- a/prelude/apple/tools/code_signing/prepare_info_plist.py +++ b/prelude/apple/tools/code_signing/prepare_info_plist.py @@ -15,6 +15,7 @@ from .info_plist_metadata import InfoPlistMetadata from .provisioning_profile_metadata import ProvisioningProfileMetadata + # Buck v1 corresponding code is in `ProvisioningProfileCopyStep::execute` in `ProvisioningProfileCopyStep.java` def prepare_info_plist( info_plist: Path, diff --git a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py index 625a850a7..fa207321e 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py @@ -15,11 +15,15 @@ from .provisioning_profile_metadata import ProvisioningProfileMetadata -META_IOS_DEVELOPER_CERTIFICATE_LINK: str = "https://www.internalfb.com/intern/qa/5198/how-do-i-get-the-fb-ios-developer-certificate" +META_IOS_DEVELOPER_CERTIFICATE_LINK: str = ( + "https://www.internalfb.com/intern/qa/5198/how-do-i-get-the-fb-ios-developer-certificate" +) META_IOS_PROVISIONING_PROFILES_LINK: str = ( "https://www.internalfb.com/intern/apple/download-provisioning-profile/" ) -META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK: str = "https://www.internalfb.com/intern/wiki/Ios-first-steps/running-on-device/#2-register-your-device-i" +META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK: str = ( + "https://www.internalfb.com/intern/wiki/Ios-first-steps/running-on-device/#2-register-your-device-i" +) class IProvisioningProfileDiagnostics(metaclass=ABCMeta): diff --git a/prelude/apple/tools/code_signing/provisioning_profile_selection.py b/prelude/apple/tools/code_signing/provisioning_profile_selection.py index 7358b2d53..e0747a581 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_selection.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_selection.py @@ -98,7 +98,7 @@ def _check_entitlements_match( ) -> Tuple[bool, Optional[EntitlementsMismatch]]: if expected_entitlements is None: return (True, None) - for (key, value) in expected_entitlements.items(): + for key, value in expected_entitlements.items(): profile_entitlement = profile.entitlements.get(key) if (key not in _IGNORE_MISMATCH_ENTITLEMENTS_KEYS) and ( not _matches_or_array_is_subset_of( diff --git a/prelude/apple/tools/info_plist_processor/preprocess.py b/prelude/apple/tools/info_plist_processor/preprocess.py index 36cf9b231..cdf87febf 100644 --- a/prelude/apple/tools/info_plist_processor/preprocess.py +++ b/prelude/apple/tools/info_plist_processor/preprocess.py @@ -18,11 +18,13 @@ class _ReGroupName(str, Enum): closeparen = "closeparen" -_re_string: str = "\\$(?P<{openparen}>[\\{{\\(])(?P<{variable}>[^\\}}\\):]+)(?::(?P<{modifier}>[^\\}}\\)]+))?(?P<{closeparen}>[\\}}\\)])".format( - openparen=_ReGroupName.openparen, - variable=_ReGroupName.variable, - modifier=_ReGroupName.modifier, - closeparen=_ReGroupName.closeparen, +_re_string: str = ( + "\\$(?P<{openparen}>[\\{{\\(])(?P<{variable}>[^\\}}\\):]+)(?::(?P<{modifier}>[^\\}}\\)]+))?(?P<{closeparen}>[\\}}\\)])".format( + openparen=_ReGroupName.openparen, + variable=_ReGroupName.variable, + modifier=_ReGroupName.modifier, + closeparen=_ReGroupName.closeparen, + ) ) diff --git a/prelude/apple/tools/info_plist_processor/process.py b/prelude/apple/tools/info_plist_processor/process.py index bca05d93a..9161063f8 100644 --- a/prelude/apple/tools/info_plist_processor/process.py +++ b/prelude/apple/tools/info_plist_processor/process.py @@ -11,6 +11,7 @@ from apple.tools.plistlib_utils import detect_format_and_load + # Corresponding v1 code is contained in `com/facebook/buck/apple/PlistProcessStep.java`, `PlistProcessStep::execute` method. def _merge_plist_dicts( source: Dict[str, Any], destination: Dict[str, Any], override: bool = False diff --git a/prelude/cxx/dist_lto/tools/dist_lto_opt.py b/prelude/cxx/dist_lto/tools/dist_lto_opt.py index bd8c7d4e4..2fcad7f4e 100644 --- a/prelude/cxx/dist_lto/tools/dist_lto_opt.py +++ b/prelude/cxx/dist_lto/tools/dist_lto_opt.py @@ -24,6 +24,7 @@ EXIT_SUCCESS, EXIT_FAILURE = 0, 1 + # Filter opt related flags def _filter_flags(clang_flags: List[str]) -> List[str]: # noqa: C901 # List of llvm flags to be ignored. diff --git a/prelude/cxx/tools/show_headers_to_dep_file.py b/prelude/cxx/tools/show_headers_to_dep_file.py index b2bf4900e..7b668c46c 100644 --- a/prelude/cxx/tools/show_headers_to_dep_file.py +++ b/prelude/cxx/tools/show_headers_to_dep_file.py @@ -14,6 +14,7 @@ import dep_file_utils + # output_path -> path to write the dep file to # cmd_args -> command to be run to get dependencies from compiler # input_file -> Path to the file we're generating the dep file for. We need this since diff --git a/prelude/cxx/tools/show_includes_to_dep_file.py b/prelude/cxx/tools/show_includes_to_dep_file.py index a525789a9..ff25b3327 100644 --- a/prelude/cxx/tools/show_includes_to_dep_file.py +++ b/prelude/cxx/tools/show_includes_to_dep_file.py @@ -11,6 +11,8 @@ import dep_file_utils DEP_PREFIX = "Note: including file:" + + # output_path -> path to write the dep field to # cmd_args -> command to be run to get dependencies from compiler # source_file -> Path to the file we're generating the dep file for. We need this since diff --git a/prelude/python/tools/make_par/__run_lpar_main__.py b/prelude/python/tools/make_par/__run_lpar_main__.py index 13de11aaa..48da2bbdf 100644 --- a/prelude/python/tools/make_par/__run_lpar_main__.py +++ b/prelude/python/tools/make_par/__run_lpar_main__.py @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. + # # Put everything inside an __invoke_main() function. # This way anything we define won't pollute globals(), since runpy From b7a7a99ae68b5c9f44e19aa00b45067daa603497 Mon Sep 17 00:00:00 2001 From: Mark Vitale Date: Sat, 2 Mar 2024 19:47:11 -0800 Subject: [PATCH 0378/1133] Fast provisioning profile reading Summary: We currently spin up a shell and run openssl on each provisioning profile in order to extract the plist content from a provisioning profile, which is quite slow. In practice, we can just extract the bytes that occur between `` and achieve the same thing much faster. This isn't the most elegant solution, but by avoiding spinning up ~400+ shells invoking openssl on each profile, we can see meaningful performance improvements in codesigning. Differential Revision: D54390550 fbshipit-source-id: 0facf313a78958d931b2eb87b3ec08386016aad1 --- prelude/apple/tools/bundling/main.py | 2 + .../tools/code_signing/codesign_bundle.py | 42 ++++++++++++++++++- prelude/apple/tools/code_signing/main.py | 1 + 3 files changed, 43 insertions(+), 2 deletions(-) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 72f948832..19378bec5 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -308,6 +308,7 @@ def _main() -> None: subject_common_name=args.ad_hoc_codesign_identity, ), log_file_path=args.log_file, + should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, ) else: profile_selection_context = None @@ -329,6 +330,7 @@ def _main() -> None: platform=args.platform, list_codesign_identities=list_codesign_identities, log_file_path=args.log_file, + should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, ) selected_identity_argument = ( signing_context.selected_profile_info.identity.fingerprint diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index ebc3f1e8b..501033f72 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -59,12 +59,15 @@ def _select_provisioning_profile( entitlements_path: Optional[Path], platform: ApplePlatform, list_codesign_identities: IListCodesignIdentities, + should_use_fast_provisioning_profile_parsing: bool, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory = _default_read_provisioning_profile_command_factory, log_file_path: Optional[Path] = None, ) -> SelectedProvisioningProfileInfo: identities = list_codesign_identities.list_codesign_identities() provisioning_profiles = _read_provisioning_profiles( - provisioning_profiles_dir, read_provisioning_profile_command_factory + provisioning_profiles_dir, + read_provisioning_profile_command_factory, + should_use_fast_provisioning_profile_parsing, ) if not provisioning_profiles: raise CodeSignProvisioningError( @@ -125,6 +128,7 @@ def signing_context_with_profile_selection( platform: ApplePlatform, list_codesign_identities: IListCodesignIdentities, log_file_path: Optional[Path] = None, + should_use_fast_provisioning_profile_parsing: bool = False, ) -> SigningContextWithProfileSelection: with open(info_plist_source, mode="rb") as info_plist_file: info_plist_metadata = InfoPlistMetadata.from_file(info_plist_file) @@ -135,6 +139,7 @@ def signing_context_with_profile_selection( platform=platform, list_codesign_identities=list_codesign_identities, log_file_path=log_file_path, + should_use_fast_provisioning_profile_parsing=should_use_fast_provisioning_profile_parsing, ) return SigningContextWithProfileSelection( @@ -263,11 +268,16 @@ def _prepare_entitlements_and_info_plist( def _read_provisioning_profiles( dirpath: Path, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, + should_use_fast_provisioning_profile_parsing: bool, ) -> List[ProvisioningProfileMetadata]: + _LOGGER.info( + f"Fast provisioning profile parsing enabled: {should_use_fast_provisioning_profile_parsing}" + ) return [ _provisioning_profile_from_file_path( dirpath / f, read_provisioning_profile_command_factory, + should_use_fast_provisioning_profile_parsing, ) for f in os.listdir(dirpath) if (f.endswith(".mobileprovision") or f.endswith(".provisionprofile")) @@ -277,8 +287,36 @@ def _read_provisioning_profiles( def _provisioning_profile_from_file_path( path: Path, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, + should_use_fast_provisioning_profile_parsing: bool, +) -> ProvisioningProfileMetadata: + if should_use_fast_provisioning_profile_parsing: + # Provisioning profiles have a plist embedded in them that we can extract directly. + # This is much faster than calling an external command like openssl. + with open(path, "rb") as f: + content = f.read() + start_index = content.find(b"", start_index) + len(b"") + if start_index >= 0 and end_index >= 0: + plist_data = content[start_index:end_index] + return ProvisioningProfileMetadata.from_provisioning_profile_file_content( + path, plist_data + ) + else: + _LOGGER.warning( + f"Failed to find plist in provisioning profile at {path}. Falling back to slow parsing." + ) + + # Fallback to slow parsing if fast parsing is disabled or fails + return _provisioning_profile_from_file_path_using_factory( + path, read_provisioning_profile_command_factory + ) + + +def _provisioning_profile_from_file_path_using_factory( + path: Path, + read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, ) -> ProvisioningProfileMetadata: - output = subprocess.check_output( + output: bytes = subprocess.check_output( read_provisioning_profile_command_factory.read_provisioning_profile_command( path ), diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index d634785bd..98dfbafe9 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -110,6 +110,7 @@ def _main() -> None: entitlements_path=args.entitlements, list_codesign_identities=ListCodesignIdentities.default(), platform=args.platform, + should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, ) codesign_bundle( bundle_path=args.bundle_path, From 08b78516fcb21599b2542b3998eb7472dcb993c9 Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Sun, 3 Mar 2024 20:23:44 -0800 Subject: [PATCH 0379/1133] Add native_library_merge_linker_args for android_binary Summary: `native_library_merge_linker_args` allows specifying a map that looks like `{"libmerged.so": ["--linker-arg"]}` so pass linker args to final merged outputs which can be merge group or split group names. To supply mip profile data to the linker, we need a way to specify linker args for merged shared libraries. Currently mip profiles use some automation to identify a representative library in each shared library, and pass the args to individual libraries, but it's a bit fragile and depends on debug outputs. Reviewed By: IanChilds Differential Revision: D53821751 fbshipit-source-id: 7ea420b5c3f2c115953858435c3ac68b5baf844c --- prelude/android/android.bzl | 2 ++ prelude/android/android_binary_native_library_rules.bzl | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 484b25b3a..19456b1cb 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -94,6 +94,7 @@ extra_attributes = { "module_manifest_skeleton": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "native_library_merge_code_generator": attrs.option(attrs.exec_dep(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), + "native_library_merge_linker_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.arg())), default = None), "strip_libraries": attrs.bool(default = not DISABLE_STRIPPING), "_android_toolchain": toolchains_common.android(), "_cxx_toolchain": attrs.split_transition_dep(cfg = cpu_split_transition, default = "toolchains//:android-hack"), @@ -124,6 +125,7 @@ extra_attributes = { "module_manifest_skeleton": attrs.option(attrs.one_of(attrs.transition_dep(cfg = cpu_transition), attrs.source()), default = None), "native_library_merge_code_generator": attrs.option(attrs.exec_dep(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), + "native_library_merge_linker_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.arg())), default = None), "_android_toolchain": toolchains_common.android(), "_cxx_toolchain": attrs.split_transition_dep(cfg = cpu_split_transition, default = "toolchains//:android-hack"), "_dex_toolchain": toolchains_common.dex(), diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 1e106c443..88a8f85e1 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -293,6 +293,7 @@ def get_android_binary_native_library_info( default_shared_libs = original_shared_libs_by_platform[platform], linkable_nodes = linkable_nodes_by_platform[platform], merge_map = merge_map_by_platform[platform], + merge_linker_args = ctx.attrs.native_library_merge_linker_args or {}, apk_module_graph = get_module_from_target, ) debug_info_by_platform[platform] = debug_info @@ -1046,6 +1047,7 @@ def _get_merged_linkables_for_platform( default_shared_libs: dict[str, SharedLibrary], linkable_nodes: dict[Label, LinkableNode], merge_map: dict[str, [str, None]], + merge_linker_args: dict[str, typing.Any], apk_module_graph: typing.Callable) -> (dict[str, MergedSharedLibrary], MergedLinkablesDebugInfo): """ This takes the merge mapping and constructs the resulting merged shared libraries. @@ -1277,12 +1279,15 @@ def _get_merged_linkables_for_platform( linkable_nodes = link_group_linkable_nodes, cxx_toolchain = cxx_toolchain, ) + link_args = [link_args] + if soname in merge_linker_args: + link_args += [LinkArgs(infos = [LinkInfo(pre_flags = merge_linker_args[soname])])] shared_lib = create_shared_lib( ctx, output_path = output_path, soname = soname, - link_args = [link_args], + link_args = link_args, cxx_toolchain = cxx_toolchain, shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname for label in shlib_deps], label = group_data.constituents[0], From 1e040452d14354999b482773944c706af2e2ed34 Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Sun, 3 Mar 2024 20:23:44 -0800 Subject: [PATCH 0380/1133] Add 'unrelinked' native lib subtargets Summary: It's useful to build individual native libraries both before and after running the relinker (suggested by smeenai) Reviewed By: IanChilds Differential Revision: D54402651 fbshipit-source-id: d4e32bb9d072d9d3f3a9bd21667da6d522b5e582 --- .../android_binary_native_library_rules.bzl | 57 ++++++++++++++----- 1 file changed, 44 insertions(+), 13 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 88a8f85e1..58b46e697 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -200,7 +200,7 @@ def get_android_binary_native_library_info( enhance_ctx.debug_output("linkables." + platform, linkables_debug) linkable_nodes_by_platform[platform] = graph_node_map - lib_outputs_by_platform = _declare_library_subtargets(ctx, dynamic_outputs, original_shared_libs_by_platform, native_library_merge_map, native_library_merge_sequence) + lib_outputs_by_platform = _declare_library_subtargets(ctx, dynamic_outputs, original_shared_libs_by_platform, native_library_merge_map, native_library_merge_sequence, enable_relinker) if native_library_merge_sequence: native_library_merge_input_file = ctx.actions.write_json("mergemap.input", { @@ -341,7 +341,9 @@ def get_android_binary_native_library_info( final_shared_libs_by_platform = original_shared_libs_by_platform if enable_relinker: + unrelinked_shared_libs_by_platform = final_shared_libs_by_platform final_shared_libs_by_platform = relink_libraries(ctx, final_shared_libs_by_platform) + _link_library_subtargets(ctx, outputs, lib_outputs_by_platform, original_shared_libs_by_platform, unrelinked_shared_libs_by_platform, merged_shared_lib_targets_by_platform, split_groups, native_merge_debug, unrelinked = True) _link_library_subtargets(ctx, outputs, lib_outputs_by_platform, original_shared_libs_by_platform, final_shared_libs_by_platform, merged_shared_lib_targets_by_platform, split_groups, native_merge_debug) @@ -398,6 +400,11 @@ def get_android_binary_native_library_info( generated_java_code = generated_java_code, ) +_NativeLibSubtargetArtifacts = record( + default = Artifact, + unrelinked = Artifact | None, +) + # Merged libraries are dynamic dependencies, but outputs need to be declared in advance to be used by subtargets. # This means we have to declare outputs for all possible merged libs (every merged name and every unmerged library name). def _declare_library_subtargets( @@ -405,7 +412,8 @@ def _declare_library_subtargets( dynamic_outputs: list[Artifact], original_shared_libs_by_platform: dict[str, dict[str, SharedLibrary]], native_library_merge_map, - native_library_merge_sequence) -> dict[str, dict[str, Artifact]]: + native_library_merge_sequence, + enable_relinker: bool) -> dict[str, dict[str, _NativeLibSubtargetArtifacts]]: lib_outputs_by_platform = {} for platform, original_shared_libs in original_shared_libs_by_platform.items(): sonames = set() @@ -420,7 +428,20 @@ def _declare_library_subtargets( output_path = _platform_output_path(soname, platform if len(original_shared_libs_by_platform) > 1 else None) lib_output = ctx.actions.declare_output(output_path, dir = True) dynamic_outputs.append(lib_output) - lib_outputs[soname] = lib_output + if enable_relinker: + output_path = output_path + ".unrelinked" + unrelinked_lib_output = ctx.actions.declare_output(output_path, dir = True) + dynamic_outputs.append(unrelinked_lib_output) + lib_outputs[soname] = _NativeLibSubtargetArtifacts( + default = lib_output, + unrelinked = unrelinked_lib_output, + ) + else: + lib_outputs[soname] = _NativeLibSubtargetArtifacts( + default = lib_output, + unrelinked = None, + ) + lib_outputs_by_platform[platform] = lib_outputs return lib_outputs_by_platform @@ -430,12 +451,13 @@ def _declare_library_subtargets( def _link_library_subtargets( ctx: AnalysisContext, outputs, # IndexSet[OutputArtifact] - lib_outputs_by_platform: dict[str, dict[str, Artifact]], # dict[platform, dict[soname, Artifact]] + lib_outputs_by_platform: dict[str, dict[str, _NativeLibSubtargetArtifacts]], # dict[platform, dict[soname, _NativeLibSubtargetArtifacts]] original_shared_libs_by_platform: dict[str, dict[str, SharedLibrary]], final_shared_libs_by_platform: dict[str, dict[str, SharedLibrary]], merged_shared_lib_targets_by_platform: dict[str, dict[Label, str]], split_groups: dict[str, str] | None, - native_merge_debug): + native_merge_debug, + unrelinked: bool = False): for platform, final_shared_libs in final_shared_libs_by_platform.items(): merged_lib_outputs = {} for soname, lib in final_shared_libs.items(): @@ -446,7 +468,7 @@ def _link_library_subtargets( group_outputs = merged_lib_outputs.setdefault(base_soname, {}) group_outputs[soname] = lib.lib.output - for soname, _ in lib_outputs_by_platform[platform].items(): + for soname, lib_outputs in lib_outputs_by_platform[platform].items(): if soname in merged_lib_outputs: group_outputs = merged_lib_outputs[soname] elif soname in original_shared_libs_by_platform[platform]: @@ -460,13 +482,22 @@ def _link_library_subtargets( # merged group name has no constituents, link to debug output group_outputs = {soname: native_merge_debug} - ctx.actions.symlinked_dir(outputs[lib_outputs_by_platform[platform][soname]], group_outputs) + output = lib_outputs.default + if unrelinked: + output = lib_outputs.unrelinked + ctx.actions.symlinked_dir(outputs[output], group_outputs) + +def _create_library_subtargets(lib_outputs_by_platform: dict[str, dict[str, _NativeLibSubtargetArtifacts]], native_libs: Artifact): + def create_library_subtarget(output: _NativeLibSubtargetArtifacts): + if output.unrelinked: + sub_targets = {"unrelinked": [DefaultInfo(default_outputs = [output.unrelinked])]} + return [DefaultInfo(default_outputs = [output.default], sub_targets = sub_targets)] + return [DefaultInfo(default_outputs = [output.default])] -def _create_library_subtargets(lib_outputs_by_platform: dict[str, dict[str, Artifact]], native_libs: Artifact): if len(lib_outputs_by_platform) > 1: return { platform: [DefaultInfo(default_outputs = [native_libs], sub_targets = { - soname: [DefaultInfo(default_outputs = [output])] + soname: create_library_subtarget(output) for soname, output in lib_outputs.items() })] for platform, lib_outputs in lib_outputs_by_platform.items() @@ -474,7 +505,7 @@ def _create_library_subtargets(lib_outputs_by_platform: dict[str, dict[str, Arti elif len(lib_outputs_by_platform) == 1: lib_outputs = list(lib_outputs_by_platform.values())[0] return { - soname: [DefaultInfo(default_outputs = [output])] + soname: create_library_subtarget(output) for soname, output in lib_outputs.items() } else: @@ -1645,7 +1676,7 @@ def relink_libraries(ctx: AnalysisContext, libraries_by_platform: dict[str, dict create_relinker_version_script( ctx.actions, output = relinker_version_script, - relinker_blocklist = [regex(s) for s in ctx.attrs.relinker_whitelist], + relinker_allowlist = [regex(s) for s in ctx.attrs.relinker_whitelist], provided_symbols = provided_symbols_file, needed_symbols = needed_symbols_for_this, ) @@ -1673,7 +1704,7 @@ def relink_libraries(ctx: AnalysisContext, libraries_by_platform: dict[str, dict def extract_provided_symbols(ctx: AnalysisContext, toolchain: CxxToolchainInfo, lib: Artifact) -> Artifact: return extract_global_syms(ctx, toolchain, lib, "relinker_extract_provided_symbols") -def create_relinker_version_script(actions: AnalysisActions, relinker_blocklist: list[regex], output: Artifact, provided_symbols: Artifact, needed_symbols: list[Artifact]): +def create_relinker_version_script(actions: AnalysisActions, relinker_allowlist: list[regex], output: Artifact, provided_symbols: Artifact, needed_symbols: list[Artifact]): def create_version_script(ctx, artifacts, outputs): all_needed_symbols = {} for symbols_file in needed_symbols: @@ -1690,7 +1721,7 @@ def create_relinker_version_script(actions: AnalysisActions, relinker_blocklist: elif "Java_" in symbol: keep_symbol = True else: - for pattern in relinker_blocklist: + for pattern in relinker_allowlist: if pattern.match(symbol): keep_symbol = True break From e5d2f05a43dfb6683cdef288b78907c199b52a74 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Mon, 4 Mar 2024 04:09:26 -0800 Subject: [PATCH 0381/1133] Give MergeAssetsExecutableMain the ability to write modular assets APKs Summary: This changes `MergeAssetsUtils` to accept a map of modules-to-assets. Assets in the "root" module are treated the same as they are now (i.e. put into the APK that is being merged). For assets in a non-root module, we create a new `assets.ap_` and put it into `/module`. Reviewed By: bchang7 Differential Revision: D54419324 fbshipit-source-id: 4da870f53a4842283697f0785cf6aba77cb44865 --- prelude/android/android_binary_resources_rules.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index 0ba6d6e75..830cf86d6 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -563,7 +563,7 @@ def _merge_assets( merge_assets_cmd.add(["--base-apk", base_apk]) merged_assets_output_hash = None - assets_dirs_file = ctx.actions.write("assets_dirs", assets_dirs) + assets_dirs_file = ctx.actions.write_json("assets_dirs.json", {ROOT_MODULE: assets_dirs}) merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) merge_assets_cmd.hidden(assets_dirs) From 7f1662920738cc955f6e07f4f391988c2ebcd8b4 Mon Sep 17 00:00:00 2001 From: Cameron Pickett Date: Mon, 4 Mar 2024 14:16:48 -0800 Subject: [PATCH 0382/1133] Migrate non-fbcode sysroot mechanism to new buck2 one Summary: Following the fbcode buckified sysroot migration, this diff moves the non-fbcode usages of rust in buck2 to our new toolchain-supported mechanism. With this change, the rust_sysroot_decorator is unnecessary in buck2, and remains around just to support the existing buck1 usages on android and iOS. Once those migrate in Q3, we can delete the decorator entirely. Reviewed By: zertosh, JakobDegen Differential Revision: D54130104 fbshipit-source-id: 0ee703008fe9fbdd8b706215eccbe526faf1f4c0 --- prelude/rust/build.bzl | 15 +++++++++++++-- prelude/rust/context.bzl | 1 + 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 1b0c06f61..07052475e 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -135,6 +135,17 @@ def compile_context(ctx: AnalysisContext) -> CompileContext: panic_runtime = toolchain_info.panic_runtime, ) + # When we pass explicit sysroot deps, we need to override the default sysroot to avoid accidentally + # linking against the prebuilt sysroot libs provided by the toolchain. Rustc requires a specific layout + # for these libs, so we need to carefully recreate the directory structure below. + if toolchain_info.explicit_sysroot_deps: + empty_dir = ctx.actions.copied_dir("empty_dir", {}) + empty_sysroot = ctx.actions.copied_dir("empty_sysroot", {"lib/rustlib/" + toolchain_info.rustc_target_triple + "/lib": empty_dir}) + + sysroot_args = cmd_args("--sysroot=", empty_sysroot, delimiter = "") + else: + sysroot_args = cmd_args() + return CompileContext( toolchain_info = toolchain_info, cxx_toolchain_info = cxx_toolchain_info, @@ -144,6 +155,7 @@ def compile_context(ctx: AnalysisContext) -> CompileContext: clippy_wrapper = clippy_wrapper, common_args = {}, transitive_dependency_dirs = {}, + sysroot_args = sysroot_args, ) def generate_rustdoc( @@ -985,7 +997,6 @@ def _compute_common_args( # TODO: SplitDebugMode("split"): ["-Csplit-debuginfo=unpacked"], }[compile_ctx.cxx_toolchain_info.split_debug_mode or SplitDebugMode("none")] - null_path = "nul" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "/dev/null" args = cmd_args( cmd_args(compile_ctx.symlinked_srcs, path_sep, crate_root, delimiter = ""), crate_name_arg, @@ -998,7 +1009,7 @@ def _compute_common_args( ["-Cprefer-dynamic=yes"] if crate_type == CrateType("dylib") else [], ["--target={}".format(toolchain_info.rustc_target_triple)] if toolchain_info.rustc_target_triple else [], split_debuginfo_flags, - ["--sysroot=" + null_path] if toolchain_info.explicit_sysroot_deps != None else [], + compile_ctx.sysroot_args, ["-Cpanic=abort", "-Zpanic-abort-tests=yes"] if toolchain_info.panic_runtime == PanicRuntime("abort") else [], _rustc_flags(toolchain_info.rustc_flags), _rustc_flags(toolchain_info.rustc_check_flags) if is_check else [], diff --git a/prelude/rust/context.bzl b/prelude/rust/context.bzl index 6f0ced6bf..74e76837f 100644 --- a/prelude/rust/context.bzl +++ b/prelude/rust/context.bzl @@ -55,4 +55,5 @@ CompileContext = record( # Memoized common args for reuse. common_args = field(dict[(CrateType, Emit, LinkStrategy, bool), CommonArgsInfo]), transitive_dependency_dirs = field(dict[Artifact, None]), + sysroot_args = field(cmd_args), ) From d6e6eeda5e8afbc94a787d940c1215287ab27c80 Mon Sep 17 00:00:00 2001 From: Amethyst Reese Date: Mon, 4 Mar 2024 14:29:30 -0800 Subject: [PATCH 0383/1133] apply Black 2024 style in fbcode (16/17) Summary: Formats the covered files with pyfmt. paintitblack Reviewed By: zertosh, abesto Differential Revision: D54470824 fbshipit-source-id: 06c8f96f17045dcd46a23398274cc5b741c6e846 --- prelude/android/tools/merge_sequence.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prelude/android/tools/merge_sequence.py b/prelude/android/tools/merge_sequence.py index 6050467b2..093cdcd54 100644 --- a/prelude/android/tools/merge_sequence.py +++ b/prelude/android/tools/merge_sequence.py @@ -705,9 +705,9 @@ def main() -> int: # noqa: C901 final_mapping[target] = None else: final_mapping[target] = final_lib_names[node.final_lib_key] - split_groups[ - final_lib_names[node.final_lib_key] - ] = node.base_library_name + split_groups[final_lib_names[node.final_lib_key]] = ( + node.base_library_name + ) else: final_mapping[target] = str(target) debug_results[platform] = ( From bc84cfa63cdc9597cf50322af53643974194f36b Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Mon, 4 Mar 2024 15:23:51 -0800 Subject: [PATCH 0384/1133] Add native_library_merge_linker_args to android_aar Summary: D53821751 broke AAR builds that use merge maps, add the attribute to android_aar. Differential Revision: D54498405 fbshipit-source-id: 80cb2b59e5b0183dc114ec157347c495867d4331 --- prelude/android/android.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 19456b1cb..b2c34997d 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -66,6 +66,7 @@ extra_attributes = { "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "min_sdk_version": attrs.option(attrs.int(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), + "native_library_merge_linker_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.arg())), default = None), "package_asset_libraries": attrs.default_only(attrs.bool(default = True)), "resources_root": attrs.option(attrs.string(), default = None), "strip_libraries": attrs.default_only(attrs.bool(default = not DISABLE_STRIPPING)), From 995754637b6b1f09f4eb59d32e0d7841a548d596 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Mon, 4 Mar 2024 15:27:32 -0800 Subject: [PATCH 0385/1133] Make the architecture available on AppleToolchainInfo Summary: Turns out architecture already exists on the buck1 `apple_toolchain`: https://www.internalfb.com/code/fbsource/[c59ef4ce3af3]/fbcode/buck2/prelude/decls/ios_rules.bzl?lines=802 We just didn't populate it nor set it when creating the toolchains. Do so. Reviewed By: rmaz Differential Revision: D54371822 fbshipit-source-id: 0f4f6b754cf961b471f59ff44ada178ce06c2ff0 --- prelude/apple/apple_toolchain.bzl | 1 + prelude/apple/apple_toolchain_types.bzl | 1 + prelude/apple/user/apple_toolchain_override.bzl | 1 + 3 files changed, 3 insertions(+) diff --git a/prelude/apple/apple_toolchain.bzl b/prelude/apple/apple_toolchain.bzl index e13b18df3..3e0802db4 100644 --- a/prelude/apple/apple_toolchain.bzl +++ b/prelude/apple/apple_toolchain.bzl @@ -16,6 +16,7 @@ def apple_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: DefaultInfo(), AppleToolchainInfo( actool = ctx.attrs.actool[RunInfo], + architecture = ctx.attrs.architecture, codesign = ctx.attrs.codesign[RunInfo], codesign_allocate = ctx.attrs.codesign_allocate[RunInfo], codesign_identities_command = ctx.attrs.codesign_identities_command[RunInfo] if ctx.attrs.codesign_identities_command else None, diff --git a/prelude/apple/apple_toolchain_types.bzl b/prelude/apple/apple_toolchain_types.bzl index 96adbaa9a..d94c9676b 100644 --- a/prelude/apple/apple_toolchain_types.bzl +++ b/prelude/apple/apple_toolchain_types.bzl @@ -9,6 +9,7 @@ AppleToolchainInfo = provider( # @unsorted-dict-items fields = { "actool": provider_field(typing.Any, default = None), # "RunInfo" + "architecture": provider_field(typing.Any, default = None), # str "codesign_allocate": provider_field(typing.Any, default = None), # "RunInfo" "codesign_identities_command": provider_field(typing.Any, default = None), # ["RunInfo", None] "codesign": provider_field(typing.Any, default = None), # "RunInfo" diff --git a/prelude/apple/user/apple_toolchain_override.bzl b/prelude/apple/user/apple_toolchain_override.bzl index 8cce54845..fce7dbfa7 100644 --- a/prelude/apple/user/apple_toolchain_override.bzl +++ b/prelude/apple/user/apple_toolchain_override.bzl @@ -16,6 +16,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: DefaultInfo(), AppleToolchainInfo( actool = base.actool, + architecture = base.architecture, codesign = base.codesign, codesign_allocate = base.codesign_allocate, copy_scene_kit_assets = base.copy_scene_kit_assets, From e56670c86f29a1fb22ec5e9f73c6fbc6162dda0f Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Mon, 4 Mar 2024 15:27:32 -0800 Subject: [PATCH 0386/1133] Add the arch to XcodeDataInfo provider Summary: We want the target's architecture if it is available. Reviewed By: milend Differential Revision: D54437961 fbshipit-source-id: c14c676cd31e15ec6e830148164346ba7ed53bd6 --- prelude/apple/apple_utility.bzl | 3 +++ prelude/apple/xcode.bzl | 3 ++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_utility.bzl b/prelude/apple/apple_utility.bzl index 81fcb867a..529793638 100644 --- a/prelude/apple/apple_utility.bzl +++ b/prelude/apple/apple_utility.bzl @@ -33,6 +33,9 @@ def get_module_name(ctx: AnalysisContext) -> str: def has_apple_toolchain(ctx: AnalysisContext) -> bool: return hasattr(ctx.attrs, "_apple_toolchain") +def get_apple_architecture(ctx: AnalysisContext) -> str: + return ctx.attrs._apple_toolchain[AppleToolchainInfo].architecture + def get_versioned_target_triple(ctx: AnalysisContext) -> str: apple_toolchain_info = ctx.attrs._apple_toolchain[AppleToolchainInfo] swift_toolchain_info = apple_toolchain_info.swift_toolchain_info diff --git a/prelude/apple/xcode.bzl b/prelude/apple/xcode.bzl index 691f23501..84ae3df5f 100644 --- a/prelude/apple/xcode.bzl +++ b/prelude/apple/xcode.bzl @@ -7,7 +7,7 @@ load("@prelude//apple:apple_sdk.bzl", "get_apple_sdk_name") load("@prelude//apple:apple_target_sdk_version.bzl", "get_min_deployment_version_for_node") -load("@prelude//apple:apple_utility.bzl", "has_apple_toolchain") +load("@prelude//apple:apple_utility.bzl", "get_apple_architecture", "has_apple_toolchain") load( "@prelude//cxx:argsfiles.bzl", "CompileArgsfile", # @unused Used as a type @@ -27,6 +27,7 @@ def apple_populate_xcode_attributes( data = cxx_populate_xcode_attributes(ctx = ctx, srcs = srcs, argsfiles = argsfiles, product_name = product_name) if has_apple_toolchain(ctx): + data["arch"] = get_apple_architecture(ctx) data["sdk"] = get_apple_sdk_name(ctx) data["deployment_version"] = get_min_deployment_version_for_node(ctx) From 3b50da2a40b70502bfa62b3ea38c36ece06227d7 Mon Sep 17 00:00:00 2001 From: Ben Chang Date: Mon, 4 Mar 2024 15:54:51 -0800 Subject: [PATCH 0387/1133] do not skip creating voltron linkables for arm64 abis when cpu_filters is empty Summary: i created prebuilt libs in each abi so that we can detect a mismatch for arm64. without the changes in `android_binary_native_library_rules.bzl` target `:bundle_with_mismatch_module_abis` fails to build due to this error P1191289443. i have no idea if this affects the caching mechanism described in D44956000. ==== future work - there is actually 1 more scenario that we may want to fix. specifically, if base APK only contains prebuilt native libs in cpu x86 but we have a module with a cxx library. the bundle cpu_filters contains more than just x86. we would have a mismatch because base would only have x86 libs while the module would have more than just x86. its probably unlikely that this issue comes up. and its probably unlikely that we want a build that has missing native libs in certain abis. Reviewed By: IanChilds Differential Revision: D54430789 fbshipit-source-id: c760e3802a5991b17f65c01af6680ca7c153f61e --- prelude/android/android_binary_native_library_rules.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 58b46e697..670a0f321 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -110,7 +110,7 @@ def get_android_binary_native_library_info( included_shared_lib_targets = [] original_shared_libs_by_platform = {} # dict[str, dict[str (soname), list[SharedLibrary]]] for platform, deps in deps_by_platform.items(): - if platform == CPU_FILTER_FOR_PRIMARY_PLATFORM and platform not in ctx.attrs.cpu_filters: + if platform == CPU_FILTER_FOR_PRIMARY_PLATFORM and platform not in ctx.attrs.cpu_filters and len(ctx.attrs.cpu_filters) != 0: continue shared_libs = get_default_shared_libs(ctx, deps, shared_libraries_to_exclude) From 90df6c063bbbf25fd1f70b5f385f557ed95a682a Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Mon, 4 Mar 2024 18:05:32 -0800 Subject: [PATCH 0388/1133] Use uquery for root target filtering Reviewed By: scottcao Differential Revision: D54396401 fbshipit-source-id: 1e22c5bd50605f4e2fcf73c3aa9716df36a8536c --- prelude/python/sourcedb/classic.bxl | 2 +- prelude/python/sourcedb/code_navigation.bxl | 5 ++--- prelude/python/sourcedb/filter.bxl | 14 +++++++------- prelude/python/sourcedb/query.bxl | 10 +++++----- prelude/python/typecheck/batch.bxl | 5 ++--- 5 files changed, 17 insertions(+), 19 deletions(-) diff --git a/prelude/python/sourcedb/classic.bxl b/prelude/python/sourcedb/classic.bxl index 7504ad812..1dec7df19 100644 --- a/prelude/python/sourcedb/classic.bxl +++ b/prelude/python/sourcedb/classic.bxl @@ -16,7 +16,7 @@ def _build_entry_point(ctx: bxl.Context) -> None: ) actions = bxl_actions.actions - query = ctx.cquery() + query = ctx.uquery() targets = do_query(ctx, query, actions, [query.eval(target) for target in ctx.cli_args.target]) built_sourcedbs = do_build(ctx, targets) diff --git a/prelude/python/sourcedb/code_navigation.bxl b/prelude/python/sourcedb/code_navigation.bxl index fd127ec88..9272ced6a 100644 --- a/prelude/python/sourcedb/code_navigation.bxl +++ b/prelude/python/sourcedb/code_navigation.bxl @@ -16,12 +16,11 @@ def _build_entry_point(ctx: bxl.Context) -> None: ) actions = bxl_actions.actions - query = ctx.cquery() + query = ctx.uquery() root = ctx.root() sources = ["{}/{}".format(root, source) for source in ctx.cli_args.source] - target_universe = ctx.uquery().owner(sources) - targets = do_query(ctx, query, actions, query.owner(sources, target_universe)) + targets = do_query(ctx, query, actions, query.owner(sources)) built_sourcedbs = do_build(ctx, targets) # Ensure all source files are materialized diff --git a/prelude/python/sourcedb/filter.bxl b/prelude/python/sourcedb/filter.bxl index 9cbbbe214..849d8db22 100644 --- a/prelude/python/sourcedb/filter.bxl +++ b/prelude/python/sourcedb/filter.bxl @@ -13,10 +13,10 @@ BUCK_PYTHON_RULE_KINDS = [ BUCK_PYTHON_RULE_KIND_QUERY = "|".join(BUCK_PYTHON_RULE_KINDS) def filter_root_targets( - query: bxl.CqueryContext, - target_patterns: typing.Any) -> bxl.ConfiguredTargetSet: + query: bxl.UqueryContext, + target_patterns: typing.Any) -> bxl.TargetSet: # Find all Pure-Python targets - candidate_targets = ctarget_set() + candidate_targets = utarget_set() for pattern in target_patterns: candidate_targets += query.kind( BUCK_PYTHON_RULE_KIND_QUERY, @@ -39,18 +39,18 @@ def filter_root_targets( return filtered_targets def do_filter( - query: bxl.CqueryContext, - target_patterns: typing.Any) -> list[ConfiguredTargetLabel]: + query: bxl.UqueryContext, + target_patterns: typing.Any) -> list[TargetLabel]: root_targets = filter_root_targets(query, target_patterns) return [root_target.label for root_target in root_targets] def _do_filter_entry_point(ctx: bxl.Context) -> None: - query = ctx.cquery() + query = ctx.uquery() targets = do_filter( query, [query.eval(target) for target in ctx.cli_args.target], ) - ctx.output.print_json([target.raw_target() for target in targets]) + ctx.output.print_json(targets) filter = bxl_main( doc = ( diff --git a/prelude/python/sourcedb/query.bxl b/prelude/python/sourcedb/query.bxl index 3b79a3b69..26d4b51ac 100644 --- a/prelude/python/sourcedb/query.bxl +++ b/prelude/python/sourcedb/query.bxl @@ -21,7 +21,7 @@ def _get_python_library_manifests_from_analysis_result( def _get_python_library_manifests_from_targets( ctx: bxl.Context, - targets: bxl.ConfiguredTargetSet) -> list[PythonLibraryManifestsTSet]: + targets: bxl.TargetSet) -> list[PythonLibraryManifestsTSet]: return filter(None, [ _get_python_library_manifests_from_analysis_result(analysis_result) for analysis_result in ctx.analysis(targets).values() @@ -30,7 +30,7 @@ def _get_python_library_manifests_from_targets( def get_python_library_manifests_tset_from_targets( ctx: bxl.Context, actions: AnalysisActions, - root_targets: bxl.ConfiguredTargetSet) -> PythonLibraryManifestsTSet: + root_targets: bxl.TargetSet) -> PythonLibraryManifestsTSet: return actions.tset( PythonLibraryManifestsTSet, children = _get_python_library_manifests_from_targets(ctx, root_targets), @@ -38,7 +38,7 @@ def get_python_library_manifests_tset_from_targets( def get_python_library_manifests_tset_from_target_patterns( ctx: bxl.Context, - query: bxl.CqueryContext, + query: bxl.UqueryContext, actions: AnalysisActions, target_patterns: typing.Any) -> PythonLibraryManifestsTSet: root_targets = filter_root_targets(query, target_patterns) @@ -46,7 +46,7 @@ def get_python_library_manifests_tset_from_target_patterns( def do_query( ctx: bxl.Context, - query: bxl.CqueryContext, + query: bxl.UqueryContext, actions: AnalysisActions, target_patterns: typing.Any) -> list[ConfiguredTargetLabel]: manifests_of_transitive_dependencies = ( @@ -64,7 +64,7 @@ def do_query( ] def _do_query_entry_point(ctx: bxl.Context) -> None: - query = ctx.cquery() + query = ctx.uquery() actions = ctx.bxl_actions().actions targets = do_query( ctx, diff --git a/prelude/python/typecheck/batch.bxl b/prelude/python/typecheck/batch.bxl index 1cd94a1ee..93733857a 100644 --- a/prelude/python/typecheck/batch.bxl +++ b/prelude/python/typecheck/batch.bxl @@ -10,11 +10,10 @@ load("@prelude//python/sourcedb/filter.bxl", "do_filter") def _run_entry_point(ctx: bxl.Context) -> None: targets = flatten(ctx.cli_args.target) - uquery_universe = ctx.target_universe(targets).target_set() - checked_targets = do_filter(ctx.cquery(), uquery_universe) + checked_targets = ctx.configured_targets(do_filter(ctx.uquery(), targets)) build_result = ctx.build([ - target.with_sub_target("typecheck") + target.label.with_sub_target("typecheck") for target in checked_targets ]) output = ctx.output.ensure_multiple(build_result) From 6872207c0e8b684340e40e5e56f5c4a1cca6269e Mon Sep 17 00:00:00 2001 From: Yaroslav Repeta Date: Mon, 4 Mar 2024 18:53:46 -0800 Subject: [PATCH 0389/1133] Add compdb target to build script Summary: `cpp_gen_cdb.bxl` (see next diff) generates compilation database for buck2 targets. In order to integrate zephyr targets with the script, adding CxxCompilationDbInfo provider with path to compile_commands.json built by cmake. Differential Revision: D53538955 fbshipit-source-id: 49e056d51c5fc1419eca669e69726503ec5439c9 --- prelude/cxx/comp_db.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/cxx/comp_db.bzl b/prelude/cxx/comp_db.bzl index a1daff0e5..630e0bbc3 100644 --- a/prelude/cxx/comp_db.bzl +++ b/prelude/cxx/comp_db.bzl @@ -15,6 +15,7 @@ load(":cxx_context.bzl", "get_cxx_toolchain_info") # Provider that exposes the compilation database information CxxCompilationDbInfo = provider(fields = { + "compdb": provider_field(typing.Any, default = None), # path customly built compile_commands.json (used by Zephyr projects) "info": provider_field(typing.Any, default = None), # A map of the file (an `Artifact`) to its corresponding `CxxSrcCompileCommand` "platform": provider_field(typing.Any, default = None), # platform for this compilation database "toolchain": provider_field(typing.Any, default = None), # toolchain for this compilation database From 4263a236061f164591983a5c1c13e7a5d3b2c967 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Tue, 5 Mar 2024 08:40:25 -0800 Subject: [PATCH 0390/1133] Put module assets into module/assets for Android Bundles Summary: Now, if we have an Android Bundle and a Voltron mapping, we pass a map of module-to-assets so that `merge_assets` will create a separate `assets.ap_` in our "module assets apks dir". If we produce that dir, then we pass it to `AndroidBundleBuilderExecutableMain` and for each module in the bundle, we look for an `assets.ap_` in the dir. If it exists, we use that when creating the "module" rather than just using a "fake" resources APK. Reviewed By: bchang7 Differential Revision: D54417876 fbshipit-source-id: 430ddad628a9f56d93864eec218600fe79e0dc8d --- .../android_binary_resources_rules.bzl | 103 +++++++++++++----- prelude/android/android_bundle.bzl | 3 + prelude/android/android_providers.bzl | 2 + 3 files changed, 82 insertions(+), 26 deletions(-) diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index 830cf86d6..c15944d48 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -96,12 +96,14 @@ def get_android_binary_resources_info( cxx_resources = get_cxx_resources(ctx, deps) is_exopackaged_enabled_for_resources = "resources" in getattr(ctx.attrs, "exopackage_modes", []) - primary_resources_apk, exopackaged_assets, exopackaged_assets_hash = _merge_assets( + primary_resources_apk, exopackaged_assets, exopackaged_assets_hash, module_assets_apks_dir = _merge_assets( ctx, is_exopackaged_enabled_for_resources, aapt2_link_info.primary_resources_apk, resource_infos, cxx_resources, + use_proto_format, # indicates that this is a .aab build + apk_module_graph_file, ) if is_exopackaged_enabled_for_resources: @@ -182,6 +184,7 @@ def get_android_binary_resources_info( exopackage_info = exopackage_info, manifest = android_manifest, module_manifests = module_manifests, + module_assets = module_assets_apks_dir, packaged_string_assets = packaged_string_assets, primary_resources_apk = primary_resources_apk, proguard_config_file = aapt2_link_info.proguard_config_file, @@ -544,39 +547,87 @@ def _merge_assets( is_exopackaged_enabled_for_resources: bool, base_apk: Artifact, resource_infos: list[AndroidResourceInfo], - cxx_resources: [Artifact, None]) -> (Artifact, [Artifact, None], [Artifact, None]): - assets_dirs = [resource_info.assets for resource_info in resource_infos if resource_info.assets] - if cxx_resources != None: - assets_dirs.extend([cxx_resources]) - if len(assets_dirs) == 0: - return base_apk, None, None - - merge_assets_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].merge_assets[RunInfo]) + cxx_resources: [Artifact, None], + is_bundle_build: bool, + apk_module_graph_file: [Artifact, None]) -> (Artifact, [Artifact, None], [Artifact, None], [Artifact, None]): + expect( + not (is_exopackaged_enabled_for_resources and is_bundle_build), + "Cannot use exopackage-for-resources with AAB builds.", + ) + asset_resource_infos = [resource_info for resource_info in resource_infos if resource_info.assets] + if not asset_resource_infos and not cxx_resources: + return base_apk, None, None, None merged_assets_output = ctx.actions.declare_output("merged_assets.ap_") - merge_assets_cmd.add(["--output-apk", merged_assets_output.as_output()]) - if is_exopackaged_enabled_for_resources: - merged_assets_output_hash = ctx.actions.declare_output("merged_assets.ap_.hash") - merge_assets_cmd.add(["--output-apk-hash", merged_assets_output_hash.as_output()]) - else: - merge_assets_cmd.add(["--base-apk", base_apk]) - merged_assets_output_hash = None + def get_common_merge_assets_cmd( + ctx: AnalysisContext, + output_apk: Artifact) -> (cmd_args, [Artifact, None]): + merge_assets_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].merge_assets[RunInfo]) + merge_assets_cmd.add(["--output-apk", output_apk.as_output()]) - assets_dirs_file = ctx.actions.write_json("assets_dirs.json", {ROOT_MODULE: assets_dirs}) - merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) - merge_assets_cmd.hidden(assets_dirs) + if getattr(ctx.attrs, "extra_no_compress_asset_extensions", None): + merge_assets_cmd.add("--extra-no-compress-asset-extensions") + merge_assets_cmd.add(ctx.attrs.extra_no_compress_asset_extensions) - if getattr(ctx.attrs, "extra_no_compress_asset_extensions", None): - merge_assets_cmd.add("--extra-no-compress-asset-extensions") - merge_assets_cmd.add(ctx.attrs.extra_no_compress_asset_extensions) + if is_exopackaged_enabled_for_resources: + merged_assets_output_hash = ctx.actions.declare_output("merged_assets.ap_.hash") + merge_assets_cmd.add(["--output-apk-hash", merged_assets_output_hash.as_output()]) + else: + merge_assets_cmd.add(["--base-apk", base_apk]) + merged_assets_output_hash = None - ctx.actions.run(merge_assets_cmd, category = "merge_assets") + return merge_assets_cmd, merged_assets_output_hash + + # For Voltron AAB builds, we need to put assets into a separate "APK" for each module. + if is_bundle_build and apk_module_graph_file: + module_assets_apks_dir = ctx.actions.declare_output("module_assets_apks") + + def merge_assets_modular(ctx: AnalysisContext, artifacts, outputs): + apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) + + module_to_assets_dirs = {} + if cxx_resources != None: + module_to_assets_dirs.setdefault(ROOT_MODULE, []).extend([cxx_resources]) + for asset_resource_info in asset_resource_infos: + module_name = apk_module_graph_info.target_to_module_mapping_function(str(asset_resource_info.raw_target)) + module_to_assets_dirs.setdefault(module_name, []).append(asset_resource_info.assets) + + merge_assets_cmd, _ = get_common_merge_assets_cmd(ctx, outputs[merged_assets_output]) + + merge_assets_cmd.add(["--module-assets-apks-dir", outputs[module_assets_apks_dir].as_output()]) + + assets_dirs_file = ctx.actions.write_json("assets_dirs.json", module_to_assets_dirs) + merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) + merge_assets_cmd.hidden([resource_info.assets for resource_info in asset_resource_infos]) + + ctx.actions.run(merge_assets_cmd, category = "merge_assets") + + ctx.actions.dynamic_output( + dynamic = [apk_module_graph_file], + inputs = [], + outputs = [module_assets_apks_dir, merged_assets_output], + f = merge_assets_modular, + ) + + return merged_assets_output, None, None, module_assets_apks_dir - if is_exopackaged_enabled_for_resources: - return base_apk, merged_assets_output, merged_assets_output_hash else: - return merged_assets_output, None, None + merge_assets_cmd, merged_assets_output_hash = get_common_merge_assets_cmd(ctx, merged_assets_output) + + assets_dirs = [resource_info.assets for resource_info in asset_resource_infos] + if cxx_resources: + assets_dirs.extend([cxx_resources]) + assets_dirs_file = ctx.actions.write_json("assets_dirs.json", {ROOT_MODULE: assets_dirs}) + merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) + merge_assets_cmd.hidden(assets_dirs) + + ctx.actions.run(merge_assets_cmd, category = "merge_assets") + + if is_exopackaged_enabled_for_resources: + return base_apk, merged_assets_output, merged_assets_output_hash, None + else: + return merged_assets_output, None, None, None def get_effective_banned_duplicate_resource_types( duplicate_resource_behavior: str, diff --git a/prelude/android/android_bundle.bzl b/prelude/android/android_bundle.bzl index 072489d81..bf5a53e00 100644 --- a/prelude/android/android_bundle.bzl +++ b/prelude/android/android_bundle.bzl @@ -84,6 +84,9 @@ def build_bundle( jar_files_that_may_contain_resources = actions.write("jar_files_that_may_contain_resources", resources_info.jar_files_that_may_contain_resources) bundle_builder_args.hidden(resources_info.jar_files_that_may_contain_resources) + if resources_info.module_assets: + bundle_builder_args.add(["--module-assets-dir", resources_info.module_assets]) + bundle_builder_args.add([ "--root-module-asset-directories-list", root_module_asset_directories_file, diff --git a/prelude/android/android_providers.bzl b/prelude/android/android_providers.bzl index bf899c595..f6f850620 100644 --- a/prelude/android/android_providers.bzl +++ b/prelude/android/android_providers.bzl @@ -65,6 +65,8 @@ AndroidBinaryResourcesInfo = record( manifest = Artifact, # per-module manifests (packaged as assets) module_manifests = list[Artifact], + # per-module assets APKs (for .aabs only) + module_assets = [Artifact, None], # zip containing any strings packaged as assets packaged_string_assets = [Artifact, None], # "APK" containing resources to be used by the Android binary From bf386389c40d765d99f13e46bef3b5697dd50ec6 Mon Sep 17 00:00:00 2001 From: Steven Ware Jones Date: Tue, 5 Mar 2024 11:04:16 -0800 Subject: [PATCH 0391/1133] Allow jars with native libs to not have an inner jar MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: Our spark pipeline uses JNI and requires native libs. Per my discussion with sfilipco mtrepanier an inner.jar won't work for this purpose. This diff essentially reverts D46366872 and removes a safeguard that prevents users from creating a java binary with a main class that does not have an inner jar but does have native libs. This could result in adopters misusing this feature for environments where an inner jar is supported. Alternatively, we could alter wrapper that builds each java binary separately and includes it in the class path. Per sfilipco "The wrapper mode in java_binary is **not portable** in it’s current format. The wrapper is just a series of symlinks. The symlinks that buck builds today are absolute paths. Only when a full wrapper build locally you can guarantee that these absolute paths exist on the local machine. "I have actually seen this being an issue in regular development. When we did clean build on master, due to remote execution (+sandcastle) + artifact caching, the master builds downloads a cached wrapper which point to the absolute paths from a Sandcastle machine but the wrapper is not a cacheable artifact (I dunno if wrapper builds were marked as not cacheable at this point). If you make any changes locally and build then buck knows to rebuild the wrapper and you can run your application. "It may be possible to build relative symlinks but “packaging “of the final “artifact” has to be done before the symlinks are build. Theoretically possible but more work than what we have now. "Second point is about native builds. To my knowledge the fundamental idea of the inner.jar is a means towards managing all the different kinds of paths that each piece of code expects and its goal is to make the application portable. Either way, you have to figure out a way to manage all the these paths that an application has. "I use portable to mean that you can move between different machines. Not that the same artifact runs on both Mac and Win. "Hmm, then there's definitely an opportunity to either modify wrapper or add another param like ensemble or jars that instead builds each java_binary separately and includes them on the classpath. Hell, they could even just stuff them in lib as unmanaged jars" Reviewed By: IanChilds Differential Revision: D54217079 fbshipit-source-id: 9b36e1a9956a20d21bd2ff849187c9d6aa3c544f --- prelude/java/java_binary.bzl | 3 --- 1 file changed, 3 deletions(-) diff --git a/prelude/java/java_binary.bzl b/prelude/java/java_binary.bzl index 96ea62f4a..d80498ef9 100644 --- a/prelude/java/java_binary.bzl +++ b/prelude/java/java_binary.bzl @@ -79,9 +79,6 @@ def _create_fat_jar( main_class = ctx.attrs.main_class if main_class: - if do_not_create_inner_jar and native_libs: - fail("For performance reasons, java binaries with a main class and native libs should always generate an inner jar.\ - The reason for having inner.jar is so that we don't have to compress the native libraries, which is slow at compilation time and also at runtime (when decompressing).") args += ["--main_class", main_class] manifest_file = ctx.attrs.manifest_file From aeae8c16d7fb7e70d5e6664047352bc8ae9d3df9 Mon Sep 17 00:00:00 2001 From: Jingbo Yang Date: Tue, 5 Mar 2024 14:54:05 -0800 Subject: [PATCH 0392/1133] Script for KSP profiling: Ensure targets in jvm_args_targets use persistent worker Summary: We want targets in jvm_arg_targets to use persistent worker in order for profiler (jvm_args) to properly work for them. # Solution - Switch targets specified by jvm_arg_targets to use persistent worker, while rest of the targets use RunInfo - Ensure worker always apply jvm_args, even if jvm_arg_targets exist Reviewed By: navidqar Differential Revision: D53969767 fbshipit-source-id: 7256d700952128313a374aec90a752adb0c8b394 --- prelude/jvm/cd_jar_creator_util.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index 0e13eb1e1..c25aff571 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -409,10 +409,11 @@ def prepare_cd_exe( jvm_args = ["-XX:-MaxFDLimit"] if extra_jvm_args_target: + local_only = True for target in extra_jvm_args_target: if qualified_name == qualified_name_with_subtarget(target): jvm_args = jvm_args + extra_jvm_args - local_only = True + local_only = False break else: jvm_args = jvm_args + extra_jvm_args From 97406a4266bda9d4b0920a1ab72950d233f21c42 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Wed, 6 Mar 2024 07:45:29 -0800 Subject: [PATCH 0393/1133] specify module format with explicit modules Summary: When using explicit modules, specify the module format explicitly. This is the default when compiling, but not when indexing via sourcekitd: https://github.com/apple/swift/blob/a983de3d68ee8a6e8da72a87aac211a0a08e41e4/lib/IDETool/CompilerInvocation.cpp#L216-L217 https://github.com/apple/swift/blob/a983de3d68ee8a6e8da72a87aac211a0a08e41e4/include/swift/Basic/LangOptions.h#L922-L926 https://github.com/apple/swift/blob/a983de3d68ee8a6e8da72a87aac211a0a08e41e4/lib/ClangImporter/ClangImporter.cpp#L701-L710 Reviewed By: drodriguez Differential Revision: D54541572 fbshipit-source-id: 3ee2bfed6ccae1395101b314954046c7eb78c236 --- prelude/apple/swift/swift_compilation.bzl | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index f9570a5ce..7a6b5ba74 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -489,13 +489,20 @@ def _get_shared_flags( ]) if uses_explicit_modules(ctx): - # We set -fmodule-file-home-is-cwd as this is used to correctly - # set the working directory of modules when generating debug info. cmd.add([ "-Xcc", "-Xclang", "-Xcc", + # We set -fmodule-file-home-is-cwd as this is used to correctly + # set the working directory of modules when generating debug info. "-fmodule-file-home-is-cwd", + "-Xcc", + "-Xclang", + "-Xcc", + # This is the default for compilation, but not in sourcekitd. + # Set it explicitly here so that indexing will not fail with + # invalid module format errors. + "-fmodule-format=obj", ]) cmd.add(get_disable_pch_validation_flags()) From 908fd89d5df713cc7fb33fdc6e21476e0b91c16b Mon Sep 17 00:00:00 2001 From: Mark Vitale Date: Wed, 6 Mar 2024 09:04:27 -0800 Subject: [PATCH 0394/1133] Parallelize provisioning profile parsing Summary: Now that our provisioning profile reading no longer shells out for an openssl invocation, let's parallelize the remaining plist_util parsing for another speed boost. We get 4-5x faster than our original implementation and 2-2.5x faster than our existing "fast" path. Reviewed By: blackm00n Differential Revision: D54505580 fbshipit-source-id: 92498928b6ab07c7bcf7f547bd159bb4ddd6b140 --- .../tools/code_signing/codesign_bundle.py | 63 ++++++++++++++++--- .../provisioning_profile_metadata.py | 21 +++++-- 2 files changed, 70 insertions(+), 14 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 501033f72..7c7ceb1ad 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +import asyncio import logging import os import shutil @@ -15,7 +16,7 @@ from dataclasses import dataclass from enum import Enum from pathlib import Path -from typing import Any, Dict, List, Optional, Union +from typing import Any, cast, Dict, List, Optional, Union from apple.tools.plistlib_utils import detect_format_and_load @@ -64,11 +65,22 @@ def _select_provisioning_profile( log_file_path: Optional[Path] = None, ) -> SelectedProvisioningProfileInfo: identities = list_codesign_identities.list_codesign_identities() - provisioning_profiles = _read_provisioning_profiles( - provisioning_profiles_dir, - read_provisioning_profile_command_factory, - should_use_fast_provisioning_profile_parsing, + _LOGGER.info( + f"Fast provisioning profile parsing enabled: {should_use_fast_provisioning_profile_parsing}" ) + provisioning_profiles = [] + if should_use_fast_provisioning_profile_parsing: + provisioning_profiles = asyncio.run( + _fast_read_provisioning_profiles_async( + provisioning_profiles_dir, + read_provisioning_profile_command_factory, + ) + ) + else: + provisioning_profiles = _read_provisioning_profiles( + provisioning_profiles_dir, + read_provisioning_profile_command_factory, + ) if not provisioning_profiles: raise CodeSignProvisioningError( f"\n\nFailed to find any provisioning profiles. Please make sure to install required provisioning profiles and make sure they are located at '{provisioning_profiles_dir}'.\n\nPlease follow the wiki to build & run on device: {META_IOS_BUILD_AND_RUN_ON_DEVICE_LINK}.\nProvisioning profiles for your app can be downloaded from {META_IOS_PROVISIONING_PROFILES_LINK}.\n" @@ -265,19 +277,50 @@ def _prepare_entitlements_and_info_plist( return prepared_entitlements_path -def _read_provisioning_profiles( +async def _fast_read_provisioning_profiles_async( dirpath: Path, read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, - should_use_fast_provisioning_profile_parsing: bool, ) -> List[ProvisioningProfileMetadata]: - _LOGGER.info( - f"Fast provisioning profile parsing enabled: {should_use_fast_provisioning_profile_parsing}" + tasks = [] + for f in os.listdir(dirpath): + if f.endswith(".mobileprovision") or f.endswith(".provisionprofile"): + filepath = dirpath / f + tasks.append( + _provisioning_profile_from_file_path_async( + filepath, + read_provisioning_profile_command_factory, + should_use_fast_provisioning_profile_parsing=True, + ) + ) + results = await asyncio.gather(*tasks) + return cast(List[ProvisioningProfileMetadata], results) + + +async def _provisioning_profile_from_file_path_async( + path: Path, + read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, + should_use_fast_provisioning_profile_parsing: bool, +) -> ProvisioningProfileMetadata: + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, + _provisioning_profile_from_file_path, + path, + read_provisioning_profile_command_factory, + should_use_fast_provisioning_profile_parsing, ) + + +def _read_provisioning_profiles( + dirpath: Path, + read_provisioning_profile_command_factory: IReadProvisioningProfileCommandFactory, +) -> List[ProvisioningProfileMetadata]: + return [ _provisioning_profile_from_file_path( dirpath / f, read_provisioning_profile_command_factory, - should_use_fast_provisioning_profile_parsing, + should_use_fast_provisioning_profile_parsing=False, ) for f in os.listdir(dirpath) if (f.endswith(".mobileprovision") or f.endswith(".provisionprofile")) diff --git a/prelude/apple/tools/code_signing/provisioning_profile_metadata.py b/prelude/apple/tools/code_signing/provisioning_profile_metadata.py index d8b05ad73..436855404 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_metadata.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_metadata.py @@ -25,9 +25,9 @@ class ProvisioningProfileMetadata: uuid: str # Naïve object with ignored timezone, see https://bugs.python.org/msg110249 expiration_date: datetime - platforms: Set[str] + platforms: FrozenSet[str] # Let's agree they are uppercased - developer_certificate_fingerprints: Set[str] + developer_certificate_fingerprints: FrozenSet[str] entitlements: Dict[str, Any] _mergeable_entitlements_keys: FrozenSet[str] = frozenset( @@ -75,7 +75,20 @@ def from_provisioning_profile_file_content( file_path=file_path, uuid=root["UUID"], expiration_date=root["ExpirationDate"], - platforms=set(root["Platform"]), - developer_certificate_fingerprints=developer_certificate_fingerprints, + platforms=frozenset(root["Platform"]), + developer_certificate_fingerprints=frozenset( + developer_certificate_fingerprints + ), entitlements=root["Entitlements"], ) + + def __hash__(self) -> int: + return hash( + ( + self.file_path, + self.uuid, + self.expiration_date, + self.platforms, + self.developer_certificate_fingerprints, + ) + ) From a907dc7be4266ca8f3448a0e74e321fe3c1c4449 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 6 Mar 2024 12:05:17 -0800 Subject: [PATCH 0395/1133] fix extra MacOS Info.plist keys Reviewed By: milend Differential Revision: D54586378 fbshipit-source-id: ce3bc4aed667d8b4005dbd7cdd437a50187f7cf6 --- prelude/apple/apple_info_plist.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/apple/apple_info_plist.bzl b/prelude/apple/apple_info_plist.bzl index 44a33f7a5..3b461f0b6 100644 --- a/prelude/apple/apple_info_plist.bzl +++ b/prelude/apple/apple_info_plist.bzl @@ -124,7 +124,7 @@ def _info_plist_additional_keys(ctx: AnalysisContext) -> dict[str, typing.Any]: return result def _extra_mac_info_plist_keys(sdk_metadata: AppleSdkMetadata, extension: str) -> dict[str, typing.Any]: - if sdk_metadata.name == MacOSXSdkMetadata.name and extension == "xpc": + if sdk_metadata.name == MacOSXSdkMetadata.name and extension != "xpc": return { "NSHighResolutionCapable": True, "NSSupportsAutomaticGraphicsSwitching": True, From 534270753a20458a4abc1db7df05efa8236e6774 Mon Sep 17 00:00:00 2001 From: Austin Longino Date: Wed, 6 Mar 2024 13:56:31 -0800 Subject: [PATCH 0396/1133] Support validation_deps in android_bundle Summary: In preparation for a migration to AAB we need to migrate existing functionality to android_bundle. This diff ports changes from D54097352 to android_bundle this will allow our derived builds to function https://www.internalfb.com/sandcastle/workflow/4332462841532626945 Reviewed By: IanChilds Differential Revision: D54565402 fbshipit-source-id: 8d6cf9c33f82a0c4f6b3430218edb6f9138690fc --- prelude/android/android.bzl | 1 + prelude/android/android_bundle.bzl | 11 ++++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index b2c34997d..868eccd20 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -135,6 +135,7 @@ extra_attributes = { "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), "_java_toolchain": toolchains_common.java_for_android(), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.transition_dep(cfg = cpu_transition), sorted = True, default = []), }, "android_instrumentation_apk": { "aapt_mode": attrs.enum(AaptMode, default = "aapt1"), # Match default in V1 diff --git a/prelude/android/android_bundle.bzl b/prelude/android/android_bundle.bzl index bf5a53e00..849f242c2 100644 --- a/prelude/android/android_bundle.bzl +++ b/prelude/android/android_bundle.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_binary.bzl", "get_binary_info") load("@prelude//android:android_providers.bzl", "AndroidAabInfo", "AndroidBinaryNativeLibsInfo", "AndroidBinaryResourcesInfo", "DexFilesInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") @@ -21,6 +22,7 @@ def android_bundle_impl(ctx: AnalysisContext) -> list[Provider]: native_library_info = android_binary_info.native_library_info, resources_info = android_binary_info.resources_info, bundle_config = ctx.attrs.bundle_config_file, + validation_deps_outputs = get_validation_deps_outputs(ctx), ) java_packaging_deps = android_binary_info.java_packaging_deps @@ -42,7 +44,8 @@ def build_bundle( dex_files_info: DexFilesInfo, native_library_info: AndroidBinaryNativeLibsInfo, resources_info: AndroidBinaryResourcesInfo, - bundle_config: [Artifact, None]) -> Artifact: + bundle_config: [Artifact, None], + validation_deps_outputs: [list[Artifact], None] = None) -> Artifact: output_bundle = actions.declare_output("{}.aab".format(label.name)) bundle_builder_args = cmd_args([ @@ -54,6 +57,12 @@ def build_bundle( "--dex-file", dex_files_info.primary_dex, ]) + + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + if validation_deps_outputs: + bundle_builder_args.hidden(validation_deps_outputs) + if bundle_config: bundle_builder_args.add(["--path-to-bundle-config-file", bundle_config]) From 69c58517356aee46fe1cb81ec934dfd15c85a1ec Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Wed, 6 Mar 2024 15:23:27 -0800 Subject: [PATCH 0397/1133] Fail if exclude-libs,ALL is used Summary: This flag frequently causes buck2 to produce builds that crash at runtime. Let's make it a build time failure to clean up remaining uses, and prevent it from being re-introduced. This could be done in the macro layer but it should be cheap enough to check it for any android_binary. Since this affects shared library dependencies of binary builds only the flag can still be used by AARs or standalone library builds where it's possible this may still have some use (or may just be harder to clean up). Reviewed By: nlutsenko Differential Revision: D54439931 fbshipit-source-id: 67ef6e28a3e1a9b662659b977342dc6cc76ab6d6 --- prelude/android/android_binary_native_library_rules.bzl | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 670a0f321..f246fdcdd 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -1774,6 +1774,15 @@ def create_shared_lib( shared_lib_deps: list[str], label: Label, can_be_asset: bool) -> SharedLibrary: + for link_arg in link_args: + flags = link_arg.flags or [] + for info in link_arg.infos or []: + flags += info.pre_flags or [] + flags += info.post_flags or [] + for flag in flags: + flag = str(flag) + if flag.endswith("--exclude-libs,ALL") or flag.endswith("--exclude-libs=ALL"): + fail("The behavior of --exclude-libs,ALL is not predictable when building Android binaries and may cause runtime crashes, remove it from {} (or its merged constituents)".format(label)) link_result = cxx_link_shared_library( ctx = ctx, output = output_path, From a8a20997e3dc9ef71e76cad29d240e0523de27c4 Mon Sep 17 00:00:00 2001 From: Itamar Oren Date: Wed, 6 Mar 2024 16:56:42 -0800 Subject: [PATCH 0398/1133] Disable 3.8 EOL warning in python prelude Summary: temporarily disabling this warning to mitigate warning fatigue will bring it back when we get closer to 3.8 removal Reviewed By: cxxxs Differential Revision: D54594872 fbshipit-source-id: 32a40b9484b23ac94518cb599ce41628f9cc4412 --- prelude/python/python_binary.bzl | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index b84d4da14..850574b05 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -733,19 +733,19 @@ def python_binary_impl(ctx: AnalysisContext) -> list[Provider]: if main_module.endswith(".py"): main_module = main_module[:-3] - if "python-version=3.8" in ctx.attrs.labels: - # buildifier: disable=print - print(( - "\033[1;33m \u26A0 [Warning] " + - "{0} 3.8 is EOL, and is going away by the end of H1 2024. " + - "This build triggered //{1}:{2} which still uses {0} 3.8. " + - "Make sure someone (you or the appropriate maintainers) upgrades it to {0} 3.10 soon to avoid breakages. " + - "https://fburl.com/python-eol \033[0m" - ).format( - "Cinder" if "python-flavor=cinder" in ctx.attrs.labels else "Python", - ctx.label.package, - ctx.attrs.name, - )) + # if "python-version=3.8" in ctx.attrs.labels: + # # buildifier: disable=print + # print(( + # "\033[1;33m \u26A0 [Warning] " + + # "{0} 3.8 is EOL, and is going away by the end of H1 2024. " + + # "This build triggered //{1}:{2} which still uses {0} 3.8. " + + # "Make sure someone (you or the appropriate maintainers) upgrades it to {0} 3.10 soon to avoid breakages. " + + # "https://fburl.com/python-eol \033[0m" + # ).format( + # "Cinder" if "python-flavor=cinder" in ctx.attrs.labels else "Python", + # ctx.label.package, + # ctx.attrs.name, + # )) if main_module != None: main = (EntryPointKind("module"), main_module) From 48d11c23670990641d018f8b41b9f72cf97bff5a Mon Sep 17 00:00:00 2001 From: Boyan Li Date: Wed, 6 Mar 2024 16:59:31 -0800 Subject: [PATCH 0399/1133] add rust toolchain configs for mips64-liquidsec target platform Summary: Here is a summary of changes of this diff which are pre-requisites of building rust code to the target "mips64-unknown-linux-gnuabi64": 1. Build sysroot from source for mips64 liquidsec platform 2. Add a config_backed_rust_toolchain and the corresponding buck toolchain configs for mips64 liquidsec platform 3. Some fixups to have proper env var defined: STD_ENV_ARCH 4. Add proper rust flag for vesta buck targets. 5. Fix Marvell vesta code broken by recent changes 6. Add proper select keys for buck targets to have the right set of dependencies. 7. Address some other Buck2 compilation error messages Run the following to generate a proper BUCK file RUSTC_BOOTSTRAP=1 THIRD_PARTY_REPO_PATHS=xplat/rust/toolchain/sysroot/1.76.0 ./fbcode/common/rust/tools/reindeer/buckify & ./fbcode/common/rust/tools/reindeer/buckify (without env vars) Reviewed By: capickett Differential Revision: D53593912 fbshipit-source-id: 4147cc23d9e969a74a941aef2b62ca64c7c9d894 --- prelude/rust/tools/tool_rules.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/rust/tools/tool_rules.bzl b/prelude/rust/tools/tool_rules.bzl index ce4c721b7..48c507e69 100644 --- a/prelude/rust/tools/tool_rules.bzl +++ b/prelude/rust/tools/tool_rules.bzl @@ -17,6 +17,7 @@ def _get_rustc_cfg_impl(ctx: AnalysisContext) -> list[Provider]: toolchain_info.compiler, cmd_args("--print=cfg=", out.as_output(), delimiter = ""), cmd_args("--target=", toolchain_info.rustc_target_triple, delimiter = ""), + cmd_args("--sysroot="), # We do not need a sysroot here, and not all platforms we support have one available (e.g. mips64-unknown-linux-gnuabi64) ] ctx.actions.run(cmd, category = "rustc_cfg") From 197b429ee95d4ab15c80f6c0da8aa8aa7bf87852 Mon Sep 17 00:00:00 2001 From: Conner Nilsen Date: Wed, 6 Mar 2024 20:06:11 -0800 Subject: [PATCH 0400/1133] Pyre Configurationless migration for] [batch:22/244] Reviewed By: grievejia Differential Revision: D54558228 fbshipit-source-id: 11abf3cec53ad912904e8accd5a25271c4ae72d8 --- prelude/apple/tools/bundling/action_metadata.py | 2 ++ prelude/apple/tools/bundling/assemble_bundle.py | 2 ++ prelude/apple/tools/bundling/assemble_bundle_types.py | 2 ++ prelude/apple/tools/bundling/incremental_state.py | 2 ++ prelude/apple/tools/bundling/incremental_utils.py | 2 ++ prelude/apple/tools/bundling/main.py | 2 ++ prelude/apple/tools/bundling/swift_support.py | 2 ++ prelude/apple/tools/code_signing/app_id.py | 2 ++ prelude/apple/tools/code_signing/apple_platform.py | 2 ++ prelude/apple/tools/code_signing/codesign_command_factory.py | 2 ++ prelude/apple/tools/code_signing/fast_adhoc.py | 2 ++ prelude/apple/tools/code_signing/identity.py | 2 ++ prelude/apple/tools/code_signing/info_plist_metadata.py | 2 ++ prelude/apple/tools/code_signing/list_codesign_identities.py | 2 ++ prelude/apple/tools/code_signing/main.py | 2 ++ .../tools/code_signing/prepare_code_signing_entitlements.py | 2 ++ prelude/apple/tools/code_signing/prepare_info_plist.py | 2 ++ .../tools/code_signing/provisioning_profile_diagnostics.py | 2 ++ .../apple/tools/code_signing/provisioning_profile_selection.py | 2 ++ .../code_signing/read_provisioning_profile_command_factory.py | 2 ++ prelude/apple/tools/dry_codesign_tool.py | 2 ++ prelude/apple/tools/info_plist_processor/main.py | 2 ++ prelude/apple/tools/info_plist_processor/preprocess.py | 2 ++ prelude/apple/tools/info_plist_processor/process.py | 2 ++ prelude/apple/tools/ipa_package_maker.py | 2 ++ prelude/apple/tools/plistlib_utils.py | 2 ++ prelude/apple/tools/re_compatibility_utils/writable.py | 2 ++ prelude/apple/tools/resource_broker/idb_companion.py | 2 ++ prelude/apple/tools/resource_broker/idb_target.py | 2 ++ prelude/apple/tools/resource_broker/ios.py | 2 ++ prelude/apple/tools/resource_broker/macos.py | 2 ++ prelude/apple/tools/resource_broker/main.py | 2 ++ prelude/apple/tools/resource_broker/simctl_runtime.py | 2 ++ prelude/apple/tools/resource_broker/timeouts.py | 2 ++ prelude/apple/tools/resource_broker/utils.py | 2 ++ prelude/apple/tools/selective_debugging/macho.py | 2 ++ prelude/apple/tools/selective_debugging/macho_parser.py | 2 ++ prelude/apple/tools/selective_debugging/main.py | 2 ++ prelude/apple/tools/selective_debugging/scrubber.py | 2 ++ prelude/apple/tools/selective_debugging/spec.py | 2 ++ prelude/apple/tools/selective_debugging/utils.py | 2 ++ prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py | 2 ++ prelude/python/runtime/__par__/bootstrap.py | 2 ++ prelude/python/tools/create_manifest_for_source_dir.py | 2 ++ prelude/python/tools/extract.py | 2 ++ prelude/python/tools/fail_with_message.py | 2 ++ prelude/python/tools/generate_static_extension_info.py | 2 ++ prelude/python/tools/make_par/__run_lpar_main__.py | 2 ++ prelude/python/tools/make_par/sitecustomize.py | 2 ++ prelude/python/tools/make_py_package_inplace.py | 2 ++ prelude/python/tools/make_py_package_manifest_module.py | 2 ++ prelude/python/tools/make_py_package_modules.py | 2 ++ prelude/python/tools/make_source_db_no_deps.py | 2 ++ prelude/python/tools/parse_imports.py | 2 ++ prelude/python/tools/py38stdlib.py | 2 ++ prelude/python/tools/sourcedb_merger/inputs.py | 2 ++ prelude/python/tools/sourcedb_merger/legacy_merge.py | 2 ++ prelude/python/tools/sourcedb_merger/legacy_outputs.py | 2 ++ prelude/python/tools/sourcedb_merger/merge.py | 2 ++ prelude/python/tools/sourcedb_merger/outputs.py | 2 ++ prelude/python/tools/sourcedb_merger/tests/__init__.py | 2 ++ prelude/python/tools/sourcedb_merger/tests/inputs_test.py | 2 ++ .../python/tools/sourcedb_merger/tests/legacy_output_test.py | 2 ++ prelude/python/tools/sourcedb_merger/tests/outputs_test.py | 2 ++ prelude/python/tools/static_extension_finder.py | 2 ++ prelude/python/tools/traverse_dep_manifest.py | 2 ++ 66 files changed, 132 insertions(+) diff --git a/prelude/apple/tools/bundling/action_metadata.py b/prelude/apple/tools/bundling/action_metadata.py index 56569e8bf..ade702a70 100644 --- a/prelude/apple/tools/bundling/action_metadata.py +++ b/prelude/apple/tools/bundling/action_metadata.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json import os from dataclasses import dataclass diff --git a/prelude/apple/tools/bundling/assemble_bundle.py b/prelude/apple/tools/bundling/assemble_bundle.py index 484d67045..6819eb1b5 100644 --- a/prelude/apple/tools/bundling/assemble_bundle.py +++ b/prelude/apple/tools/bundling/assemble_bundle.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import logging import os import shutil diff --git a/prelude/apple/tools/bundling/assemble_bundle_types.py b/prelude/apple/tools/bundling/assemble_bundle_types.py index c304c4f09..88233489f 100644 --- a/prelude/apple/tools/bundling/assemble_bundle_types.py +++ b/prelude/apple/tools/bundling/assemble_bundle_types.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import functools diff --git a/prelude/apple/tools/bundling/incremental_state.py b/prelude/apple/tools/bundling/incremental_state.py index bcc890117..4588e8adc 100644 --- a/prelude/apple/tools/bundling/incremental_state.py +++ b/prelude/apple/tools/bundling/incremental_state.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json from dataclasses import dataclass from io import TextIOBase diff --git a/prelude/apple/tools/bundling/incremental_utils.py b/prelude/apple/tools/bundling/incremental_utils.py index bb1f18d1d..38f7251e1 100644 --- a/prelude/apple/tools/bundling/incremental_utils.py +++ b/prelude/apple/tools/bundling/incremental_utils.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import logging import os from pathlib import Path diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 19378bec5..a95e148d6 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import cProfile import json diff --git a/prelude/apple/tools/bundling/swift_support.py b/prelude/apple/tools/bundling/swift_support.py index b9ecd81d8..91ef7644f 100644 --- a/prelude/apple/tools/bundling/swift_support.py +++ b/prelude/apple/tools/bundling/swift_support.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import os import shlex import shutil diff --git a/prelude/apple/tools/code_signing/app_id.py b/prelude/apple/tools/code_signing/app_id.py index 2e0e0b3b3..deac46eba 100644 --- a/prelude/apple/tools/code_signing/app_id.py +++ b/prelude/apple/tools/code_signing/app_id.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import re diff --git a/prelude/apple/tools/code_signing/apple_platform.py b/prelude/apple/tools/code_signing/apple_platform.py index 751f652ee..ee32486c8 100644 --- a/prelude/apple/tools/code_signing/apple_platform.py +++ b/prelude/apple/tools/code_signing/apple_platform.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from enum import Enum from typing import Optional diff --git a/prelude/apple/tools/code_signing/codesign_command_factory.py b/prelude/apple/tools/code_signing/codesign_command_factory.py index bf77cbfb1..9a90e0323 100644 --- a/prelude/apple/tools/code_signing/codesign_command_factory.py +++ b/prelude/apple/tools/code_signing/codesign_command_factory.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from abc import ABCMeta, abstractmethod from pathlib import Path from typing import List, Optional, Union diff --git a/prelude/apple/tools/code_signing/fast_adhoc.py b/prelude/apple/tools/code_signing/fast_adhoc.py index e752232da..9d79c57e8 100644 --- a/prelude/apple/tools/code_signing/fast_adhoc.py +++ b/prelude/apple/tools/code_signing/fast_adhoc.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import logging import os import subprocess diff --git a/prelude/apple/tools/code_signing/identity.py b/prelude/apple/tools/code_signing/identity.py index 35e5006bd..191e52691 100644 --- a/prelude/apple/tools/code_signing/identity.py +++ b/prelude/apple/tools/code_signing/identity.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import re diff --git a/prelude/apple/tools/code_signing/info_plist_metadata.py b/prelude/apple/tools/code_signing/info_plist_metadata.py index 75f666fba..7778c7def 100644 --- a/prelude/apple/tools/code_signing/info_plist_metadata.py +++ b/prelude/apple/tools/code_signing/info_plist_metadata.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations from dataclasses import dataclass diff --git a/prelude/apple/tools/code_signing/list_codesign_identities.py b/prelude/apple/tools/code_signing/list_codesign_identities.py index b75ce678c..2a4d458b9 100644 --- a/prelude/apple/tools/code_signing/list_codesign_identities.py +++ b/prelude/apple/tools/code_signing/list_codesign_identities.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import subprocess diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index 98dfbafe9..eba6fbf2a 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import pathlib import sys diff --git a/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py b/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py index a1e92299c..2ed16222b 100644 --- a/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py +++ b/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import os import plistlib import tempfile diff --git a/prelude/apple/tools/code_signing/prepare_info_plist.py b/prelude/apple/tools/code_signing/prepare_info_plist.py index 6bd03d505..a5e7104e6 100644 --- a/prelude/apple/tools/code_signing/prepare_info_plist.py +++ b/prelude/apple/tools/code_signing/prepare_info_plist.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import os import tempfile from pathlib import Path diff --git a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py index fa207321e..1187cab2c 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from abc import ABCMeta, abstractmethod from pathlib import Path from typing import List, Optional, Type, TypeVar diff --git a/prelude/apple/tools/code_signing/provisioning_profile_selection.py b/prelude/apple/tools/code_signing/provisioning_profile_selection.py index e0747a581..c927b1d6f 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_selection.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_selection.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import datetime import logging from dataclasses import dataclass diff --git a/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py b/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py index ed5b01a4d..c6f09fce8 100644 --- a/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py +++ b/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from abc import ABCMeta, abstractmethod from pathlib import Path from typing import List, Union diff --git a/prelude/apple/tools/dry_codesign_tool.py b/prelude/apple/tools/dry_codesign_tool.py index 71364ddb7..38a34e799 100644 --- a/prelude/apple/tools/dry_codesign_tool.py +++ b/prelude/apple/tools/dry_codesign_tool.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import plistlib import shutil diff --git a/prelude/apple/tools/info_plist_processor/main.py b/prelude/apple/tools/info_plist_processor/main.py index 157652f60..995b38577 100644 --- a/prelude/apple/tools/info_plist_processor/main.py +++ b/prelude/apple/tools/info_plist_processor/main.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse from contextlib import ExitStack from enum import Enum diff --git a/prelude/apple/tools/info_plist_processor/preprocess.py b/prelude/apple/tools/info_plist_processor/preprocess.py index cdf87febf..937959fe8 100644 --- a/prelude/apple/tools/info_plist_processor/preprocess.py +++ b/prelude/apple/tools/info_plist_processor/preprocess.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json import re from enum import Enum diff --git a/prelude/apple/tools/info_plist_processor/process.py b/prelude/apple/tools/info_plist_processor/process.py index 9161063f8..178bcbfc2 100644 --- a/prelude/apple/tools/info_plist_processor/process.py +++ b/prelude/apple/tools/info_plist_processor/process.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json import plistlib from typing import Any, Dict, IO, Optional, TextIO diff --git a/prelude/apple/tools/ipa_package_maker.py b/prelude/apple/tools/ipa_package_maker.py index 6cbd21157..ac3275d67 100644 --- a/prelude/apple/tools/ipa_package_maker.py +++ b/prelude/apple/tools/ipa_package_maker.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import os import shutil diff --git a/prelude/apple/tools/plistlib_utils.py b/prelude/apple/tools/plistlib_utils.py index 63ea7a356..39141677c 100644 --- a/prelude/apple/tools/plistlib_utils.py +++ b/prelude/apple/tools/plistlib_utils.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import plistlib from io import BytesIO from typing import Any, Dict, IO diff --git a/prelude/apple/tools/re_compatibility_utils/writable.py b/prelude/apple/tools/re_compatibility_utils/writable.py index 7fc1aec5a..b6c0ee90d 100644 --- a/prelude/apple/tools/re_compatibility_utils/writable.py +++ b/prelude/apple/tools/re_compatibility_utils/writable.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import os import platform import stat diff --git a/prelude/apple/tools/resource_broker/idb_companion.py b/prelude/apple/tools/resource_broker/idb_companion.py index f831e32cc..aa2b450a3 100644 --- a/prelude/apple/tools/resource_broker/idb_companion.py +++ b/prelude/apple/tools/resource_broker/idb_companion.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import os import signal from dataclasses import dataclass diff --git a/prelude/apple/tools/resource_broker/idb_target.py b/prelude/apple/tools/resource_broker/idb_target.py index 37de481dc..bfa7630d6 100644 --- a/prelude/apple/tools/resource_broker/idb_target.py +++ b/prelude/apple/tools/resource_broker/idb_target.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json from dataclasses import dataclass from enum import Enum diff --git a/prelude/apple/tools/resource_broker/ios.py b/prelude/apple/tools/resource_broker/ios.py index 379367c08..12bdcf841 100644 --- a/prelude/apple/tools/resource_broker/ios.py +++ b/prelude/apple/tools/resource_broker/ios.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import os from typing import List, Optional diff --git a/prelude/apple/tools/resource_broker/macos.py b/prelude/apple/tools/resource_broker/macos.py index d3aeaa4f9..ad103a031 100644 --- a/prelude/apple/tools/resource_broker/macos.py +++ b/prelude/apple/tools/resource_broker/macos.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import asyncio from typing import cast, List diff --git a/prelude/apple/tools/resource_broker/main.py b/prelude/apple/tools/resource_broker/main.py index 0600a351e..15a23ba91 100644 --- a/prelude/apple/tools/resource_broker/main.py +++ b/prelude/apple/tools/resource_broker/main.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import asyncio import json diff --git a/prelude/apple/tools/resource_broker/simctl_runtime.py b/prelude/apple/tools/resource_broker/simctl_runtime.py index 55a5740d2..6787b2b5c 100644 --- a/prelude/apple/tools/resource_broker/simctl_runtime.py +++ b/prelude/apple/tools/resource_broker/simctl_runtime.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json from dataclasses import dataclass, field from typing import List, Optional diff --git a/prelude/apple/tools/resource_broker/timeouts.py b/prelude/apple/tools/resource_broker/timeouts.py index 018044687..a5694dd67 100644 --- a/prelude/apple/tools/resource_broker/timeouts.py +++ b/prelude/apple/tools/resource_broker/timeouts.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + DEFAULT_OPERATION_TIMEOUT = 10 # Simulator boot is an expensive command and can take a long time to complete diff --git a/prelude/apple/tools/resource_broker/utils.py b/prelude/apple/tools/resource_broker/utils.py index 5128fd19b..80d36e716 100644 --- a/prelude/apple/tools/resource_broker/utils.py +++ b/prelude/apple/tools/resource_broker/utils.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import asyncio import json import shlex diff --git a/prelude/apple/tools/selective_debugging/macho.py b/prelude/apple/tools/selective_debugging/macho.py index 9b4964891..8371cbeb6 100644 --- a/prelude/apple/tools/selective_debugging/macho.py +++ b/prelude/apple/tools/selective_debugging/macho.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from dataclasses import dataclass MH_MAGIC = 0xFEEDFACE diff --git a/prelude/apple/tools/selective_debugging/macho_parser.py b/prelude/apple/tools/selective_debugging/macho_parser.py index 76e60512f..d9717c9db 100644 --- a/prelude/apple/tools/selective_debugging/macho_parser.py +++ b/prelude/apple/tools/selective_debugging/macho_parser.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import sys from typing import BinaryIO, List, Optional, Tuple diff --git a/prelude/apple/tools/selective_debugging/main.py b/prelude/apple/tools/selective_debugging/main.py index aa5e2b4af..d1dc98de3 100644 --- a/prelude/apple/tools/selective_debugging/main.py +++ b/prelude/apple/tools/selective_debugging/main.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import sys diff --git a/prelude/apple/tools/selective_debugging/scrubber.py b/prelude/apple/tools/selective_debugging/scrubber.py index 2600ba5b8..a3f75ab16 100644 --- a/prelude/apple/tools/selective_debugging/scrubber.py +++ b/prelude/apple/tools/selective_debugging/scrubber.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json import os diff --git a/prelude/apple/tools/selective_debugging/spec.py b/prelude/apple/tools/selective_debugging/spec.py index 6bba9c3b8..35fa26ca8 100644 --- a/prelude/apple/tools/selective_debugging/spec.py +++ b/prelude/apple/tools/selective_debugging/spec.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import json import re from dataclasses import dataclass, field diff --git a/prelude/apple/tools/selective_debugging/utils.py b/prelude/apple/tools/selective_debugging/utils.py index e9a92803a..0d0d78230 100644 --- a/prelude/apple/tools/selective_debugging/utils.py +++ b/prelude/apple/tools/selective_debugging/utils.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + class MachOException(Exception): pass diff --git a/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py b/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py index 77ec8a7c8..f3897b936 100644 --- a/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py +++ b/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import os import shutil diff --git a/prelude/python/runtime/__par__/bootstrap.py b/prelude/python/runtime/__par__/bootstrap.py index 83bf5542b..1cf7a424a 100644 --- a/prelude/python/runtime/__par__/bootstrap.py +++ b/prelude/python/runtime/__par__/bootstrap.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import os diff --git a/prelude/python/tools/create_manifest_for_source_dir.py b/prelude/python/tools/create_manifest_for_source_dir.py index 96b011f3e..4f086154c 100755 --- a/prelude/python/tools/create_manifest_for_source_dir.py +++ b/prelude/python/tools/create_manifest_for_source_dir.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import json import os diff --git a/prelude/python/tools/extract.py b/prelude/python/tools/extract.py index 843241d8a..6b3eef77c 100755 --- a/prelude/python/tools/extract.py +++ b/prelude/python/tools/extract.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Quick and dirty wrapper to extract zip files; python 3.6.2+ diff --git a/prelude/python/tools/fail_with_message.py b/prelude/python/tools/fail_with_message.py index dd06ee5a9..bf4dba134 100644 --- a/prelude/python/tools/fail_with_message.py +++ b/prelude/python/tools/fail_with_message.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import sys from pathlib import Path diff --git a/prelude/python/tools/generate_static_extension_info.py b/prelude/python/tools/generate_static_extension_info.py index d0679a608..da569f56d 100644 --- a/prelude/python/tools/generate_static_extension_info.py +++ b/prelude/python/tools/generate_static_extension_info.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import sys from typing import List diff --git a/prelude/python/tools/make_par/__run_lpar_main__.py b/prelude/python/tools/make_par/__run_lpar_main__.py index 48da2bbdf..8605c5ef3 100644 --- a/prelude/python/tools/make_par/__run_lpar_main__.py +++ b/prelude/python/tools/make_par/__run_lpar_main__.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + # # Put everything inside an __invoke_main() function. diff --git a/prelude/python/tools/make_par/sitecustomize.py b/prelude/python/tools/make_par/sitecustomize.py index f36c8cbfe..152a34e17 100644 --- a/prelude/python/tools/make_par/sitecustomize.py +++ b/prelude/python/tools/make_par/sitecustomize.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import multiprocessing.util as mp_util diff --git a/prelude/python/tools/make_py_package_inplace.py b/prelude/python/tools/make_py_package_inplace.py index eb993a155..d966770e9 100755 --- a/prelude/python/tools/make_py_package_inplace.py +++ b/prelude/python/tools/make_py_package_inplace.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Create a bootstrapper pex for inplace python binaries diff --git a/prelude/python/tools/make_py_package_manifest_module.py b/prelude/python/tools/make_py_package_manifest_module.py index 1d8506864..89427237b 100755 --- a/prelude/python/tools/make_py_package_manifest_module.py +++ b/prelude/python/tools/make_py_package_manifest_module.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Generate a __manifest__.py module containing build metadata for a Python package. """ diff --git a/prelude/python/tools/make_py_package_modules.py b/prelude/python/tools/make_py_package_modules.py index 07e247df1..db591ec71 100755 --- a/prelude/python/tools/make_py_package_modules.py +++ b/prelude/python/tools/make_py_package_modules.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Create the link tree for inplace Python binaries. diff --git a/prelude/python/tools/make_source_db_no_deps.py b/prelude/python/tools/make_source_db_no_deps.py index d764f8fe5..547df4854 100644 --- a/prelude/python/tools/make_source_db_no_deps.py +++ b/prelude/python/tools/make_source_db_no_deps.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + """ Creates a Python Source DB JSON file from Python manifest JSON file (e.g. for use with Pyre). diff --git a/prelude/python/tools/parse_imports.py b/prelude/python/tools/parse_imports.py index a0c2bf9ac..6b97b5de6 100644 --- a/prelude/python/tools/parse_imports.py +++ b/prelude/python/tools/parse_imports.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import ast import json diff --git a/prelude/python/tools/py38stdlib.py b/prelude/python/tools/py38stdlib.py index 30052528f..f5d33a6b4 100644 --- a/prelude/python/tools/py38stdlib.py +++ b/prelude/python/tools/py38stdlib.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + # This is list is "borrowed" from https://github.com/amyreese/stdlibs from typing import FrozenSet diff --git a/prelude/python/tools/sourcedb_merger/inputs.py b/prelude/python/tools/sourcedb_merger/inputs.py index 8ccc78304..db2d4ed24 100644 --- a/prelude/python/tools/sourcedb_merger/inputs.py +++ b/prelude/python/tools/sourcedb_merger/inputs.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-ignore-all-errors + import dataclasses import json import pathlib diff --git a/prelude/python/tools/sourcedb_merger/legacy_merge.py b/prelude/python/tools/sourcedb_merger/legacy_merge.py index ecd6c61e1..b6d977d11 100644 --- a/prelude/python/tools/sourcedb_merger/legacy_merge.py +++ b/prelude/python/tools/sourcedb_merger/legacy_merge.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-ignore-all-errors + import argparse import pathlib import sys diff --git a/prelude/python/tools/sourcedb_merger/legacy_outputs.py b/prelude/python/tools/sourcedb_merger/legacy_outputs.py index aefebb22b..89ecd93fe 100644 --- a/prelude/python/tools/sourcedb_merger/legacy_outputs.py +++ b/prelude/python/tools/sourcedb_merger/legacy_outputs.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-ignore-all-errors + import dataclasses import json import pathlib diff --git a/prelude/python/tools/sourcedb_merger/merge.py b/prelude/python/tools/sourcedb_merger/merge.py index 6f641c040..b21a00a9e 100644 --- a/prelude/python/tools/sourcedb_merger/merge.py +++ b/prelude/python/tools/sourcedb_merger/merge.py @@ -6,6 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-ignore-all-errors + import argparse import pathlib import sys diff --git a/prelude/python/tools/sourcedb_merger/outputs.py b/prelude/python/tools/sourcedb_merger/outputs.py index 7a7e4f88e..40e6ef688 100644 --- a/prelude/python/tools/sourcedb_merger/outputs.py +++ b/prelude/python/tools/sourcedb_merger/outputs.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-ignore-all-errors + import dataclasses import json import pathlib diff --git a/prelude/python/tools/sourcedb_merger/tests/__init__.py b/prelude/python/tools/sourcedb_merger/tests/__init__.py index b404f6ed6..858c58d3c 100644 --- a/prelude/python/tools/sourcedb_merger/tests/__init__.py +++ b/prelude/python/tools/sourcedb_merger/tests/__init__.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-ignore-all-errors + from .inputs_test import * # noqa from .legacy_output_test import * # noqa from .outputs_test import * # noqa diff --git a/prelude/python/tools/sourcedb_merger/tests/inputs_test.py b/prelude/python/tools/sourcedb_merger/tests/inputs_test.py index c671e1b61..da77a5463 100644 --- a/prelude/python/tools/sourcedb_merger/tests/inputs_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/inputs_test.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-ignore-all-errors + import contextlib import json import os diff --git a/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py b/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py index 267a135f2..4457ac417 100644 --- a/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-ignore-all-errors + import unittest from typing import Mapping diff --git a/prelude/python/tools/sourcedb_merger/tests/outputs_test.py b/prelude/python/tools/sourcedb_merger/tests/outputs_test.py index 91affd805..7b3db01c2 100644 --- a/prelude/python/tools/sourcedb_merger/tests/outputs_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/outputs_test.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-ignore-all-errors + import unittest from typing import Mapping diff --git a/prelude/python/tools/static_extension_finder.py b/prelude/python/tools/static_extension_finder.py index f3be8f919..c4c1171f7 100644 --- a/prelude/python/tools/static_extension_finder.py +++ b/prelude/python/tools/static_extension_finder.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + # Add a try except to force eager importing try: diff --git a/prelude/python/tools/traverse_dep_manifest.py b/prelude/python/tools/traverse_dep_manifest.py index cc7c5e45b..6e73c9414 100644 --- a/prelude/python/tools/traverse_dep_manifest.py +++ b/prelude/python/tools/traverse_dep_manifest.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import argparse import json From 688ee9c062fc50dc112878e797bfcd4d9a7578c5 Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Wed, 6 Mar 2024 21:14:40 -0800 Subject: [PATCH 0401/1133] Add build_config_dot_java subtarget to android_build_config Summary: TSIA Differential Revision: D54608673 fbshipit-source-id: 5517f1db4b9a5e1ea81334d25958ae42f003ec47 --- prelude/android/android_build_config.bzl | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/prelude/android/android_build_config.bzl b/prelude/android/android_build_config.bzl index b1a32b8a3..e900add65 100644 --- a/prelude/android/android_build_config.bzl +++ b/prelude/android/android_build_config.bzl @@ -26,7 +26,7 @@ def android_build_config_impl(ctx: AnalysisContext) -> list[Provider]: providers.append(android_build_config_info) providers.append(merge_android_packageable_info(ctx.label, ctx.actions, deps = [], build_config_info = android_build_config_info)) - build_config_dot_java_library, java_packaging_info = generate_android_build_config( + build_config_dot_java_library, java_packaging_info, build_config_dot_java = generate_android_build_config( ctx, ctx.attrs.name, ctx.attrs.package, @@ -38,7 +38,14 @@ def android_build_config_impl(ctx: AnalysisContext) -> list[Provider]: providers.append(java_packaging_info) providers.append(build_config_dot_java_library) - providers.append(DefaultInfo(default_output = build_config_dot_java_library.library_output.full_library)) + providers.append( + DefaultInfo( + default_output = build_config_dot_java_library.library_output.full_library, + sub_targets = { + "build_config_dot_java": [DefaultInfo(default_output = build_config_dot_java)], + }, + ), + ) return providers def generate_android_build_config( @@ -47,7 +54,7 @@ def generate_android_build_config( java_package: str, use_constant_expressions: bool, default_values: list[BuildConfigField], - values_file: [Artifact, None]) -> (JavaLibraryInfo, JavaPackagingInfo): + values_file: [Artifact, None]) -> (JavaLibraryInfo, JavaPackagingInfo, Artifact): build_config_dot_java = _generate_build_config_dot_java(ctx, source, java_package, use_constant_expressions, default_values, values_file) compiled_build_config_dot_java = _compile_and_package_build_config_dot_java(ctx, java_package, build_config_dot_java) @@ -61,7 +68,7 @@ def generate_android_build_config( output_for_classpath_macro = library_output.full_library, ), JavaPackagingInfo( packaging_deps = packaging_deps, - )) + ), build_config_dot_java) def _generate_build_config_dot_java( ctx: AnalysisContext, From ba0cdd70081a4c1040f404c4896a88f141eab8b7 Mon Sep 17 00:00:00 2001 From: Cameron Pickett Date: Wed, 6 Mar 2024 21:24:47 -0800 Subject: [PATCH 0402/1133] fix Windows clippy.* subtargets Summary: The logic here resulted in a `DO ()` expression when skipping sysroot which causes a syntax error when building for windows. Fix here is to omit most of the logic setting sysroot to avoid the syntax error. Reviewed By: shayne-fletcher Differential Revision: D54597316 fbshipit-source-id: bd2d7ffb87a396fcef55ad2e45ea387de4718e85 --- prelude/rust/build.bzl | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 07052475e..b903495bc 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -1052,11 +1052,8 @@ def _clippy_wrapper( [ "@echo off", "set __CLIPPY_INTERNAL_TESTS=true", - cmd_args(rustc_print_sysroot, format = 'FOR /F "tokens=* USEBACKQ" %%F IN (`{}`) DO ('), - ] + ( - [] if skip_setting_sysroot else ["set SYSROOT=%%F"] - ) + [ - ")", + ] + [ + cmd_args(rustc_print_sysroot, format = 'FOR /F "tokens=* USEBACKQ" %%F IN (`{}`) DO (set SYSROOT=%%F)') if not skip_setting_sysroot else "", cmd_args(clippy_driver, format = "{} %*"), ], allow_args = True, From 24967ad4650e9b3e517fdd5c8d4560d7306c8045 Mon Sep 17 00:00:00 2001 From: Vincent Lee Date: Wed, 6 Mar 2024 23:10:12 -0800 Subject: [PATCH 0403/1133] Default android builds to arm64 Summary: I'm not sure of any good justification for keeping the default `android` mapping to `x86` especially since many app builds are moving to `arm64` as the default (e.g. [fb4a](https://fb.workplace.com/groups/androidfyi/permalink/2960957143952881/)). Most users should already be pinned to the correct target platform anyway, but for compiler engineers, we build the libraries directly (e.g. `libc++`) to verify correctness, and easier for us to specify it without modifier configs every time we need to build for the arm64 platform. Reviewed By: IanChilds Differential Revision: D54564400 fbshipit-source-id: 85d378ce55646f380f955642569fa348ed1a04e4 --- prelude/android/configuration.bzl | 2 +- prelude/android/cpu_filters.bzl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/android/configuration.bzl b/prelude/android/configuration.bzl index f383e99f0..70ad1ef96 100644 --- a/prelude/android/configuration.bzl +++ b/prelude/android/configuration.bzl @@ -23,7 +23,7 @@ load("@prelude//utils:expect.bzl", "expect") # platforms). We only use the "arm64" native libraries if it is one of the specified platforms. We # "throw away" the non-native libraries for all other configured sub-graphs. -_DEFAULT_PLATFORM = "config//platform/android:x86_32-fbsource" +_DEFAULT_PLATFORM = "config//platform/android:arm64-fbsource" _REFS = { "arm64": "config//cpu/constraints:arm64", diff --git a/prelude/android/cpu_filters.bzl b/prelude/android/cpu_filters.bzl index 796d6a16b..814a38c2d 100644 --- a/prelude/android/cpu_filters.bzl +++ b/prelude/android/cpu_filters.bzl @@ -14,7 +14,7 @@ CPU_FILTER_TO_ABI_DIRECTORY = { ALL_CPU_FILTERS = CPU_FILTER_TO_ABI_DIRECTORY.keys() -CPU_FILTER_FOR_DEFAULT_PLATFORM = "x86" +CPU_FILTER_FOR_DEFAULT_PLATFORM = "arm64" # The "primary platform" is the one that we use for all # the non-native targets. We keep this consistent regardless From 7f4a01dadabdd1a233c5271ca12bc4ab39139e27 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Thu, 7 Mar 2024 02:38:50 -0800 Subject: [PATCH 0404/1133] introduce possibility to build library as a mach-o bundle MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: To build as a Mach-O bundle instead of dylib you need to link with `-bundle` instead of `-shared`. Buck1 has a `#mach-o-bundle` flavor to apply on `apple_library` to get the Mach-O bundle result. Though we can't map buck1 flavor to buck2 subtarget easily — it's not the extra output and we probably want everything that root target provides (e.g. same subtargets including linker maps etc). Just reusing current `apple_library` implementation doesn't work cause we need to resolve path conflicts for almost every result and intermediate artifact in apple/cxx rule stack. So better choice would be either to introduce `apple_mach_o_bundle` rule or just introduce way to amend `apple_library` behavior. The latter is better way for backwards compatibility with buck1 which is why I implemented it in this diff. Reviewed By: milend Differential Revision: D54584428 fbshipit-source-id: a519219820190e36627c2d9388e97468c690ae07 --- prelude/apple/apple_library.bzl | 19 +++++++++++++++++++ prelude/apple/apple_rules_impl.bzl | 4 +++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index ccb920d5e..2a3232e89 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -84,6 +84,13 @@ load(":resource_groups.bzl", "create_resource_graph") load(":xcode.bzl", "apple_populate_xcode_attributes") load(":xctest_swift_support.bzl", "xctest_swift_support_info") +AppleSharedLibraryMachOFileType = enum( + # dynamicly bound shared library file + "dylib", + # dynamicly bound bundle file aka Mach-O bundle + "bundle", +) + AppleLibraryAdditionalParams = record( # Name of the top level rule utilizing the apple_library rule. rule_type = str, @@ -108,6 +115,17 @@ AppleLibraryAdditionalParams = record( def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: def get_apple_library_providers(deps_providers) -> list[Provider]: + shared_type = AppleSharedLibraryMachOFileType(ctx.attrs.shared_library_macho_file_type) + if shared_type == AppleSharedLibraryMachOFileType("bundle"): + shared_library_flags_overrides = SharedLibraryFlagOverrides( + # When `-bundle` is used we can't use the `-install_name` args, thus we keep this field empty. + shared_library_name_linker_flags_format = [], + shared_library_flags = ["-bundle"], + ) + elif shared_type == AppleSharedLibraryMachOFileType("dylib"): + shared_library_flags_overrides = None + else: + fail("Unsupported `shared_library_macho_file_type` attribute value: `{}`".format(shared_type)) constructor_params = apple_library_rule_constructor_params_and_swift_providers( ctx, AppleLibraryAdditionalParams( @@ -119,6 +137,7 @@ def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: # We generate a provider on our own, disable to avoid several providers of same type. cxx_resources_as_apple_resources = False, ), + shared_library_flags = shared_library_flags_overrides, ), deps_providers, ) diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index f7aded0d3..77c776220 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -24,7 +24,7 @@ load(":apple_binary.bzl", "apple_binary_impl") load(":apple_bundle.bzl", "apple_bundle_impl") load(":apple_bundle_types.bzl", "AppleBundleInfo") load(":apple_core_data.bzl", "apple_core_data_impl") -load(":apple_library.bzl", "apple_library_impl") +load(":apple_library.bzl", "AppleSharedLibraryMachOFileType", "apple_library_impl") load(":apple_package.bzl", "apple_package_impl") load(":apple_package_config.bzl", "IpaCompressionLevel") load(":apple_resource.bzl", "apple_resource_impl") @@ -112,6 +112,8 @@ def _apple_library_extra_attrs(): "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "preferred_linkage": attrs.enum(Linkage, default = "any"), "serialize_debugging_options": attrs.bool(default = True), + # Mach-O file type for binary when the target is built as a shared library. + "shared_library_macho_file_type": attrs.enum(AppleSharedLibraryMachOFileType.values(), default = "dylib"), "stripped": attrs.option(attrs.bool(), default = None), "supports_header_symlink_subtarget": attrs.bool(default = False), "supports_shlib_interfaces": attrs.bool(default = True), From a0deae2e1418f9e26252ef5d273b1b201563a5ac Mon Sep 17 00:00:00 2001 From: Balaji S Date: Thu, 7 Mar 2024 03:14:03 -0800 Subject: [PATCH 0405/1133] Reduce stdout summary shown in tpx results Summary: * Reduce number of lines logged to summary on test failures Reviewed By: TheGeorge Differential Revision: D54630518 fbshipit-source-id: e9786ae760b0a89e0242907f2851c331d3df0c27 --- prelude/erlang/common_test/test_exec/src/ct_executor.erl | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/prelude/erlang/common_test/test_exec/src/ct_executor.erl b/prelude/erlang/common_test/test_exec/src/ct_executor.erl index af35d10df..e7c8e0f2d 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_executor.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_executor.erl @@ -26,6 +26,9 @@ % For testing -export([split_args/1]). +-define(STDOUT_MAX_LINES, 1000). +-define(STDOUT_MAX_LINE_LENGTH, 10000). + -spec run([string()]) -> no_return(). run(Args) when is_list(Args) -> ExitCode = @@ -71,7 +74,9 @@ run(Args) when is_list(Args) -> erlang:system_flag(backtrace_depth, 20), ?LOG_DEBUG("ct_run called with arguments ~p ~n", [CtRunArgs]), Providers1 = [buck_ct_provider:do_pre_running(Provider) || Provider <- Providers0], - {ok, IoBuffer} = io_buffer:start_link(), + {ok, IoBuffer} = io_buffer:start_link(#{ + passthrough => true, max_elements => ?STDOUT_MAX_LINES, max_length => ?STDOUT_MAX_LINE_LENGTH + }), register(cth_tpx_io_buffer, IoBuffer), %% set global timeout Result = ct:run_test(CtRunArgs), From d2d61ca9d7cb1c03c3cc9c34faed6d8b44b0bdab Mon Sep 17 00:00:00 2001 From: Balaji S Date: Thu, 7 Mar 2024 06:00:31 -0800 Subject: [PATCH 0406/1133] Avoid copying of logs when it has reached max limit Summary: * Avoid copying logs again because of using `++` operator as we are anyway displaying tail of the log, we can have the truncated statement at the top Reviewed By: TheGeorge Differential Revision: D54630517 fbshipit-source-id: 4de291a7359bde9e9f649886e892868325da5f83 --- prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl index fe7be1216..4603643b4 100644 --- a/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl +++ b/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl @@ -497,13 +497,15 @@ add_result( filename:dirname(OutputFile), "ct_executor.stdout.txt" ) end, - Io ++ + [ io_lib:format( - "\n The std_out has been truncated, see ~s for the full suite std_out.", + "The stdout logs have been truncated, see ~s for the full suite stdout. Showing tail below\n", [ StdOutLocation ] - ); + ) + | Io + ]; false -> Io end From 4c3775d1d4a87793ef35f422e7efa8a9d7e64ed1 Mon Sep 17 00:00:00 2001 From: Balaji S Date: Thu, 7 Mar 2024 08:36:41 -0800 Subject: [PATCH 0407/1133] Use SANDCASTLE env variable correctly in cth_tpx Summary: * `os:getenv(` returns a string or `false` if not set. However the cth code assumes it return true / false. * We change it to be similar to https://fburl.com/code/9zzs1so8 Reviewed By: jcpetruzza Differential Revision: D54633611 fbshipit-source-id: 063907ed89d7dbcb340479beca605ab38faf9f4f --- .../erlang/common_test/cth_hooks/src/cth_tpx.erl | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl b/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl index 4603643b4..ce4479c59 100644 --- a/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl +++ b/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl @@ -489,7 +489,7 @@ add_result( case Truncated of true -> StdOutLocation = - case os:getenv("SANDCASTLE") of + case is_running_in_sandcastle() of true -> "tab Diagnostics: Artifacts/ct_executor.stdout.txt"; _ -> @@ -674,3 +674,15 @@ modify_shared_state(HookState, Caller, Action) -> {ok, Action(State)} end), NewHookState. + +-spec is_running_in_sandcastle() -> boolean(). +is_running_in_sandcastle() -> + case os:getenv("SANDCASTLE_DIFF_ID") of + [$D | _] -> + true; + _ -> + case os:getenv("SANDCASTLE") of + false -> false; + _ -> true + end + end. From 020474e9340c081a379e1339261db3f6b9dc9451 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Thu, 7 Mar 2024 10:29:40 -0800 Subject: [PATCH 0408/1133] add bundle_config() to apple_xcuitest Summary: Add a macro for `apple_xcuitest` and apple `apple_bundle_config()`. This is required to correctly configure codesigning. Reviewed By: blackm00n Differential Revision: D54638026 fbshipit-source-id: e6aa6f3937aa312ebba136006cc52c35de652ffb --- prelude/apple/apple_macro_layer.bzl | 6 ++++++ prelude/native.bzl | 9 ++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_macro_layer.bzl b/prelude/apple/apple_macro_layer.bzl index 3d39921e1..ffb2735f4 100644 --- a/prelude/apple/apple_macro_layer.bzl +++ b/prelude/apple/apple_macro_layer.bzl @@ -83,6 +83,12 @@ def apple_test_macro_impl(apple_test_rule, apple_resource_bundle_rule, **kwargs) **kwargs ) +def apple_xcuitest_macro_impl(apple_xcuitest_rule, **kwargs): + kwargs.update(apple_bundle_config()) + apple_xcuitest_rule( + **kwargs + ) + def apple_bundle_macro_impl(apple_bundle_rule, apple_resource_bundle_rule, **kwargs): info_plist_substitutions = kwargs.get("info_plist_substitutions") kwargs.update(apple_bundle_config()) diff --git a/prelude/native.bzl b/prelude/native.bzl index 89b493a40..b05f61cb1 100644 --- a/prelude/native.bzl +++ b/prelude/native.bzl @@ -12,7 +12,7 @@ # **all** interpreted files. load("@prelude//android:cpu_filters.bzl", "ALL_CPU_FILTERS", "CPU_FILTER_FOR_DEFAULT_PLATFORM") -load("@prelude//apple:apple_macro_layer.bzl", "apple_binary_macro_impl", "apple_bundle_macro_impl", "apple_library_macro_impl", "apple_package_macro_impl", "apple_test_macro_impl", "apple_universal_executable_macro_impl") +load("@prelude//apple:apple_macro_layer.bzl", "apple_binary_macro_impl", "apple_bundle_macro_impl", "apple_library_macro_impl", "apple_package_macro_impl", "apple_test_macro_impl", "apple_universal_executable_macro_impl", "apple_xcuitest_macro_impl") load("@prelude//apple/swift:swift_toolchain_macro_layer.bzl", "swift_toolchain_macro_impl") load("@prelude//cxx:cxx_toolchain.bzl", "cxx_toolchain_inheriting_target_platform") load("@prelude//cxx:cxx_toolchain_macro_layer.bzl", "cxx_toolchain_macro_impl") @@ -342,6 +342,12 @@ def _apple_test_macro_stub(**kwargs): **kwargs ) +def _apple_xcuitest_macro_stub(**kwargs): + apple_xcuitest_macro_impl( + apple_xcuitest_rule = __rules__["apple_xcuitest"], + **kwargs + ) + def _apple_binary_macro_stub(**kwargs): apple_binary_macro_impl( apple_binary_rule = __rules__["apple_binary"], @@ -444,6 +450,7 @@ __extra_rules__ = { "apple_test": _apple_test_macro_stub, "apple_universal_executable": _apple_universal_executable_macro_stub, "apple_watchos_bundle": _apple_watchos_bundle_macro_stub, + "apple_xcuitest": _apple_xcuitest_macro_stub, "configured_alias": _configured_alias_macro_stub, "cxx_toolchain": _cxx_toolchain_macro_stub, "cxx_toolchain_override": _cxx_toolchain_override_macro_stub, From 925313a3ed9497f2a69c3bdc66c2a5f93af55759 Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Thu, 7 Mar 2024 13:06:43 -0800 Subject: [PATCH 0409/1133] Make android_binary respect values_file for build config deps Summary: Currently the `values_file` field for an `android_build_config` is only used when the build_config target is built in isolation, not as a dependency of an android_binary. From the docs for `android_build_config.values_file`: `To override the values in an APK, specify build_config_values or build_config_values_file in android_binary().` Reviewed By: lcharriere Differential Revision: D54521849 fbshipit-source-id: 78519b4a551b14c2d9563897ed8deeaea55fd06d --- prelude/android/android_binary.bzl | 5 ++++- prelude/android/android_build_config.bzl | 2 +- prelude/android/android_providers.bzl | 1 + 3 files changed, 6 insertions(+), 2 deletions(-) diff --git a/prelude/android/android_binary.bzl b/prelude/android/android_binary.bzl index a47e4e4e7..f83d25a55 100644 --- a/prelude/android/android_binary.bzl +++ b/prelude/android/android_binary.bzl @@ -210,6 +210,8 @@ def get_build_config_java_libraries( default_build_config_fields = get_build_config_fields(ctx.attrs.build_config_values) + android_binary_values_file = ctx.attrs.build_config_values_file[DefaultInfo].default_outputs[0] if isinstance(ctx.attrs.build_config_values_file, Dependency) else ctx.attrs.build_config_values_file + java_libraries = [] java_packages_seen = [] for build_config_info in build_config_infos: @@ -221,13 +223,14 @@ def get_build_config_java_libraries( for build_config_field in build_config_info.build_config_fields + default_build_config_fields + build_config_constants: all_build_config_values[build_config_field.name] = build_config_field + values_file = android_binary_values_file if android_binary_values_file else build_config_info.values_file java_libraries.append(generate_android_build_config( ctx, java_package, java_package, True, # use_constant_expressions all_build_config_values.values(), - ctx.attrs.build_config_values_file[DefaultInfo].default_outputs[0] if isinstance(ctx.attrs.build_config_values_file, Dependency) else ctx.attrs.build_config_values_file, + values_file, )[1]) return java_libraries diff --git a/prelude/android/android_build_config.bzl b/prelude/android/android_build_config.bzl index e900add65..452b3132c 100644 --- a/prelude/android/android_build_config.bzl +++ b/prelude/android/android_build_config.bzl @@ -22,7 +22,7 @@ def android_build_config_impl(ctx: AnalysisContext) -> list[Provider]: providers = [] default_build_config_fields = get_build_config_fields(ctx.attrs.values) - android_build_config_info = AndroidBuildConfigInfo(package = ctx.attrs.package, build_config_fields = default_build_config_fields) + android_build_config_info = AndroidBuildConfigInfo(package = ctx.attrs.package, build_config_fields = default_build_config_fields, values_file = ctx.attrs.values_file) providers.append(android_build_config_info) providers.append(merge_android_packageable_info(ctx.label, ctx.actions, deps = [], build_config_info = android_build_config_info)) diff --git a/prelude/android/android_providers.bzl b/prelude/android/android_providers.bzl index f6f850620..774bb24df 100644 --- a/prelude/android/android_providers.bzl +++ b/prelude/android/android_providers.bzl @@ -97,6 +97,7 @@ AndroidBuildConfigInfo = provider( fields = { "package": str, "build_config_fields": list[BuildConfigField], + "values_file": provider_field(typing.Any, default = None), }, ) From ca1dcdf337c01e1059fdae005e352ea5a1249d71 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Thu, 7 Mar 2024 14:09:33 -0800 Subject: [PATCH 0410/1133] add logging to swift support bundling step Summary: log swift stdlib tool command Reviewed By: milend Differential Revision: D54632967 fbshipit-source-id: 84b2e1065e802c38ab4b7a294b622378aa8395a8 --- prelude/apple/tools/bundling/swift_support.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/prelude/apple/tools/bundling/swift_support.py b/prelude/apple/tools/bundling/swift_support.py index 91ef7644f..2137e897a 100644 --- a/prelude/apple/tools/bundling/swift_support.py +++ b/prelude/apple/tools/bundling/swift_support.py @@ -7,6 +7,7 @@ # pyre-strict +import logging import os import shlex import shutil @@ -16,6 +17,8 @@ from pathlib import Path from typing import List, Optional, Union +_LOGGER: logging.Logger = logging.getLogger(__name__) + @dataclass class SwiftSupportArguments: @@ -39,10 +42,11 @@ def run_swift_stdlib_tool( env = os.environ.copy() # xcrun doesn't like relative paths env["SDKROOT"] = os.path.abspath(args.sdk_root) - result = subprocess.run( - _execution_command(bundle_path, signing_identity, args, tmp_dir), - env=env, + cmd = _execution_command(bundle_path, signing_identity, args, tmp_dir) + _LOGGER.info( + f"Running Swift stdlib tool with command: `{cmd}` and environment `{env}`." ) + result = subprocess.run(cmd, env=env) result.check_returncode() outputs = sorted(os.listdir(tmp_dir)) frameworks_path = bundle_path / args.frameworks_destination From 3adffaaf784bce9c3d236c12bf99476a1d4479e1 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Thu, 7 Mar 2024 14:09:33 -0800 Subject: [PATCH 0411/1133] codesign swift support on copy Summary: Remove the option when Swift stdlib tool is called with `--sign` argument. Reason is it's non-deterministic and keeps files with `.original` suffix in the output directory. That's newly created temporary directory content after stdlib tool ran: ``` akozhevnikov@akozhevnikov-mbp ~ % ls -l /Users/akozhevnikov/fbsource/buck-out/v2/tmp/fbsource/1a9611a81498f620/fbobjc/Apps/Diamond/Editor/__Editor__/apple_assemble_bundle_incremental/tmpbx6z1qv_ total 29632 -rwxr-xr-x 1 akozhevnikov staff 6456320 7 Mar 11:42 libswiftCore.dylib -rwxr-xr-x 1 akozhevnikov staff 6456208 7 Mar 11:42 libswiftCore.dylib.original -rwxr-xr-x 1 akozhevnikov staff 1124624 7 Mar 11:42 libswift_Concurrency.dylib -rwxr-xr-x 1 akozhevnikov staff 1124496 7 Mar 11:42 libswift_Concurrency.dylib.original ``` (from P1193443229 you can see originals are Apple signed, while .dylib files are signed with our certificate) In some cases there might be `.original` file but no signed counterpart. To properly fix that we better to just codesign swift dylibs later on copy. Reviewed By: milend Differential Revision: D54633019 fbshipit-source-id: 401c2cc3cd1f5baa8e3d16dc3a970c1871c17dcf --- prelude/apple/tools/bundling/main.py | 6 ++- prelude/apple/tools/bundling/swift_support.py | 46 ++++++++----------- 2 files changed, 23 insertions(+), 29 deletions(-) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index a95e148d6..851ad0a6d 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -367,7 +367,6 @@ def _main() -> None: if swift_support_args: swift_stdlib_paths = run_swift_stdlib_tool( bundle_path=args.output, - signing_identity=selected_identity_argument, args=swift_support_args, ) else: @@ -382,12 +381,15 @@ def _main() -> None: raise RuntimeError( "Expected signing context to be created before bundling is done if codesign is requested." ) + codesign_on_copy_paths = [ + i.dst for i in spec if i.codesign_on_copy + ] + swift_stdlib_paths codesign_bundle( bundle_path=args.output, signing_context=signing_context, entitlements_path=args.entitlements, platform=args.platform, - codesign_on_copy_paths=[i.dst for i in spec if i.codesign_on_copy], + codesign_on_copy_paths=codesign_on_copy_paths, codesign_args=args.codesign_args, codesign_tool=args.codesign_tool, codesign_configuration=args.codesign_configuration, diff --git a/prelude/apple/tools/bundling/swift_support.py b/prelude/apple/tools/bundling/swift_support.py index 2137e897a..d1c53bf78 100644 --- a/prelude/apple/tools/bundling/swift_support.py +++ b/prelude/apple/tools/bundling/swift_support.py @@ -31,10 +31,8 @@ class SwiftSupportArguments: sdk_root: Path -def run_swift_stdlib_tool( - bundle_path: Path, signing_identity: Optional[str], args: SwiftSupportArguments -) -> List[Path]: - # TODO(akozhevnikov) when incremental bundling is on, binary, frameworks and plugins are not changed, signing identity is unchanged skip this step. +def run_swift_stdlib_tool(bundle_path: Path, args: SwiftSupportArguments) -> List[Path]: + # TODO(T181556849) when incremental bundling is on, binary, frameworks and plugins are not changed, signing identity is unchanged skip this step. bundle_relative_output_paths = [] with tempfile.TemporaryDirectory() as tmp_dir: # When signing, swift-stdlib-tool needs a proper PATH environment variable. @@ -42,7 +40,7 @@ def run_swift_stdlib_tool( env = os.environ.copy() # xcrun doesn't like relative paths env["SDKROOT"] = os.path.abspath(args.sdk_root) - cmd = _execution_command(bundle_path, signing_identity, args, tmp_dir) + cmd = _execution_command(bundle_path, args, tmp_dir) _LOGGER.info( f"Running Swift stdlib tool with command: `{cmd}` and environment `{env}`." ) @@ -62,28 +60,22 @@ def run_swift_stdlib_tool( def _execution_command( bundle_path: Path, - signing_identity: Optional[str], args: SwiftSupportArguments, tmp_dir: str, ) -> List[Union[str, Path]]: - signing_args = ["--sign", signing_identity] if signing_identity else [] - return ( - shlex.split(args.swift_stdlib_command) - + [ - "--copy", - "--strip-bitcode", - "--scan-executable", - bundle_path / args.binary_destination, - "--scan-executable", - bundle_path / args.appclips_destination, - "--scan-folder", - bundle_path / args.frameworks_destination, - "--scan-folder", - bundle_path / args.plugins_destination, - "--destination", - tmp_dir, - "--platform", - args.platform, - ] - + signing_args - ) + return shlex.split(args.swift_stdlib_command) + [ + "--copy", + "--strip-bitcode", + "--scan-executable", + bundle_path / args.binary_destination, + "--scan-executable", + bundle_path / args.appclips_destination, + "--scan-folder", + bundle_path / args.frameworks_destination, + "--scan-folder", + bundle_path / args.plugins_destination, + "--destination", + tmp_dir, + "--platform", + args.platform, + ] From 188fed7c54c42b70a38499962b88743d4bfce9cd Mon Sep 17 00:00:00 2001 From: Edward Pilatowicz Date: Thu, 7 Mar 2024 17:47:56 -0800 Subject: [PATCH 0412/1133] Clarify genrule() $OUT requirements Summary: Clarify genrule() $OUT requirements. Reviewed By: JakobDegen Differential Revision: D54550456 fbshipit-source-id: 6176fa33cd72841a58803aaa42d47ce2bd41c1ef --- prelude/decls/genrule_common.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prelude/decls/genrule_common.bzl b/prelude/decls/genrule_common.bzl index 1b84e2808..aa142a558 100644 --- a/prelude/decls/genrule_common.bzl +++ b/prelude/decls/genrule_common.bzl @@ -72,7 +72,9 @@ def _cmd_arg(): command determine whether this filepath is treated as a file or a directory. If the filepath is a directory, then the shell command needs to create it if not using named outputs. Otherwise, it will - be automatically created. + be automatically created. All outputs (directories and files) must + be readable, writable, and (in the case of directories) executable + by the current user. The file or directory specified by this variable must always From a5c52c13feccba5dac2ddbf7c274bb19b7667d35 Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Thu, 7 Mar 2024 18:04:28 -0800 Subject: [PATCH 0413/1133] Change default output for exopackage builds Summary: Every once in a while a user tries to `adb install` an exopackage apk and hits hard to debug errors. There's no valid use case for using an exopackage apk by itself, without buck or without more metadata, so let's get rid of this footgun. Reviewed By: IanChilds, jlwass Differential Revision: D54643778 fbshipit-source-id: 33217935d2c7e94ce7033597d88a5b61138cbbbe --- prelude/android/android_apk.bzl | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index 6dd0662f0..b5ca6d406 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -35,11 +35,18 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: validation_deps_outputs = get_validation_deps_outputs(ctx), ) - exopackage_info = ExopackageInfo( - secondary_dex_info = dex_files_info.secondary_dex_exopackage_info, - native_library_info = native_library_info.exopackage_info, - resources_info = resources_info.exopackage_info, - ) + if dex_files_info.secondary_dex_exopackage_info or native_library_info.exopackage_info or resources_info.exopackage_info: + exopackage_info = ExopackageInfo( + secondary_dex_info = dex_files_info.secondary_dex_exopackage_info, + native_library_info = native_library_info.exopackage_info, + resources_info = resources_info.exopackage_info, + ) + exopackage_outputs = _get_exopackage_outputs(exopackage_info) + default_output = ctx.actions.write("exopackage_apk_warning", "exopackage apks should not be used externally, try buck install or building with exopackage disabled\n") + else: + exopackage_info = None + exopackage_outputs = [] + default_output = output_apk class_to_srcs, class_to_srcs_subtargets = get_class_to_source_map_info( ctx, @@ -63,7 +70,7 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: r_dot_java_packages = set([info.specified_r_dot_java_package for info in resources_info.unfiltered_resource_infos if info.specified_r_dot_java_package]), shared_libraries = set(native_library_info.shared_libraries), ), - DefaultInfo(default_output = output_apk, other_outputs = _get_exopackage_outputs(exopackage_info) + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), + DefaultInfo(default_output = default_output, other_outputs = exopackage_outputs + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, definitely_has_native_libs = definitely_has_native_libs), TemplatePlaceholderInfo( keyed_variables = { From 3366266de6aa0e25b2ddddcdaedd4d55bc22c7df Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Fri, 8 Mar 2024 02:10:57 -0800 Subject: [PATCH 0414/1133] Make `allow_cache_upload` attr an optional bool Summary: `allow_cache_upload` currently exists on various rules (e.g., `cxx_library()`) and it's of type `bool`. This is problematic because we cannot distinguish whether the value was set to `False` or whether no value was assigned at the node. We need to be able to distinguish, so that we can fallback to the toolchain value if a target does not set a value explicitly. Here, we change the type to an optional bool and create a utility function `cxx_attrs_get_allow_cache_upload()` which performs the fallback to `False`. `cxx_attrs_get_allow_cache_upload()` will be extended later in the stack to take another default value (e.g., from the toolchain). Reviewed By: rmaz Differential Revision: D54582113 fbshipit-source-id: 50684f9a457066054771caddbb0c7f1be73f690d --- prelude/cxx/anon_link.bzl | 3 ++- prelude/cxx/cxx.bzl | 3 ++- prelude/cxx/cxx_library.bzl | 5 +++-- prelude/cxx/cxx_utility.bzl | 10 ++++++++++ prelude/cxx/headers.bzl | 3 ++- prelude/cxx/symbols.bzl | 3 ++- prelude/decls/common.bzl | 5 +++-- prelude/python/python_binary.bzl | 3 ++- prelude/rust/rust_binary.bzl | 3 ++- 9 files changed, 28 insertions(+), 10 deletions(-) create mode 100644 prelude/cxx/cxx_utility.bzl diff --git a/prelude/cxx/anon_link.bzl b/prelude/cxx/anon_link.bzl index 354f4aba1..9da24b7eb 100644 --- a/prelude/cxx/anon_link.bzl +++ b/prelude/cxx/anon_link.bzl @@ -11,6 +11,7 @@ load( "make_artifact_tset", ) load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_info.bzl", @@ -168,7 +169,7 @@ def deserialize_anon_attrs( category_suffix = attrs.category_suffix, identifier = attrs.identifier, enable_distributed_thinlto = attrs.enable_distributed_thinlto, - allow_cache_upload = attrs.allow_cache_upload, + allow_cache_upload = cxx_attrs_get_allow_cache_upload(attrs), ) result_type = CxxLinkResultType(attrs.result_type) diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 2fe804beb..0be9df6c8 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -16,6 +16,7 @@ load( "get_project_root_file", ) load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -236,7 +237,7 @@ def cxx_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_group_info = link_group_info, auto_link_group_specs = get_auto_link_group_specs(ctx, link_group_info), prefer_stripped_objects = ctx.attrs.prefer_stripped_objects, - exe_allow_cache_upload = ctx.attrs.allow_cache_upload, + exe_allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs), extra_link_roots = linkables(ctx.attrs.link_group_deps), ) output = cxx_executable(ctx, params) diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 184d31dde..31f1ffffa 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -35,6 +35,7 @@ load( "@prelude//apple/swift:swift_runtime.bzl", "create_swift_runtime_linkable", ) +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load( "@prelude//ide_integrations:xcode.bzl", "XCODE_DATA_SUB_TARGET", @@ -910,12 +911,12 @@ def cxx_compile_srcs( ) # Define object files. - pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = True, allow_cache_upload = ctx.attrs.allow_cache_upload) + pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = True, allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs)) pic = _get_library_compile_output(ctx, pic_cxx_outs, impl_params.extra_link_input) non_pic = None if preferred_linkage != Linkage("shared"): - non_pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = False, allow_cache_upload = ctx.attrs.allow_cache_upload) + non_pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = False, allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs)) non_pic = _get_library_compile_output(ctx, non_pic_cxx_outs, impl_params.extra_link_input) return _CxxCompiledSourcesOutput( diff --git a/prelude/cxx/cxx_utility.bzl b/prelude/cxx/cxx_utility.bzl new file mode 100644 index 000000000..3c239695a --- /dev/null +++ b/prelude/cxx/cxx_utility.bzl @@ -0,0 +1,10 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +def cxx_attrs_get_allow_cache_upload(attrs: struct) -> bool: + value = attrs.allow_cache_upload + return value if value != None else False diff --git a/prelude/cxx/headers.bzl b/prelude/cxx/headers.bzl index d41dc1f29..21faa8741 100644 --- a/prelude/cxx/headers.bzl +++ b/prelude/cxx/headers.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") load("@prelude//utils:utils.bzl", "from_named_set", "map_val", "value_or") @@ -358,5 +359,5 @@ def _mk_hmap(ctx: AnalysisContext, name: str, headers: dict[str, (Artifact, str) cmd.add(["--mappings-file", hmap_args_file]).hidden(header_args) if project_root_file: cmd.add(["--project-root-file", project_root_file]) - ctx.actions.run(cmd, category = "generate_hmap", identifier = name, allow_cache_upload = ctx.attrs.allow_cache_upload) + ctx.actions.run(cmd, category = "generate_hmap", identifier = name, allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs)) return output diff --git a/prelude/cxx/symbols.bzl b/prelude/cxx/symbols.bzl index d2aa2abe1..d423b4450 100644 --- a/prelude/cxx/symbols.bzl +++ b/prelude/cxx/symbols.bzl @@ -7,6 +7,7 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//os_lookup:defs.bzl", "OsLookup") def _extract_symbol_names( @@ -118,7 +119,7 @@ def _anon_extract_symbol_names_impl(ctx): objects = ctx.attrs.objects, prefer_local = ctx.attrs.prefer_local, undefined_only = ctx.attrs.undefined_only, - allow_cache_upload = ctx.attrs.allow_cache_upload, + allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs), ) return [DefaultInfo(), _SymbolsInfo(artifact = output)] diff --git a/prelude/decls/common.bzl b/prelude/decls/common.bzl index 64b9552dc..e3e839bb0 100644 --- a/prelude/decls/common.bzl +++ b/prelude/decls/common.bzl @@ -205,8 +205,9 @@ def _exec_os_type_arg() -> Attr: def _allow_cache_upload_arg(): return { - "allow_cache_upload": attrs.bool( - default = False, + "allow_cache_upload": attrs.option( + attrs.bool(), + default = None, doc = """ Whether to allow uploading the output of this rule to be uploaded to cache when the action is executed locally if the configuration diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 850574b05..f8d86329c 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -23,6 +23,7 @@ load( "@prelude//cxx:cxx_types.bzl", "CxxRuleConstructorParams", ) +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load( "@prelude//cxx:groups.bzl", "Group", @@ -763,7 +764,7 @@ def python_binary_impl(ctx: AnalysisContext) -> list[Provider]: srcs, {}, compile = value_or(ctx.attrs.compile, False), - allow_cache_upload = ctx.attrs.allow_cache_upload, + allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs), ) return [ make_default_info(pex), diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 046d43d04..f4cabb9ac 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -19,6 +19,7 @@ load( "cxx_attr_deps", ) load("@prelude//cxx:cxx_link_utility.bzl", "executable_shared_lib_arguments") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load( "@prelude//cxx:link_groups.bzl", "LINK_GROUP_MAPPINGS_FILENAME_SUFFIX", @@ -365,7 +366,7 @@ def rust_binary_impl(ctx: AnalysisContext) -> list[Provider]: compile_ctx = compile_ctx, default_roots = ["main.rs"], extra_flags = [], - allow_cache_upload = ctx.attrs.allow_cache_upload, + allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs), ) return providers + [RunInfo(args = args)] From 76041c55a3a3e7ecb5cdd4feb9c98baa71e66008 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Fri, 8 Mar 2024 02:10:57 -0800 Subject: [PATCH 0415/1133] Add `c_compiler_allow_cache_upload` and `cxx_compiler_allow_cache_upload` fields to `cxx_toolchain()` Summary: We need to have fields on `cxx_toolchain()` which controls whether compilation actions (i.e., those which produce object files) can be uploaded to the remote cache. This diff adds those fields which are currently inoperational. Reviewed By: rmaz Differential Revision: D54582546 fbshipit-source-id: ff15db6d631972e01ab10613689c3c1620b9f962 --- prelude/cxx/cxx_toolchain.bzl | 3 ++- prelude/cxx/cxx_utility.bzl | 19 +++++++++++++++++++ prelude/cxx/user/cxx_toolchain_override.bzl | 3 ++- 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index c256f623a..7d165bd1a 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -7,6 +7,7 @@ load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CudaCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "DepTrackingMode", "DistLtoToolsInfo", "HipCompilerInfo", "LinkerInfo", "PicBehavior", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") +load("@prelude//cxx:cxx_utility.bzl", "cxx_toolchain_allow_cache_upload_args") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:headers.bzl", "HeaderMode", "HeadersAsRawHeadersMode") load("@prelude//cxx:linker.bzl", "LINKERS", "is_pdb_generated") @@ -225,7 +226,7 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): # FIXME: prelude// should be standalone (not refer to fbsource//) "_mk_hmap": attrs.default_only(dep_type(providers = [RunInfo], default = "prelude//cxx/tools:hmap_wrapper")), "_msvc_hermetic_exec": attrs.default_only(dep_type(providers = [RunInfo], default = "prelude//windows/tools:msvc_hermetic_exec")), - } + } | cxx_toolchain_allow_cache_upload_args() def _cxx_toolchain_inheriting_target_platform_attrs(): attrs = dict(cxx_rules.cxx_toolchain.attrs) diff --git a/prelude/cxx/cxx_utility.bzl b/prelude/cxx/cxx_utility.bzl index 3c239695a..f0310dfb7 100644 --- a/prelude/cxx/cxx_utility.bzl +++ b/prelude/cxx/cxx_utility.bzl @@ -8,3 +8,22 @@ def cxx_attrs_get_allow_cache_upload(attrs: struct) -> bool: value = attrs.allow_cache_upload return value if value != None else False + +def cxx_toolchain_allow_cache_upload_args(): + doc = """ + Whether to allow uploading of object files to cache when the compile + action is executed locally and the configuration allows uploads (i.e., + there is a cache configured and the client has permission to write to it). + """ + return { + "c_compiler_allow_cache_upload": attrs.option( + attrs.bool(), + default = None, + doc = doc, + ), + "cxx_compiler_allow_cache_upload": attrs.option( + attrs.bool(), + default = None, + doc = doc, + ), + } diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index 680a24589..96e9d8f5d 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "CxxPlatformInfo", "CxxToolchainInfo", "LinkerInfo", "LinkerType", "PicBehavior", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") +load("@prelude//cxx:cxx_utility.bzl", "cxx_toolchain_allow_cache_upload_args") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:headers.bzl", "HeaderMode") load("@prelude//cxx:linker.bzl", "is_pdb_generated") @@ -221,7 +222,7 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "strip_debug_flags": attrs.option(attrs.list(attrs.arg()), default = None), "strip_non_global_flags": attrs.option(attrs.list(attrs.arg()), default = None), "use_archiver_flags": attrs.option(attrs.bool(), default = None), - } + } | cxx_toolchain_allow_cache_upload_args() cxx_toolchain_override_registration_spec = RuleRegistrationSpec( name = "cxx_toolchain_override", From f3c3884a68b7dc81534f6890484bffc17becdafa Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Fri, 8 Mar 2024 02:10:57 -0800 Subject: [PATCH 0416/1133] Propagate cache upload state from toolchain to compiler info providers Summary: Propagates the attributes from `cxx_toolchain()` into the compiler info structs. Reviewed By: rmaz Differential Revision: D54582719 fbshipit-source-id: 8dca2ddf117992b40df610aeb5bed130b5f582fa --- prelude/cxx/cxx_toolchain.bzl | 2 ++ prelude/cxx/cxx_toolchain_types.bzl | 2 ++ prelude/cxx/user/cxx_toolchain_override.bzl | 4 +++- prelude/utils/pick.bzl | 3 +++ 4 files changed, 10 insertions(+), 1 deletion(-) diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index 7d165bd1a..f5fb2546d 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -34,6 +34,7 @@ def cxx_toolchain_impl(ctx): preprocessor = c_compiler, preprocessor_flags = cmd_args(ctx.attrs.c_preprocessor_flags), dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], + allow_cache_upload = ctx.attrs.c_compiler_allow_cache_upload, ) cxx_compiler = _get_maybe_wrapped_msvc(ctx.attrs.cxx_compiler[RunInfo], ctx.attrs.cxx_compiler_type or ctx.attrs.compiler_type, ctx.attrs._msvc_hermetic_exec[RunInfo]) cxx_info = CxxCompilerInfo( @@ -43,6 +44,7 @@ def cxx_toolchain_impl(ctx): preprocessor = cxx_compiler, preprocessor_flags = cmd_args(ctx.attrs.cxx_preprocessor_flags), dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], + allow_cache_upload = ctx.attrs.cxx_compiler_allow_cache_upload, ) asm_info = AsmCompilerInfo( compiler = ctx.attrs.asm_compiler[RunInfo], diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index 9582e4983..ee2972ebd 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -118,6 +118,8 @@ _compiler_fields = [ "preprocessor_type", "preprocessor_flags", "dep_files_processor", + # Controls cache upload for object files + "allow_cache_upload", ] HipCompilerInfo = provider(fields = _compiler_fields) diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index 96e9d8f5d..82c62f8f9 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -16,7 +16,7 @@ load( ) load("@prelude//linking:lto.bzl", "LtoMode") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//utils:pick.bzl", _pick = "pick", _pick_and_add = "pick_and_add", _pick_bin = "pick_bin", _pick_dep = "pick_dep") +load("@prelude//utils:pick.bzl", _pick = "pick", _pick_and_add = "pick_and_add", _pick_bin = "pick_bin", _pick_dep = "pick_dep", _pick_raw = "pick_raw") load("@prelude//utils:utils.bzl", "flatten", "map_val", "value_or") def _cxx_toolchain_override(ctx): @@ -53,6 +53,7 @@ def _cxx_toolchain_override(ctx): preprocessor_type = base_c_info.preprocessor_type, preprocessor_flags = _pick(ctx.attrs.c_preprocessor_flags, base_c_info.preprocessor_flags), dep_files_processor = base_c_info.dep_files_processor, + allow_cache_upload = _pick_raw(ctx.attrs.c_compiler_allow_cache_upload, base_c_info.allow_cache_upload), ) base_cxx_info = base_toolchain.cxx_compiler_info cxx_info = CxxCompilerInfo( @@ -63,6 +64,7 @@ def _cxx_toolchain_override(ctx): preprocessor_type = base_cxx_info.preprocessor_type, preprocessor_flags = _pick(ctx.attrs.cxx_preprocessor_flags, base_cxx_info.preprocessor_flags), dep_files_processor = base_cxx_info.dep_files_processor, + allow_cache_upload = _pick_raw(ctx.attrs.cxx_compiler_allow_cache_upload, base_cxx_info.allow_cache_upload), ) base_linker_info = base_toolchain.linker_info linker_type = ctx.attrs.linker_type if ctx.attrs.linker_type != None else base_linker_info.type diff --git a/prelude/utils/pick.bzl b/prelude/utils/pick.bzl index ad1310eb4..e64a7513f 100644 --- a/prelude/utils/pick.bzl +++ b/prelude/utils/pick.bzl @@ -12,6 +12,9 @@ def pick_bin(override, underlying): return override[RunInfo] if override != None else underlying def pick_dep(override, underlying): + return pick_raw(override, underlying) + +def pick_raw(override, underlying): return override if override != None else underlying def pick_and_add(override, additional, underlying): From 0aa20d71f0670e1c99b12f522ad4c00012e73cd0 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Fri, 8 Mar 2024 02:10:57 -0800 Subject: [PATCH 0417/1133] Upload object files if cxx_toolchain() allows it Summary: If a target (e.g., of type `cxx_library()`) specifies `allow_cache_upload`, use that to control cache upload. Otherwise, fall back on the value of `c_compiler_allow_cache_upload` or `cxx_compiler_allow_cache_upload` depending on the file being compiled. Reviewed By: rmaz Differential Revision: D54583043 fbshipit-source-id: 31a98b8edae38b1637b191ad394ed31a2a79287c --- prelude/cxx/compile.bzl | 9 ++++++--- prelude/cxx/cxx_library.bzl | 5 ++--- prelude/cxx/cxx_utility.bzl | 7 +++++-- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index cadd02edb..785c93b4f 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -7,6 +7,7 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//linking:lto.bzl", "LtoMode") load("@prelude//utils:set.bzl", "set") load( @@ -105,6 +106,7 @@ _CxxCompileCommand = record( compiler_type = field(str), # The action category category = field(str), + allow_cache_upload = field(bool), ) # Information about how to compile a source file. @@ -323,12 +325,14 @@ def create_compile_cmds( argsfile_by_ext[ext.value] = _mk_argsfile(ctx, compiler_info, pre, ext, headers_tag, False) abs_argsfile_by_ext[ext.value] = _mk_argsfile(ctx, compiler_info, pre, ext, abs_headers_tag, True) + allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs, default = compiler_info.allow_cache_upload) cxx_compile_cmd_by_ext[ext] = _CxxCompileCommand( base_compile_cmd = base_compile_cmd, argsfile = argsfile_by_ext[ext.value], headers_dep_files = headers_dep_files, compiler_type = compiler_info.compiler_type, category = category, + allow_cache_upload = allow_cache_upload, ) cxx_compile_cmd = cxx_compile_cmd_by_ext[ext] @@ -361,8 +365,7 @@ def create_compile_cmds( def compile_cxx( ctx: AnalysisContext, src_compile_cmds: list[CxxSrcCompileCommand], - pic: bool = False, - allow_cache_upload: bool = False) -> list[CxxCompileOutput]: + pic: bool = False) -> list[CxxCompileOutput]: """ For a given list of src_compile_cmds, generate output artifacts. """ @@ -463,7 +466,7 @@ def compile_cxx( category = src_compile_cmd.cxx_compile_cmd.category, identifier = identifier, dep_files = action_dep_files, - allow_cache_upload = allow_cache_upload, + allow_cache_upload = src_compile_cmd.cxx_compile_cmd.allow_cache_upload, ) # If we're building with split debugging, where the debug info is in the diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 31f1ffffa..d63eca1ae 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -35,7 +35,6 @@ load( "@prelude//apple/swift:swift_runtime.bzl", "create_swift_runtime_linkable", ) -load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load( "@prelude//ide_integrations:xcode.bzl", "XCODE_DATA_SUB_TARGET", @@ -911,12 +910,12 @@ def cxx_compile_srcs( ) # Define object files. - pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = True, allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs)) + pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = True) pic = _get_library_compile_output(ctx, pic_cxx_outs, impl_params.extra_link_input) non_pic = None if preferred_linkage != Linkage("shared"): - non_pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = False, allow_cache_upload = cxx_attrs_get_allow_cache_upload(ctx.attrs)) + non_pic_cxx_outs = compile_cxx(ctx, compile_cmd_output.src_compile_cmds, pic = False) non_pic = _get_library_compile_output(ctx, non_pic_cxx_outs, impl_params.extra_link_input) return _CxxCompiledSourcesOutput( diff --git a/prelude/cxx/cxx_utility.bzl b/prelude/cxx/cxx_utility.bzl index f0310dfb7..a221419d8 100644 --- a/prelude/cxx/cxx_utility.bzl +++ b/prelude/cxx/cxx_utility.bzl @@ -5,9 +5,12 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -def cxx_attrs_get_allow_cache_upload(attrs: struct) -> bool: +def cxx_attrs_get_allow_cache_upload(attrs: struct, default: [None, bool] = None) -> bool: + default_value = default if default != None else False + if not hasattr(attrs, "allow_cache_upload"): + return default_value value = attrs.allow_cache_upload - return value if value != None else False + return value if value != None else default_value def cxx_toolchain_allow_cache_upload_args(): doc = """ From 0876d49c5268243375276c83269904e957e3732c Mon Sep 17 00:00:00 2001 From: Vladimir Makaev Date: Fri, 8 Mar 2024 02:28:23 -0800 Subject: [PATCH 0418/1133] Remove prelude//rules.bzl from target_type Summary: this is to match the same thing from non-bxl code Reviewed By: artempyanykh Differential Revision: D54663124 fbshipit-source-id: e284a0247a11bc42ca930436aea7868c510e7127 --- prelude/debugging/common.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/debugging/common.bzl b/prelude/debugging/common.bzl index e11531375..b484d780e 100644 --- a/prelude/debugging/common.bzl +++ b/prelude/debugging/common.bzl @@ -19,6 +19,6 @@ def create_target_info(target: bxl.ConfiguredTargetNode) -> TargetInfo: attrs = target.attrs_lazy() return TargetInfo( target = target_name(target), - target_type = rule_type(target), + target_type = rule_type(target).removeprefix("prelude//rules.bzl:"), labels = attrs.get("labels").value() if attrs.get("labels") != None else [], ) From c75610bbd8902fbe584898ed8b3583f9d9bd12c7 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Fri, 8 Mar 2024 07:07:48 -0800 Subject: [PATCH 0419/1133] Universal: fix Apple transition to filter out x86 constraints on arm64 configurations Summary: # Problem ``` $ buck2 build fbsource//arvr/mode/mac/opt fbsource//arvr/apps/xr/audio/editor_app:graph_editor_universal_bundle ``` fails because Pika-15 compiler doesn't seem recognise `-march=haswell` ``` clang++: error: unsupported argument 'haswell' to option '-march=' Action failed: fbsource//arvr/libraries/audio/xr/platform/default:platform (cxx_compile XraMemoryDefault.cpp (pic)) ``` Scoped repro ``` $ buck2 build fbsource//arvr/mode/mac/opt fbsource//arvr/libraries/audio/xr/dsp:dsp --target-universe fbsource//arvr/apps/xr/audio/editor_app:graph_editor_universal_bundle ``` We can see that `-march=haswell` comes from the toolchain (P1193905364) ``` $ buck2 cquery "kind('cxx_toolchain', deps(fbsource//arvr/libraries/audio/fbaudio/fba/plugins:reverb))" --target-universe fbsource//arvr/apps/xr/audio/editor_app:graph_editor_universal_bundle fbsource//arvr/mode/mac/opt -a compiler_flags ``` Specifically from here (added in D50429820): https://www.internalfb.com/code/fbsource/[ba40e6e9ed30d125d5b78e53ce10a35008147490][blame]/xplat/toolchains/apple/flags.bzl?lines=137 But the problem is that `-march=haswell` gets added to the arm64 compiler flags which is clearly wrong. If we look at the configurations ``` $ buck2 cquery "fbsource//arvr/libraries/audio/fbaudio/fba/plugins:reverb" --target-universe fbsource//arvr/apps/xr/audio/editor_app:graph_editor_universal_bundle fbsource//arvr/mode/mac/opt fbsource//arvr/libraries/audio/fbaudio/fba/plugins:reverb (arm64#91e2716b413f760e) fbsource//arvr/libraries/audio/fbaudio/fba/plugins:reverb (fbsource//arvr/apps/xr/audio/editor_app:UniversalMacPlatform#691c0c1f3340b6ac) fbsource//arvr/libraries/audio/fbaudio/fba/plugins:reverb (x86_64#36edf2bb4a5b1e72) $ buck2 audit configurations "arm64#91e2716b413f760e" arm64#91e2716b413f760e: ovr_config//build_mode/apple/constraints:universal-enabled (ovr_config//build_mode/apple/constraints:universal) ovr_config//build_mode/constraints:arvr_mode_enabled (ovr_config//build_mode/constraints:arvr_mode) ovr_config//build_mode/constraints:development (ovr_config//build_mode/constraints:build_type) ovr_config//build_mode/constraints:no-san (ovr_config//build_mode/constraints:san) ovr_config//build_mode/constraints:release (ovr_config//build_mode/constraints:build_mode) ovr_config//compiler/constraints:clang (ovr_config//compiler/constraints:toolchain) ovr_config//compiler/constraints:gcc-or-clang (ovr_config//compiler/constraints:compiler-flavor) ovr_config//constraints:any (ovr_config//constraints:_) ovr_config//cpu/constraints:arm64 (ovr_config//cpu/constraints:cpu) ovr_config//cpu/x86/constraints:avx2 (ovr_config//cpu/x86/constraints:has_avx2) ovr_config//cpu/x86/constraints:sse42 (ovr_config//cpu/x86/constraints:has_sse42) ovr_config//cuda/constraints:no_cuda (ovr_config//cuda/constraints:cuda) ovr_config//lang/cxx/constraints:std=c++20 (ovr_config//lang/cxx/constraints:std) ovr_config//os/constraints:general_purpose (ovr_config//os/constraints:kind) ovr_config//os/constraints:macos (ovr_config//os/constraints:os) ovr_config//os/sdk/apple/constraints:macosx (ovr_config//os/sdk/apple/constraints:_) ovr_config//toolchain/python/constraints:3.8 (ovr_config//toolchain/python/constraints:python-version) ovr_config//toolchain/xcode/constraints:app-toolchain-pika-fat (ovr_config//toolchain/xcode/constraints:app-toolchain) ``` Note the presence of x86 constraints ``` ovr_config//cpu/x86/constraints:avx2 (ovr_config//cpu/x86/constraints:has_avx2) ovr_config//cpu/x86/constraints:sse42 (ovr_config//cpu/x86/constraints:has_sse42) ``` We see the arm64 configuration contains x86 constraints which is why the select above gets triggered even though we're targeting arm64. # Solution We need to fix the CPU transition, so that it filters out any incompatible arch constraints. Reviewed By: blackm00n, cvanwinkle Differential Revision: D54678600 fbshipit-source-id: 352175dbd4109c06483a0802a72ea8e73ff1c894 --- prelude/apple/user/cpu_split_transition.bzl | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/prelude/apple/user/cpu_split_transition.bzl b/prelude/apple/user/cpu_split_transition.bzl index 027259d98..4beda5002 100644 --- a/prelude/apple/user/cpu_split_transition.bzl +++ b/prelude/apple/user/cpu_split_transition.bzl @@ -27,6 +27,15 @@ def _universal_constraint_value(platform: PlatformInfo, refs: struct) -> [None, universal = platform.configuration.constraints.get(refs.universal[ConstraintSettingInfo].label) return universal.label == refs.universal_enabled[ConstraintValueInfo].label if universal != None else False +def _filter_incompatible_constraints(platform_name: str, constraints: dict[TargetLabel, ConstraintValueInfo]) -> dict[TargetLabel, ConstraintValueInfo]: + filtered = dict() + for constraint_setting_label, constraint_value_info in constraints.items(): + incompatible_constraint_name = "//cpu/x86" if platform_name == "arm64" else "//cpu/arm" + if incompatible_constraint_name in str(constraint_value_info.label): + continue + filtered[constraint_setting_label] = constraint_value_info + return filtered + def _cpu_split_transition_impl( platform: PlatformInfo, refs: struct, @@ -68,8 +77,8 @@ def _cpu_split_transition_impl( cpu_constraint_name = refs.cpu[ConstraintSettingInfo].label base_constraints = { - constraint_setting_label: constraint_setting_value - for (constraint_setting_label, constraint_setting_value) in platform.configuration.constraints.items() + constraint_setting_label: constraint_value_info + for (constraint_setting_label, constraint_value_info) in platform.configuration.constraints.items() if constraint_setting_label != cpu_constraint_name } @@ -77,6 +86,8 @@ def _cpu_split_transition_impl( for platform_name, cpu_constraint in cpu_name_to_cpu_constraint.items(): updated_constraints = dict(base_constraints) updated_constraints[cpu_constraint_name] = cpu_constraint + updated_constraints = _filter_incompatible_constraints(platform_name, updated_constraints) + new_configs[platform_name] = PlatformInfo( label = platform_name, configuration = ConfigurationInfo( From a713031202cfda0b9e9f6c91b9c94e16318c84f5 Mon Sep 17 00:00:00 2001 From: Taras Tsugrii Date: Fri, 8 Mar 2024 08:09:28 -0800 Subject: [PATCH 0420/1133] Avoid unnecessary string allocs in breadth_first_traversal_by. Summary: this reduces `breadth_first_traversal_by`'s time from 14s to 9s when running ``` buck2 profile analysis --output heap_summary.prof --mode heap-summary-allocated @//mode/opt //minimal_viable_ai/fire:light ``` before: P1193135390 ``` "fbcode/buck2/prelude/utils/graph_utils.bzl.breadth_first_traversal_by",14.121,28.171,15382004,2 ``` after: P1193139027 ``` "fbcode/buck2/prelude/utils/graph_utils.bzl.breadth_first_traversal_by",9.033,21.918,7694802,2 ``` this is even more noticeable from flame graph before - https://pxl.cl/4svXD after - https://pxl.cl/4svXN Reviewed By: JakobDegen Differential Revision: D54601252 fbshipit-source-id: 585edde1d995d39085089e321c6a2496e75354c0 --- prelude/utils/graph_utils.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/utils/graph_utils.bzl b/prelude/utils/graph_utils.bzl index aa8c44fef..e7ff022b9 100644 --- a/prelude/utils/graph_utils.bzl +++ b/prelude/utils/graph_utils.bzl @@ -207,8 +207,8 @@ def breadth_first_traversal_by( if not queue: break node = queue.pop() - if graph_nodes: - expect(node in graph_nodes, "Expected node {} in graph nodes", node_formatter(node)) + if graph_nodes and node not in graph_nodes: + fail("Expected node {} in graph nodes".format(node_formatter(node))) nodes_to_visit = get_nodes_to_traverse_func(node) for node in nodes_to_visit: if node not in visited: From 5f6f23aa79e57fed63dc667d0f426ae8312bb8f5 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 8 Mar 2024 10:54:28 -0800 Subject: [PATCH 0421/1133] Set the absolute args MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: In Xcode, when compiling tests that depend on Swift mixed module targets, we error on not being able to find the `-Swift.h` file: {F1466320168} ``` /Users/chatatap/fbsource/fbobjc/Libraries/FBCameraRollGlobalControls/FBCameraRollGlobalControls/__tests__/FBCameraRollGlobalControlsTests.mm:16:9: fatal error: 'FBCameraRollGlobalControlsV2/FBCameraRollGlobalControlsV2-Swift.h' file not found #import  ^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ``` This is because for the absolute argsfiles utilized in Xcode, we were not propagating the `swift-extended_symlink_tree` include arg: https://www.internalfb.com/intern/diffing/?paste_number=1194009906 Reviewed By: rmaz Differential Revision: D54688707 fbshipit-source-id: d997669f74ad762d29c6d3252e41872477006b4e --- prelude/apple/swift/swift_compilation.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 7a6b5ba74..75213702c 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -277,6 +277,7 @@ def compile_swift( headers = [exported_swift_header], modular_args = modulemap_pp_info.modular_args, relative_args = CPreprocessorArgs(args = modulemap_pp_info.relative_args.args), + absolute_args = CPreprocessorArgs(args = modulemap_pp_info.absolute_args.args), modulemap_path = modulemap_pp_info.modulemap_path, ) From 0ed15bd04a68fb7042eee3f096adaa759ebb18ce Mon Sep 17 00:00:00 2001 From: Arsen Tumanyan Date: Fri, 8 Mar 2024 15:21:41 -0800 Subject: [PATCH 0422/1133] Read extension for python binaries from config Summary: The `pex_extension` config isn't respected otherwise. Differential Revision: D54687539 fbshipit-source-id: a2f01bf7cfa2e8a56d69a652428421a3b9bb6193 --- prelude/toolchains/python.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prelude/toolchains/python.bzl b/prelude/toolchains/python.bzl index b4ac5611d..01a36f71f 100644 --- a/prelude/toolchains/python.bzl +++ b/prelude/toolchains/python.bzl @@ -66,6 +66,7 @@ def _system_python_toolchain_impl(ctx): make_py_package_inplace = ctx.attrs.make_py_package_inplace[RunInfo], compile = RunInfo(args = ["echo", "COMPILEINFO"]), package_style = "inplace", + pex_extension = ctx.attrs.pex_extension, native_link_strategy = "separate", runtime_library = ctx.attrs.runtime_library, ), @@ -81,6 +82,7 @@ system_python_toolchain = rule( "make_py_package_modules": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:make_py_package_modules")), "make_source_db": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:make_source_db")), "make_source_db_no_deps": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//python/tools:make_source_db_no_deps")), + "pex_extension": attrs.string(default = ".pex"), "runtime_library": attrs.default_only(attrs.dep(providers = [ArtifactGroupInfo], default = "prelude//python/runtime:bootstrap_files")), }, is_toolchain_rule = True, From 177a82111503e25c6813a6deeff9a70eeb1e6561 Mon Sep 17 00:00:00 2001 From: Taras Tsugrii Date: Fri, 8 Mar 2024 17:19:59 -0800 Subject: [PATCH 0423/1133] Use list/dict comprehensions instead of for loop. Summary: I was hoping this would improve perf, but while it doesn't seem to matter as much as in Python it's still more concise and idiomatic Reviewed By: JakobDegen Differential Revision: D54614492 fbshipit-source-id: dedb4373c3ce5235818e4396dad4ab0aaa98ed5f --- prelude/cxx/omnibus.bzl | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index b26c06f28..1df12523f 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -654,9 +654,10 @@ def _ordered_roots( """ # Calculate all deps each root node needs to link against. - link_deps = {} - for label, root in spec.roots.items(): - link_deps[label] = _link_deps(spec.link_infos, root.deps, pic_behavior) + link_deps = { + label: _link_deps(spec.link_infos, root.deps, pic_behavior) + for label, root in spec.roots.items() + } # Used the link deps to create the graph of root nodes. root_graph = { @@ -664,14 +665,12 @@ def _ordered_roots( for node, deps in link_deps.items() } - ordered_roots = [] - # Emit the root link info in post-order, so that we generate root link rules # for dependencies before their dependents. - for label in post_order_traversal(root_graph): - root = spec.roots[label] - deps = link_deps[label] - ordered_roots.append((label, root, deps)) + ordered_roots = [ + (label, spec.roots[label], link_deps[label]) + for label in post_order_traversal(root_graph) + ] return ordered_roots From b5e6ae865dd5e4935b0d77e2ed2bee24cace539c Mon Sep 17 00:00:00 2001 From: Taras Tsugrii Date: Fri, 8 Mar 2024 17:19:59 -0800 Subject: [PATCH 0424/1133] Use list comprehensions instead of for loop. Summary: it's more concise and idiomatic Reviewed By: JakobDegen Differential Revision: D54616804 fbshipit-source-id: d5ab3694e533f991f39282ba3e5c74913f0b7a1f --- prelude/utils/graph_utils.bzl | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/prelude/utils/graph_utils.bzl b/prelude/utils/graph_utils.bzl index e7ff022b9..efe2737d9 100644 --- a/prelude/utils/graph_utils.bzl +++ b/prelude/utils/graph_utils.bzl @@ -58,11 +58,7 @@ def post_order_traversal( out_degrees[node] += 1 rdeps[dep].append(node) - queue = [] - - for node, out_degree in out_degrees.items(): - if out_degree == 0: - queue.append(node) + queue = [node for node, out_degree in out_degrees.items() if out_degree == 0] ordered = [] From c49aa7abad6e0d55df99cbb2d6335d2df38d0903 Mon Sep 17 00:00:00 2001 From: Taras Tsugrii Date: Fri, 8 Mar 2024 17:19:59 -0800 Subject: [PATCH 0425/1133] Compute out_degrees using single traversal. Summary: it's much more efficient to perform a single `out_degree` assignment instead of `O(|deps|)` + 1. Reviewed By: IanChilds Differential Revision: D54622807 fbshipit-source-id: f7af3be5294847e4dc6e189531363410be1efcf0 --- prelude/utils/graph_utils.bzl | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/prelude/utils/graph_utils.bzl b/prelude/utils/graph_utils.bzl index efe2737d9..3fadf06f6 100644 --- a/prelude/utils/graph_utils.bzl +++ b/prelude/utils/graph_utils.bzl @@ -51,11 +51,12 @@ def post_order_traversal( Performs a post-order traversal of `graph`. """ - out_degrees = {node: 0 for node in graph} + out_degrees = {} rdeps = {node: [] for node in graph} for node, deps in graph.items(): - for dep in dedupe(deps): - out_degrees[node] += 1 + deps = dedupe(deps) + out_degrees[node] = len(deps) + for dep in deps: rdeps[dep].append(node) queue = [node for node, out_degree in out_degrees.items() if out_degree == 0] From 7b4d7b2fa82538008fc978c9cbf93d39f850cd64 Mon Sep 17 00:00:00 2001 From: Ignacio Guridi Date: Fri, 8 Mar 2024 18:29:37 -0800 Subject: [PATCH 0426/1133] Back out "Change default output for exopackage builds" Summary: Original commit changeset: 33217935d2c7 Original Phabricator Diff: D54643778 --- bisect paste P1194124230 Reviewed By: lmvasquezg Differential Revision: D54701757 fbshipit-source-id: 34a0ae29627625ab9d27a0328f51d3a7f80f8e2d --- prelude/android/android_apk.bzl | 19 ++++++------------- 1 file changed, 6 insertions(+), 13 deletions(-) diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index b5ca6d406..6dd0662f0 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -35,18 +35,11 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: validation_deps_outputs = get_validation_deps_outputs(ctx), ) - if dex_files_info.secondary_dex_exopackage_info or native_library_info.exopackage_info or resources_info.exopackage_info: - exopackage_info = ExopackageInfo( - secondary_dex_info = dex_files_info.secondary_dex_exopackage_info, - native_library_info = native_library_info.exopackage_info, - resources_info = resources_info.exopackage_info, - ) - exopackage_outputs = _get_exopackage_outputs(exopackage_info) - default_output = ctx.actions.write("exopackage_apk_warning", "exopackage apks should not be used externally, try buck install or building with exopackage disabled\n") - else: - exopackage_info = None - exopackage_outputs = [] - default_output = output_apk + exopackage_info = ExopackageInfo( + secondary_dex_info = dex_files_info.secondary_dex_exopackage_info, + native_library_info = native_library_info.exopackage_info, + resources_info = resources_info.exopackage_info, + ) class_to_srcs, class_to_srcs_subtargets = get_class_to_source_map_info( ctx, @@ -70,7 +63,7 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: r_dot_java_packages = set([info.specified_r_dot_java_package for info in resources_info.unfiltered_resource_infos if info.specified_r_dot_java_package]), shared_libraries = set(native_library_info.shared_libraries), ), - DefaultInfo(default_output = default_output, other_outputs = exopackage_outputs + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), + DefaultInfo(default_output = output_apk, other_outputs = _get_exopackage_outputs(exopackage_info) + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, definitely_has_native_libs = definitely_has_native_libs), TemplatePlaceholderInfo( keyed_variables = { From ab9b64392b556b507c237d2f4f14dd0b49aafd0d Mon Sep 17 00:00:00 2001 From: zadig Date: Mon, 11 Mar 2024 02:46:49 -0700 Subject: [PATCH 0427/1133] Allow `None` in `LinkerInfo.linker_flags`. Summary: Allow `None` in `LinkerInfo.linker_flags`. [`LinkerInfo.linker_flags`] field is by default set to `None`, as well as other flag list fields (e.g. [`LinkerInfo.archiver_flags`]). However, [`LinkerInfo.linker_flags`] isn't consistenly used across `prelude`. For instance, in [`cxx/cxx_library.bzl`], [`LinkerInfo.linker_flags`] is treated as an optional list: https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_library.bzl#L774 On the other hand, in [`cxx/cxx_toolchain_types.bzl`], [`LinkerInfo.linker_flags`] is expected to contain a non-`None` value: https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_toolchain_types.bzl#L308 https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_toolchain_types.bzl#L327 This leads to an error when defining a custom CXX toolchain: ``` Caused by: 0: Error running analysis for `toolchains//:cxx (prelude//platforms:default#904931f735703749) (prelude//platforms:default#904931f735703749)` 1: Traceback (most recent call last): File , in * musl-toolchain/define_toolchain.bzl:53, in _define_cxx_toolchain_impl ] + cxx_toolchain_infos( * prelude/cxx/cxx_toolchain_types.bzl:308, in cxx_toolchain_infos "ldflags-shared": _shell_quote(linker_info.linker_flags), * prelude/cxx/cxx_toolchain_types.bzl:328, in _shell_quote return cmd_args(xs, quote = "shell") error: Expected `a value implementing CommandLineArgLike (str, Artifact, RunInfo, etc)`, but got `NoneType` --> prelude/cxx/cxx_toolchain_types.bzl:328:12 | 328 | return cmd_args(xs, quote = "shell") | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ``` This commit handles the [`LinkerInfo.linker_flags`] consistently across the project. [`LinkerInfo.linker_flags`]: https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_toolchain_types.bzl#L41 [`LinkerInfo.archiver_flags`]: https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_toolchain_types.bzl#L20 [`cxx/cxx_library.bzl`]: https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_library.bzl [`cxx/cxx_toolchain_types.bzl`]: https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_toolchain_types.bzl X-link: https://github.com/facebook/buck2/pull/593 Reviewed By: ndmitchell Differential Revision: D54732694 Pulled By: JakobDegen fbshipit-source-id: 4c0bd711cecdce45c1b4f1329962cebaf2cca9d4 --- prelude/cxx/cxx_toolchain_types.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index ee2972ebd..037ca3bc4 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -307,9 +307,9 @@ def cxx_toolchain_infos( # NOTE(agallagher): The arg-less variants of the ldflags macro are # identical, and are just separate to match v1's behavior (ideally, # we just have a single `ldflags` macro for this case). - "ldflags-shared": _shell_quote(linker_info.linker_flags), - "ldflags-static": _shell_quote(linker_info.linker_flags), - "ldflags-static-pic": _shell_quote(linker_info.linker_flags), + "ldflags-shared": _shell_quote(linker_info.linker_flags or []), + "ldflags-static": _shell_quote(linker_info.linker_flags or []), + "ldflags-static-pic": _shell_quote(linker_info.linker_flags or []), "objcopy": binary_utilities_info.objcopy, # TODO(T110378148): $(platform-name) is almost unusued. Should we remove it? "platform-name": platform_name, From b42e95b69dcb0f2f5307eca0c085e4bc5ca13867 Mon Sep 17 00:00:00 2001 From: zadig Date: Mon, 11 Mar 2024 02:47:36 -0700 Subject: [PATCH 0428/1133] Make `CxxToolchainInfo.mk_comp_db` optional. Summary: Make `CxxToolchainInfo.mk_comp_db` optional. According to [the documentation], constructing a [`CxxToolchainInfo`] requires at least C/C++ compiler info along with linker info. It means that [`CxxToolchainInfo.mk_comp_db`] should be optional. By the way, its definition is set to `default = None`. A `None` value in [`CxxToolchainInfo.mk_comp_db`] leads to an error: ``` Caused by: Traceback (most recent call last): File , in * prelude/rules.bzl:101, in buck2_compatibility_shim return impl(ctx) * prelude/cxx/cxx.bzl:191, in cxx_library_impl output = cxx_library_parameterized(ctx, params) * prelude/cxx/cxx_library.bzl:425, in cxx_library_parameterized comp_db = create_compilation_database(ctx, compiled_srcs.compile_cmds.src_com... error: Operation `[]` not supported for types `NoneType` and `run_info_callable` --> prelude/cxx/comp_db.bzl:34:18 | 34 | mk_comp_db = get_cxx_toolchain_info(ctx).mk_comp_db[RunInfo] | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ``` This commit fixes this small bug by allowing `None` in [`CxxToolchainInfo.mk_comp_db`]. [the documentation]: https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_toolchain_types.bzl#L251-L255 [`CxxToolchainInfo`]: https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_toolchain_types.bzl#L167 [`CxxToolchainInfo.mk_comp_db`]: https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_toolchain_types.bzl#L183 X-link: https://github.com/facebook/buck2/pull/594 Reviewed By: ndmitchell Differential Revision: D54732663 Pulled By: JakobDegen fbshipit-source-id: 85a22a21194285919c6dd5643c4a0dedaf6099ac --- prelude/cxx/comp_db.bzl | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/prelude/cxx/comp_db.bzl b/prelude/cxx/comp_db.bzl index 630e0bbc3..449c80657 100644 --- a/prelude/cxx/comp_db.bzl +++ b/prelude/cxx/comp_db.bzl @@ -32,7 +32,10 @@ def create_compilation_database( ctx: AnalysisContext, src_compile_cmds: list[CxxSrcCompileCommand], identifier: str) -> DefaultInfo: - mk_comp_db = get_cxx_toolchain_info(ctx).mk_comp_db[RunInfo] + mk_comp_db = get_cxx_toolchain_info(ctx).mk_comp_db + if mk_comp_db == None: + return DefaultInfo() + mk_comp_db = mk_comp_db[RunInfo] # Generate the per-source compilation DB entries. entries = {} From ae2073634c88c720862ea9784f2a7267623f0335 Mon Sep 17 00:00:00 2001 From: zadig Date: Mon, 11 Mar 2024 03:09:33 -0700 Subject: [PATCH 0429/1133] Disable stripping stage if no strip binary was provided. Summary: Disable stripping stage if no strip binary was provided. [`CxxToolchainInfo`] has a field called [`binary_utilities_info`] which points to some binary utilities such as [`strip`] or [`objcopy`]. Similar to https://github.com/facebook/buck2/pull/594, according to [the documentation](https://github.com/facebook/buck2/blob/1e89af6622344c46543a9e3781a556e058471043/prelude/cxx/cxx_toolchain_types.bzl#L251-L255), [`CxxToolchainInfo.binary_utilities_info`] is optional. However, in the [`cxx_library` implementation], [`_strip_objects`] is called regardless of the [`CxxToolchainInfo.binary_utilities_info`] value. This leads to the following error: ``` Caused by: Traceback (most recent call last): File , in * prelude/rules.bzl:101, in buck2_compatibility_shim return impl(ctx) * prelude/cxx/cxx.bzl:191, in cxx_library_impl output = cxx_library_parameterized(ctx, params) * prelude/cxx/cxx_library.bzl:371, in cxx_library_parameterized compiled_srcs = cxx_compile_srcs( * prelude/cxx/cxx_library.bzl:914, in cxx_compile_srcs pic = _get_library_compile_output(ctx, pic_cxx_outs, impl_params.extra_link_i... * prelude/cxx/cxx_library.bzl:866, in _get_library_compile_output stripped_objects = _strip_objects(ctx, objects) * prelude/cxx/cxx_library.bzl:1108, in _strip_objects outs.append(strip_debug_info(ctx, base + ".stripped.o", obj)) * prelude/linking/strip.bzl:67, in strip_debug_info return _strip_debug_info( error: Object of type `NoneType` has no attribute `strip` --> prelude/linking/strip.bzl:16:13 | 16 | strip = cxx_toolchain.binary_utilities_info.strip | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ``` This commit prevents this bug from happening by making [`CxxToolchainInfo.binary_utilities_info`] truely optional. [`CxxToolchainInfo`]: https://github.com/facebook/buck2/blob/6b2b497907676c662baa2f39d7622241da6f0081/prelude/cxx/cxx_toolchain_types.bzl#L167 [`binary_utilities_info`]: https://github.com/facebook/buck2/blob/6b2b497907676c662baa2f39d7622241da6f0081/prelude/cxx/cxx_toolchain_types.bzl#L176 [`CxxToolchainInfo.binary_utilities_info`]: https://github.com/facebook/buck2/blob/6b2b497907676c662baa2f39d7622241da6f0081/prelude/cxx/cxx_toolchain_types.bzl#L176 [`strip`]: https://github.com/facebook/buck2/blob/6b2b497907676c662baa2f39d7622241da6f0081/prelude/cxx/cxx_toolchain_types.bzl#L77 [`objcopy`]: https://github.com/facebook/buck2/blob/6b2b497907676c662baa2f39d7622241da6f0081/prelude/cxx/cxx_toolchain_types.bzl#L75 [`cxx_library` implementation]: https://github.com/facebook/buck2/blob/6b2b497907676c662baa2f39d7622241da6f0081/prelude/cxx/cxx_library.bzl#L866 [`_strip_objects`]: https://github.com/facebook/buck2/blob/6b2b497907676c662baa2f39d7622241da6f0081/prelude/cxx/cxx_library.bzl#L1093 X-link: https://github.com/facebook/buck2/pull/595 Reviewed By: ndmitchell Differential Revision: D54732708 Pulled By: JakobDegen fbshipit-source-id: a89dc6b452658c8f3c1773b9b5ad1f4882d7b957 --- prelude/cxx/cxx_library.bzl | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index d63eca1ae..15663206c 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -1095,11 +1095,18 @@ def _strip_objects(ctx: AnalysisContext, objects: list[Artifact]) -> list[Artifa Return new objects with debug info stripped. """ + cxx_toolchain_info = get_cxx_toolchain_info(ctx) + # Stripping is not supported on Windows - linker_type = get_cxx_toolchain_info(ctx).linker_info.type + linker_type = cxx_toolchain_info.linker_info.type if linker_type == "windows": return objects + # Disable stripping if no `strip` binary was provided by the toolchain. + if cxx_toolchain_info.binary_utilities_info == None or \ + cxx_toolchain_info.binary_utilities_info.strip == None: + return objects + outs = [] for obj in objects: From 53a35c98fde13dd43ef2e5da10e993a422c057fe Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Mon, 11 Mar 2024 06:05:55 -0700 Subject: [PATCH 0430/1133] Let go linker decide the linkmode if not explicetely specified Summary: This will help us to avoid the need to manually manage linkmode for each cpu architecture Inspired by mitigation of S399850 Reviewed By: leoleovich Differential Revision: D54647849 fbshipit-source-id: 1393c1236bcb31d81ee3032dd13b9ac42c87fd10 --- prelude/go/link.bzl | 22 +++++++--------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 3c3c4d93d..600c5d51f 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -58,14 +58,6 @@ def _build_mode_param(mode: GoBuildMode) -> str: def get_inherited_link_pkgs(deps: list[Dependency]) -> dict[str, GoPkg]: return merge_pkgs([d[GoPkgLinkInfo].pkgs for d in deps if GoPkgLinkInfo in d]) -def is_any_dep_cgo(deps: list[Dependency]) -> bool: - for d in deps: - if GoPkgLinkInfo in d: - for pkg in d[GoPkgLinkInfo].pkgs.values(): - if pkg.cgo: - return True - return False - # TODO(cjhopman): Is link_style a LibOutputStyle or a LinkStrategy here? Based # on returning an empty thing for link_style != shared, it seems likely its # intended to be LibOutputStyle, but it's called in places that are passing what @@ -149,15 +141,15 @@ def link( link_mode = "external" elif shared: link_mode = "external" - elif is_any_dep_cgo(deps): - link_mode = "external" - else: - link_mode = "internal" - cmd.add("-linkmode", link_mode) - if link_mode == "external": + if link_mode != None: + cmd.add("-linkmode", link_mode) + + cxx_toolchain = go_toolchain.cxx_toolchain_for_linking + if cxx_toolchain == None and link_mode == "external": + fail("cxx_toolchain required for link_mode='external'") + if cxx_toolchain != None: is_win = ctx.attrs._exec_os_type[OsLookup].platform == "windows" - cxx_toolchain = go_toolchain.cxx_toolchain_for_linking # Gather external link args from deps. ext_links = get_link_args_for_strategy(ctx, cxx_inherited_link_info(deps), to_link_strategy(link_style)) From ddfba3046ce93fe99028fb2c40f6e15e0f586c80 Mon Sep 17 00:00:00 2001 From: bumbu a Date: Mon, 11 Mar 2024 07:22:30 -0700 Subject: [PATCH 0431/1133] Force building msys and rsys targets locally Summary: See T175874740 for context Reviewed By: c-ryan747 Differential Revision: D54586983 fbshipit-source-id: dc76bcbcfce3527453b3acbf1b4cd3e3d45ec928 --- prelude/genrule_local_labels.bzl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/prelude/genrule_local_labels.bzl b/prelude/genrule_local_labels.bzl index 0fec9753b..bea36736f 100644 --- a/prelude/genrule_local_labels.bzl +++ b/prelude/genrule_local_labels.bzl @@ -206,6 +206,11 @@ _GENRULE_LOCAL_LABELS = {label: True for label in [ # Uses jf which is not on RE "uses_jf", + + # On Messenger Desktop few targets are massive and take much longer on RE than + # locally to build on Windows. This is a mitigation until we can break down these + # targets + "zeratul_windows_capacity_hog", ]} def genrule_labels_require_local(labels): From 6cbc4d87a8c5778af3ef24ba004f4f1197c1eb60 Mon Sep 17 00:00:00 2001 From: Overhatted <15021741+Overhatted@users.noreply.github.com> Date: Mon, 11 Mar 2024 07:52:36 -0700 Subject: [PATCH 0432/1133] Adding rule to build Windows .rc files Summary: Adds the ability to compile resource files: https://learn.microsoft.com/en-us/windows/win32/menurc/about-resource-files More details are in the commit messages. X-link: https://github.com/facebook/buck2/pull/581 Reviewed By: JakobDegen Differential Revision: D54603649 Pulled By: KapJI fbshipit-source-id: fcf56a919b8b6245000e71565894dc7c5e1c0c3f --- prelude/cxx/cxx_toolchain.bzl | 18 +++++- prelude/cxx/cxx_toolchain_types.bzl | 8 +++ prelude/cxx/windows_resource.bzl | 70 ++++++++++++++++++++++++ prelude/decls/cxx_rules.bzl | 54 ++++++++++++++++++ prelude/rules_impl.bzl | 5 ++ prelude/toolchains/cxx.bzl | 24 ++++++++ prelude/toolchains/msvc/run_msvc_tool.py | 8 ++- prelude/toolchains/msvc/tools.bzl | 34 ++++++++++++ prelude/toolchains/msvc/vswhere.py | 50 ++++++++++++----- 9 files changed, 254 insertions(+), 17 deletions(-) create mode 100644 prelude/cxx/windows_resource.bzl diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index f5fb2546d..f1af710df 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -6,7 +6,7 @@ # of this source tree. load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") -load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CudaCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "DepTrackingMode", "DistLtoToolsInfo", "HipCompilerInfo", "LinkerInfo", "PicBehavior", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") +load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CudaCompilerInfo", "CvtresCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "DepTrackingMode", "DistLtoToolsInfo", "HipCompilerInfo", "LinkerInfo", "PicBehavior", "RcCompilerInfo", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") load("@prelude//cxx:cxx_utility.bzl", "cxx_toolchain_allow_cache_upload_args") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:headers.bzl", "HeaderMode", "HeadersAsRawHeadersMode") @@ -73,6 +73,18 @@ def cxx_toolchain_impl(ctx): compiler_flags = cmd_args(ctx.attrs.hip_compiler_flags), preprocessor_flags = cmd_args(ctx.attrs.hip_preprocessor_flags), ) if ctx.attrs.hip_compiler else None + cvtres_info = CvtresCompilerInfo( + compiler = ctx.attrs.cvtres_compiler[RunInfo], + compiler_type = ctx.attrs.cvtres_compiler_type or ctx.attrs.compiler_type, + compiler_flags = cmd_args(ctx.attrs.cvtres_compiler_flags), + preprocessor_flags = cmd_args(ctx.attrs.cvtres_preprocessor_flags), + ) if ctx.attrs.cvtres_compiler else None + rc_info = RcCompilerInfo( + compiler = ctx.attrs.rc_compiler[RunInfo], + compiler_type = ctx.attrs.rc_compiler_type or ctx.attrs.compiler_type, + compiler_flags = cmd_args(ctx.attrs.rc_compiler_flags), + preprocessor_flags = cmd_args(ctx.attrs.rc_preprocessor_flags), + ) if ctx.attrs.rc_compiler else None linker_info = LinkerInfo( archiver = ctx.attrs.archiver[RunInfo], @@ -142,6 +154,8 @@ def cxx_toolchain_impl(ctx): as_compiler_info = as_info, cuda_compiler_info = cuda_info, hip_compiler_info = hip_info, + cvtres_compiler_info = cvtres_info, + rc_compiler_info = rc_info, header_mode = _get_header_mode(ctx), llvm_link = ctx.attrs.llvm_link[RunInfo] if ctx.attrs.llvm_link else None, object_format = CxxObjectFormat(object_format), @@ -177,6 +191,7 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): "cpp_dep_tracking_mode": attrs.enum(DepTrackingMode.values(), default = "makefile"), "cuda_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "cuda_dep_tracking_mode": attrs.enum(DepTrackingMode.values(), default = "makefile"), + "cvtres_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "cxx_compiler": dep_type(providers = [RunInfo]), "generate_linker_maps": attrs.bool(default = False), "hip_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), @@ -199,6 +214,7 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): "produce_interface_from_stub_shared_library": attrs.bool(default = False), "public_headers_symlinks_enabled": attrs.bool(default = True), "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), + "rc_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), "requires_objects": attrs.bool(default = False), "sanitizer_runtime_enabled": attrs.bool(default = False), "sanitizer_runtime_files": attrs.set(attrs.dep(), sorted = True, default = []), # Use `attrs.dep()` as it's not a tool, always propagate target platform diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index 037ca3bc4..f8fd32705 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -124,6 +124,8 @@ _compiler_fields = [ HipCompilerInfo = provider(fields = _compiler_fields) CudaCompilerInfo = provider(fields = _compiler_fields) +CvtresCompilerInfo = provider(fields = _compiler_fields) +RcCompilerInfo = provider(fields = _compiler_fields) CCompilerInfo = provider(fields = _compiler_fields) CxxCompilerInfo = provider(fields = _compiler_fields) AsmCompilerInfo = provider(fields = _compiler_fields) @@ -182,6 +184,8 @@ CxxToolchainInfo = provider( "as_compiler_info": provider_field(typing.Any, default = None), "hip_compiler_info": provider_field(typing.Any, default = None), "cuda_compiler_info": provider_field(typing.Any, default = None), + "cvtres_compiler_info": provider_field(typing.Any, default = None), + "rc_compiler_info": provider_field(typing.Any, default = None), "mk_comp_db": provider_field(typing.Any, default = None), "mk_hmap": provider_field(typing.Any, default = None), "llvm_link": provider_field(typing.Any, default = None), @@ -232,6 +236,8 @@ def cxx_toolchain_infos( as_compiler_info = None, hip_compiler_info = None, cuda_compiler_info = None, + cvtres_compiler_info = None, + rc_compiler_info = None, object_format = CxxObjectFormat("native"), mk_comp_db = None, mk_hmap = None, @@ -273,6 +279,8 @@ def cxx_toolchain_infos( as_compiler_info = as_compiler_info, hip_compiler_info = hip_compiler_info, cuda_compiler_info = cuda_compiler_info, + cvtres_compiler_info = cvtres_compiler_info, + rc_compiler_info = rc_compiler_info, mk_comp_db = mk_comp_db, mk_hmap = mk_hmap, object_format = object_format, diff --git a/prelude/cxx/windows_resource.bzl b/prelude/cxx/windows_resource.bzl new file mode 100644 index 000000000..b144c2988 --- /dev/null +++ b/prelude/cxx/windows_resource.bzl @@ -0,0 +1,70 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//linking:link_groups.bzl", "LinkGroupLibInfo") +load("@prelude//linking:link_info.bzl", "LibOutputStyle", "LinkInfo", "LinkInfos", "ObjectsLinkable", "create_merged_link_info") +load("@prelude//linking:linkable_graph.bzl", "create_linkable_graph") +load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo") + +def windows_resource_impl(ctx: AnalysisContext) -> list[Provider]: + objects = [] + + toolchain = get_cxx_toolchain_info(ctx) + for src in ctx.attrs.srcs: + rc_output = ctx.actions.declare_output( + "__objects__", + "{}.res".format(src.short_path), + ) + rc_cmd = cmd_args(toolchain.rc_compiler_info.compiler) + rc_cmd.add(toolchain.rc_compiler_info.compiler_flags) + rc_cmd.add(cmd_args(rc_output.as_output(), format = "/fo{}")) + rc_cmd.add(src) + + ctx.actions.run( + rc_cmd, + category = "rc_compile", + ) + + cvtres_output = ctx.actions.declare_output( + "__objects__", + "{}.obj".format(src.short_path), + ) + cvtres_cmd = cmd_args(toolchain.cvtres_compiler_info.compiler) + cvtres_cmd.add(toolchain.cvtres_compiler_info.compiler_flags) + cvtres_cmd.add(cmd_args(cvtres_output.as_output(), format = "/OUT:{}")) + cvtres_cmd.add(rc_output) + + ctx.actions.run( + cvtres_cmd, + category = "cvtres_compile", + ) + + objects.append(cvtres_output) + + link = LinkInfo( + name = ctx.attrs.name, + linkables = [ObjectsLinkable( + objects = objects, + linker_type = toolchain.linker_info.type, + link_whole = True, + )], + ) + + providers = [ + DefaultInfo(default_output = None), + SharedLibraryInfo(set = None), + LinkGroupLibInfo(libs = {}), + create_linkable_graph(ctx), + create_merged_link_info( + ctx, + toolchain.pic_behavior, + {output_style: LinkInfos(default = link) for output_style in LibOutputStyle}, + ), + ] + + return providers diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index d0c9771b2..5508da5ac 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -730,6 +730,51 @@ cxx_precompiled_header = prelude_rule( ), ) +windows_resource = prelude_rule( + name = "windows_resource", + docs = """ + A `windows_resource()` rule specifies a set of Window's Resource File (.rc) that + are compiled into object files. + + The files are compiled into .res files using rc.exe and then compiled into object files + using cvtres.exe. + They are not part of cxx_library because Microsoft's linker ignores the resources + unless they are specified as an object file, meaning including them in a possibly static + library is unintuitive. + """, + examples = """ + ``` + + # A rule that includes a single .rc file and compiles it into an object file. + windows_resource( + name = "resources", + srcs = [ + "resources.rc", + ], + ) + + # A rule that links against the above windows_resource rule. + cxx_binary( + name = "app", + srcs = [ + "main.cpp", + ], + deps = [ + ":resources" + ], + ) + + ``` + """, + further = None, + attrs = ( + cxx_common.srcs_arg() | + { + "labels": attrs.list(attrs.string(), default = []), + } + ), +) + cxx_test = prelude_rule( name = "cxx_test", docs = """ @@ -891,6 +936,10 @@ cxx_toolchain = prelude_rule( "cuda_compiler_flags": attrs.list(attrs.arg(), default = []), "cuda_compiler_type": attrs.option(attrs.enum(CxxToolProviderType), default = None), "cuda_preprocessor_flags": attrs.list(attrs.arg(), default = []), + "cvtres_compiler": attrs.option(attrs.source(), default = None), + "cvtres_compiler_flags": attrs.list(attrs.arg(), default = []), + "cvtres_compiler_type": attrs.option(attrs.enum(CxxToolProviderType), default = None), + "cvtres_preprocessor_flags": attrs.list(attrs.arg(), default = []), "cxx_compiler": attrs.source(), "cxx_compiler_flags": attrs.list(attrs.arg(), default = []), "cxx_compiler_type": attrs.option(attrs.enum(CxxToolProviderType), default = None), @@ -921,6 +970,10 @@ cxx_toolchain = prelude_rule( "public_headers_symlinks_enabled": attrs.bool(default = False), "ranlib": attrs.option(attrs.source(), default = None), "ranlib_flags": attrs.list(attrs.arg(), default = []), + "rc_compiler": attrs.option(attrs.source(), default = None), + "rc_compiler_flags": attrs.list(attrs.arg(), default = []), + "rc_compiler_type": attrs.option(attrs.enum(CxxToolProviderType), default = None), + "rc_preprocessor_flags": attrs.list(attrs.arg(), default = []), "requires_archives": attrs.bool(default = False), "shared_dep_runtime_ld_flags": attrs.list(attrs.arg(), default = []), "shared_library_extension": attrs.string(default = ""), @@ -1243,6 +1296,7 @@ cxx_rules = struct( cxx_genrule = cxx_genrule, cxx_library = cxx_library, cxx_precompiled_header = cxx_precompiled_header, + windows_resource = windows_resource, cxx_test = cxx_test, cxx_toolchain = cxx_toolchain, prebuilt_cxx_library = prebuilt_cxx_library, diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index d80865e85..174e09dcf 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -16,6 +16,7 @@ load("@prelude//cxx:cxx_toolchain.bzl", "cxx_toolchain_extra_attributes", "cxx_t load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo", "HeaderMode") load("@prelude//cxx:prebuilt_cxx_library_group.bzl", "prebuilt_cxx_library_group_impl") +load("@prelude//cxx:windows_resource.bzl", "windows_resource_impl") load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") load("@prelude//erlang:erlang.bzl", _erlang_implemented_rules = "implemented_rules") load("@prelude//git:git_fetch.bzl", "git_fetch_impl") @@ -159,6 +160,7 @@ extra_implemented_rules = struct( cxx_python_extension = cxx_python_extension_impl, prebuilt_cxx_library = prebuilt_cxx_library_impl, prebuilt_cxx_library_group = prebuilt_cxx_library_group_impl, + windows_resource = windows_resource_impl, # C++ / LLVM llvm_link_bitcode = llvm_link_bitcode_impl, @@ -585,6 +587,9 @@ inlined_extra_attributes = { }, "rust_test": {}, "sh_test": {}, + "windows_resource": { + "_cxx_toolchain": toolchains_common.cxx(), + }, } all_extra_attributes = _merge_dictionaries([ diff --git a/prelude/toolchains/cxx.bzl b/prelude/toolchains/cxx.bzl index 682ea8ca3..e8b3152a9 100644 --- a/prelude/toolchains/cxx.bzl +++ b/prelude/toolchains/cxx.bzl @@ -9,11 +9,13 @@ load( "@prelude//cxx:cxx_toolchain_types.bzl", "BinaryUtilitiesInfo", "CCompilerInfo", + "CvtresCompilerInfo", "CxxCompilerInfo", "CxxPlatformInfo", "CxxToolchainInfo", "LinkerInfo", "PicBehavior", + "RcCompilerInfo", "ShlibInterfacesMode", ) load("@prelude//cxx:headers.bzl", "HeaderMode") @@ -34,6 +36,8 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): asm_compiler_type = ctx.attrs.compiler_type compiler = ctx.attrs.compiler cxx_compiler = ctx.attrs.cxx_compiler + cvtres_compiler = ctx.attrs.cvtres_compiler + rc_compiler = ctx.attrs.rc_compiler linker = ctx.attrs.linker linker_type = "gnu" pic_behavior = PicBehavior("supported") @@ -57,6 +61,10 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): if compiler == "cl.exe": compiler = msvc_tools.cl_exe cxx_compiler = compiler + if cvtres_compiler == "cvtres.exe": + cvtres_compiler = msvc_tools.cvtres_exe + if rc_compiler == "rc.exe": + rc_compiler = msvc_tools.rc_exe if linker == "link.exe": linker = msvc_tools.link_exe linker = _windows_linker_wrapper(ctx, linker) @@ -141,6 +149,18 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): compiler = RunInfo(args = [asm_compiler]), compiler_type = asm_compiler_type, ), + cvtres_compiler_info = CvtresCompilerInfo( + compiler = RunInfo(args = [cvtres_compiler]), + preprocessor_flags = [], + compiler_flags = ctx.attrs.cvtres_flags, + compiler_type = ctx.attrs.compiler_type, + ), + rc_compiler_info = RcCompilerInfo( + compiler = RunInfo(args = [rc_compiler]), + preprocessor_flags = [], + compiler_flags = ctx.attrs.rc_flags, + compiler_type = ctx.attrs.compiler_type, + ), header_mode = HeaderMode("symlink_tree_only"), cpp_dep_tracking_mode = ctx.attrs.cpp_dep_tracking_mode, pic_behavior = pic_behavior, @@ -179,6 +199,8 @@ system_cxx_toolchain = rule( "compiler": attrs.string(default = "cl.exe" if host_info().os.is_windows else "clang"), "compiler_type": attrs.string(default = "windows" if host_info().os.is_windows else "clang"), # one of CxxToolProviderType "cpp_dep_tracking_mode": attrs.string(default = "makefile"), + "cvtres_compiler": attrs.string(default = "cvtres.exe"), + "cvtres_flags": attrs.list(attrs.string(), default = []), "cxx_compiler": attrs.string(default = "cl.exe" if host_info().os.is_windows else "clang++"), "cxx_flags": attrs.list(attrs.string(), default = []), "link_flags": attrs.list(attrs.string(), default = []), @@ -188,6 +210,8 @@ system_cxx_toolchain = rule( "linker_wrapper": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//cxx/tools:linker_wrapper")), "make_comp_db": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//cxx/tools:make_comp_db")), "msvc_tools": attrs.default_only(attrs.exec_dep(providers = [VisualStudio], default = "prelude//toolchains/msvc:msvc_tools")), + "rc_compiler": attrs.string(default = "rc.exe"), + "rc_flags": attrs.list(attrs.string(), default = []), }, is_toolchain_rule = True, ) diff --git a/prelude/toolchains/msvc/run_msvc_tool.py b/prelude/toolchains/msvc/run_msvc_tool.py index 6d2f8fc73..2fa9a6019 100644 --- a/prelude/toolchains/msvc/run_msvc_tool.py +++ b/prelude/toolchains/msvc/run_msvc_tool.py @@ -42,8 +42,12 @@ def main(): prepend_env(env, "PATH", tool.PATH) prepend_env(env, "INCLUDE", tool.INCLUDE) - completed_process = subprocess.run([tool.exe, *arguments], env=env) - sys.exit(completed_process.returncode) + if tool.exe is None: + print("Tool not found", file=sys.stderr) + sys.exit(1) + else: + completed_process = subprocess.run([tool.exe, *arguments], env=env) + sys.exit(completed_process.returncode) if __name__ == "__main__": diff --git a/prelude/toolchains/msvc/tools.bzl b/prelude/toolchains/msvc/tools.bzl index 72d294eba..199065fb9 100644 --- a/prelude/toolchains/msvc/tools.bzl +++ b/prelude/toolchains/msvc/tools.bzl @@ -12,27 +12,35 @@ VisualStudio = provider( fields = { # cl.exe "cl_exe": provider_field(typing.Any, default = None), + # cvtres.exe + "cvtres_exe": provider_field(typing.Any, default = None), # lib.exe "lib_exe": provider_field(typing.Any, default = None), # ml64.exe "ml64_exe": provider_field(typing.Any, default = None), # link.exe "link_exe": provider_field(typing.Any, default = None), + # rc.exe + "rc_exe": provider_field(typing.Any, default = None), }, ) def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: cl_exe_json = ctx.actions.declare_output("cl.exe.json") + cvtres_exe_json = ctx.actions.declare_output("cvtres.exe.json") lib_exe_json = ctx.actions.declare_output("lib.exe.json") ml64_exe_json = ctx.actions.declare_output("ml64.exe.json") link_exe_json = ctx.actions.declare_output("link.exe.json") + rc_exe_json = ctx.actions.declare_output("rc.exe.json") cmd = [ ctx.attrs.vswhere[RunInfo], cmd_args("--cl=", cl_exe_json.as_output(), delimiter = ""), + cmd_args("--cvtres=", cvtres_exe_json.as_output(), delimiter = ""), cmd_args("--lib=", lib_exe_json.as_output(), delimiter = ""), cmd_args("--ml64=", ml64_exe_json.as_output(), delimiter = ""), cmd_args("--link=", link_exe_json.as_output(), delimiter = ""), + cmd_args("--rc=", rc_exe_json.as_output(), delimiter = ""), ] ctx.actions.run( @@ -48,6 +56,12 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: cmd = cmd_args(run_msvc_tool, cl_exe_json), os = ScriptOs("windows"), ) + cvtres_exe_script = cmd_script( + ctx = ctx, + name = "cvtres", + cmd = cmd_args(run_msvc_tool, cvtres_exe_json), + os = ScriptOs("windows"), + ) lib_exe_script = cmd_script( ctx = ctx, name = "lib", @@ -66,6 +80,12 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: cmd = cmd_args(run_msvc_tool, link_exe_json), os = ScriptOs("windows"), ) + rc_exe_script = cmd_script( + ctx = ctx, + name = "rc", + cmd = cmd_args(run_msvc_tool, rc_exe_json), + os = ScriptOs("windows"), + ) return [ # Supports `buck2 run prelude//toolchains/msvc:msvc_tools[cl.exe]` @@ -77,6 +97,12 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: "json": [DefaultInfo(default_output = cl_exe_json)], }), ], + "cvtres.exe": [ + RunInfo(args = [cvtres_exe_script]), + DefaultInfo(sub_targets = { + "json": [DefaultInfo(default_output = cvtres_exe_json)], + }), + ], "lib.exe": [ RunInfo(args = [lib_exe_script]), DefaultInfo(sub_targets = { @@ -95,12 +121,20 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: "json": [DefaultInfo(default_output = ml64_exe_json)], }), ], + "rc.exe": [ + RunInfo(args = [rc_exe_script]), + DefaultInfo(sub_targets = { + "json": [DefaultInfo(default_output = rc_exe_json)], + }), + ], }), VisualStudio( cl_exe = cl_exe_script, + cvtres_exe = cvtres_exe_script, lib_exe = lib_exe_script, ml64_exe = ml64_exe_script, link_exe = link_exe_script, + rc_exe = rc_exe_script, ), ] diff --git a/prelude/toolchains/msvc/vswhere.py b/prelude/toolchains/msvc/vswhere.py index 0d98a6165..9c9f6a541 100644 --- a/prelude/toolchains/msvc/vswhere.py +++ b/prelude/toolchains/msvc/vswhere.py @@ -19,13 +19,18 @@ from pathlib import Path from typing import IO, List, NamedTuple +VC_EXE_NAMES = ["cl.exe", "cvtres.exe", "lib.exe", "ml64.exe", "link.exe"] +UCRT_EXE_NAMES = ["rc.exe"] + class OutputJsonFiles(NamedTuple): # We write a Tool instance as JSON into each of these files. cl: IO[str] + cvtres: IO[str] lib: IO[str] ml64: IO[str] link: IO[str] + rc: IO[str] class Tool(NamedTuple): @@ -35,11 +40,14 @@ class Tool(NamedTuple): INCLUDE: List[Path] = [] -def find_in_path(executable): +def find_in_path(executable, is_optional=False): which = shutil.which(executable) if which is None: - print(f"{executable} not found in $PATH", file=sys.stderr) - sys.exit(1) + if is_optional: + return None + else: + print(f"{executable} not found in $PATH", file=sys.stderr) + sys.exit(1) return Tool(which) @@ -99,8 +107,9 @@ def find_with_vswhere_exe(): lib_path = tools_path / "lib" / "x64" include_path = tools_path / "include" - exe_names = "cl.exe", "lib.exe", "ml64.exe", "link.exe" - if not all(bin_path.joinpath(exe).exists() for exe in exe_names): + vc_exe_paths = [bin_path / exe for exe in VC_EXE_NAMES] + + if not all(exe.exists() for exe in vc_exe_paths): continue PATH = [bin_path] @@ -109,10 +118,16 @@ def find_with_vswhere_exe(): ucrt, ucrt_version = get_ucrt_dir() if ucrt and ucrt_version: - PATH.append(ucrt / "bin" / ucrt_version / "x64") + ucrt_bin_path = ucrt / "bin" / ucrt_version / "x64" + PATH.append(ucrt_bin_path) LIB.append(ucrt / "lib" / ucrt_version / "ucrt" / "x64") INCLUDE.append(ucrt / "include" / ucrt_version / "ucrt") + ucrt_exe_paths = [ucrt_bin_path / exe for exe in UCRT_EXE_NAMES] + ucrt_exe_paths = [exe if exe.exists() else None for exe in ucrt_exe_paths] + else: + ucrt_exe_paths = [None for exe in UCRT_EXE_NAMES] + sdk, sdk_version = get_sdk10_dir() if sdk and sdk_version: PATH.append(sdk / "bin" / "x64") @@ -123,12 +138,13 @@ def find_with_vswhere_exe(): INCLUDE.append(sdk / "include" / sdk_version / "shared") return [ - Tool(exe=bin_path / exe, LIB=LIB, PATH=PATH, INCLUDE=INCLUDE) - for exe in exe_names + Tool(exe=exe, LIB=LIB, PATH=PATH, INCLUDE=INCLUDE) + for exe in vc_exe_paths + ucrt_exe_paths ] print( - "vswhere.exe did not find a suitable MSVC toolchain containing cl.exe, lib.exe, ml64.exe", + "vswhere.exe did not find a suitable MSVC toolchain containing " + + ", ".join(VC_EXE_NAMES), file=sys.stderr, ) sys.exit(1) @@ -205,24 +221,30 @@ def write_tool_json(out, tool): def main(): parser = argparse.ArgumentParser() parser.add_argument("--cl", type=argparse.FileType("w"), required=True) + parser.add_argument("--cvtres", type=argparse.FileType("w"), required=True) parser.add_argument("--lib", type=argparse.FileType("w"), required=True) parser.add_argument("--ml64", type=argparse.FileType("w"), required=True) parser.add_argument("--link", type=argparse.FileType("w"), required=True) + parser.add_argument("--rc", type=argparse.FileType("w"), required=True) output = OutputJsonFiles(**vars(parser.parse_args())) # If vcvars has been run, it puts these tools onto $PATH. if "VCINSTALLDIR" in os.environ: - cl_exe = find_in_path("cl.exe") - lib_exe = find_in_path("lib.exe") - ml64_exe = find_in_path("ml64.exe") - link_exe = find_in_path("link.exe") + cl_exe, cvtres_exe, lib_exe, ml64_exe, link_exe = ( + find_in_path(exe) for exe in VC_EXE_NAMES + ) + rc_exe = find_in_path("rc.exe", optional=True) else: - cl_exe, lib_exe, ml64_exe, link_exe = find_with_vswhere_exe() + cl_exe, cvtres_exe, lib_exe, ml64_exe, link_exe, rc_exe = ( + find_with_vswhere_exe() + ) write_tool_json(output.cl, cl_exe) + write_tool_json(output.cvtres, cvtres_exe) write_tool_json(output.lib, lib_exe) write_tool_json(output.ml64, ml64_exe) write_tool_json(output.link, link_exe) + write_tool_json(output.rc, rc_exe) if __name__ == "__main__": From 53c005b238f67029eb162765e9f9dabd60334d2c Mon Sep 17 00:00:00 2001 From: generatedunixname89002005287564 Date: Mon, 11 Mar 2024 09:57:39 -0700 Subject: [PATCH 0433/1133] Pyre Configurationless migration for] [batch:22/244] Reviewed By: connernilsen Differential Revision: D54729819 fbshipit-source-id: a3169732e2325236a3d78bc1d10048faf880d76f --- prelude/apple/tools/code_signing/codesign_bundle.py | 2 ++ .../apple/tools/code_signing/provisioning_profile_metadata.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 7c7ceb1ad..a4145761c 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + import asyncio import logging import os diff --git a/prelude/apple/tools/code_signing/provisioning_profile_metadata.py b/prelude/apple/tools/code_signing/provisioning_profile_metadata.py index 436855404..733b32e6a 100644 --- a/prelude/apple/tools/code_signing/provisioning_profile_metadata.py +++ b/prelude/apple/tools/code_signing/provisioning_profile_metadata.py @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# pyre-strict + from __future__ import annotations import hashlib From daef4362b3a058a049f6435d14a2bd9aded74620 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Mon, 11 Mar 2024 10:06:09 -0700 Subject: [PATCH 0434/1133] command alias: Add trampoline as `DefaultInfo` Summary: Command aliases currently don't return a `DefaultInfo`. This diff introduces one, specifically populating it with the trampoline script which is now unconditionally created. The `RunInfo` still uses the more efficient construction. This way running the script outputted by the default info will match `buck2 run` behavior, which seems right. cc cjhopman whose todo this is fixing Reviewed By: cjhopman Differential Revision: D54698402 fbshipit-source-id: 10b3bddfa589fc2c9ee6376a329cbac33a0de8fd --- prelude/command_alias.bzl | 142 ++++++++++++++++++-------------------- 1 file changed, 68 insertions(+), 74 deletions(-) diff --git a/prelude/command_alias.bzl b/prelude/command_alias.bzl index 3b7a4db58..1dcc7ea9b 100644 --- a/prelude/command_alias.bzl +++ b/prelude/command_alias.bzl @@ -24,56 +24,52 @@ def _command_alias_impl_target_unix(ctx, exec_is_windows: bool): else: base = _get_run_info_from_exe(ctx.attrs.exe) - run_info_args = cmd_args() + trampoline_args = cmd_args() + trampoline_args.add("#!/usr/bin/env bash") + trampoline_args.add("set -euo pipefail") + trampoline_args.add('BUCK_COMMAND_ALIAS_ABSOLUTE=$(cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P)') + + for (k, v) in ctx.attrs.env.items(): + # TODO(akozhevnikov): maybe check environment variable is not conflicting with pre-existing one + trampoline_args.add(cmd_args(["export ", k, "=", cmd_args(v, quote = "shell")], delimiter = "")) + + if len(ctx.attrs.platform_exe.items()) > 0: + trampoline_args.add('case "$(uname)" in') + for platform, exe in ctx.attrs.platform_exe.items(): + # Only linux and macos are supported. + if platform == "linux": + _add_platform_case_to_trampoline_args(trampoline_args, "Linux", _get_run_info_from_exe(exe), ctx.attrs.args) + elif platform == "macos": + _add_platform_case_to_trampoline_args(trampoline_args, "Darwin", _get_run_info_from_exe(exe), ctx.attrs.args) + + # Default case + _add_platform_case_to_trampoline_args(trampoline_args, "*", base, ctx.attrs.args) + trampoline_args.add("esac") + else: + _add_args_declaration_to_trampoline_args(trampoline_args, base, ctx.attrs.args) - if len(ctx.attrs.env) > 0 or len(ctx.attrs.platform_exe.items()) > 0: - trampoline_args = cmd_args() - trampoline_args.add("#!/usr/bin/env bash") - trampoline_args.add("set -euo pipefail") - trampoline_args.add('BUCK_COMMAND_ALIAS_ABSOLUTE=$(cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P)') - - for (k, v) in ctx.attrs.env.items(): - # TODO(akozhevnikov): maybe check environment variable is not conflicting with pre-existing one - trampoline_args.add(cmd_args(["export ", k, "=", cmd_args(v, quote = "shell")], delimiter = "")) - - if len(ctx.attrs.platform_exe.items()) > 0: - trampoline_args.add('case "$(uname)" in') - for platform, exe in ctx.attrs.platform_exe.items(): - # Only linux and macos are supported. - if platform == "linux": - _add_platform_case_to_trampoline_args(trampoline_args, "Linux", _get_run_info_from_exe(exe), ctx.attrs.args) - elif platform == "macos": - _add_platform_case_to_trampoline_args(trampoline_args, "Darwin", _get_run_info_from_exe(exe), ctx.attrs.args) - - # Default case - _add_platform_case_to_trampoline_args(trampoline_args, "*", base, ctx.attrs.args) - trampoline_args.add("esac") - else: - _add_args_declaration_to_trampoline_args(trampoline_args, base, ctx.attrs.args) - - trampoline_args.add('exec "${ARGS[@]}"') - - trampoline = _relativize_path( - ctx, - trampoline_args, - "sh", - "$BUCK_COMMAND_ALIAS_ABSOLUTE", - exec_is_windows, - ) + trampoline_args.add('exec "${ARGS[@]}"') + trampoline = _relativize_path( + ctx, + trampoline_args, + "sh", + "$BUCK_COMMAND_ALIAS_ABSOLUTE", + exec_is_windows, + ) + + run_info_args = cmd_args() + if len(ctx.attrs.env) > 0 or len(ctx.attrs.platform_exe.items()) > 0: run_info_args.add(trampoline) - run_info_args.hidden([trampoline_args]) + run_info_args.hidden(trampoline_args) else: run_info_args.add(base.args) run_info_args.add(ctx.attrs.args) run_info_args.hidden(ctx.attrs.resources) - # TODO(cjhopman): Consider what this should have for default outputs. Using - # the base's default outputs may not really be correct (it makes more sense to - # be the outputs required by the args). return [ - DefaultInfo(), + DefaultInfo(default_output = trampoline, other_outputs = [trampoline_args] + ctx.attrs.resources), RunInfo(args = run_info_args), ] @@ -87,50 +83,48 @@ def _command_alias_impl_target_windows(ctx, exec_is_windows: bool): else: base = RunInfo() + trampoline_args = cmd_args() + trampoline_args.add("@echo off") + + # Set BUCK_COMMAND_ALIAS_ABSOLUTE to the drive and full path of the script being created here + # We use this below to prefix any artifacts being referenced in the script + trampoline_args.add("set BUCK_COMMAND_ALIAS_ABSOLUTE=%~dp0") + + # Handle envs + for (k, v) in ctx.attrs.env.items(): + # TODO(akozhevnikov): maybe check environment variable is not conflicting with pre-existing one + trampoline_args.add(cmd_args(["set ", k, "=", v], delimiter = "")) + + # Handle args + # We shell quote the args but not the base. This is due to the same limitation detailed below with T111687922 + cmd = cmd_args([base.args], delimiter = " ") + for arg in ctx.attrs.args: + cmd.add(cmd_args(arg, quote = "shell")) + + # Add on %* to handle any other args passed through the command + cmd.add("%*") + trampoline_args.add(cmd) + + trampoline = _relativize_path( + ctx, + trampoline_args, + "bat", + "%BUCK_COMMAND_ALIAS_ABSOLUTE%", + exec_is_windows, + ) + run_info_args = cmd_args() if len(ctx.attrs.env) > 0: - trampoline_args = cmd_args() - trampoline_args.add("@echo off") - - # Set BUCK_COMMAND_ALIAS_ABSOLUTE to the drive and full path of the script being created here - # We use this below to prefix any artifacts being referenced in the script - trampoline_args.add("set BUCK_COMMAND_ALIAS_ABSOLUTE=%~dp0") - - # Handle envs - for (k, v) in ctx.attrs.env.items(): - # TODO(akozhevnikov): maybe check environment variable is not conflicting with pre-existing one - trampoline_args.add(cmd_args(["set ", k, "=", v], delimiter = "")) - - # Handle args - # We shell quote the args but not the base. This is due to the same limitation detailed below with T111687922 - cmd = cmd_args([base.args], delimiter = " ") - for arg in ctx.attrs.args: - cmd.add(cmd_args(arg, quote = "shell")) - - # Add on %* to handle any other args passed through the command - cmd.add("%*") - trampoline_args.add(cmd) - - trampoline = _relativize_path( - ctx, - trampoline_args, - "bat", - "%BUCK_COMMAND_ALIAS_ABSOLUTE%", - exec_is_windows, - ) run_info_args.add(trampoline) - run_info_args.hidden([trampoline_args]) + run_info_args.hidden(trampoline_args) else: run_info_args.add(base.args) run_info_args.add(ctx.attrs.args) run_info_args.hidden(ctx.attrs.resources) - # TODO(cjhopman): Consider what this should have for default outputs. Using - # the base's default outputs may not really be correct (it makes more sense to - # be the outputs required by the args). return [ - DefaultInfo(), + DefaultInfo(default_output = trampoline, other_outputs = [trampoline_args] + ctx.attrs.resources), RunInfo(args = run_info_args), ] From 01ce533b8ba1b87c16f2e99107d4574924561989 Mon Sep 17 00:00:00 2001 From: Omri Dover Date: Mon, 11 Mar 2024 10:28:42 -0700 Subject: [PATCH 0435/1133] Exclude rust libs from android somerge Summary: Fixing the automation_fblite_fallback_release build failure introduced in D54551438 Reviewed By: capickett Differential Revision: D54748362 fbshipit-source-id: d66faa01e6651f696620c7305c567fa45384f934 --- prelude/rust/rust_library.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index f7dea97fa..77ab368d4 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -663,6 +663,7 @@ def _rust_link_providers( # if this target actually requested that. Opt ourselves out # if it didn't. ignore_force_static_follows_dependents = preferred_linkage != Linkage("static"), + include_in_android_mergemap = False, ), label = new_label, ), @@ -884,6 +885,7 @@ def _native_providers( link_infos = link_infos, shared_libs = solibs, default_soname = shlib_name, + include_in_android_mergemap = False, ), ), deps = inherited_link_graphs + inherited_exported_deps, From faaa4e8a77696bf8af819f39258eb39dbbb6360b Mon Sep 17 00:00:00 2001 From: Jia Chen Date: Mon, 11 Mar 2024 10:47:04 -0700 Subject: [PATCH 0436/1133] Support arbitrary target expressions in `batch.bxl` Summary: The BXL that performs per-target Pyre check, `batch.bxl` (D53874679), seems to take different kind of cli args from the [pre-existing classic Pyre BXL in production](https://www.internalfb.com/code/fbsource/[174d10113644]/fbcode/buck2/prelude/python/sourcedb/classic.bxl), for no good reason. The classic BXL can take arbitrary query expressions as target, whereas the per-target BXL can't handle complicated ones. Ex: ``` # This is OK > buck2 bxl prelude//python/sourcedb/classic.bxl:build -- --target "fbcode//foo/... - fbcode//foo/bar/..." # This is not OK buck2 bxl prelude//python/typecheck/batch.bxl:run -- --target "fbcode//foo/... - fbcode//foo/bar/..." ``` This makes it annoying to migrate some of the pre-existing local pyre configurations to per-target, where the [specified targets](https://www.internalfb.com/code/fbsource/[e1220167dd95]/fbcode/fblearner/flow/projects/search/util/recurring_training/.pyre_configuration.local?lines=4) were in a form that per-target BXL cannot support today. Let's change the per-target Pyre BXL and have it process its cli argument in the same way as classic Pyre BXL (where each cli arg first gets passed into `uquery` to resolve the query expression), to make life easier for us when migrating. Reviewed By: kinto0 Differential Revision: D54703094 fbshipit-source-id: 30a993cd3fea41484d9d7f32134a468ee1b898e6 --- prelude/python/typecheck/batch.bxl | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/prelude/python/typecheck/batch.bxl b/prelude/python/typecheck/batch.bxl index 93733857a..2521ddf1f 100644 --- a/prelude/python/typecheck/batch.bxl +++ b/prelude/python/typecheck/batch.bxl @@ -5,12 +5,13 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//utils:utils.bzl", "flatten") load("@prelude//python/sourcedb/filter.bxl", "do_filter") def _run_entry_point(ctx: bxl.Context) -> None: - targets = flatten(ctx.cli_args.target) - checked_targets = ctx.configured_targets(do_filter(ctx.uquery(), targets)) + uquery = ctx.uquery() + checked_targets = ctx.configured_targets( + do_filter(uquery, [uquery.eval(target) for target in ctx.cli_args.target]), + ) build_result = ctx.build([ target.label.with_sub_target("typecheck") @@ -31,7 +32,7 @@ run = bxl_main( impl = _run_entry_point, cli_args = { "target": cli_args.list( - cli_args.target_expr( + cli_args.string( doc = "Target pattern to run type checking on", ), ), From d6adb62cac94b385b9584c7232279706750e07c6 Mon Sep 17 00:00:00 2001 From: Edward Pastuszenski Date: Mon, 11 Mar 2024 21:49:10 -0700 Subject: [PATCH 0437/1133] Restore native_library_merge_sequence group entry support Summary: [Group entries](https://www.internalfb.com/code/fbsource/[4c02570d84f108dc3d76799d027c81940dfe94d7]/xplat/build_infra/buck_client/docs/rule/android_binary.soy?lines=408-409%2C426-440) introduce alternative entry structure for `native_library_merge_sequence` arguments - they can now be lists of tuples, not just tuples. Reviewed By: christolliday Differential Revision: D54783496 fbshipit-source-id: 69fd2a32982df54ec319ee5d77dc1718f1a3fac7 --- prelude/android/android_binary_native_library_rules.bzl | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index f246fdcdd..aaa384b05 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -421,7 +421,12 @@ def _declare_library_subtargets( if native_library_merge_map: sonames.update(native_library_merge_map.keys()) elif native_library_merge_sequence: - sonames.update([soname for (soname, _) in native_library_merge_sequence]) + for entry in native_library_merge_sequence: + if type(entry) == "list": + sonames.update([soname for (soname, _) in entry]) + else: + (soname, _) = entry + sonames.add(soname) lib_outputs = {} for soname in sonames.list(): From dd9bc01dec4cabe075183f94008ba1cfe5a7a9d1 Mon Sep 17 00:00:00 2001 From: Omri Dover Date: Tue, 12 Mar 2024 09:24:15 -0700 Subject: [PATCH 0438/1133] add TODO following D54748362 Summary: as title Reviewed By: dtolnay Differential Revision: D54794497 fbshipit-source-id: faa5d305808cd0ffff25fff273ce145c231b6e45 --- prelude/rust/rust_library.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 77ab368d4..9d6849624 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -663,7 +663,7 @@ def _rust_link_providers( # if this target actually requested that. Opt ourselves out # if it didn't. ignore_force_static_follows_dependents = preferred_linkage != Linkage("static"), - include_in_android_mergemap = False, + include_in_android_mergemap = False, # TODO(pickett): Plumb D54748362 to the macro layer ), label = new_label, ), From b03a15c6a85be6c8cdb7a38d734db5fafc3c24c1 Mon Sep 17 00:00:00 2001 From: Balaji S Date: Tue, 12 Mar 2024 09:44:06 -0700 Subject: [PATCH 0439/1133] Add e2e test for test:list Summary: * We setup a template for adding e2e tests to test shell ## Test setup * We add the test file we want to test to suite_data * We add this in buck2 as a fixture target * We add the fixture as a dep of the e2e test * This ensures that test shell will pick up that suite * Change the implementation to return the output so we can check against that in tests Reviewed By: acw224 Differential Revision: D54541220 fbshipit-source-id: 4307f68159f159cbb88760222ebe39bee2bd8f49 --- .../erlang/common_test/test_cli_lib/BUCK.v2 | 18 +++++++++ .../common_test/test_cli_lib/src/test.erl | 39 ++++++++++++------- .../test_cli_lib/test/test_cli_e2e_SUITE.erl | 33 ++++++++++++++++ .../test_list_SUITE.erl | 28 +++++++++++++ prelude/erlang/common_test/test_exec/BUCK.v2 | 6 +-- 5 files changed, 107 insertions(+), 17 deletions(-) create mode 100644 prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl create mode 100644 prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl diff --git a/prelude/erlang/common_test/test_cli_lib/BUCK.v2 b/prelude/erlang/common_test/test_cli_lib/BUCK.v2 index 439cc1c4d..9cb3223fc 100644 --- a/prelude/erlang/common_test/test_cli_lib/BUCK.v2 +++ b/prelude/erlang/common_test/test_cli_lib/BUCK.v2 @@ -13,3 +13,21 @@ erlang_application( ], visibility = ["PUBLIC"], ) + +erlang_tests( + contacts = ["whatsapp_testing_infra"], + labels = ["e2e"], + suites = ["test/test_cli_e2e_SUITE.erl"], + deps = [ + "stdlib", + ":test_cli_e2e_SUITE_fixtures", + ":test_cli_lib", + ], +) + +erlang_application( + name = "test_cli_e2e_SUITE_fixtures", + srcs = glob(["test/test_cli_e2e_SUITE_data/*.erl"]), + applications = [], + labels = ["test_application"], +) diff --git a/prelude/erlang/common_test/test_cli_lib/src/test.erl b/prelude/erlang/common_test/test_cli_lib/src/test.erl index 52420ece2..28f069f94 100644 --- a/prelude/erlang/common_test/test_cli_lib/src/test.erl +++ b/prelude/erlang/common_test/test_cli_lib/src/test.erl @@ -34,6 +34,11 @@ start_shell/0 ]). +%% Test functions +-export([ + list_impl/1 +]). + -type test_id() :: string() | non_neg_integer(). -type test_info() :: #{name := string(), suite := atom()}. -type run_spec() :: test_id() | [test_info()]. @@ -71,7 +76,7 @@ help() -> [ print_help(F, A) || {F, A} <- ?MODULE:module_info(exports), - not lists:member(F, [module_info, ensure_initialized, start, start_shell]) + not lists:member(F, [module_info, ensure_initialized, start, start_shell, list_impl]) ], io:format("~n"), io:format("For more information, use the built in help, e.g. h(test, help)~n"), @@ -143,10 +148,9 @@ list() -> %% tests from that module instead -spec list(RegExOrModule :: module() | string()) -> ok | {error, term()}. list(RegEx) when is_list(RegEx) -> - ensure_initialized(), - case ct_daemon:list(RegEx) of - {invalid_regex, _} = Err -> {error, Err}; - Tests -> print_tests(Tests) + case list_impl(RegEx) of + {ok, TestsString} -> io:format("~s", [TestsString]); + Error -> Error end. %% @doc Run a test given by either the test id from the last list() command, or @@ -210,6 +214,14 @@ reset() -> end. %% internal +-spec list_impl(RegEx :: string()) -> {ok, string()} | {error, term()}. +list_impl(RegEx) -> + ensure_initialized(), + case ct_daemon:list(RegEx) of + {invalid_regex, _} = Err -> {error, Err}; + Tests -> {ok, print_tests(Tests)} + end. + ensure_initialized() -> PrintInit = lists:foldl( fun(Fun, Acc) -> Fun() orelse Acc end, @@ -294,19 +306,20 @@ init_group_leader() -> ct_daemon:set_gl(), false. --spec print_tests([{module(), [{non_neg_integer(), string()}]}]) -> ok. +-spec print_tests([{module(), [{non_neg_integer(), string()}]}]) -> string(). print_tests([]) -> - io:format("no tests found~n"); + lists:flatten(io_lib:format("no tests found~n")); print_tests(Tests) -> - print_tests_impl(lists:reverse(Tests)). + lists:flatten(print_tests_impl(lists:reverse(Tests))). --spec print_tests_impl([{module(), [{non_neg_integer(), string()}]}]) -> ok. +-spec print_tests_impl([{module(), [{non_neg_integer(), string()}]}]) -> io_lib:chars(). print_tests_impl([]) -> - ok; + ""; print_tests_impl([{Suite, SuiteTests} | Rest]) -> - io:format("~s:~n", [Suite]), - [io:format("\t~b - ~s~n", [Id, Test]) || {Id, Test} <- SuiteTests], - print_tests_impl(Rest). + SuiteString = io_lib:format("~s:~n", [Suite]), + TestsString = [io_lib:format("\t~b - ~s~n", [Id, Test]) || {Id, Test} <- SuiteTests], + RestString = print_tests_impl(Rest), + SuiteString ++ TestsString ++ RestString. -spec is_debug_session() -> boolean(). is_debug_session() -> diff --git a/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl b/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl new file mode 100644 index 000000000..9cb1778f6 --- /dev/null +++ b/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl @@ -0,0 +1,33 @@ +%% Copyright (c) Meta Platforms, Inc. and affiliates. +%% This source code is licensed under both the MIT license found in the +%% LICENSE-MIT file in the root directory of this source tree and the Apache +%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%% of this source tree. +%%% % @format +-module(test_cli_e2e_SUITE). + +-include_lib("stdlib/include/assert.hrl"). + +-export([all/0]). + +-export([ + test_list/1 +]). + +all() -> + [test_list]. + +test_list(_Config) -> + Expected = + "test_cli_e2e_SUITE:\n" + "test_list_SUITE:\n" + "\t1 - test_list_SUITE - .test_pass\n" + "\t2 - test_list_SUITE - default.test_fail\n", + ?assertEqual({ok, Expected}, test:list_impl("test_list_SUITE")), + + ?assertMatch({error, {invalid_regex, _}}, test:list_impl("^[a")), + + EmptyExpected = + "test_cli_e2e_SUITE:\n" + "test_list_SUITE:\n", + ?assertEqual({ok, EmptyExpected}, test:list_impl("does_not_exist_SUITE")). diff --git a/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl b/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl new file mode 100644 index 000000000..c28e4b0e9 --- /dev/null +++ b/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl @@ -0,0 +1,28 @@ +%% Copyright (c) Meta Platforms, Inc. and affiliates. +%% This source code is licensed under both the MIT license found in the +%% LICENSE-MIT file in the root directory of this source tree and the Apache +%% License, Version 2.0 found in the LICENSE-APACHE file in the root directory +%% of this source tree. +%%% % @format +-module(test_list_SUITE). + +-include_lib("stdlib/include/assert.hrl"). + +-export([all/0, groups/0]). + +-export([ + test_pass/1, + test_fail/1 +]). + +all() -> + [test_pass, {group, default}]. + +groups() -> + [{default, [], [test_fail]}]. + +test_pass(_Config) -> + ?assert(true). + +test_fail(_Config) -> + ?assert(false). diff --git a/prelude/erlang/common_test/test_exec/BUCK.v2 b/prelude/erlang/common_test/test_exec/BUCK.v2 index af2feb4f4..1db95d1a6 100644 --- a/prelude/erlang/common_test/test_exec/BUCK.v2 +++ b/prelude/erlang/common_test/test_exec/BUCK.v2 @@ -25,11 +25,9 @@ erlang_application( ) erlang_tests( - contacts = ["whatsapp_erlclient"], + contacts = ["whatsapp_testing_infra"], labels = ["unit"], - suites = glob( - ["test/*_SUITE.erl"], - ), + suites = ["test/ct_executor_SUITE.erl"], deps = [ "stdlib", ":test_exec", From 3abdec7c4641529d1fc2ce8bce61e327fb86795b Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Tue, 12 Mar 2024 09:47:33 -0700 Subject: [PATCH 0440/1133] thin the apple_xcuitest test runner binary Summary: The XCUITest runner binary needs to be thinned to launch successfully on devices. Otherwise we hit issues opening the test bundle like: ``` (mach-o file, but is an incompatible architecture (have 'arm64', need 'arm64e')) ``` Reviewed By: chatura-atapattu Differential Revision: D54806185 fbshipit-source-id: f0d5a4ce19130167a0c32d5dfac8e5c7e6285643 --- prelude/apple/apple_xcuitest.bzl | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/prelude/apple/apple_xcuitest.bzl b/prelude/apple/apple_xcuitest.bzl index 265a49d0c..6aa5052ed 100644 --- a/prelude/apple/apple_xcuitest.bzl +++ b/prelude/apple/apple_xcuitest.bzl @@ -11,6 +11,7 @@ load(":apple_bundle_destination.bzl", "AppleBundleDestination") load(":apple_bundle_part.bzl", "AppleBundlePart", "assemble_bundle") load(":apple_bundle_types.bzl", "AppleBundleInfo", "AppleBundleType") load(":apple_info_plist.bzl", "process_info_plist") +load(":apple_utility.bzl", "get_apple_architecture") def apple_xcuitest_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: # The XCUITest runner app bundle copies the application from the platform @@ -47,12 +48,22 @@ def _get_uitest_bundle(ctx: AnalysisContext) -> AppleBundlePart: ) def _get_xctrunner_binary(ctx: AnalysisContext) -> AppleBundlePart: + arch = get_apple_architecture(ctx) + lipo = ctx.attrs._apple_toolchain[AppleToolchainInfo].lipo platform_path = ctx.attrs._apple_toolchain[AppleToolchainInfo].platform_path - copied_binary = ctx.actions.declare_output(ctx.attrs.name) + thin_binary = ctx.actions.declare_output(ctx.attrs.name) xctrunner_path = cmd_args(platform_path, "Developer/Library/Xcode/Agents/XCTRunner.app/XCTRunner", delimiter = "/") - ctx.actions.run(["cp", "-PR", xctrunner_path, copied_binary.as_output()], category = "copy_xctrunner") + ctx.actions.run([ + lipo, + xctrunner_path, + "-extract", + arch, + "-output", + thin_binary.as_output(), + ], category = "copy_xctrunner") + return AppleBundlePart( - source = copied_binary, + source = thin_binary, destination = AppleBundleDestination("executables"), ) From 2f2de7f0ed377ad43f18e947f05b32a5616f6143 Mon Sep 17 00:00:00 2001 From: Jeremy Braun Date: Tue, 12 Mar 2024 22:03:06 -0700 Subject: [PATCH 0441/1133] propagate `zip_safe` down to `make_par` Summary: `python_binary` and friends have a `zip_safe` parameter that are supposed to set the `PEX-INFO`-carried property of a similar name. This wasn't getting propagated down to the call to `pex` by the buck2 rules. Reviewed By: cxxxs Differential Revision: D54130316 fbshipit-source-id: 423731b0de0329f0b00061ffa6597fd90bdd66d0 --- prelude/python/make_py_package.bzl | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 659929f10..2b4726477 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -264,6 +264,7 @@ def _make_py_package_impl( preload_libraries, symlink_tree_path, package_style, + True if ctx.attrs.zip_safe == None else ctx.attrs.zip_safe, ) bootstrap_args.add(build_args) if standalone: @@ -353,7 +354,8 @@ def _pex_bootstrap_args( shared_libraries: dict[str, (LinkedObject, bool)], preload_libraries: cmd_args, symlink_tree_path: Artifact | None, - package_style: PackageStyle) -> cmd_args: + package_style: PackageStyle, + zip_safe: bool) -> cmd_args: cmd = cmd_args() cmd.add(preload_libraries) cmd.add([ @@ -377,6 +379,9 @@ def _pex_bootstrap_args( cmd.add("--use-lite") cmd.add(output.as_output()) + if package_style == PackageStyle("standalone") and not zip_safe: + cmd.add("--no-zip-safe") + return cmd def _pex_modules_common_args( From db3b367fa8cf542cfd4e5a2fc32a087832dcd607 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Wed, 13 Mar 2024 04:33:55 -0700 Subject: [PATCH 0442/1133] Link Groups: refactor computation of `[info]` subtarget Summary: Remove dependency on `LinkGroupInfo` as we want to re-use the code for `resource_group_map()` later Reviewed By: rmaz Differential Revision: D54682853 fbshipit-source-id: 225529322526ab6d55f87ffa875407e3784e2e06 --- prelude/cxx/user/link_group_map.bzl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index 93ee74c70..20038f7aa 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -139,10 +139,10 @@ def _make_json_info_for_group(group: Group) -> dict[str, typing.Any]: "name": group.name, } -def _make_info_subtarget_providers(ctx: AnalysisContext, link_group_info: LinkGroupInfo) -> list[Provider]: +def _make_info_subtarget_providers(ctx: AnalysisContext, groups: list[Group], mappings: dict[Label, str]) -> list[Provider]: info_json = { - "groups": {name: _make_json_info_for_group(group) for name, group in link_group_info.groups.items()}, - "mappings": link_group_info.mappings, + "groups": {group.name: _make_json_info_for_group(group) for group in groups}, + "mappings": mappings, } json_output = ctx.actions.write_json("link_group_map_info.json", info_json) return [DefaultInfo(default_output = json_output)] @@ -162,7 +162,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: link_group_info = build_link_group_info(linkable_graph, link_groups) return [ DefaultInfo(sub_targets = { - "info": _make_info_subtarget_providers(ctx, link_group_info), + "info": _make_info_subtarget_providers(ctx, link_group_info.groups.values(), link_group_info.mappings), }), link_group_info, ] From 78e360a2406da58f588f94d5ecda6b433170fd6d Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Wed, 13 Mar 2024 04:33:55 -0700 Subject: [PATCH 0443/1133] Extract link group info code Summary: Moves code to be re-used for `resource_group_map()` up the stack. Reviewed By: rmaz Differential Revision: D54683426 fbshipit-source-id: 579432a004e328550dc6a2cc661eacfa83b4aa43 --- prelude/cxx/groups.bzl | 44 +++++++++++++++++++++++ prelude/cxx/user/link_group_map.bzl | 56 ++--------------------------- 2 files changed, 46 insertions(+), 54 deletions(-) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index 1237e783a..72185a694 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -380,3 +380,47 @@ def _hash_group_name(prefix: str, name: str) -> str: Hash algorithm is stable in starlark: https://fburl.com/code/ptegkov6 """ return "{}_{}".format(prefix, str(hash(name))) + +def _make_json_info_for_build_target_pattern(build_target_pattern: BuildTargetPattern) -> dict[str, typing.Any]: + # `BuildTargetPattern` contains lambdas which are not serializable, so + # have to generate the JSON representation + return { + "cell": build_target_pattern.cell, + "kind": build_target_pattern.kind, + "name": build_target_pattern.name, + "path": build_target_pattern.path, + } + +def _make_json_info_for_group_mapping_filters(filters: list[[BuildTargetFilter, LabelFilter]]) -> list[dict[str, typing.Any]]: + json_filters = [] + for filter in filters: + if filter._type == FilterType("label"): + json_filters += [{"regex": str(filter.regex)}] + elif filter._type == FilterType("pattern"): + json_filters += [_make_json_info_for_build_target_pattern(filter.pattern)] + else: + fail("Unknown filter type: " + filter) + return json_filters + +def _make_json_info_for_group_mapping(group_mapping: GroupMapping) -> dict[str, typing.Any]: + return { + "filters": _make_json_info_for_group_mapping_filters(group_mapping.filters), + "preferred_linkage": group_mapping.preferred_linkage, + "root": group_mapping.root, + "traversal": group_mapping.traversal, + } + +def _make_json_info_for_group(group: Group) -> dict[str, typing.Any]: + return { + "attrs": group.attrs, + "mappings": [_make_json_info_for_group_mapping(mapping) for mapping in group.mappings], + "name": group.name, + } + +def make_info_subtarget_providers(ctx: AnalysisContext, groups: list[Group], mappings: dict[Label, str]) -> list[Provider]: + info_json = { + "groups": {group.name: _make_json_info_for_group(group) for group in groups}, + "mappings": mappings, + } + json_output = ctx.actions.write_json("link_group_map_info.json", info_json) + return [DefaultInfo(default_output = json_output)] diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index 20038f7aa..9e7fcd17f 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -7,11 +7,7 @@ load( "@prelude//cxx:groups.bzl", - "BuildTargetFilter", # @unused Used as a type - "FilterType", - "Group", # @unused Used as a type - "GroupMapping", # @unused Used as a type - "LabelFilter", # @unused Used as a type + "make_info_subtarget_providers", "parse_groups_definitions", ) load( @@ -37,10 +33,6 @@ load( "SharedLibraryInfo", ) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load( - "@prelude//utils:build_target_pattern.bzl", - "BuildTargetPattern", # @unused Used as a type -) load("@prelude//decls/common.bzl", "Linkage", "Traversal") def _v1_attrs( @@ -103,50 +95,6 @@ def link_group_map_attr(): default = None, ) -def _make_json_info_for_build_target_pattern(build_target_pattern: BuildTargetPattern) -> dict[str, typing.Any]: - # `BuildTargetPattern` contains lambdas which are not serializable, so - # have to generate the JSON representation - return { - "cell": build_target_pattern.cell, - "kind": build_target_pattern.kind, - "name": build_target_pattern.name, - "path": build_target_pattern.path, - } - -def _make_json_info_for_group_mapping_filters(filters: list[[BuildTargetFilter, LabelFilter]]) -> list[dict[str, typing.Any]]: - json_filters = [] - for filter in filters: - if filter._type == FilterType("label"): - json_filters += [{"regex": str(filter.regex)}] - elif filter._type == FilterType("pattern"): - json_filters += [_make_json_info_for_build_target_pattern(filter.pattern)] - else: - fail("Unknown filter type: " + filter) - return json_filters - -def _make_json_info_for_group_mapping(group_mapping: GroupMapping) -> dict[str, typing.Any]: - return { - "filters": _make_json_info_for_group_mapping_filters(group_mapping.filters), - "preferred_linkage": group_mapping.preferred_linkage, - "root": group_mapping.root, - "traversal": group_mapping.traversal, - } - -def _make_json_info_for_group(group: Group) -> dict[str, typing.Any]: - return { - "attrs": group.attrs, - "mappings": [_make_json_info_for_group_mapping(mapping) for mapping in group.mappings], - "name": group.name, - } - -def _make_info_subtarget_providers(ctx: AnalysisContext, groups: list[Group], mappings: dict[Label, str]) -> list[Provider]: - info_json = { - "groups": {group.name: _make_json_info_for_group(group) for group in groups}, - "mappings": mappings, - } - json_output = ctx.actions.write_json("link_group_map_info.json", info_json) - return [DefaultInfo(default_output = json_output)] - def _impl(ctx: AnalysisContext) -> list[Provider]: # Extract graphs from the roots via the raw attrs, as `parse_groups_definitions` # parses them as labels. @@ -162,7 +110,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: link_group_info = build_link_group_info(linkable_graph, link_groups) return [ DefaultInfo(sub_targets = { - "info": _make_info_subtarget_providers(ctx, link_group_info.groups.values(), link_group_info.mappings), + "info": make_info_subtarget_providers(ctx, link_group_info.groups.values(), link_group_info.mappings), }), link_group_info, ] From 6cb694cdb15ceb213587e4b13b1d6c146444dac4 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Wed, 13 Mar 2024 04:33:55 -0700 Subject: [PATCH 0444/1133] Link Groups: add support for `[info]` subtarget to `resource_group_map()` Summary: Adds `[info]` subtarget to `resource_group_map()` for debugging purposes. Reviewed By: fabiomassimo Differential Revision: D54683686 fbshipit-source-id: 666e940b6ef865a0443f695f8b2ee2fe6ced8957 --- prelude/apple/user/resource_group_map.bzl | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/prelude/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl index 4b46e4f51..850f17fa3 100644 --- a/prelude/apple/user/resource_group_map.bzl +++ b/prelude/apple/user/resource_group_map.bzl @@ -15,6 +15,7 @@ load( "@prelude//cxx:groups.bzl", "compute_mappings", "create_group", + "make_info_subtarget_providers", "parse_groups_definitions", ) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") @@ -59,7 +60,9 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: graph_map = resource_graph_node_map, ) return [ - DefaultInfo(), + DefaultInfo(sub_targets = { + "info": make_info_subtarget_providers(ctx, resource_groups, mappings), + }), ResourceGroupInfo( groups = resource_groups, groups_hash = hash(str(resource_groups)), From a69d1dfb23267bf96693cc3ac06d2ae1f62cec2a Mon Sep 17 00:00:00 2001 From: mo Date: Wed, 13 Mar 2024 05:39:28 -0700 Subject: [PATCH 0445/1133] Add support for EWDK to the prelude / msvc toolchain Summary: Microsoft releases an Enterprise WDK (EWDK) bundle for building drivers and applications without needing a Visual Studio installation. This is very useful for CI builds, where unpacking a zip or mounting an iso file is all that is needed to spin up a build VM, instead of needing to download, install, validate, install WDK, install SDK, repair WDK, etc. Also, there is no license requirement to use the EWDK. This updates the `vswhere.py` file to extract the necessary binaries, include dirs, and library dirs from the environment produced by calling the `SetupBuildEnv.bat` utility in the EWDK. Once this is done, the rest of a buck2 build operates normally. To use this, download and "install" the EWDK to the filesystem. ``` > set EWDKDIR=C:\ewdk11 > buck2 build //path/to/target ``` X-link: https://github.com/facebook/buck2/pull/591 Reviewed By: JakobDegen Differential Revision: D54803497 Pulled By: KapJI fbshipit-source-id: 98c4a6813fdef7efb86e0ae787d601867947a6bc --- prelude/toolchains/msvc/vswhere.py | 80 ++++++++++++++++++++++++++++++ 1 file changed, 80 insertions(+) diff --git a/prelude/toolchains/msvc/vswhere.py b/prelude/toolchains/msvc/vswhere.py index 9c9f6a541..f007d7415 100644 --- a/prelude/toolchains/msvc/vswhere.py +++ b/prelude/toolchains/msvc/vswhere.py @@ -15,6 +15,7 @@ import shutil import subprocess import sys +import tempfile import winreg from pathlib import Path from typing import IO, List, NamedTuple @@ -218,6 +219,81 @@ def write_tool_json(out, tool): out.write(j) +# for use with the ewdk to grab the environment strings +def get_ewdk_env(ewdkdir: Path): + """ + Inspiration taken from the following: + http://pythonwise.blogspot.fr/2010/04/sourcing-shell-script.html (Miki Tebeka) + http://stackoverflow.com/questions/3503719/#comment28061110_3505826 (ahal) + """ + + # We need to write the script that will make the important variables available + with tempfile.NamedTemporaryFile( + prefix="VcVarsExtract", suffix=".bat", mode="w", delete=False + ) as tmp: + print("@echo off", file=tmp) + print("call %* > NUL", file=tmp) + print("set", file=tmp) + + env_script = ewdkdir / "BuildEnv" / "SetupBuildEnv.cmd" + cmd = [tmp.name, env_script, "amd64"] + output = subprocess.check_output(cmd).decode("utf-8") + + env = {} + for line in output.split("\r\n"): + if line and "=" in line: + first, second = line.split("=", 1) + env[first] = second + + return env + + +def find_with_ewdk(ewdkdir: Path): + env = get_ewdk_env(ewdkdir) + + installation_path = Path(env["VSINSTALLDIR"]) + vc_tools_version = env["VCToolsVersion"] + tools_path = installation_path / "VC" / "Tools" / "MSVC" / vc_tools_version + bin_path = tools_path / "bin" / "HostX64" / "x64" + lib_path = tools_path / "lib" / "x64" + include_path = tools_path / "include" + + PATH = [bin_path] + LIB = [lib_path] + INCLUDE = [include_path] + + ucrt = Path(env["UCRTContentRoot"]) + ucrt_version = env.get("Version_Number") + + vc_exe_paths = [bin_path / exe for exe in VC_EXE_NAMES] + + if ucrt_version: + ucrt_bin_path = ucrt / "bin" / ucrt_version / "x64" + PATH.append(ucrt_bin_path) + LIB.append(ucrt / "lib" / ucrt_version / "ucrt" / "x64") + INCLUDE.append(ucrt / "include" / ucrt_version / "ucrt") + + ucrt_exe_paths = [ucrt_bin_path / exe for exe in UCRT_EXE_NAMES] + ucrt_exe_paths = [exe if exe.exists() else None for exe in ucrt_exe_paths] + else: + ucrt_exe_paths = [None for exe in UCRT_EXE_NAMES] + + sdk = Path(env["WindowsSdkDir"]) + sdk_version = ucrt_version + if sdk_version: + PATH.append(sdk / "bin" / "x64") + LIB.append(sdk / "lib" / sdk_version / "um" / "x64") + INCLUDE.append(sdk / "include" / sdk_version / "um") + INCLUDE.append(sdk / "include" / sdk_version / "cppwinrt") + INCLUDE.append(sdk / "include" / sdk_version / "winrt") + INCLUDE.append(sdk / "include" / sdk_version / "shared") + + return [ + Tool(exe=bin_path / exe, LIB=LIB, PATH=PATH, INCLUDE=INCLUDE) + for exe in vc_exe_paths + ucrt_exe_paths + ] + + def main(): parser = argparse.ArgumentParser() parser.add_argument("--cl", type=argparse.FileType("w"), required=True) @@ -234,6 +310,10 @@ def main(): find_in_path(exe) for exe in VC_EXE_NAMES ) rc_exe = find_in_path("rc.exe", optional=True) + elif "EWDKDIR" in os.environ: + cl_exe, cvtres_exe, lib_exe, ml64_exe, link_exe, rc_exe = find_with_ewdk( + Path(os.environ["EWDKDIR"]) + ) else: cl_exe, cvtres_exe, lib_exe, ml64_exe, link_exe, rc_exe = ( find_with_vswhere_exe() From f9ca0a699353ac613c642b835c121f0212885b04 Mon Sep 17 00:00:00 2001 From: Jeremy Braun Date: Wed, 13 Mar 2024 06:06:21 -0700 Subject: [PATCH 0446/1133] Allow cxx toolchains to provide post_ldflags/post_linker_flags Summary: Cxx toolchains can define `post_ldflags`, which are added to the end of the linker command line (as the `post_linker_flags`) at the end. This allows the toolchain to override other flags that came from other sources, etc. Reviewed By: scottcao Differential Revision: D54221096 fbshipit-source-id: c131506ab2d489c30e7b8c19799eb378d96923c6 --- prelude/cxx/cxx_link_utility.bzl | 31 +++++++++++++++------ prelude/cxx/cxx_toolchain.bzl | 1 + prelude/cxx/cxx_toolchain_types.bzl | 1 + prelude/cxx/dist_lto/dist_lto.bzl | 17 +++++++---- prelude/cxx/link.bzl | 8 ++++-- prelude/cxx/user/cxx_toolchain_override.bzl | 2 ++ prelude/decls/cxx_rules.bzl | 1 + prelude/rules_impl.bzl | 1 + prelude/toolchains/cxx.bzl | 2 ++ 9 files changed, 48 insertions(+), 16 deletions(-) diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 99c6d3f6d..87b0de947 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -256,20 +256,35 @@ def executable_shared_lib_arguments( shared_libs_symlink_tree = shared_libs_symlink_tree, ) -def cxx_link_cmd_parts(toolchain: CxxToolchainInfo) -> ((RunInfo | cmd_args), cmd_args): +LinkCmdParts = record( + linker = [RunInfo, cmd_args], + linker_flags = cmd_args, + post_linker_flags = cmd_args, + # linker + linker_flags, for convenience + link_cmd = cmd_args, +) + +def cxx_link_cmd_parts(toolchain: CxxToolchainInfo) -> LinkCmdParts: # `toolchain_linker_flags` can either be a list of strings, `cmd_args` or `None`, # so we need to do a bit more work to satisfy the type checker toolchain_linker_flags = toolchain.linker_info.linker_flags + toolchain_post_linker_flags = toolchain.linker_info.post_linker_flags if toolchain_linker_flags == None: toolchain_linker_flags = cmd_args() elif not type(toolchain_linker_flags) == "cmd_args": toolchain_linker_flags = cmd_args(toolchain_linker_flags) - return toolchain.linker_info.linker, toolchain_linker_flags + if toolchain_post_linker_flags == None: + toolchain_post_linker_flags = cmd_args() + elif not type(toolchain_post_linker_flags) == "cmd_args": + toolchain_post_linker_flags = cmd_args(toolchain_post_linker_flags) -# The command line for linking with C++ -def cxx_link_cmd(toolchain: CxxToolchainInfo) -> cmd_args: - linker, toolchain_linker_flags = cxx_link_cmd_parts(toolchain) - command = cmd_args(linker) - command.add(toolchain_linker_flags) - return command + link_cmd = cmd_args(toolchain.linker_info.linker) + link_cmd.add(toolchain_linker_flags) + + return LinkCmdParts( + linker = toolchain.linker_info.linker, + linker_flags = toolchain_linker_flags, + post_linker_flags = toolchain_post_linker_flags, + link_cmd = link_cmd, + ) diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index f1af710df..8eca13ea8 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -103,6 +103,7 @@ def cxx_toolchain_impl(ctx): link_ordering = ctx.attrs.link_ordering, linker = ctx.attrs.linker[RunInfo], linker_flags = cmd_args(ctx.attrs.linker_flags).add(c_lto_flags), + post_linker_flags = cmd_args(ctx.attrs.post_linker_flags), lto_mode = lto_mode, mk_shlib_intf = ctx.attrs.shared_library_interface_producer, object_file_extension = ctx.attrs.object_file_extension or "o", diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index f8fd32705..ffd05a17e 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -39,6 +39,7 @@ LinkerInfo = provider( "link_ordering": provider_field(typing.Any, default = None), # LinkOrdering "linker": provider_field(typing.Any, default = None), "linker_flags": provider_field(typing.Any, default = None), + "post_linker_flags": provider_field(typing.Any, default = None), "lto_mode": provider_field(typing.Any, default = None), "mk_shlib_intf": provider_field(typing.Any, default = None), # "o" on Unix, "obj" on Windows diff --git a/prelude/cxx/dist_lto/dist_lto.bzl b/prelude/cxx/dist_lto/dist_lto.bzl index c2a945031..1c49f435a 100644 --- a/prelude/cxx/dist_lto/dist_lto.bzl +++ b/prelude/cxx/dist_lto/dist_lto.bzl @@ -18,7 +18,7 @@ load( ) load( "@prelude//cxx:cxx_link_utility.bzl", - "cxx_link_cmd", + "cxx_link_cmd_parts", "linker_map_args", ) load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") @@ -361,7 +361,9 @@ def cxx_dist_link( index_file_out = ctx.actions.declare_output(make_id(index_cat) + "/index") index_out_dir = cmd_args(index_file_out.as_output()).parent() - index_cmd = cxx_link_cmd(cxx_toolchain) + index_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) + + index_cmd = index_cmd_parts.link_cmd index_cmd.add(cmd_args(index_argfile, format = "@{}")) output_as_string = cmd_args(output) @@ -371,6 +373,7 @@ def cxx_dist_link( index_cmd.add("-Wl,--thinlto-emit-imports-files") index_cmd.add("-Wl,--thinlto-full-index") index_cmd.add(cmd_args(index_out_dir, format = "-Wl,--thinlto-prefix-replace=;{}/")) + index_cmd.add(index_cmd_parts.post_linker_flags) # Terminate the index file with a newline. index_meta.add("") @@ -403,13 +406,15 @@ def cxx_dist_link( dynamic_plan(link_plan = link_plan_out, index_argsfile_out = index_argsfile_out, final_link_index = final_link_index) def prepare_opt_flags(link_infos: list[LinkInfo]) -> cmd_args: - opt_args = cmd_args() - opt_args.add(cxx_link_cmd(cxx_toolchain)) + opt_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) + opt_args = opt_cmd_parts.link_cmd # buildifier: disable=uninitialized for link in link_infos: for raw_flag in link.pre_flags + link.post_flags: opt_args.add(raw_flag) + + opt_args.add(opt_cmd_parts.post_linker_flags) return opt_args opt_common_flags = prepare_opt_flags(link_infos) @@ -576,7 +581,8 @@ def cxx_dist_link( current_index += 1 link_args.add(link.post_flags) - link_cmd = cxx_link_cmd(cxx_toolchain) + link_cmd_parts = cxx_link_cmd_parts(cxx_toolchain) + link_cmd = link_cmd_parts.link_cmd final_link_argfile, final_link_inputs = ctx.actions.write( outputs[linker_argsfile_out].as_output(), link_args, @@ -598,6 +604,7 @@ def cxx_dist_link( link_cmd.hidden(link_args) link_cmd.hidden(opt_objects) link_cmd.hidden(archives) + link_cmd.add(link_cmd_parts.post_linker_flags) ctx.actions.run(link_cmd, category = make_cat("thin_lto_link"), identifier = identifier, local_only = True) diff --git a/prelude/cxx/link.bzl b/prelude/cxx/link.bzl index 3e3f0757d..624662886 100644 --- a/prelude/cxx/link.bzl +++ b/prelude/cxx/link.bzl @@ -162,8 +162,8 @@ def cxx_link_into( else: links_with_linker_map = opts.links - linker, toolchain_linker_flags = cxx_link_cmd_parts(cxx_toolchain_info) - all_link_args = cmd_args(toolchain_linker_flags) + link_cmd_parts = cxx_link_cmd_parts(cxx_toolchain_info) + all_link_args = cmd_args(link_cmd_parts.linker_flags) all_link_args.add(get_output_flags(linker_info.type, output)) sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain_info, output) @@ -222,6 +222,8 @@ def cxx_link_into( pdb = link_args_output.pdb_artifact, ) + all_link_args.add(link_cmd_parts.post_linker_flags) + if linker_info.type == "windows": shell_quoted_args = cmd_args(all_link_args) else: @@ -233,7 +235,7 @@ def cxx_link_into( allow_args = True, ) - command = cmd_args(linker) + command = cmd_args(link_cmd_parts.linker) command.add(cmd_args(argfile, format = "@{}")) command.hidden(link_args_output.hidden) command.hidden(shell_quoted_args) diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index 82c62f8f9..b7f6111a1 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -94,6 +94,7 @@ def _cxx_toolchain_override(ctx): link_ordering = base_linker_info.link_ordering, linker = _pick_bin(ctx.attrs.linker, base_linker_info.linker), linker_flags = _pick(ctx.attrs.linker_flags, base_linker_info.linker_flags), + post_linker_flags = _pick(ctx.attrs.post_linker_flags, base_linker_info.post_linker_flags), lto_mode = value_or(map_val(LtoMode, ctx.attrs.lto_mode), base_linker_info.lto_mode), object_file_extension = base_linker_info.object_file_extension, shlib_interfaces = value_or(shlib_interfaces, base_linker_info.shlib_interfaces), @@ -210,6 +211,7 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "pic_behavior": attrs.enum(PicBehavior.values(), default = "supported"), "platform_deps_aliases": attrs.option(attrs.list(attrs.string()), default = None), "platform_name": attrs.option(attrs.string(), default = None), + "post_linker_flags": attrs.option(attrs.list(attrs.arg()), default = None), "produce_interface_from_stub_shared_library": attrs.option(attrs.bool(), default = None), "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), "sanitizer_runtime_enabled": attrs.bool(default = False), diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 5508da5ac..8db1779ee 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -966,6 +966,7 @@ cxx_toolchain = prelude_rule( "objcopy_recalculates_layout": attrs.bool(default = False), "object_file_extension": attrs.string(default = ""), "pic_type_for_shared_linking": attrs.enum(PicType, default = "pic"), + "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "private_headers_symlinks_enabled": attrs.bool(default = False), "public_headers_symlinks_enabled": attrs.bool(default = False), "ranlib": attrs.option(attrs.source(), default = None), diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 174e09dcf..9ab78432f 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -525,6 +525,7 @@ inlined_extra_attributes = { "header_dirs": attrs.option(attrs.list(attrs.source(allow_directory = True)), default = None), "linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "platform_header_dirs": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.source(allow_directory = True)))), default = None), + "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "preferred_linkage": attrs.enum(Linkage, default = "any"), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), diff --git a/prelude/toolchains/cxx.bzl b/prelude/toolchains/cxx.bzl index e8b3152a9..d1b1dc159 100644 --- a/prelude/toolchains/cxx.bzl +++ b/prelude/toolchains/cxx.bzl @@ -93,6 +93,7 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): linker_info = LinkerInfo( linker = RunInfo(args = linker), linker_flags = additional_linker_flags + ctx.attrs.link_flags, + post_linker_flags = ctx.attrs.post_link_flags, archiver = RunInfo(args = archiver_args), archiver_type = archiver_type, archiver_supports_argfiles = archiver_supports_argfiles, @@ -210,6 +211,7 @@ system_cxx_toolchain = rule( "linker_wrapper": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//cxx/tools:linker_wrapper")), "make_comp_db": attrs.default_only(attrs.exec_dep(providers = [RunInfo], default = "prelude//cxx/tools:make_comp_db")), "msvc_tools": attrs.default_only(attrs.exec_dep(providers = [VisualStudio], default = "prelude//toolchains/msvc:msvc_tools")), + "post_link_flags": attrs.list(attrs.string(), default = []), "rc_compiler": attrs.string(default = "rc.exe"), "rc_flags": attrs.list(attrs.string(), default = []), }, From 057dfc2aa00e30e60c348a96a768e9e8096ffab4 Mon Sep 17 00:00:00 2001 From: Tien-Che Tsai Date: Wed, 13 Mar 2024 08:54:30 -0700 Subject: [PATCH 0447/1133] Add debuginfo to apple_universal_executable Summary: We need this for some debugger setup that doesn't use dSYM bundles Reviewed By: blackm00n Differential Revision: D54643599 fbshipit-source-id: 34e519b8b4d240092383e6515ea0d9091f35de05 --- prelude/apple/apple_universal_executable.bzl | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/prelude/apple/apple_universal_executable.bzl b/prelude/apple/apple_universal_executable.bzl index 87cac4d55..a8ca1e605 100644 --- a/prelude/apple/apple_universal_executable.bzl +++ b/prelude/apple/apple_universal_executable.bzl @@ -14,7 +14,7 @@ load(":apple_bundle_utility.bzl", "get_default_binary_dep", "get_flattened_binar load(":apple_code_signing_types.bzl", "AppleEntitlementsInfo") load(":apple_dsym.bzl", "DSYM_SUBTARGET", "get_apple_dsym_ext") load(":apple_universal_binaries.bzl", "create_universal_binary") -load(":debug.bzl", "AppleDebuggableInfo") +load(":debug.bzl", "AppleDebuggableInfo", "DEBUGINFO_SUBTARGET") load(":resource_groups.bzl", "ResourceGraphInfo") _FORWARDED_PROVIDER_TYPES = [ @@ -51,6 +51,11 @@ def apple_universal_executable_impl(ctx: AnalysisContext) -> list[Provider]: split_arch_dsym = ctx.attrs.split_arch_dsym, ) + debug_info = project_artifacts( + actions = ctx.actions, + tsets = [binary_outputs.debuggable_info.debug_info_tset], + ) + sub_targets = {} if ctx.attrs.split_arch_dsym: dsyms = binary_outputs.debuggable_info.dsyms @@ -58,15 +63,19 @@ def apple_universal_executable_impl(ctx: AnalysisContext) -> list[Provider]: dsyms = [get_apple_dsym_ext( ctx = ctx, executable = binary_outputs.binary, - debug_info = project_artifacts( - actions = ctx.actions, - tsets = [binary_outputs.debuggable_info.debug_info_tset], - ), + debug_info = debug_info, action_identifier = ctx.attrs.name + "_dsym", output_path = dsym_name, )] sub_targets[DSYM_SUBTARGET] = [DefaultInfo(default_outputs = dsyms)] + debug_info_artifacts_manifest = ctx.actions.write( + "debuginfo.artifacts", + debug_info, + with_inputs = True, + ) + sub_targets[DEBUGINFO_SUBTARGET] = [DefaultInfo(default_output = debug_info_artifacts_manifest)] + default_binary = get_default_binary_dep(ctx.attrs.executable) forwarded_providers = [] for forward_provider_type in _FORWARDED_PROVIDER_TYPES: From 61ea2fe222982f0042aef87a51a9e71b2d0e1b63 Mon Sep 17 00:00:00 2001 From: Tien-Che Tsai Date: Wed, 13 Mar 2024 08:54:30 -0700 Subject: [PATCH 0448/1133] Make `label` of `apple_universal_executable` optional Summary: Labels are usually optional with an empty list as default Reviewed By: blackm00n Differential Revision: D54751468 fbshipit-source-id: 29285bb416747ca091a6345fec00737bb1986a1a --- prelude/apple/apple_rules_impl.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 77c776220..bf22e48b2 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -134,7 +134,7 @@ def _apple_universal_executable_extra_attrs(): attribs = { "executable": attrs.split_transition_dep(cfg = cpu_split_transition), "executable_name": attrs.option(attrs.string(), default = None), - "labels": attrs.list(attrs.string()), + "labels": attrs.list(attrs.string(), default = []), "split_arch_dsym": attrs.bool(default = False), "universal": attrs.option(attrs.bool(), default = None), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, From 477929ecb1c1b69a7f8634de9977e200f0634762 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Wed, 13 Mar 2024 12:57:48 -0700 Subject: [PATCH 0449/1133] Improve typing Summary: Now that `provider_field()` can take proper types, lets update these... half of them were wrong anyways! Reviewed By: chatura-atapattu Differential Revision: D54809728 fbshipit-source-id: 4f24edb45331e4f7de9d98ed95a239e74db24f47 --- prelude/cxx/link_groups.bzl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 8bd273b64..d86bcdf62 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -110,16 +110,16 @@ LINK_GROUP_MAPPINGS_FILENAME_SUFFIX = ".link_group_map.json" LinkGroupInfo = provider( # @unsorted-dict-items fields = { - "groups": provider_field(typing.Any, default = None), # dict[str, Group] - "groups_hash": provider_field(typing.Any, default = None), # str - "mappings": provider_field(typing.Any, default = None), # dict[ConfiguredProvidersLabel, str] + "groups": provider_field(dict[str, Group]), + "groups_hash": provider_field(int), + "mappings": provider_field(dict[Label, str]), # Additional graphs needed to cover labels referenced by the groups above. # This is useful in cases where the consumer of this provider won't already # have deps covering these. # NOTE(agallagher): We do this to maintain existing behavior w/ the # standalone `link_group_map()` rule, but it's not clear if it's actually # desirable behavior. - "graph": provider_field(typing.Any, default = None), # LinkableGraph + "graph": provider_field(LinkableGraph), }, ) From ccf610a765521d80b6610a40ce05c35b0c345c5e Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Wed, 13 Mar 2024 13:56:21 -0700 Subject: [PATCH 0450/1133] Add manifest_entries support for min_sdk_version Summary: `aapt` in `buck1` supports `manifest_entries`. `aapt2` does not, but some places that want to migrate to `buck2` are still using `aapt` and this feels like the easiest way to unblock them (along with the next diff). Meta: https://fb.workplace.com/groups/2222954841208728/posts/1357353841588539/ Reviewed By: navidqar Differential Revision: D54803390 fbshipit-source-id: bb7fccd315eca6dc18bc0832a4e53880d15394fa --- prelude/android/aapt2_link.bzl | 9 ++++++--- prelude/android/android_binary_resources_rules.bzl | 3 +-- prelude/android/robolectric_test.bzl | 1 - 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/prelude/android/aapt2_link.bzl b/prelude/android/aapt2_link.bzl index 4126c11c6..7919a168d 100644 --- a/prelude/android/aapt2_link.bzl +++ b/prelude/android/aapt2_link.bzl @@ -15,6 +15,7 @@ def get_aapt2_link( android_toolchain: AndroidToolchainInfo, resource_infos: list[AndroidResourceInfo], android_manifest: Artifact, + manifest_entries: dict, includes_vector_drawables: bool, no_auto_version: bool, no_version_transitions: bool, @@ -24,7 +25,6 @@ def get_aapt2_link( package_id_offset: int, resource_stable_ids: [Artifact, None], preferred_density: [str, None], - min_sdk: [str, None], filter_locales: bool, locales: list[str], compiled_resource_apks: list[Artifact], @@ -81,8 +81,11 @@ def get_aapt2_link( aapt2_command.add(["--stable-ids", resource_stable_ids]) if preferred_density != None: aapt2_command.add(["--preferred-density", preferred_density]) - if min_sdk != None: - aapt2_command.add(["--min-sdk-version", min_sdk]) + + manifest_entries_min_sdk = manifest_entries.get("min_sdk_version", None) + if manifest_entries_min_sdk != None: + aapt2_command.add(["--min-sdk-version", str(manifest_entries_min_sdk)]) + if filter_locales and len(locales) > 0: aapt2_command.add("-c") diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index c15944d48..297d7defa 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -41,7 +41,6 @@ def get_android_binary_resources_info( resource_infos_to_exclude: [set_type, None] = None, r_dot_java_packages_to_exclude: [list[str], None] = [], generate_strings_and_ids_separately: [bool, None] = True, - aapt2_min_sdk: [str, None] = None, aapt2_preferred_density: [str, None] = None) -> AndroidBinaryResourcesInfo: android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo] @@ -65,6 +64,7 @@ def get_android_binary_resources_info( ctx.attrs._android_toolchain[AndroidToolchainInfo], resource_infos, android_manifest, + manifest_entries = getattr(ctx.attrs, "manifest_entries", {}), includes_vector_drawables = getattr(ctx.attrs, "includes_vector_drawables", False), no_auto_version = getattr(ctx.attrs, "no_auto_version_resources", False), no_version_transitions = getattr(ctx.attrs, "no_version_transitions_resources", False), @@ -78,7 +78,6 @@ def get_android_binary_resources_info( extra_filtered_resources = getattr(ctx.attrs, "extra_filtered_resources", []), locales = getattr(ctx.attrs, "locales", []) or getattr(ctx.attrs, "locales_for_binary_resources", []), filter_locales = getattr(ctx.attrs, "aapt2_locale_filtering", False) or bool(getattr(ctx.attrs, "locales_for_binary_resources", [])), - min_sdk = aapt2_min_sdk, preferred_density = aapt2_preferred_density, ) diff --git a/prelude/android/robolectric_test.bzl b/prelude/android/robolectric_test.bzl index 4ef558f5d..475a2c847 100644 --- a/prelude/android/robolectric_test.bzl +++ b/prelude/android/robolectric_test.bzl @@ -45,7 +45,6 @@ def robolectric_test_impl(ctx: AnalysisContext) -> list[Provider]: use_proto_format = False, referenced_resources_lists = [], generate_strings_and_ids_separately = False, - aapt2_min_sdk = ctx.attrs.manifest_entries.get("min_sdk_version", None), aapt2_preferred_density = ctx.attrs.preferred_density_for_binary_resources, ) From c837ee27f6f38456d47d84acbfefe7591df3a91b Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Wed, 13 Mar 2024 13:56:21 -0700 Subject: [PATCH 0451/1133] Support other possible manifest_entries Summary: This adds all the other `manifest_entries` that can be used with `aapt` in `buck1`. Reviewed By: navidqar Differential Revision: D54803285 fbshipit-source-id: fda58f51c73d9deb5708e821645628ea2948e15e --- prelude/android/aapt2_link.bzl | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/prelude/android/aapt2_link.bzl b/prelude/android/aapt2_link.bzl index 7919a168d..a21c592f0 100644 --- a/prelude/android/aapt2_link.bzl +++ b/prelude/android/aapt2_link.bzl @@ -85,6 +85,18 @@ def get_aapt2_link( manifest_entries_min_sdk = manifest_entries.get("min_sdk_version", None) if manifest_entries_min_sdk != None: aapt2_command.add(["--min-sdk-version", str(manifest_entries_min_sdk)]) + manifest_entries_target_sdk = manifest_entries.get("target_sdk_version", None) + if manifest_entries_target_sdk != None: + aapt2_command.add(["--target-sdk-version", str(manifest_entries_target_sdk)]) + manifest_entries_version_code = manifest_entries.get("version_code", None) + if manifest_entries_version_code != None: + aapt2_command.add(["--version-code", manifest_entries_version_code]) + manifest_entries_version_name = manifest_entries.get("version_name", None) + if manifest_entries_version_name != None: + aapt2_command.add(["--version-name", manifest_entries_version_name]) + manifest_entries_debug_mode = str(manifest_entries.get("debug_mode", "False")).lower() == "true" + if manifest_entries_debug_mode: + aapt2_command.add(["--debug-mode"]) if filter_locales and len(locales) > 0: aapt2_command.add("-c") From 442da17d22b1a54ebf13d2627653ca3117d49925 Mon Sep 17 00:00:00 2001 From: Navid Qaragozlou Date: Wed, 13 Mar 2024 16:47:32 -0700 Subject: [PATCH 0452/1133] Provide jvm args to java test Summary: This will enable users to profile their test execution. See test plan. Reviewed By: hick209 Differential Revision: D54859769 fbshipit-source-id: aa2b33509a04c78037eb9a260aff8fc52c2f2a48 --- prelude/java/java_test.bzl | 3 +++ prelude/java/java_toolchain.bzl | 1 + 2 files changed, 4 insertions(+) diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index 75a0d2d33..ba6718916 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -50,6 +50,9 @@ def build_junit_test( java = ctx.attrs.java[RunInfo] if ctx.attrs.java else ctx.attrs._java_toolchain[JavaToolchainInfo].java_for_tests cmd = [java] + extra_cmds + ctx.attrs.vm_args + ["-XX:-MaxFDLimit"] + if len(java_test_toolchain.jvm_args) > 0: + cmd.extend(java_test_toolchain.jvm_args) + classpath = [] if java_test_toolchain.use_java_custom_class_loader: diff --git a/prelude/java/java_toolchain.bzl b/prelude/java/java_toolchain.bzl index 51bbe0637..82ab7517f 100644 --- a/prelude/java/java_toolchain.bzl +++ b/prelude/java/java_toolchain.bzl @@ -69,6 +69,7 @@ JavaTestToolchainInfo = provider( "java_custom_class_loader_vm_args": provider_field(typing.Any, default = None), "junit5_test_runner_main_class_args": provider_field(typing.Any, default = None), "junit_test_runner_main_class_args": provider_field(typing.Any, default = None), + "jvm_args": provider_field(typing.Any, default = None), "list_class_names": provider_field(typing.Any, default = None), "merge_class_to_source_maps": provider_field(typing.Any, default = None), "test_runner_library_jar": provider_field(typing.Any, default = None), From 1ef4bbe3cf2fe4574d5a9dff899238e5fa6d9d1b Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Thu, 14 Mar 2024 12:00:12 -0700 Subject: [PATCH 0453/1133] Change default output for exopackage builds, again Reviewed By: IanChilds Differential Revision: D54883408 fbshipit-source-id: 2d4094cd48feaf520b751baad35ff392d9fa2ebe --- prelude/android/android_apk.bzl | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index 6dd0662f0..94ab2a5c4 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -35,11 +35,19 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: validation_deps_outputs = get_validation_deps_outputs(ctx), ) - exopackage_info = ExopackageInfo( - secondary_dex_info = dex_files_info.secondary_dex_exopackage_info, - native_library_info = native_library_info.exopackage_info, - resources_info = resources_info.exopackage_info, - ) + if dex_files_info.secondary_dex_exopackage_info or native_library_info.exopackage_info or resources_info.exopackage_info: + exopackage_info = ExopackageInfo( + secondary_dex_info = dex_files_info.secondary_dex_exopackage_info, + native_library_info = native_library_info.exopackage_info, + resources_info = resources_info.exopackage_info, + ) + exopackage_outputs = _get_exopackage_outputs(exopackage_info) + default_output = ctx.actions.write("exopackage_apk_warning", "exopackage apks should not be used externally, try buck install or building with exopackage disabled\n") + sub_targets["exo_apk"] = [DefaultInfo(default_output = output_apk)] # Used by tests + else: + exopackage_info = None + exopackage_outputs = [] + default_output = output_apk class_to_srcs, class_to_srcs_subtargets = get_class_to_source_map_info( ctx, @@ -63,7 +71,7 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: r_dot_java_packages = set([info.specified_r_dot_java_package for info in resources_info.unfiltered_resource_infos if info.specified_r_dot_java_package]), shared_libraries = set(native_library_info.shared_libraries), ), - DefaultInfo(default_output = output_apk, other_outputs = _get_exopackage_outputs(exopackage_info) + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), + DefaultInfo(default_output = default_output, other_outputs = exopackage_outputs + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, definitely_has_native_libs = definitely_has_native_libs), TemplatePlaceholderInfo( keyed_variables = { From 453e41c30c9ddee01295565ef179b59ddad88ad7 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Thu, 14 Mar 2024 21:42:33 -0700 Subject: [PATCH 0454/1133] link_group_map_attr() -> LINK_GROUP_MAP_ATTR Summary: 1. `link_group_map_attr()` doesn't need to be a function -> change it to a constant. 2. Convert `_v1_attrs()` into a function which takes a parameter so you can customize the "root node" type and update accordingly. Reviewed By: rmaz Differential Revision: D54909697 fbshipit-source-id: 9e7a7c3c294595668e17b53517c189927503672c --- prelude/apple/apple_rules_impl.bzl | 6 ++-- prelude/cxx/user/link_group_map.bzl | 56 ++++++++++++----------------- prelude/decls/rust_rules.bzl | 4 +-- prelude/rules_impl.bzl | 10 +++--- 4 files changed, 33 insertions(+), 43 deletions(-) diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index bf22e48b2..953ac783b 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -15,7 +15,7 @@ load("@prelude//apple/swift:swift_toolchain.bzl", "swift_toolchain_impl") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo", "HeaderMode") -load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") +load("@prelude//cxx/user:link_group_map.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") load("@prelude//decls/common.bzl", "Linkage") @@ -84,7 +84,7 @@ def _apple_binary_extra_attrs(): "enable_distributed_thinlto": attrs.bool(default = False), "extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "prefer_stripped_objects": attrs.bool(default = False), @@ -107,7 +107,7 @@ def _apple_library_extra_attrs(): "extra_xcode_sources": attrs.list(attrs.source(allow_directory = True), default = []), "header_mode": attrs.option(attrs.enum(HeaderMode.values()), default = None), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "preferred_linkage": attrs.enum(Linkage, default = "any"), diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index 9e7fcd17f..18440fe5b 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -35,23 +35,7 @@ load( load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") load("@prelude//decls/common.bzl", "Linkage", "Traversal") -def _v1_attrs( - optional_root: bool = False, - # Whether we should parse `root` fields as a `dependency`, instead of a `label`. - root_is_dep: bool = True): - if root_is_dep: - attrs_root = attrs.dep(providers = [ - LinkGroupLibInfo, - LinkableGraph, - MergedLinkInfo, - SharedLibraryInfo, - ]) - else: - attrs_root = attrs.label() - - if optional_root: - attrs_root = attrs.option(attrs_root) - +def _v1_attrs(attrs_root): return attrs.list( attrs.tuple( # name @@ -78,22 +62,19 @@ def _v1_attrs( ), ) -def link_group_map_attr(): - v2_attrs = attrs.dep(providers = [LinkGroupInfo]) - return attrs.option( - attrs.one_of( - v2_attrs, - _v1_attrs( - optional_root = True, - # Inlined `link_group_map` will parse roots as `label`s, to avoid - # bloating deps w/ unrelated mappings (e.g. it's common to use - # a default mapping for all rules, which would otherwise add - # unrelated deps to them). - root_is_dep = False, - ), +LINK_GROUP_MAP_ATTR = attrs.option( + attrs.one_of( + attrs.dep(providers = [LinkGroupInfo]), + _v1_attrs( + # Inlined `link_group_map` will parse roots as `label`s, to avoid + # bloating deps w/ unrelated mappings (e.g. it's common to use + # a default mapping for all rules, which would otherwise add + # unrelated deps to them). + attrs_root = attrs.option(attrs.label()), ), - default = None, - ) + ), + default = None, +) def _impl(ctx: AnalysisContext) -> list[Provider]: # Extract graphs from the roots via the raw attrs, as `parse_groups_definitions` @@ -119,6 +100,15 @@ registration_spec = RuleRegistrationSpec( name = "link_group_map", impl = _impl, attrs = { - "map": _v1_attrs(), + "map": _v1_attrs( + attrs_root = attrs.dep( + providers = [ + LinkGroupLibInfo, + LinkableGraph, + MergedLinkInfo, + SharedLibraryInfo, + ], + ), + ), }, ) diff --git a/prelude/decls/rust_rules.bzl b/prelude/decls/rust_rules.bzl index 703436c81..599dcc020 100644 --- a/prelude/decls/rust_rules.bzl +++ b/prelude/decls/rust_rules.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") +load("@prelude//cxx/user:link_group_map.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//rust:link_info.bzl", "RustProcMacroPlugin") load("@prelude//rust:rust_binary.bzl", "rust_binary_impl", "rust_test_impl") load("@prelude//rust:rust_library.bzl", "prebuilt_rust_library_impl", "rust_library_impl") @@ -102,7 +102,7 @@ _RUST_EXECUTABLE_ATTRIBUTES = { "enable_distributed_thinlto": attrs.bool(default = False), # Required by the rules but not supported, since Rust is auto-link groups only "link_group": attrs.default_only(attrs.option(attrs.string(), default = None)), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_group_min_binary_node_count": attrs.option(attrs.int(), default = None), "rpath": attrs.bool(default = False, doc = """ Set the "rpath" in the executable when using a shared link style. diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 9ab78432f..be09d27a9 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -17,7 +17,7 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainIn load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo", "HeaderMode") load("@prelude//cxx:prebuilt_cxx_library_group.bzl", "prebuilt_cxx_library_group_impl") load("@prelude//cxx:windows_resource.bzl", "windows_resource_impl") -load("@prelude//cxx/user:link_group_map.bzl", "link_group_map_attr") +load("@prelude//cxx/user:link_group_map.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//erlang:erlang.bzl", _erlang_implemented_rules = "implemented_rules") load("@prelude//git:git_fetch.bzl", "git_fetch_impl") load("@prelude//go:cgo_library.bzl", "cgo_library_impl") @@ -278,7 +278,7 @@ def _python_executable_attrs(): "executable_name": attrs.option(attrs.string(), default = None), "inplace_build_args": attrs.list(attrs.arg(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_group_min_binary_node_count": attrs.option(attrs.int(), default = None), "link_style": attrs.enum(LinkableDepType, default = "static"), "main_function": attrs.option( @@ -349,7 +349,7 @@ def _cxx_binary_and_test_attrs(): "distributed_thinlto_partial_split_dwarf": attrs.bool(default = False), "enable_distributed_thinlto": attrs.bool(default = False), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_group_min_binary_node_count": attrs.option(attrs.int(), default = None), "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "link_whole": attrs.default_only(attrs.bool(default = False)), @@ -412,7 +412,7 @@ inlined_extra_attributes = { "header_mode": attrs.option(attrs.enum(HeaderMode.values()), default = None), "link_deps_query_whole": attrs.bool(default = False), "link_execution_preference": link_execution_preference_attr(), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "prefer_stripped_objects": attrs.bool(default = False), @@ -490,7 +490,7 @@ inlined_extra_attributes = { }, "haskell_binary": { "auto_link_groups": attrs.bool(default = False), - "link_group_map": link_group_map_attr(), + "link_group_map": LINK_GROUP_MAP_ATTR, "template_deps": attrs.list(attrs.exec_dep(providers = [HaskellLibraryProvider]), default = []), "_cxx_toolchain": toolchains_common.cxx(), "_haskell_toolchain": toolchains_common.haskell(), From b2f34676d81c8fbd24e75a5e25e0f6a0cf917eb9 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Thu, 14 Mar 2024 21:42:33 -0700 Subject: [PATCH 0455/1133] Dedupe Traversal enum. Summary: ^^^ Reviewed By: rmaz Differential Revision: D54816765 fbshipit-source-id: 71e966dcf4505d132be7b8bc0efcdb892c924043 --- prelude/apple/user/apple_watchos_bundle.bzl | 2 +- prelude/apple/user/resource_group_map.bzl | 2 +- prelude/cxx/groups.bzl | 11 +---------- prelude/cxx/user/link_group_map.bzl | 2 +- prelude/decls/common.bzl | 9 ++++++++- prelude/decls/cxx_rules.bzl | 6 +++--- prelude/decls/go_rules.bzl | 2 +- prelude/decls/halide_rules.bzl | 2 +- prelude/decls/ios_rules.bzl | 8 ++++---- prelude/python/python_binary.bzl | 2 +- 10 files changed, 22 insertions(+), 24 deletions(-) diff --git a/prelude/apple/user/apple_watchos_bundle.bzl b/prelude/apple/user/apple_watchos_bundle.bzl index 261f68b63..1462e6616 100644 --- a/prelude/apple/user/apple_watchos_bundle.bzl +++ b/prelude/apple/user/apple_watchos_bundle.bzl @@ -34,7 +34,7 @@ def _apple_bundle_base_attrs(): "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), "product_name": attrs.option(attrs.string(), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "resource_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), "try_skip_code_signing": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), diff --git a/prelude/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl index 850f17fa3..802416c66 100644 --- a/prelude/apple/user/resource_group_map.bzl +++ b/prelude/apple/user/resource_group_map.bzl @@ -79,6 +79,6 @@ registration_spec = RuleRegistrationSpec( name = "resource_group_map", impl = _impl, attrs = { - "map": attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), + "map": attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), }, ) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index 72185a694..15e0cc65e 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//decls:common.bzl", "Traversal") load( "@prelude//linking:link_info.bzl", "Linkage", @@ -28,16 +29,6 @@ load( "value_or", ) -# Types of group traversal -Traversal = enum( - # Includes the target and all of it's transitive dependencies in the group. - "tree", - # Includes only the target in the group. - "node", - # Uses pattern and separates all targets by full folder path. - "subfolders", -) - # Optional type of filtering FilterType = enum( # Filters for targets with labels matching the regex pattern defined after `label:`. diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index 18440fe5b..46e634520 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -47,7 +47,7 @@ def _v1_attrs(attrs_root): # root node attrs_root, # traversal - attrs.enum(Traversal), + attrs.enum(Traversal.values()), # filters, either `None`, a single filter, or a list of filters # (which must all match). attrs.option(attrs.one_of(attrs.list(attrs.string()), attrs.string())), diff --git a/prelude/decls/common.bzl b/prelude/decls/common.bzl index e3e839bb0..5f85ba5a7 100644 --- a/prelude/decls/common.bzl +++ b/prelude/decls/common.bzl @@ -49,7 +49,14 @@ SourceAbiVerificationMode = ["off", "log", "fail"] TestType = ["junit", "junit5", "testng"] -Traversal = ["tree", "node", "subfolders"] +Traversal = enum( + # Includes the target and all of it's transitive dependencies in the group. + "tree", + # Includes only the target in the group. + "node", + # Uses pattern and separates all targets by full folder path. + "subfolders", +) UnusedDependenciesAction = ["unknown", "fail", "warn", "ignore", "unrecognized"] diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 8db1779ee..6a17ea055 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -117,7 +117,7 @@ cxx_binary = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg(anon_target_compatible = True))), default = []), @@ -570,7 +570,7 @@ cxx_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), "module_name": attrs.option(attrs.string(), default = None), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), @@ -878,7 +878,7 @@ cxx_test = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), "link_style": attrs.option(attrs.enum(LinkableDepType), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "platform_compiler_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index 3f56acaf0..f69b6baff 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -114,7 +114,7 @@ cgo_library = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "platform_headers": attrs.list(attrs.tuple(attrs.regex(), attrs.named_set(attrs.source(), sorted = True)), default = []), "platform_srcs": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.one_of(attrs.source(), attrs.tuple(attrs.source(), attrs.list(attrs.arg()))), sorted = True)), default = []), diff --git a/prelude/decls/halide_rules.bzl b/prelude/decls/halide_rules.bzl index 52db23f10..6e9f29f62 100644 --- a/prelude/decls/halide_rules.bzl +++ b/prelude/decls/halide_rules.bzl @@ -98,7 +98,7 @@ halide_library = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), "link_style": attrs.option(attrs.enum(LinkableDepType), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 44fee26c4..aba1d5237 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -193,7 +193,7 @@ apple_binary = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), "link_whole": attrs.option(attrs.bool(), default = None), "modular": attrs.bool(default = False), "module_name": attrs.option(attrs.string(), default = None), @@ -370,7 +370,7 @@ apple_bundle = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "resource_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), "try_skip_code_signing": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), @@ -486,7 +486,7 @@ apple_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), "modular": attrs.bool(default = False), "module_name": attrs.option(attrs.string(), default = None), "module_requires_cxx": attrs.bool(default = False), @@ -743,7 +743,7 @@ apple_test = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal), attrs.option(attrs.string()))))), default = None), + "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), "link_whole": attrs.option(attrs.bool(), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "modular": attrs.bool(default = False), diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index f8d86329c..57d01f265 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -29,7 +29,6 @@ load( "Group", "GroupAttrs", "GroupMapping", - "Traversal", ) load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout") load( @@ -53,6 +52,7 @@ load( "CPreprocessorArgs", "cxx_inherited_preprocessor_infos", ) +load("@prelude//decls:common.bzl", "Traversal") load( "@prelude//linking:link_info.bzl", "Linkage", From 2ecbe4848992926f50cc6105f96650f3b6dea8af Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Thu, 14 Mar 2024 21:42:33 -0700 Subject: [PATCH 0456/1133] Dedupe 'Linkage'. Summary: Similar to "Traversal", we had two versions of these, one in `prelude/decls/common.bzl` and one *somewhere else*. This unifies them. Reviewed By: rmaz Differential Revision: D54919955 fbshipit-source-id: 04843249e009f96cac56e15d64d304ee0204282c --- .../android/android_binary_native_library_rules.bzl | 2 +- prelude/apple/apple_rules_impl.bzl | 8 ++++---- prelude/apple/apple_rules_impl_utility.bzl | 2 +- prelude/apple/prebuilt_apple_framework.bzl | 2 +- prelude/cxx/cxx.bzl | 2 +- prelude/cxx/cxx_library.bzl | 2 +- prelude/cxx/cxx_library_utility.bzl | 2 +- prelude/cxx/groups.bzl | 6 +----- prelude/cxx/link_groups.bzl | 2 +- prelude/cxx/omnibus.bzl | 2 +- prelude/cxx/prebuilt_cxx_library_group.bzl | 2 +- prelude/cxx/user/link_group_map.bzl | 2 +- prelude/decls/common.bzl | 10 +++++++++- prelude/decls/cxx_rules.bzl | 4 ++-- prelude/decls/haskell_rules.bzl | 2 +- prelude/decls/ios_rules.bzl | 10 +++++----- prelude/decls/rust_rules.bzl | 4 ++-- prelude/go/cgo_library.bzl | 2 +- prelude/haskell/haskell.bzl | 2 +- prelude/haskell/haskell_ghci.bzl | 2 +- prelude/linking/link_info.bzl | 11 +---------- prelude/linking/linkable_graph.bzl | 2 +- prelude/python/cxx_python_extension.bzl | 2 +- prelude/python/python_binary.bzl | 8 ++------ prelude/rules_impl.bzl | 6 +++--- prelude/rust/link_info.bzl | 5 ++++- prelude/rust/rust_library.bzl | 2 +- 27 files changed, 50 insertions(+), 56 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index aaa384b05..6eb1a928c 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -28,6 +28,7 @@ load( "extract_global_syms", "extract_undefined_syms", ) +load("@prelude//decls:common.bzl", "Linkage") load("@prelude//java:java_library.bzl", "compile_to_jar") # @unused load("@prelude//java:java_providers.bzl", "JavaClasspathEntry", "JavaLibraryInfo", "derive_compiling_deps") # @unused load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") @@ -37,7 +38,6 @@ load( "LinkArgs", "LinkInfo", "LinkOrdering", - "Linkage", "SharedLibLinkable", "get_lib_output_style", "set_link_info_link_whole", diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 953ac783b..958bb1815 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -88,7 +88,7 @@ def _apple_binary_extra_attrs(): "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "prefer_stripped_objects": attrs.bool(default = False), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "sanitizer_runtime_enabled": attrs.option(attrs.bool(), default = None), "stripped": attrs.option(attrs.bool(), default = None), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), @@ -110,7 +110,7 @@ def _apple_library_extra_attrs(): "link_group_map": LINK_GROUP_MAP_ATTR, "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "serialize_debugging_options": attrs.bool(default = True), # Mach-O file type for binary when the target is built as a shared library. "shared_library_macho_file_type": attrs.enum(AppleSharedLibraryMachOFileType.values(), default = "dylib"), @@ -227,14 +227,14 @@ extra_attributes = { }, "prebuilt_apple_framework": { "framework": attrs.option(attrs.source(allow_directory = True), default = None), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, }, "scene_kit_assets": { "path": attrs.source(allow_directory = True), }, "swift_library": { - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), }, "swift_toolchain": { "architecture": attrs.option(attrs.string(), default = None), # TODO(T115173356): Make field non-optional diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 57fe14c1b..5a96205fa 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -96,7 +96,7 @@ def apple_test_extra_attrs(): "link_style": attrs.enum(LinkableDepType, default = "static"), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), # The test source code and lib dependencies should be built into a shared library. - "preferred_linkage": attrs.enum(Linkage, default = "shared"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "shared"), # Expected by `apple_bundle`, for `apple_test` this field is always None. "resource_group": attrs.option(attrs.string(), default = None), # Expected by `apple_bundle`, for `apple_test` this field is always None. diff --git a/prelude/apple/prebuilt_apple_framework.bzl b/prelude/apple/prebuilt_apple_framework.bzl index e717eb40b..c48e96b11 100644 --- a/prelude/apple/prebuilt_apple_framework.bzl +++ b/prelude/apple/prebuilt_apple_framework.bzl @@ -18,6 +18,7 @@ load( "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors", ) +load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_groups.bzl", "merge_link_group_lib_info", @@ -27,7 +28,6 @@ load( "LibOutputStyle", "LinkInfo", "LinkInfos", - "Linkage", "create_merged_link_info", ) load( diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 0be9df6c8..fbfa80637 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -17,6 +17,7 @@ load( ) load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") +load("@prelude//decls:common.bzl", "Linkage") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -33,7 +34,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "SharedLibLinkable", "create_merged_link_info", diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 15663206c..0b964ae2f 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -35,6 +35,7 @@ load( "@prelude//apple/swift:swift_runtime.bzl", "create_swift_runtime_linkable", ) +load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//ide_integrations:xcode.bzl", "XCODE_DATA_SUB_TARGET", @@ -64,7 +65,6 @@ load( "LinkInfos", "LinkOrdering", "LinkStrategy", - "Linkage", "LinkedObject", # @unused Used as a type "ObjectsLinkable", "SharedLibLinkable", diff --git a/prelude/cxx/cxx_library_utility.bzl b/prelude/cxx/cxx_library_utility.bzl index c3dd30820..e630407f8 100644 --- a/prelude/cxx/cxx_library_utility.bzl +++ b/prelude/cxx/cxx_library_utility.bzl @@ -11,11 +11,11 @@ load( "single_artifact", ) load("@prelude//:paths.bzl", "paths") +load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_info.bzl", "LinkStrategy", "LinkStyle", - "Linkage", "LinkerFlags", "MergedLinkInfo", ) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index 15e0cc65e..a945887c2 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -5,11 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//decls:common.bzl", "Traversal") -load( - "@prelude//linking:link_info.bzl", - "Linkage", -) +load("@prelude//decls:common.bzl", "Linkage", "Traversal") load( "@prelude//utils:build_target_pattern.bzl", "BuildTargetPattern", diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index d86bcdf62..df8016215 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//decls:common.bzl", "Linkage") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -18,7 +19,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", # @unused Used as a type "SharedLibLinkable", "get_lib_output_style", diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index 1df12523f..0cd38cebe 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -12,6 +12,7 @@ load( "CxxLinkResult", # @unused Used as a type "cxx_link_shared_library", ) +load("@prelude//decls:common.bzl", "Linkage") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_info.bzl", @@ -20,7 +21,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "SharedLibLinkable", "get_lib_output_style", diff --git a/prelude/cxx/prebuilt_cxx_library_group.bzl b/prelude/cxx/prebuilt_cxx_library_group.bzl index fa51631ef..5b77b5fd1 100644 --- a/prelude/cxx/prebuilt_cxx_library_group.bzl +++ b/prelude/cxx/prebuilt_cxx_library_group.bzl @@ -13,6 +13,7 @@ load( "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors", ) +load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_groups.bzl", "merge_link_group_lib_info", @@ -25,7 +26,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "SharedLibLinkable", "create_merged_link_info", diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index 46e634520..fd1b1b35a 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -52,7 +52,7 @@ def _v1_attrs(attrs_root): # (which must all match). attrs.option(attrs.one_of(attrs.list(attrs.string()), attrs.string())), # linkage - attrs.option(attrs.enum(Linkage)), + attrs.option(attrs.enum(Linkage.values())), ), ), # attributes diff --git a/prelude/decls/common.bzl b/prelude/decls/common.bzl index 5f85ba5a7..1975474ef 100644 --- a/prelude/decls/common.bzl +++ b/prelude/decls/common.bzl @@ -39,7 +39,15 @@ IncludeType = ["local", "system", "raw"] LinkableDepType = ["static", "static_pic", "shared"] -Linkage = ["any", "static", "shared"] +# Ways a library can request to be linked (e.g. usually specific via a rule +# param like `preferred_linkage`). The actual link style used for a library is +# usually determined by a combination of this and the link style being exported +# via a provider. +Linkage = enum( + "any", + "static", + "shared", +) LogLevel = ["off", "severe", "warning", "info", "config", "fine", "finer", "finest", "all"] diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 6a17ea055..7f78f4248 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -542,7 +542,7 @@ cxx_library = prelude_rule( } | cxx_common.supported_platforms_regex_arg() | cxx_common.force_static(force_static_type = attrs.option(attrs.bool(), default = None)) | - native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage), default = None)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage.values()), default = None)) | cxx_common.reexport_all_header_dependencies_arg() | cxx_common.exported_deps_arg() | cxx_common.exported_platform_deps_arg() | @@ -1112,7 +1112,7 @@ prebuilt_cxx_library = prelude_rule( cxx_common.exported_platform_preprocessor_flags_arg() | cxx_common.exported_linker_flags_arg() | cxx_common.force_static(force_static_type = attrs.bool(default = False)) | - native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage), default = None)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage.values()), default = None)) | cxx_common.exported_deps_arg() | cxx_common.exported_platform_deps_arg() | cxx_common.supports_merged_linking() | diff --git a/prelude/decls/haskell_rules.bzl b/prelude/decls/haskell_rules.bzl index eee463374..e9f877d63 100644 --- a/prelude/decls/haskell_rules.bzl +++ b/prelude/decls/haskell_rules.bzl @@ -168,7 +168,7 @@ haskell_library = prelude_rule( haskell_common.scripts_arg() | buck.platform_deps_arg() | native_common.link_whole(link_whole_type = attrs.bool(default = False)) | - native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage.values())) | { "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index aba1d5237..d311452bf 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -204,7 +204,7 @@ apple_binary = prelude_rule( "post_linker_flags": attrs.list(attrs.arg(), default = []), "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), "precompiled_header": attrs.option(attrs.source(), default = None), - "preferred_linkage": attrs.option(attrs.enum(Linkage), default = None), + "preferred_linkage": attrs.option(attrs.enum(Linkage.values()), default = None), "prefix_header": attrs.option(attrs.source(), default = None), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), @@ -441,7 +441,7 @@ apple_library = prelude_rule( cxx_common.exported_linker_flags_arg() | cxx_common.exported_platform_linker_flags_arg() | apple_common.target_sdk_version() | - native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage), default = None)) | + native_common.preferred_linkage(preferred_linkage_type = attrs.option(attrs.enum(Linkage.values()), default = None)) | native_common.link_style() | native_common.link_whole(link_whole_type = attrs.option(attrs.bool(), default = None)) | cxx_common.reexport_all_header_dependencies_arg() | @@ -756,7 +756,7 @@ apple_test = prelude_rule( "post_linker_flags": attrs.list(attrs.arg(), default = []), "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), "precompiled_header": attrs.option(attrs.source(), default = None), - "preferred_linkage": attrs.option(attrs.enum(Linkage), default = None), + "preferred_linkage": attrs.option(attrs.enum(Linkage.values()), default = None), "prefix_header": attrs.option(attrs.source(), default = None), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), @@ -905,7 +905,7 @@ prebuilt_apple_framework = prelude_rule( attrs = ( # @unsorted-dict-items { - "preferred_linkage": attrs.enum(Linkage, doc = """ + "preferred_linkage": attrs.enum(Linkage.values(), doc = """ How to link to a binary: use `dynamic` for a dynamic framework, and `static` for old universal static frameworks manually lipo-ed together. `dynamic` will @@ -967,7 +967,7 @@ swift_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "module_name": attrs.option(attrs.string(), default = None), - "preferred_linkage": attrs.option(attrs.enum(Linkage), default = None), + "preferred_linkage": attrs.option(attrs.enum(Linkage.values()), default = None), "sdk_modules": attrs.list(attrs.string(), default = []), "serialize_debugging_options": attrs.bool(default = False), "soname": attrs.option(attrs.string(), default = None), diff --git a/prelude/decls/rust_rules.bzl b/prelude/decls/rust_rules.bzl index 599dcc020..142652a67 100644 --- a/prelude/decls/rust_rules.bzl +++ b/prelude/decls/rust_rules.bzl @@ -51,7 +51,7 @@ prebuilt_rust_library = prelude_rule( 'libfoo-abc123def456.rlib' if it has symbol versioning metadata. """), } | - native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage, default = "any")) | + native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage.values(), default = "any")) | rust_common.crate(crate_type = attrs.string(default = "")) | rust_common.deps_arg(is_binary = False) | { @@ -239,7 +239,7 @@ rust_library = prelude_rule( rust_common.env_arg() | rust_common.crate(crate_type = attrs.option(attrs.string(), default = None)) | rust_common.crate_root() | - native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage, default = "any")) | + native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage.values(), default = "any")) | _rust_common_attributes(is_binary = False) | { "crate_dynamic": attrs.option(attrs.dep(), default = None), diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 0b2e3fc51..33d4017de 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -30,10 +30,10 @@ load( "cxx_merge_cpreprocessors", "cxx_private_preprocessor_info", ) +load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_info.bzl", "LinkStyle", - "Linkage", "MergedLinkInfo", "create_merged_link_info_for_propagation", ) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 7e84f453b..0f4812d96 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -54,6 +54,7 @@ load( "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors", ) +load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//haskell:compile.bzl", "CompileResultInfo", @@ -101,7 +102,6 @@ load( "LinkInfo", "LinkInfos", "LinkStyle", - "Linkage", "LinkedObject", "MergedLinkInfo", "SharedLibLinkable", diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index 0842bdc2f..5295bec28 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -16,6 +16,7 @@ load( "@prelude//cxx:link_types.bzl", "link_options", ) +load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//haskell:compile.bzl", "HaskellLibraryInfo", @@ -34,7 +35,6 @@ load( "LinkArgs", "LinkInfo", "LinkStyle", - "Linkage", "get_lib_output_style", "set_linkable_link_whole", "to_link_strategy", diff --git a/prelude/linking/link_info.bzl b/prelude/linking/link_info.bzl index 856e4f811..6fd0d726d 100644 --- a/prelude/linking/link_info.bzl +++ b/prelude/linking/link_info.bzl @@ -17,6 +17,7 @@ load( "get_no_as_needed_shared_libs_flags", "get_objects_as_library_args", ) +load("@prelude//decls:common.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") load( "@prelude//utils:utils.bzl", @@ -74,16 +75,6 @@ def default_output_style_for_link_strategy(link_strategy: LinkStrategy) -> LibOu return LibOutputStyle("pic_archive") return LibOutputStyle("shared_lib") -# Ways a library can request to be linked (e.g. usually specific via a rule -# param like `preferred_linkage`. The actual link style used for a library is -# usually determined by a combination of this and the link style being exported -# via a provider. -Linkage = enum( - "static", - "shared", - "any", -) - # An archive. ArchiveLinkable = record( # Artifact in the .a format from ar diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index 1f73160ed..a0849a5de 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -7,6 +7,7 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") +load("@prelude//decls:common.bzl", "Linkage") load("@prelude//python:python.bzl", "PythonLibraryInfo") load("@prelude//utils:expect.bzl", "expect") load( @@ -19,7 +20,6 @@ load( "LinkInfo", # @unused Used as a type "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "LinkerFlags", "MergedLinkInfo", diff --git a/prelude/python/cxx_python_extension.bzl b/prelude/python/cxx_python_extension.bzl index 618d04374..eacfd9ac8 100644 --- a/prelude/python/cxx_python_extension.bzl +++ b/prelude/python/cxx_python_extension.bzl @@ -32,6 +32,7 @@ load( "create_linkable_root", "get_roots", ) +load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_groups.bzl", "merge_link_group_lib_info", @@ -41,7 +42,6 @@ load( "LibOutputStyle", "LinkInfo", "LinkInfos", - "Linkage", "create_merged_link_info", "wrap_link_infos", ) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 57d01f265..3f5a68172 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -52,12 +52,8 @@ load( "CPreprocessorArgs", "cxx_inherited_preprocessor_infos", ) -load("@prelude//decls:common.bzl", "Traversal") -load( - "@prelude//linking:link_info.bzl", - "Linkage", - "LinkedObject", -) +load("@prelude//decls:common.bzl", "Linkage", "Traversal") +load("@prelude//linking:link_info.bzl", "LinkedObject") load( "@prelude//linking:linkable_graph.bzl", "LinkableGraph", diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index be09d27a9..4b59bdb4a 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -416,7 +416,7 @@ inlined_extra_attributes = { "link_ordering": attrs.option(attrs.enum(LinkOrdering.values()), default = None), "precompiled_header": attrs.option(attrs.dep(providers = [CPrecompiledHeaderInfo]), default = None), "prefer_stripped_objects": attrs.bool(default = False), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "resources": attrs.named_set(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), sorted = True, default = []), "supports_header_symlink_subtarget": attrs.bool(default = False), "supports_python_dlopen": attrs.option(attrs.bool(), default = None), @@ -509,7 +509,7 @@ inlined_extra_attributes = { "_haskell_toolchain": toolchains_common.haskell(), }, "haskell_library": { - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "template_deps": attrs.list(attrs.exec_dep(providers = [HaskellLibraryProvider]), default = []), "_cxx_toolchain": toolchains_common.cxx(), "_haskell_toolchain": toolchains_common.haskell(), @@ -526,7 +526,7 @@ inlined_extra_attributes = { "linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "platform_header_dirs": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.source(allow_directory = True)))), default = None), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), - "preferred_linkage": attrs.enum(Linkage, default = "any"), + "preferred_linkage": attrs.enum(Linkage.values(), default = "any"), "public_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "public_system_include_directories": attrs.set(attrs.string(), sorted = True, default = []), "raw_headers": attrs.set(attrs.source(), sorted = True, default = []), diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index e392a7405..e682db462 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -33,6 +33,10 @@ load( "get_link_group_info", "get_link_group_preferred_linkage", ) +load( + "@prelude//decls:common.bzl", + "Linkage", # @unused Used as a type +) load( "@prelude//linking:link_groups.bzl", "LinkGroupLib", # @unused Used as a type @@ -42,7 +46,6 @@ load( "@prelude//linking:link_info.bzl", "LinkInfo", "LinkStrategy", - "Linkage", # @unused Used as a type "MergedLinkInfo", "get_link_args_for_strategy", "unpack_external_debug_info", diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 9d6849624..5fcbe1bd9 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -30,6 +30,7 @@ load( "@prelude//cxx:omnibus.bzl", "create_linkable_root", ) +load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_groups.bzl", "merge_link_group_lib_info", @@ -42,7 +43,6 @@ load( "LinkInfo", "LinkInfos", "LinkStrategy", - "Linkage", "LinkedObject", "MergedLinkInfo", "SharedLibLinkable", From ae4967c399e2623baf47de54be283674f2ce2b22 Mon Sep 17 00:00:00 2001 From: Tianyu Li Date: Fri, 15 Mar 2024 07:38:08 -0700 Subject: [PATCH 0457/1133] Check nullability of jvm_args before calling len Summary: WhatsApp Android is failing with this error P1196332522. This is because jvm args is default to None for wa-android. So this diff checks nullability before invoking len(...) Reviewed By: jiawei-lyu Differential Revision: D54916079 fbshipit-source-id: ace3b9231454363ba51e7ca86b41e32e42c45e9f --- prelude/java/java_test.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index ba6718916..86ed15509 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -50,7 +50,7 @@ def build_junit_test( java = ctx.attrs.java[RunInfo] if ctx.attrs.java else ctx.attrs._java_toolchain[JavaToolchainInfo].java_for_tests cmd = [java] + extra_cmds + ctx.attrs.vm_args + ["-XX:-MaxFDLimit"] - if len(java_test_toolchain.jvm_args) > 0: + if java_test_toolchain.jvm_args != None and len(java_test_toolchain.jvm_args) > 0: cmd.extend(java_test_toolchain.jvm_args) classpath = [] From 9ab84d2fb715ed2f7bc8558b3ee5136ca817f68a Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Fri, 15 Mar 2024 14:15:21 -0700 Subject: [PATCH 0458/1133] Move Traversal into link_groups_types.bzl Summary: Context: https://www.internalfb.com/diff/D54816765?dst_version_fbid=396432509662376&transaction_fbid=2035286753523230 Reviewed By: chatura-atapattu Differential Revision: D54953923 fbshipit-source-id: 45d0682e8b2a92ada5e232637dce8891216e338d --- prelude/apple/user/apple_watchos_bundle.bzl | 2 +- prelude/apple/user/resource_group_map.bzl | 2 +- prelude/cxx/groups.bzl | 3 ++- prelude/cxx/link_groups_types.bzl | 15 +++++++++++++++ prelude/cxx/user/link_group_map.bzl | 3 ++- prelude/decls/common.bzl | 9 --------- prelude/decls/cxx_rules.bzl | 3 ++- prelude/decls/go_rules.bzl | 3 ++- prelude/decls/halide_rules.bzl | 3 ++- prelude/decls/ios_rules.bzl | 3 ++- prelude/python/python_binary.bzl | 3 ++- 11 files changed, 31 insertions(+), 18 deletions(-) create mode 100644 prelude/cxx/link_groups_types.bzl diff --git a/prelude/apple/user/apple_watchos_bundle.bzl b/prelude/apple/user/apple_watchos_bundle.bzl index 1462e6616..7ab77fe43 100644 --- a/prelude/apple/user/apple_watchos_bundle.bzl +++ b/prelude/apple/user/apple_watchos_bundle.bzl @@ -7,8 +7,8 @@ load("@prelude//apple:apple_bundle.bzl", "apple_bundle_impl") load("@prelude//apple:apple_rules_impl_utility.bzl", "apple_bundle_extra_attrs") +load("@prelude//cxx:link_groups_types.bzl", "Traversal") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//decls/common.bzl", "Traversal") load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension") load(":watch_transition.bzl", "watch_transition") diff --git a/prelude/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl index 802416c66..50f37e70d 100644 --- a/prelude/apple/user/resource_group_map.bzl +++ b/prelude/apple/user/resource_group_map.bzl @@ -18,8 +18,8 @@ load( "make_info_subtarget_providers", "parse_groups_definitions", ) +load("@prelude//cxx:link_groups_types.bzl", "Traversal") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//decls/common.bzl", "Traversal") def resource_group_map_attr(): return attrs.option(attrs.dep(providers = [ResourceGroupInfo]), default = None) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index a945887c2..ff9f72cce 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//decls:common.bzl", "Linkage", "Traversal") +load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//utils:build_target_pattern.bzl", "BuildTargetPattern", diff --git a/prelude/cxx/link_groups_types.bzl b/prelude/cxx/link_groups_types.bzl new file mode 100644 index 000000000..f9f01e7bb --- /dev/null +++ b/prelude/cxx/link_groups_types.bzl @@ -0,0 +1,15 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +Traversal = enum( + # Includes the target and all of it's transitive dependencies in the group. + "tree", + # Includes only the target in the group. + "node", + # Uses pattern and separates all targets by full folder path. + "subfolders", +) diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index fd1b1b35a..c9611ada7 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -15,6 +15,7 @@ load( "LinkGroupInfo", "build_link_group_info", ) +load("@prelude//cxx:link_groups_types.bzl", "Traversal") load( "@prelude//linking:link_groups.bzl", "LinkGroupLibInfo", @@ -33,7 +34,7 @@ load( "SharedLibraryInfo", ) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//decls/common.bzl", "Linkage", "Traversal") +load("@prelude//decls/common.bzl", "Linkage") def _v1_attrs(attrs_root): return attrs.list( diff --git a/prelude/decls/common.bzl b/prelude/decls/common.bzl index 1975474ef..10f98c677 100644 --- a/prelude/decls/common.bzl +++ b/prelude/decls/common.bzl @@ -57,15 +57,6 @@ SourceAbiVerificationMode = ["off", "log", "fail"] TestType = ["junit", "junit5", "testng"] -Traversal = enum( - # Includes the target and all of it's transitive dependencies in the group. - "tree", - # Includes only the target in the group. - "node", - # Uses pattern and separates all targets by full folder path. - "subfolders", -) - UnusedDependenciesAction = ["unknown", "fail", "warn", "ignore", "unrecognized"] def _name_arg(name_type): diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 7f78f4248..78df1eeaf 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -10,8 +10,9 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) +load("@prelude//cxx:link_groups_types.bzl", "Traversal") load(":apple_common.bzl", "apple_common") -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "Linkage", "Traversal", "buck", "prelude_rule") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "Linkage", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":genrule_common.bzl", "genrule_common") load(":native_common.bzl", "native_common") diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index f69b6baff..9ffa6f504 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -10,7 +10,8 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "Traversal", "buck", "prelude_rule") +load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":go_common.bzl", "go_common") load(":native_common.bzl", "native_common") diff --git a/prelude/decls/halide_rules.bzl b/prelude/decls/halide_rules.bzl index 6e9f29f62..4376e53e7 100644 --- a/prelude/decls/halide_rules.bzl +++ b/prelude/decls/halide_rules.bzl @@ -10,7 +10,8 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "Traversal", "prelude_rule") +load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "prelude_rule") load(":cxx_common.bzl", "cxx_common") halide_library = prelude_rule( diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index d311452bf..c04486a58 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -10,8 +10,9 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) +load("@prelude//cxx:link_groups_types.bzl", "Traversal") load(":apple_common.bzl", "apple_common") -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "Linkage", "Traversal", "buck", "prelude_rule") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "Linkage", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":native_common.bzl", "native_common") diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 3f5a68172..7dbd4350d 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -38,6 +38,7 @@ load( "build_link_group_info", "get_link_group_info", ) +load("@prelude//cxx:link_groups_types.bzl", "Traversal") load("@prelude//cxx:linker.bzl", "get_rpath_origin") load( "@prelude//cxx:omnibus.bzl", @@ -52,7 +53,7 @@ load( "CPreprocessorArgs", "cxx_inherited_preprocessor_infos", ) -load("@prelude//decls:common.bzl", "Linkage", "Traversal") +load("@prelude//decls:common.bzl", "Linkage") load("@prelude//linking:link_info.bzl", "LinkedObject") load( "@prelude//linking:linkable_graph.bzl", From 3a09797e8fe2d31da83b48edc8b134bdda922ac9 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Fri, 15 Mar 2024 14:15:21 -0700 Subject: [PATCH 0459/1133] Move Linkage out of decls/common and into prelude/linking/types Summary: Context: https://www.internalfb.com/diff/D54816765?dst_version_fbid=396432509662376&transaction_fbid=2035286753523230 Reviewed By: chatura-atapattu Differential Revision: D54953922 fbshipit-source-id: bd745228c8f60094331f6859d538bfdfa7635b0c --- .../android_binary_native_library_rules.bzl | 2 +- prelude/apple/apple_rules_impl.bzl | 2 +- prelude/apple/apple_rules_impl_utility.bzl | 3 ++- prelude/apple/prebuilt_apple_framework.bzl | 2 +- prelude/cxx/cxx.bzl | 2 +- prelude/cxx/cxx_library.bzl | 2 +- prelude/cxx/cxx_library_utility.bzl | 2 +- prelude/cxx/groups.bzl | 2 +- prelude/cxx/link_groups.bzl | 2 +- prelude/cxx/omnibus.bzl | 2 +- prelude/cxx/prebuilt_cxx_library_group.bzl | 2 +- prelude/cxx/user/link_group_map.bzl | 2 +- prelude/decls/common.bzl | 10 ---------- prelude/decls/cxx_rules.bzl | 3 ++- prelude/decls/haskell_rules.bzl | 3 ++- prelude/decls/ios_rules.bzl | 3 ++- prelude/decls/rust_rules.bzl | 3 ++- prelude/go/cgo_library.bzl | 2 +- prelude/haskell/haskell.bzl | 2 +- prelude/haskell/haskell_ghci.bzl | 2 +- prelude/linking/link_info.bzl | 2 +- prelude/linking/linkable_graph.bzl | 2 +- prelude/linking/types.bzl | 16 ++++++++++++++++ prelude/python/cxx_python_extension.bzl | 2 +- prelude/python/python_binary.bzl | 2 +- prelude/rules_impl.bzl | 3 ++- prelude/rust/link_info.bzl | 8 ++++---- prelude/rust/rust_library.bzl | 2 +- 28 files changed, 51 insertions(+), 39 deletions(-) create mode 100644 prelude/linking/types.bzl diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 6eb1a928c..e7a07bf5d 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -28,7 +28,6 @@ load( "extract_global_syms", "extract_undefined_syms", ) -load("@prelude//decls:common.bzl", "Linkage") load("@prelude//java:java_library.bzl", "compile_to_jar") # @unused load("@prelude//java:java_providers.bzl", "JavaClasspathEntry", "JavaLibraryInfo", "derive_compiling_deps") # @unused load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") @@ -60,6 +59,7 @@ load( "traverse_shared_library_info", ) load("@prelude//linking:strip.bzl", "strip_object") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", "post_order_traversal", "pre_order_traversal") load("@prelude//utils:set.bzl", "set", "set_type") # @unused Used as a type diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 958bb1815..5dc530ef2 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -18,7 +18,7 @@ load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo", "HeaderMode") load("@prelude//cxx/user:link_group_map.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") -load("@prelude//decls/common.bzl", "Linkage") +load("@prelude//linking:types.bzl", "Linkage") load(":apple_asset_catalog.bzl", "apple_asset_catalog_impl") load(":apple_binary.bzl", "apple_binary_impl") load(":apple_bundle.bzl", "apple_bundle_impl") diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 5a96205fa..79013c250 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -17,7 +17,8 @@ load("@prelude//apple/user:resource_group_map.bzl", "resource_group_map_attr") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") -load("@prelude//decls/common.bzl", "LinkableDepType", "Linkage") +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//decls/common.bzl", "LinkableDepType") def get_apple_toolchain_attr(): # FIXME: prelude// should be standalone (not refer to fbcode//) diff --git a/prelude/apple/prebuilt_apple_framework.bzl b/prelude/apple/prebuilt_apple_framework.bzl index c48e96b11..41d7fc42a 100644 --- a/prelude/apple/prebuilt_apple_framework.bzl +++ b/prelude/apple/prebuilt_apple_framework.bzl @@ -18,7 +18,6 @@ load( "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors", ) -load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_groups.bzl", "merge_link_group_lib_info", @@ -41,6 +40,7 @@ load( "SharedLibraryInfo", "merge_shared_libraries", ) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:utils.bzl", "filter_and_map_idx") load(":apple_bundle_types.bzl", "AppleBundleInfo", "AppleBundleTypeDefault") load(":apple_frameworks.bzl", "to_framework_name") diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index fbfa80637..905ae0539 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -17,7 +17,6 @@ load( ) load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") -load("@prelude//decls:common.bzl", "Linkage") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -58,6 +57,7 @@ load( ) load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "create_shared_libraries", "merge_shared_libraries") load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") load( "@prelude//tests:re_utils.bzl", diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 0b964ae2f..926472621 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -35,7 +35,6 @@ load( "@prelude//apple/swift:swift_runtime.bzl", "create_swift_runtime_linkable", ) -load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//ide_integrations:xcode.bzl", "XCODE_DATA_SUB_TARGET", @@ -95,6 +94,7 @@ load( ) load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "create_shared_libraries", "merge_shared_libraries") load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") diff --git a/prelude/cxx/cxx_library_utility.bzl b/prelude/cxx/cxx_library_utility.bzl index e630407f8..4d81d5c70 100644 --- a/prelude/cxx/cxx_library_utility.bzl +++ b/prelude/cxx/cxx_library_utility.bzl @@ -11,7 +11,6 @@ load( "single_artifact", ) load("@prelude//:paths.bzl", "paths") -load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_info.bzl", "LinkStrategy", @@ -19,6 +18,7 @@ load( "LinkerFlags", "MergedLinkInfo", ) +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//utils:utils.bzl", "flatten", diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index ff9f72cce..d35363dd5 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -6,7 +6,7 @@ # of this source tree. load("@prelude//cxx:link_groups_types.bzl", "Traversal") -load("@prelude//decls:common.bzl", "Linkage") +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//utils:build_target_pattern.bzl", "BuildTargetPattern", diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index df8016215..f5c2c0ef1 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -6,7 +6,6 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") -load("@prelude//decls:common.bzl", "Linkage") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -38,6 +37,7 @@ load( "get_linkable_graph_node_map_func", "get_transitive_deps", ) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") load( diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index 0cd38cebe..15e2f52f2 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -12,7 +12,6 @@ load( "CxxLinkResult", # @unused Used as a type "cxx_link_shared_library", ) -load("@prelude//decls:common.bzl", "Linkage") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_info.bzl", @@ -38,6 +37,7 @@ load( "linkable_deps", "linkable_graph", ) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:graph_utils.bzl", diff --git a/prelude/cxx/prebuilt_cxx_library_group.bzl b/prelude/cxx/prebuilt_cxx_library_group.bzl index 5b77b5fd1..2cfbdae5f 100644 --- a/prelude/cxx/prebuilt_cxx_library_group.bzl +++ b/prelude/cxx/prebuilt_cxx_library_group.bzl @@ -13,7 +13,6 @@ load( "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors", ) -load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_groups.bzl", "merge_link_group_lib_info", @@ -45,6 +44,7 @@ load( "merge_shared_libraries", ) load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:utils.bzl", "flatten_dict") load(":cxx_context.bzl", "get_cxx_toolchain_info") diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index c9611ada7..002f24885 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -33,8 +33,8 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", ) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -load("@prelude//decls/common.bzl", "Linkage") def _v1_attrs(attrs_root): return attrs.list( diff --git a/prelude/decls/common.bzl b/prelude/decls/common.bzl index 10f98c677..4a6e58138 100644 --- a/prelude/decls/common.bzl +++ b/prelude/decls/common.bzl @@ -39,16 +39,6 @@ IncludeType = ["local", "system", "raw"] LinkableDepType = ["static", "static_pic", "shared"] -# Ways a library can request to be linked (e.g. usually specific via a rule -# param like `preferred_linkage`). The actual link style used for a library is -# usually determined by a combination of this and the link style being exported -# via a provider. -Linkage = enum( - "any", - "static", - "shared", -) - LogLevel = ["off", "severe", "warning", "info", "config", "fine", "finer", "finest", "all"] OnDuplicateEntry = ["fail", "overwrite", "append"] diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 78df1eeaf..2dd197fa7 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -11,8 +11,9 @@ # well-formatted (and then delete this TODO) load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load("@prelude//linking:types.bzl", "Linkage") load(":apple_common.bzl", "apple_common") -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "Linkage", "buck", "prelude_rule") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":genrule_common.bzl", "genrule_common") load(":native_common.bzl", "native_common") diff --git a/prelude/decls/haskell_rules.bzl b/prelude/decls/haskell_rules.bzl index e9f877d63..c11ab6040 100644 --- a/prelude/decls/haskell_rules.bzl +++ b/prelude/decls/haskell_rules.bzl @@ -10,7 +10,8 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load(":common.bzl", "LinkableDepType", "Linkage", "buck", "prelude_rule") +load("@prelude//linking:types.bzl", "Linkage") +load(":common.bzl", "LinkableDepType", "buck", "prelude_rule") load(":haskell_common.bzl", "haskell_common") load(":native_common.bzl", "native_common") diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index c04486a58..2878b92bc 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -11,8 +11,9 @@ # well-formatted (and then delete this TODO) load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load("@prelude//linking:types.bzl", "Linkage") load(":apple_common.bzl", "apple_common") -load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "Linkage", "buck", "prelude_rule") +load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":native_common.bzl", "native_common") diff --git a/prelude/decls/rust_rules.bzl b/prelude/decls/rust_rules.bzl index 142652a67..3d7456045 100644 --- a/prelude/decls/rust_rules.bzl +++ b/prelude/decls/rust_rules.bzl @@ -6,10 +6,11 @@ # of this source tree. load("@prelude//cxx/user:link_group_map.bzl", "LINK_GROUP_MAP_ATTR") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//rust:link_info.bzl", "RustProcMacroPlugin") load("@prelude//rust:rust_binary.bzl", "rust_binary_impl", "rust_test_impl") load("@prelude//rust:rust_library.bzl", "prebuilt_rust_library_impl", "rust_library_impl") -load(":common.bzl", "Linkage", "buck", "prelude_rule") +load(":common.bzl", "buck", "prelude_rule") load(":native_common.bzl", "native_common") load(":re_test_common.bzl", "re_test_common") load(":rust_common.bzl", "rust_common", "rust_target_dep") diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 33d4017de..d49667391 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -30,7 +30,6 @@ load( "cxx_merge_cpreprocessors", "cxx_private_preprocessor_info", ) -load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_info.bzl", "LinkStyle", @@ -42,6 +41,7 @@ load( "SharedLibraryInfo", "merge_shared_libraries", ) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:expect.bzl", "expect") load( diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 0f4812d96..391299fe2 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -54,7 +54,6 @@ load( "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors", ) -load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//haskell:compile.bzl", "CompileResultInfo", @@ -133,6 +132,7 @@ load( "merge_shared_libraries", "traverse_shared_library_info", ) +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//python:python.bzl", "PythonLibraryInfo", diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index 5295bec28..faea3eb2f 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -16,7 +16,6 @@ load( "@prelude//cxx:link_types.bzl", "link_options", ) -load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//haskell:compile.bzl", "HaskellLibraryInfo", @@ -51,6 +50,7 @@ load( "SharedLibraryInfo", "traverse_shared_library_info", ) +load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//cxx:linker.bzl", "get_rpath_origin", diff --git a/prelude/linking/link_info.bzl b/prelude/linking/link_info.bzl index 6fd0d726d..948878962 100644 --- a/prelude/linking/link_info.bzl +++ b/prelude/linking/link_info.bzl @@ -17,7 +17,7 @@ load( "get_no_as_needed_shared_libs_flags", "get_objects_as_library_args", ) -load("@prelude//decls:common.bzl", "Linkage") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") load( "@prelude//utils:utils.bzl", diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index a0849a5de..c47b5d035 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -7,7 +7,7 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") -load("@prelude//decls:common.bzl", "Linkage") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//python:python.bzl", "PythonLibraryInfo") load("@prelude//utils:expect.bzl", "expect") load( diff --git a/prelude/linking/types.bzl b/prelude/linking/types.bzl new file mode 100644 index 000000000..486318bed --- /dev/null +++ b/prelude/linking/types.bzl @@ -0,0 +1,16 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# Ways a library can request to be linked (e.g. usually specific via a rule +# param like `preferred_linkage`). The actual link style used for a library is +# usually determined by a combination of this and the link style being exported +# via a provider. +Linkage = enum( + "any", + "static", + "shared", +) diff --git a/prelude/python/cxx_python_extension.bzl b/prelude/python/cxx_python_extension.bzl index eacfd9ac8..d41228b66 100644 --- a/prelude/python/cxx_python_extension.bzl +++ b/prelude/python/cxx_python_extension.bzl @@ -32,7 +32,6 @@ load( "create_linkable_root", "get_roots", ) -load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_groups.bzl", "merge_link_group_lib_info", @@ -60,6 +59,7 @@ load( "@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", ) +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//python:toolchain.bzl", "PythonPlatformInfo", "get_platform_attr") load("@prelude//utils:expect.bzl", "expect") diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 7dbd4350d..692eebd68 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -53,7 +53,6 @@ load( "CPreprocessorArgs", "cxx_inherited_preprocessor_infos", ) -load("@prelude//decls:common.bzl", "Linkage") load("@prelude//linking:link_info.bzl", "LinkedObject") load( "@prelude//linking:linkable_graph.bzl", @@ -68,6 +67,7 @@ load( ) load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") load("@prelude//linking:strip.bzl", "strip_debug_with_gnu_debuglink") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:utils.bzl", "flatten", "value_or") load("@prelude//paths.bzl", "paths") load("@prelude//resources.bzl", "gather_resources") diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 4b59bdb4a..62746aa67 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -40,6 +40,7 @@ load("@prelude//julia:julia.bzl", _julia_extra_attributes = "extra_attributes", load("@prelude//kotlin:kotlin.bzl", _kotlin_extra_attributes = "extra_attributes", _kotlin_implemented_rules = "implemented_rules") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//lua:cxx_lua_extension.bzl", "cxx_lua_extension_impl") load("@prelude//lua:lua_binary.bzl", "lua_binary_impl") load("@prelude//lua:lua_library.bzl", "lua_library_impl") @@ -56,7 +57,7 @@ load("@prelude//python_bootstrap:python_bootstrap.bzl", "PythonBootstrapSources" load("@prelude//zip_file:zip_file.bzl", _zip_file_extra_attributes = "extra_attributes", _zip_file_implemented_rules = "implemented_rules") load("@prelude//apple/user/apple_resource_transition.bzl", "apple_resource_transition") load("@prelude//decls/android_rules.bzl", "android_rules") -load("@prelude//decls/common.bzl", "IncludeType", "LinkableDepType", "Linkage", "buck") +load("@prelude//decls/common.bzl", "IncludeType", "LinkableDepType", "buck") load("@prelude//decls/core_rules.bzl", "core_rules") load("@prelude//decls/cxx_rules.bzl", "cxx_rules") load("@prelude//decls/d_rules.bzl", "d_rules") diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index e682db462..d15631af5 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -33,10 +33,6 @@ load( "get_link_group_info", "get_link_group_preferred_linkage", ) -load( - "@prelude//decls:common.bzl", - "Linkage", # @unused Used as a type -) load( "@prelude//linking:link_groups.bzl", "LinkGroupLib", # @unused Used as a type @@ -60,6 +56,10 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", ) +load( + "@prelude//linking:types.bzl", + "Linkage", # @unused Used as a type +) load( ":context.bzl", "CrateName", # @unused Used as a type diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 5fcbe1bd9..cf18650a9 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -30,7 +30,6 @@ load( "@prelude//cxx:omnibus.bzl", "create_linkable_root", ) -load("@prelude//decls:common.bzl", "Linkage") load( "@prelude//linking:link_groups.bzl", "merge_link_group_lib_info", @@ -66,6 +65,7 @@ load( "merge_shared_libraries", ) load("@prelude//linking:strip.bzl", "strip_debug_info") +load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") load( ":build.bzl", From 39b3365c7604350e41eca5901ebe6bb20cd6acf1 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Fri, 15 Mar 2024 14:15:21 -0700 Subject: [PATCH 0460/1133] Move core types for LinkGroupInfo provide into prelude/decls. Summary: We have ~10 redeclations of a complex type in our codebase that is also incomplete. This migrates all the types + attr definition to `prelude/decls` so we can unify it in the next diff. Reviewed By: chatura-atapattu Differential Revision: D54909698 fbshipit-source-id: d721ed8b89a4f2ee7020ec6b3b50b3c9d05416a9 --- prelude/apple/apple_rules_impl.bzl | 2 +- prelude/apple/resource_groups.bzl | 7 +- prelude/apple/user/apple_watchos_bundle.bzl | 2 +- prelude/apple/user/resource_group_map.bzl | 2 +- prelude/cxx/cxx.bzl | 10 ++- prelude/cxx/cxx_types.bzl | 5 +- prelude/cxx/groups.bzl | 87 +++--------------- prelude/cxx/groups_types.bzl | 97 +++++++++++++++++++++ prelude/cxx/link_groups.bzl | 25 ++---- prelude/cxx/link_groups_types.bzl | 64 ++++++++++++-- prelude/cxx/user/link_group_map.bzl | 12 ++- prelude/decls/cxx_rules.bzl | 2 +- prelude/decls/go_rules.bzl | 2 +- prelude/decls/halide_rules.bzl | 2 +- prelude/decls/ios_rules.bzl | 2 +- prelude/decls/rust_rules.bzl | 2 +- prelude/python/python_binary.bzl | 14 ++- prelude/rules_impl.bzl | 2 +- prelude/rust/link_info.bzl | 5 +- 19 files changed, 218 insertions(+), 126 deletions(-) create mode 100644 prelude/cxx/groups_types.bzl diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 5dc530ef2..e7d634f6c 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -15,7 +15,7 @@ load("@prelude//apple/swift:swift_toolchain.bzl", "swift_toolchain_impl") load("@prelude//apple/swift:swift_toolchain_types.bzl", "SwiftObjectFormat") load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo", "HeaderMode") -load("@prelude//cxx/user:link_group_map.bzl", "LINK_GROUP_MAP_ATTR") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") load("@prelude//linking:types.bzl", "Linkage") diff --git a/prelude/apple/resource_groups.bzl b/prelude/apple/resource_groups.bzl index 8b5f63ba2..069d679a9 100644 --- a/prelude/apple/resource_groups.bzl +++ b/prelude/apple/resource_groups.bzl @@ -5,11 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load( - "@prelude//cxx:groups.bzl", - "Group", - "MATCH_ALL_LABEL", -) +load("@prelude//cxx:groups.bzl", "MATCH_ALL_LABEL") +load("@prelude//cxx:groups_types.bzl", "Group") load( "@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", diff --git a/prelude/apple/user/apple_watchos_bundle.bzl b/prelude/apple/user/apple_watchos_bundle.bzl index 7ab77fe43..dc251d876 100644 --- a/prelude/apple/user/apple_watchos_bundle.bzl +++ b/prelude/apple/user/apple_watchos_bundle.bzl @@ -7,7 +7,7 @@ load("@prelude//apple:apple_bundle.bzl", "apple_bundle_impl") load("@prelude//apple:apple_rules_impl_utility.bzl", "apple_bundle_extra_attrs") -load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load("@prelude//cxx:groups_types.bzl", "Traversal") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension") load(":watch_transition.bzl", "watch_transition") diff --git a/prelude/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl index 50f37e70d..b99597be6 100644 --- a/prelude/apple/user/resource_group_map.bzl +++ b/prelude/apple/user/resource_group_map.bzl @@ -18,7 +18,7 @@ load( "make_info_subtarget_providers", "parse_groups_definitions", ) -load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load("@prelude//cxx:groups_types.bzl", "Traversal") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") def resource_group_map_attr(): diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 905ae0539..c848ea778 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -17,6 +17,14 @@ load( ) load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") +load( + "@prelude//cxx:groups_types.bzl", + "Group", +) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -95,7 +103,6 @@ load( ) load( ":groups.bzl", - "Group", # @unused Used as a type "MATCH_ALL_LABEL", "NO_MATCH_LABEL", ) @@ -110,7 +117,6 @@ load( ) load( ":link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLibSpec", "get_link_group_info", ) diff --git a/prelude/cxx/cxx_types.bzl b/prelude/cxx/cxx_types.bzl index 62cd70bb3..e6ba9f3a5 100644 --- a/prelude/cxx/cxx_types.bzl +++ b/prelude/cxx/cxx_types.bzl @@ -6,6 +6,10 @@ # of this source tree. load("@prelude//:artifact_tset.bzl", "ArtifactTSet") # @unused Used as a type +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) load( "@prelude//linking:link_info.bzl", "LinkArgs", @@ -30,7 +34,6 @@ load( ) load( ":link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLibSpec", # @unused Used as a type ) load( diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index d35363dd5..88dc349bd 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load("@prelude//cxx:groups_types.bzl", "Traversal") load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//utils:build_target_pattern.bzl", @@ -25,30 +25,16 @@ load( "map_val", "value_or", ) - -# Optional type of filtering -FilterType = enum( - # Filters for targets with labels matching the regex pattern defined after `label:`. - "label", - # Filters for targets for the build target pattern defined after "pattern:". - "pattern", - # Filters for targets matching the regex pattern defined after "target_regex:". - "target_regex", -) - -BuildTargetFilter = record( - pattern = field(BuildTargetPattern), - _type = field(FilterType, FilterType("pattern")), -) - -LabelFilter = record( - regex = regex, - _type = field(FilterType, FilterType("label")), -) - -TargetRegexFilter = record( - regex = regex, - _type = field(FilterType, FilterType("target_regex")), +load( + ":groups_types.bzl", + "BuildTargetFilter", + "FilterType", + "Group", + "GroupAttrs", + "GroupDefinition", + "GroupMapping", + "LabelFilter", + "TargetRegexFilter", ) # Label for special group mapping which makes every target associated with it to be included in all groups @@ -58,18 +44,6 @@ MATCH_ALL_LABEL = "MATCH_ALL" # against the final binary NO_MATCH_LABEL = "NO_MATCH" -# Representation of a parsed group mapping -GroupMapping = record( - # The root to apply this mapping to. - root = field([Label, None], None), - # The type of traversal to use. - traversal = field(Traversal, Traversal("tree")), - # Optional filter type to apply to the traversal. - filters = field(list[[BuildTargetFilter, LabelFilter, TargetRegexFilter]], []), - # Preferred linkage for this target when added to a link group. - preferred_linkage = field([Linkage, None], None), -) - _VALID_ATTRS = [ "enable_distributed_thinlto", "enable_if_node_count_exceeds", @@ -79,45 +53,6 @@ _VALID_ATTRS = [ "requires_root_node_exists", ] -# Representation of group attributes -GroupAttrs = record( - # Use distributed thinlto to build the link group shared library. - enable_distributed_thinlto = field(bool, False), - # Enable this link group if the binary's node count exceeds the given threshold - enable_if_node_count_exceeds = field([int, None], None), - # Discard all dependencies in the link group, useful for dropping unused dependencies - # from the build graph. - discard_group = field(bool, False), - # Adds additional linker flags used to link the link group shared object. - linker_flags = field(list, []), - # Adds additional linker flags to apply to dependents that link against the - # link group's shared object. - exported_linker_flags = field(list, []), - # Requires root nodes in specs to always exist in dependency graph. - # Otherwise fails. - requires_root_node_exists = field(bool, True), -) - -# Types of group traversal -GroupDefinition = enum( - # Group is explicitly defined in mapping provided by user. - # That is the default behavior. - "explicit", - # Group is implicitly created during mapping computations. - # For example, group can be created for "subfolders" traversal. - "implicit", -) - -# Representation of a parsed group -Group = record( - # The name for this group. - name = str, - # The mappings that are part of this group. - mappings = list[GroupMapping], - attrs = GroupAttrs, - definition_type = field(GroupDefinition, GroupDefinition("explicit")), -) - # Creates a group from an existing group, overwriting any properties provided def create_group( group: Group, diff --git a/prelude/cxx/groups_types.bzl b/prelude/cxx/groups_types.bzl new file mode 100644 index 000000000..0a8159c58 --- /dev/null +++ b/prelude/cxx/groups_types.bzl @@ -0,0 +1,97 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//linking:types.bzl", "Linkage") +load( + "@prelude//utils:build_target_pattern.bzl", + "BuildTargetPattern", +) + +Traversal = enum( + # Includes the target and all of it's transitive dependencies in the group. + "tree", + # Includes only the target in the group. + "node", + # Uses pattern and separates all targets by full folder path. + "subfolders", +) + +# Optional type of filtering +FilterType = enum( + # Filters for targets with labels matching the regex pattern defined after `label:`. + "label", + # Filters for targets for the build target pattern defined after "pattern:". + "pattern", + # Filters for targets matching the regex pattern defined after "target_regex:". + "target_regex", +) + +BuildTargetFilter = record( + pattern = field(BuildTargetPattern), + _type = field(FilterType, FilterType("pattern")), +) + +LabelFilter = record( + regex = regex, + _type = field(FilterType, FilterType("label")), +) + +TargetRegexFilter = record( + regex = regex, + _type = field(FilterType, FilterType("target_regex")), +) + +# Representation of a parsed group mapping +GroupMapping = record( + # The root to apply this mapping to. + root = field([Label, None], None), + # The type of traversal to use. + traversal = field(Traversal, Traversal("tree")), + # Optional filter type to apply to the traversal. + filters = field(list[[BuildTargetFilter, LabelFilter, TargetRegexFilter]], []), + # Preferred linkage for this target when added to a link group. + preferred_linkage = field([Linkage, None], None), +) + +# Representation of group attributes +GroupAttrs = record( + # Use distributed thinlto to build the link group shared library. + enable_distributed_thinlto = field(bool, False), + # Enable this link group if the binary's node count exceeds the given threshold + enable_if_node_count_exceeds = field([int, None], None), + # Discard all dependencies in the link group, useful for dropping unused dependencies + # from the build graph. + discard_group = field(bool, False), + # Adds additional linker flags used to link the link group shared object. + linker_flags = field(list, []), + # Adds additional linker flags to apply to dependents that link against the + # link group's shared object. + exported_linker_flags = field(list, []), + # Requires root nodes in specs to always exist in dependency graph. + # Otherwise fails. + requires_root_node_exists = field(bool, True), +) + +# Types of group traversal +GroupDefinition = enum( + # Group is explicitly defined in mapping provided by user. + # That is the default behavior. + "explicit", + # Group is implicitly created during mapping computations. + # For example, group can be created for "subfolders" traversal. + "implicit", +) + +# Representation of a parsed group +Group = record( + # The name for this group. + name = str, + # The mappings that are part of this group. + mappings = list[GroupMapping], + attrs = GroupAttrs, + definition_type = field(GroupDefinition, GroupDefinition("explicit")), +) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index f5c2c0ef1..3ac8ba425 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -6,6 +6,14 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load( + "@prelude//cxx:groups_types.bzl", + "Group", # @unused Used as a type +) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", +) load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( "@prelude//linking:link_groups.bzl", @@ -61,7 +69,6 @@ load( load(":cxx_toolchain_types.bzl", "PicBehavior") load( ":groups.bzl", - "Group", # @unused Used as a type "MATCH_ALL_LABEL", "NO_MATCH_LABEL", "compute_mappings", @@ -107,22 +114,6 @@ LINK_GROUP_MAP_DATABASE_FILENAME = "link_group_map_database.json" LINK_GROUP_MAPPINGS_SUB_TARGET = "link-group-mappings" LINK_GROUP_MAPPINGS_FILENAME_SUFFIX = ".link_group_map.json" -LinkGroupInfo = provider( - # @unsorted-dict-items - fields = { - "groups": provider_field(dict[str, Group]), - "groups_hash": provider_field(int), - "mappings": provider_field(dict[Label, str]), - # Additional graphs needed to cover labels referenced by the groups above. - # This is useful in cases where the consumer of this provider won't already - # have deps covering these. - # NOTE(agallagher): We do this to maintain existing behavior w/ the - # standalone `link_group_map()` rule, but it's not clear if it's actually - # desirable behavior. - "graph": provider_field(LinkableGraph), - }, -) - LinkGroupLinkInfo = record( link_info = field(LinkInfo), output_style = field(LibOutputStyle), diff --git a/prelude/cxx/link_groups_types.bzl b/prelude/cxx/link_groups_types.bzl index f9f01e7bb..2ecf90729 100644 --- a/prelude/cxx/link_groups_types.bzl +++ b/prelude/cxx/link_groups_types.bzl @@ -5,11 +5,61 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -Traversal = enum( - # Includes the target and all of it's transitive dependencies in the group. - "tree", - # Includes only the target in the group. - "node", - # Uses pattern and separates all targets by full folder path. - "subfolders", +load("@prelude//linking:types.bzl", "Linkage") +load(":groups_types.bzl", "Group", "Traversal") + +LinkGroupInfo = provider( + fields = { + # Additional graphs needed to cover labels referenced by the groups above. + # This is useful in cases where the consumer of this provider won't already + # have deps covering these. + # NOTE(agallagher): We do this to maintain existing behavior w/ the + # standalone `link_group_map()` rule, but it's not clear if it's actually + # desirable behavior. + "graph": provider_field(typing.Any, default = None), # LinkableGraph + "groups": provider_field(dict[str, Group]), + "groups_hash": provider_field(int), + "mappings": provider_field(dict[Label, str]), + }, +) + +def link_group_inlined_map_attr(root_attr): + return attrs.list( + attrs.tuple( + # name + attrs.string(), + # list of mappings + attrs.list( + # a single mapping + attrs.tuple( + # root node + root_attr, + # traversal + attrs.enum(Traversal.values()), + # filters, either `None`, a single filter, or a list of filters + # (which must all match). + attrs.option(attrs.one_of(attrs.list(attrs.string()), attrs.string())), + # linkage + attrs.option(attrs.enum(Linkage.values())), + ), + ), + # attributes + attrs.option( + attrs.dict(key = attrs.string(), value = attrs.any(), sorted = False), + ), + ), + ) + +LINK_GROUP_MAP_ATTR = attrs.option( + attrs.one_of( + attrs.dep(providers = [LinkGroupInfo]), + link_group_inlined_map_attr( + # Inlined `link_group_map` will parse roots as `label`s, to avoid + # bloating deps w/ unrelated mappings (e.g. it's common to use + # a default mapping for all rules, which would otherwise add + # unrelated deps to them). + root_attr = attrs.option(attrs.label()), + ), + ), + default = None, ) diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index 002f24885..20597750c 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -10,12 +10,16 @@ load( "make_info_subtarget_providers", "parse_groups_definitions", ) +load("@prelude//cxx:groups_types.bzl", "Traversal") load( "@prelude//cxx:link_groups.bzl", - "LinkGroupInfo", "build_link_group_info", ) -load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type + "link_group_inlined_map_attr", +) load( "@prelude//linking:link_groups.bzl", "LinkGroupLibInfo", @@ -101,8 +105,8 @@ registration_spec = RuleRegistrationSpec( name = "link_group_map", impl = _impl, attrs = { - "map": _v1_attrs( - attrs_root = attrs.dep( + "map": link_group_inlined_map_attr( + root_attr = attrs.dep( providers = [ LinkGroupLibInfo, LinkableGraph, diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 2dd197fa7..10c90a4b8 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -10,7 +10,7 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load("@prelude//cxx:groups_types.bzl", "Traversal") load("@prelude//linking:types.bzl", "Linkage") load(":apple_common.bzl", "apple_common") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "buck", "prelude_rule") diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index 9ffa6f504..33b9903dd 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -10,7 +10,7 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load("@prelude//cxx:groups_types.bzl", "Traversal") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":go_common.bzl", "go_common") diff --git a/prelude/decls/halide_rules.bzl b/prelude/decls/halide_rules.bzl index 4376e53e7..5af8b47c7 100644 --- a/prelude/decls/halide_rules.bzl +++ b/prelude/decls/halide_rules.bzl @@ -10,7 +10,7 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load("@prelude//cxx:groups_types.bzl", "Traversal") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "prelude_rule") load(":cxx_common.bzl", "cxx_common") diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 2878b92bc..2a8282d08 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -10,7 +10,7 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load("@prelude//cxx:groups_types.bzl", "Traversal") load("@prelude//linking:types.bzl", "Linkage") load(":apple_common.bzl", "apple_common") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "buck", "prelude_rule") diff --git a/prelude/decls/rust_rules.bzl b/prelude/decls/rust_rules.bzl index 3d7456045..32854fb27 100644 --- a/prelude/decls/rust_rules.bzl +++ b/prelude/decls/rust_rules.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx/user:link_group_map.bzl", "LINK_GROUP_MAP_ATTR") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:types.bzl", "Linkage") load("@prelude//rust:link_info.bzl", "RustProcMacroPlugin") load("@prelude//rust:rust_binary.bzl", "rust_binary_impl", "rust_test_impl") diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 692eebd68..57cda3537 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -25,20 +25,23 @@ load( ) load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load( - "@prelude//cxx:groups.bzl", + "@prelude//cxx:groups_types.bzl", "Group", "GroupAttrs", "GroupMapping", + "Traversal", ) load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout") load( "@prelude//cxx:link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLibSpec", "build_link_group_info", "get_link_group_info", ) -load("@prelude//cxx:link_groups_types.bzl", "Traversal") +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) load("@prelude//cxx:linker.bzl", "get_rpath_origin") load( "@prelude//cxx:omnibus.bzl", @@ -53,7 +56,10 @@ load( "CPreprocessorArgs", "cxx_inherited_preprocessor_infos", ) -load("@prelude//linking:link_info.bzl", "LinkedObject") +load( + "@prelude//linking:link_info.bzl", + "LinkedObject", +) load( "@prelude//linking:linkable_graph.bzl", "LinkableGraph", diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 62746aa67..99e39a7b3 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -15,9 +15,9 @@ load("@prelude//cxx:cxx.bzl", "cxx_binary_impl", "cxx_library_impl", "cxx_precom load("@prelude//cxx:cxx_toolchain.bzl", "cxx_toolchain_extra_attributes", "cxx_toolchain_impl") load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", "CxxToolchainInfo") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo", "HeaderMode") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//cxx:prebuilt_cxx_library_group.bzl", "prebuilt_cxx_library_group_impl") load("@prelude//cxx:windows_resource.bzl", "windows_resource_impl") -load("@prelude//cxx/user:link_group_map.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//erlang:erlang.bzl", _erlang_implemented_rules = "implemented_rules") load("@prelude//git:git_fetch.bzl", "git_fetch_impl") load("@prelude//go:cgo_library.bzl", "cgo_library_impl") diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index d15631af5..85fafffa1 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -23,7 +23,6 @@ load( load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load( "@prelude//cxx:link_groups.bzl", - "LinkGroupInfo", # @unused Used as a type "LinkGroupLinkInfo", # @unused Used as a type "create_link_groups", "get_filtered_labels_to_links_map", @@ -33,6 +32,10 @@ load( "get_link_group_info", "get_link_group_preferred_linkage", ) +load( + "@prelude//cxx:link_groups_types.bzl", + "LinkGroupInfo", # @unused Used as a type +) load( "@prelude//linking:link_groups.bzl", "LinkGroupLib", # @unused Used as a type From 25848a1c1429c16321e2df0bd4391444d37e2cec Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Fri, 15 Mar 2024 14:15:21 -0700 Subject: [PATCH 0461/1133] Unify link_group_map attr across all rules. Summary: Right now it's being redeclared N times (and I have to change it), which makes it difficult. This unifies it. Reviewed By: rmaz Differential Revision: D54909696 fbshipit-source-id: 43d0d69449344f2a96112f77352df7810e29de7b --- prelude/decls/cxx_rules.bzl | 8 ++++---- prelude/decls/go_rules.bzl | 4 ++-- prelude/decls/halide_rules.bzl | 4 ++-- prelude/decls/ios_rules.bzl | 7 ++++--- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 10c90a4b8..225980ffb 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -10,7 +10,7 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load("@prelude//cxx:groups_types.bzl", "Traversal") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:types.bzl", "Linkage") load(":apple_common.bzl", "apple_common") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "buck", "prelude_rule") @@ -119,7 +119,7 @@ cxx_binary = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), "post_platform_linker_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg(anon_target_compatible = True))), default = []), @@ -572,7 +572,7 @@ cxx_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "module_name": attrs.option(attrs.string(), default = None), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "post_linker_flags": attrs.list(attrs.arg(anon_target_compatible = True), default = []), @@ -880,7 +880,7 @@ cxx_test = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_style": attrs.option(attrs.enum(LinkableDepType), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "platform_compiler_flags": attrs.list(attrs.tuple(attrs.regex(), attrs.list(attrs.arg())), default = []), diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index 33b9903dd..c128410c0 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -10,7 +10,7 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load("@prelude//cxx:groups_types.bzl", "Traversal") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":go_common.bzl", "go_common") @@ -115,7 +115,7 @@ cgo_library = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), "platform_headers": attrs.list(attrs.tuple(attrs.regex(), attrs.named_set(attrs.source(), sorted = True)), default = []), "platform_srcs": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.one_of(attrs.source(), attrs.tuple(attrs.source(), attrs.list(attrs.arg()))), sorted = True)), default = []), diff --git a/prelude/decls/halide_rules.bzl b/prelude/decls/halide_rules.bzl index 5af8b47c7..7ddda0ec7 100644 --- a/prelude/decls/halide_rules.bzl +++ b/prelude/decls/halide_rules.bzl @@ -10,7 +10,7 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load("@prelude//cxx:groups_types.bzl", "Traversal") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "prelude_rule") load(":cxx_common.bzl", "cxx_common") @@ -99,7 +99,7 @@ halide_library = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "link_deps_query_whole": attrs.bool(default = False), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_style": attrs.option(attrs.enum(LinkableDepType), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "platform_deps": attrs.list(attrs.tuple(attrs.regex(), attrs.set(attrs.dep(), sorted = True)), default = []), diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 2a8282d08..71bca86fd 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -11,6 +11,7 @@ # well-formatted (and then delete this TODO) load("@prelude//cxx:groups_types.bzl", "Traversal") +load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:types.bzl", "Linkage") load(":apple_common.bzl", "apple_common") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "buck", "prelude_rule") @@ -195,7 +196,7 @@ apple_binary = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_whole": attrs.option(attrs.bool(), default = None), "modular": attrs.bool(default = False), "module_name": attrs.option(attrs.string(), default = None), @@ -488,7 +489,7 @@ apple_library = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "modular": attrs.bool(default = False), "module_name": attrs.option(attrs.string(), default = None), "module_requires_cxx": attrs.bool(default = False), @@ -745,7 +746,7 @@ apple_test = prelude_rule( "libraries": attrs.list(attrs.string(), default = []), "licenses": attrs.list(attrs.source(), default = []), "link_group": attrs.option(attrs.string(), default = None), - "link_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), + "link_group_map": LINK_GROUP_MAP_ATTR, "link_whole": attrs.option(attrs.bool(), default = None), "linker_extra_outputs": attrs.list(attrs.string(), default = []), "modular": attrs.bool(default = False), From 33b910e2de47a35416edba2ea32945f405667502 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Fri, 15 Mar 2024 15:24:39 -0700 Subject: [PATCH 0462/1133] Change 'root' -> 'roots'. Summary: In the next diff, we are going to add support for this syntax: ``` ( "some_link_group", [ ([":A", ":B"], "intersect", None), ... ``` this is a departure from the current link_group syntax which assumes that the first item in the `tuple()` is a single item. This item is stored in `GroupMapping` as `root: provider_field(Label | None)`, but it turns-out that many of the places it is passed to can take a list of `Label`. As such, this updates `root` to become `roots: provider_field(list[Label] | None)` so that we can support the new syntax in the next diff. Reviewed By: stepancheg Differential Revision: D54809112 fbshipit-source-id: 329147f545800b07e1e3145ffa6be9a17ce23142 --- prelude/apple/user/resource_group_map.bzl | 35 +++++++++++++++++++---- prelude/cxx/cxx_executable.bzl | 8 ++---- prelude/cxx/groups.bzl | 24 ++++++++++++---- prelude/cxx/groups_types.bzl | 2 +- prelude/cxx/link_groups.bzl | 30 +++++++++++++------ prelude/haskell/haskell.bzl | 8 ++---- prelude/python/python_binary.bzl | 6 ++-- 7 files changed, 76 insertions(+), 37 deletions(-) diff --git a/prelude/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl index b99597be6..3240805d6 100644 --- a/prelude/apple/user/resource_group_map.bzl +++ b/prelude/apple/user/resource_group_map.bzl @@ -7,6 +7,7 @@ load( "@prelude//apple:resource_groups.bzl", + "ResourceGraphNode", # @unused Used as a type "ResourceGroupInfo", "create_resource_graph", "get_resource_graph_node_map_func", @@ -18,7 +19,11 @@ load( "make_info_subtarget_providers", "parse_groups_definitions", ) -load("@prelude//cxx:groups_types.bzl", "Traversal") +load( + "@prelude//cxx:groups_types.bzl", + "GroupMapping", # @unused Used as a type + "Traversal", +) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") def resource_group_map_attr(): @@ -49,11 +54,10 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: # ResourceGraphInfo, which `create_resource_graph` removes above. # So make sure we remove them from the mappings too, otherwise # `compute_mappings` crashes on the inconsistency. - mappings = [ - mapping - for mapping in group.mappings - if mapping.root == None or mapping.root in resource_graph_node_map - ], + mappings = filter( + None, + [_fixup_mapping_to_only_include_roots_in_the_map(m, resource_graph_node_map) for m in group.mappings], + ), ) for group in resource_groups }, @@ -75,6 +79,25 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: ), ] +def _fixup_mapping_to_only_include_roots_in_the_map(mapping: GroupMapping, node_map: dict[Label, ResourceGraphNode]) -> GroupMapping | None: + if not mapping.roots: + return mapping + + filtered_roots = [ + root + for root in mapping.roots + if root in node_map + ] + if not filtered_roots: + return None + + return GroupMapping( + roots = filtered_roots, + traversal = mapping.traversal, + filters = mapping.filters, + preferred_linkage = mapping.preferred_linkage, + ) + registration_spec = RuleRegistrationSpec( name = "resource_group_map", impl = _impl, diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 078f3fa51..ee9c4184b 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -115,6 +115,7 @@ load( ":cxx_types.bzl", "CxxRuleConstructorParams", # @unused Used as a type ) +load(":groups.bzl", "get_dedupped_roots_from_groups") load( ":link.bzl", "CxxLinkerMapData", @@ -398,12 +399,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, public_nodes = get_transitive_deps_matching_labels( linkable_graph_node_map = linkable_graph_node_map, label = ctx.attrs.link_group_public_deps_label, - roots = [ - mapping.root - for group in link_group_info.groups.values() - for mapping in group.mappings - if mapping.root != None - ], + roots = get_dedupped_roots_from_groups(link_group_info.groups.values()), ) filtered_links = get_filtered_links(labels_to_links_map, set(public_nodes)) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index 88dc349bd..235373864 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -94,7 +94,7 @@ def parse_groups_definitions( for entry in mappings: traversal = _parse_traversal_from_mapping(entry[1]) mapping = GroupMapping( - root = map_val(parse_root, entry[0]), + roots = filter(None, [map_val(parse_root, entry[0])]), traversal = traversal, filters = _parse_filter_from_mapping(entry[2]), preferred_linkage = Linkage(entry[3]) if len(entry) > 3 and entry[3] else None, @@ -173,14 +173,26 @@ def compute_mappings(groups_map: dict[str, Group], graph_map: dict[Label, typing return target_to_group_map +def get_dedupped_roots_from_groups(groups: list[Group]) -> list[Label]: + roots = {} + for group in groups: + for mapping in group.mappings: + if not mapping.roots: + continue + + for root in mapping.roots: + roots[root] = True + + return list(roots.keys()) + def _find_targets_in_mapping( graph_map: dict[Label, typing.Any], mapping: GroupMapping) -> list[Label]: # If we have no filtering, we don't need to do any traversal to find targets to include. if not mapping.filters: - if mapping.root == None: + if not mapping.roots: fail("no filter or explicit root given: {}", mapping) - return [mapping.root] + return mapping.roots # Else find all dependencies that match the filter. matching_targets = {} @@ -218,11 +230,11 @@ def _find_targets_in_mapping( return [] return graph_node.deps + graph_node.exported_deps - if mapping.root == None: + if not mapping.roots: for node in graph_map: find_matching_targets(node) else: - breadth_first_traversal_by(graph_map, [mapping.root], find_matching_targets) + breadth_first_traversal_by(graph_map, mapping.roots, find_matching_targets) return matching_targets.keys() @@ -329,7 +341,7 @@ def _make_json_info_for_group_mapping(group_mapping: GroupMapping) -> dict[str, return { "filters": _make_json_info_for_group_mapping_filters(group_mapping.filters), "preferred_linkage": group_mapping.preferred_linkage, - "root": group_mapping.root, + "roots": group_mapping.roots, "traversal": group_mapping.traversal, } diff --git a/prelude/cxx/groups_types.bzl b/prelude/cxx/groups_types.bzl index 0a8159c58..1fdffc919 100644 --- a/prelude/cxx/groups_types.bzl +++ b/prelude/cxx/groups_types.bzl @@ -48,7 +48,7 @@ TargetRegexFilter = record( # Representation of a parsed group mapping GroupMapping = record( # The root to apply this mapping to. - root = field([Label, None], None), + roots = field(list[Label], []), # The type of traversal to use. traversal = field(Traversal, Traversal("tree")), # Optional filter type to apply to the traversal. diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 3ac8ba425..f8a8be07c 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -211,12 +211,22 @@ def get_link_group_info( ) def get_link_group_preferred_linkage(link_groups: list[Group]) -> dict[Label, Linkage]: - return { - mapping.root: mapping.preferred_linkage - for group in link_groups - for mapping in group.mappings - if mapping.root != None and mapping.preferred_linkage != None - } + root_to_linkage = {} + for group in link_groups: + for mapping in group.mappings: + if not mapping.roots: + continue + + if not mapping.preferred_linkage: + continue + + for root in mapping.roots: + # TODO: There might be a bug here - if the same root is listed in + # two different link_group_map entries, we'll only use the preferred_linkage + # of the last style passed. + root_to_linkage[root] = mapping.preferred_linkage + + return root_to_linkage LinkGroupContext = record( link_group_mappings = field([dict[Label, str], None]), @@ -604,12 +614,14 @@ def _create_link_group( for mapping in spec.group.mappings: # If there's no explicit root, this means we need to search the entire # graph to find candidate nodes. - if mapping.root == None: + if not mapping.roots: has_empty_root = True - elif spec.group.attrs.requires_root_node_exists or mapping.root in linkable_graph_node_map: + elif spec.group.attrs.requires_root_node_exists: # If spec requires root to always exist (default True), always include to traversal to fail hard if it is not in deps. # Otherwise add to traversal only if we sure it is in deps graph. - roots.append(mapping.root) + roots.extend(mapping.roots) + else: + roots.extend([root for root in mapping.roots if root in linkable_graph_node_map]) # If this link group has an empty mapping, we need to search everything # -- even the additional roots -- to find potential nodes in the link diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 391299fe2..2a105b645 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -29,6 +29,7 @@ load( "get_rpath_origin", "get_shared_library_flags", ) +load("@prelude//cxx:groups.bzl", "get_dedupped_roots_from_groups") load( "@prelude//cxx:link_groups.bzl", "LinkGroupContext", @@ -1059,12 +1060,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_group_relevant_roots = find_relevant_roots( linkable_graph_node_map = linkable_graph_node_map, link_group_mappings = link_group_info.mappings, - roots = [ - mapping.root - for group in link_group_info.groups.values() - for mapping in group.mappings - if mapping.root != None - ], + roots = get_dedupped_roots_from_groups(link_group_info.groups.values()), ) labels_to_links_map = get_filtered_labels_to_links_map( diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 57cda3537..3e3250ee3 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -168,7 +168,7 @@ def _get_root_link_group_specs( name = dep.linkable_root_info.name, mappings = [ GroupMapping( - root = dep.linkable_graph.nodes.value.label, + roots = [dep.linkable_graph.nodes.value.label], traversal = Traversal("node"), ), ], @@ -191,7 +191,7 @@ def _get_root_link_group_specs( name = name, mappings = [ GroupMapping( - root = extension.linkable_graph.nodes.value.label, + roots = [extension.linkable_graph.nodes.value.label], traversal = Traversal("node"), ), ], @@ -231,7 +231,7 @@ def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[G name = str(dep.linkable_graph.nodes.value.label.raw_target()), mappings = [ GroupMapping( - root = dep.linkable_graph.nodes.value.label, + roots = [dep.linkable_graph.nodes.value.label], traversal = Traversal("node"), preferred_linkage = Linkage("shared"), ), From dc70b0ae000a186c197b5ccc9eec458d044bfe7c Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Fri, 15 Mar 2024 15:24:39 -0700 Subject: [PATCH 0463/1133] Unify attrs() for resource_group_map Summary: The type is redeclared everywhere, lets unify. Reviewed By: chatura-atapattu Differential Revision: D54923012 fbshipit-source-id: 8f935122a0cc7fc7a07dfe351be192f37baecd55 --- prelude/apple/apple_rules_impl_utility.bzl | 4 ++-- prelude/apple/resource_groups.bzl | 18 ++++++++++++++++-- prelude/apple/user/apple_resource_bundle.bzl | 4 ++-- prelude/apple/user/apple_watchos_bundle.bzl | 4 ++-- prelude/apple/user/resource_group_map.bzl | 4 ++-- prelude/cxx/cxx.bzl | 7 ++----- prelude/cxx/groups.bzl | 7 ------- prelude/cxx/groups_types.bzl | 7 +++++++ prelude/cxx/link_groups.bzl | 7 +++++-- prelude/decls/ios_rules.bzl | 4 ++-- 10 files changed, 40 insertions(+), 26 deletions(-) diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 79013c250..285575bd6 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -9,11 +9,11 @@ load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_syste load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo", "AppleBundleTypeAttributeType") load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignType") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") load("@prelude//apple/user:apple_selective_debugging.bzl", "AppleSelectiveDebuggingInfo") load("@prelude//apple/user:apple_simulators.bzl", "apple_simulators_transition") load("@prelude//apple/user:cpu_split_transition.bzl", "cpu_split_transition") -load("@prelude//apple/user:resource_group_map.bzl", "resource_group_map_attr") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") load("@prelude//linking:execution_preference.bzl", "link_execution_preference_attr") load("@prelude//linking:link_info.bzl", "LinkOrdering") @@ -138,7 +138,7 @@ def apple_bundle_extra_attrs(): attribs = { "binary": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), "bundle_type": attrs.option(attrs.enum(AppleBundleTypeAttributeType.values()), default = None), - "resource_group_map": resource_group_map_attr(), + "resource_group_map": RESOURCE_GROUP_MAP_ATTR, "selective_debugging": attrs.option(attrs.dep(providers = [AppleSelectiveDebuggingInfo]), default = None), "split_arch_dsym": attrs.bool(default = False), "universal": attrs.option(attrs.bool(), default = None), diff --git a/prelude/apple/resource_groups.bzl b/prelude/apple/resource_groups.bzl index 069d679a9..beb4fd62d 100644 --- a/prelude/apple/resource_groups.bzl +++ b/prelude/apple/resource_groups.bzl @@ -5,8 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:groups.bzl", "MATCH_ALL_LABEL") -load("@prelude//cxx:groups_types.bzl", "Group") +load("@prelude//cxx:groups_types.bzl", "Group", "MATCH_ALL_LABEL", "Traversal") load( "@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", @@ -32,6 +31,21 @@ ResourceGroupInfo = provider( }, ) +RESOURCE_GROUP_MAP_ATTR = attrs.option(attrs.dep(providers = [ResourceGroupInfo]), default = None) + +INLINED_RESOURCE_GROUP_MAP_ATTR = attrs.list( + attrs.tuple( + attrs.string(), + attrs.list( + attrs.tuple( + attrs.dep(), + attrs.enum(Traversal.values()), + attrs.option(attrs.string()), + ), + ), + ), +) + ResourceGraphNode = record( label = field(Label), # Attribute labels on the target. diff --git a/prelude/apple/user/apple_resource_bundle.bzl b/prelude/apple/user/apple_resource_bundle.bzl index 66c902dfb..49309f99b 100644 --- a/prelude/apple/user/apple_resource_bundle.bzl +++ b/prelude/apple/user/apple_resource_bundle.bzl @@ -9,9 +9,9 @@ load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_syste load("@prelude//apple:apple_bundle_resources.bzl", "get_apple_bundle_resource_part_list") load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension") -load(":resource_group_map.bzl", "resource_group_map_attr") def _get_apple_resources_toolchain_attr(): # FIXME: prelude// should be standalone (not refer to fbcode//) @@ -40,7 +40,7 @@ def _apple_resource_bundle_attrs(): "privacy_manifest": attrs.option(attrs.source(), default = None), "product_name": attrs.option(attrs.string(), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": resource_group_map_attr(), + "resource_group_map": RESOURCE_GROUP_MAP_ATTR, # Only include macOS hosted toolchains, so we compile resources directly on Mac RE "_apple_toolchain": _get_apple_resources_toolchain_attr(), "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), diff --git a/prelude/apple/user/apple_watchos_bundle.bzl b/prelude/apple/user/apple_watchos_bundle.bzl index dc251d876..ce0bd9ddb 100644 --- a/prelude/apple/user/apple_watchos_bundle.bzl +++ b/prelude/apple/user/apple_watchos_bundle.bzl @@ -7,7 +7,7 @@ load("@prelude//apple:apple_bundle.bzl", "apple_bundle_impl") load("@prelude//apple:apple_rules_impl_utility.bzl", "apple_bundle_extra_attrs") -load("@prelude//cxx:groups_types.bzl", "Traversal") +load("@prelude//apple:resource_groups.bzl", "INLINED_RESOURCE_GROUP_MAP_ATTR") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension") load(":watch_transition.bzl", "watch_transition") @@ -34,7 +34,7 @@ def _apple_bundle_base_attrs(): "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), "product_name": attrs.option(attrs.string(), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), + "resource_group_map": attrs.option(INLINED_RESOURCE_GROUP_MAP_ATTR, default = None), "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), "try_skip_code_signing": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), diff --git a/prelude/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl index 3240805d6..fd60d3175 100644 --- a/prelude/apple/user/resource_group_map.bzl +++ b/prelude/apple/user/resource_group_map.bzl @@ -7,6 +7,7 @@ load( "@prelude//apple:resource_groups.bzl", + "INLINED_RESOURCE_GROUP_MAP_ATTR", "ResourceGraphNode", # @unused Used as a type "ResourceGroupInfo", "create_resource_graph", @@ -22,7 +23,6 @@ load( load( "@prelude//cxx:groups_types.bzl", "GroupMapping", # @unused Used as a type - "Traversal", ) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") @@ -102,6 +102,6 @@ registration_spec = RuleRegistrationSpec( name = "resource_group_map", impl = _impl, attrs = { - "map": attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), + "map": INLINED_RESOURCE_GROUP_MAP_ATTR, }, ) diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index c848ea778..4783ab695 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -17,10 +17,6 @@ load( ) load("@prelude//cxx:cxx_sources.bzl", "get_srcs_with_flags") load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") -load( - "@prelude//cxx:groups_types.bzl", - "Group", -) load( "@prelude//cxx:link_groups_types.bzl", "LinkGroupInfo", # @unused Used as a type @@ -102,7 +98,8 @@ load( "CxxRuleSubTargetParams", ) load( - ":groups.bzl", + ":groups_types.bzl", + "Group", "MATCH_ALL_LABEL", "NO_MATCH_LABEL", ) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index 235373864..616a1a449 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -37,13 +37,6 @@ load( "TargetRegexFilter", ) -# Label for special group mapping which makes every target associated with it to be included in all groups -MATCH_ALL_LABEL = "MATCH_ALL" - -# Label for special group mapping which makes every target associated with it to be linked directly -# against the final binary -NO_MATCH_LABEL = "NO_MATCH" - _VALID_ATTRS = [ "enable_distributed_thinlto", "enable_if_node_count_exceeds", diff --git a/prelude/cxx/groups_types.bzl b/prelude/cxx/groups_types.bzl index 1fdffc919..a9fafd8ce 100644 --- a/prelude/cxx/groups_types.bzl +++ b/prelude/cxx/groups_types.bzl @@ -11,6 +11,13 @@ load( "BuildTargetPattern", ) +# Label for special group mapping which makes every target associated with it to be included in all groups +MATCH_ALL_LABEL = "MATCH_ALL" + +# Label for special group mapping which makes every target associated with it to be linked directly +# against the final binary +NO_MATCH_LABEL = "NO_MATCH" + Traversal = enum( # Includes the target and all of it's transitive dependencies in the group. "tree", diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index f8a8be07c..9081c44c3 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -69,11 +69,14 @@ load( load(":cxx_toolchain_types.bzl", "PicBehavior") load( ":groups.bzl", - "MATCH_ALL_LABEL", - "NO_MATCH_LABEL", "compute_mappings", "parse_groups_definitions", ) +load( + ":groups_types.bzl", + "MATCH_ALL_LABEL", + "NO_MATCH_LABEL", +) load( ":link.bzl", "cxx_link_shared_library", diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 71bca86fd..42d9267fa 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -10,7 +10,7 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) -load("@prelude//cxx:groups_types.bzl", "Traversal") +load("@prelude//apple:resource_groups.bzl", "INLINED_RESOURCE_GROUP_MAP_ATTR") load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:types.bzl", "Linkage") load(":apple_common.bzl", "apple_common") @@ -373,7 +373,7 @@ apple_bundle = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": attrs.option(attrs.list(attrs.tuple(attrs.string(), attrs.list(attrs.tuple(attrs.dep(), attrs.enum(Traversal.values()), attrs.option(attrs.string()))))), default = None), + "resource_group_map": attrs.option(INLINED_RESOURCE_GROUP_MAP_ATTR, default = None), "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), "try_skip_code_signing": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), From 5a8b50c43cb90bba7168de6e97f6229142689345 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sat, 16 Mar 2024 01:11:41 -0700 Subject: [PATCH 0464/1133] More private cargo_package.bzl Summary: Do not export what is not needed. Private makes code easier to read. Reviewed By: JakobDegen Differential Revision: D54981621 fbshipit-source-id: 94cf4002051021772a93e79a9aff36d78458e5e0 --- prelude/rust/cargo_package.bzl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/prelude/rust/cargo_package.bzl b/prelude/rust/cargo_package.bzl index d9b505107..b83eff1ae 100644 --- a/prelude/rust/cargo_package.bzl +++ b/prelude/rust/cargo_package.bzl @@ -8,7 +8,7 @@ load("@prelude//:prelude.bzl", "native") load("@prelude//utils:selects.bzl", "selects") -DEFAULT_PLATFORM_TEMPLATES = { +_DEFAULT_PLATFORM_TEMPLATES = { "linux-arm64": select({ "DEFAULT": False, "config//os:linux": select({ @@ -68,10 +68,10 @@ DEFAULT_PLATFORM_TEMPLATES = { }), } -def apply_platform_attrs( +def _apply_platform_attrs( platform_attrs, universal_attrs = {}, - templates = DEFAULT_PLATFORM_TEMPLATES): + templates = _DEFAULT_PLATFORM_TEMPLATES): combined_attrs = dict(universal_attrs) for platform, attrs in platform_attrs.items(): @@ -88,7 +88,7 @@ def apply_platform_attrs( return combined_attrs def _cargo_rust_binary(name, platform = {}, **kwargs): - kwargs = apply_platform_attrs(platform, kwargs) + kwargs = _apply_platform_attrs(platform, kwargs) rustc_flags = kwargs.get("rustc_flags", []) kwargs["rustc_flags"] = ["--cap-lints=allow"] + rustc_flags @@ -96,7 +96,7 @@ def _cargo_rust_binary(name, platform = {}, **kwargs): native.rust_binary(name = name, **kwargs) def _cargo_rust_library(name, platform = {}, **kwargs): - kwargs = apply_platform_attrs(platform, kwargs) + kwargs = _apply_platform_attrs(platform, kwargs) rustc_flags = kwargs.get("rustc_flags", []) kwargs["rustc_flags"] = ["--cap-lints=allow"] + rustc_flags From b75b0cb88822f92bd380808031082f004cd6c8d0 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sat, 16 Mar 2024 01:37:36 -0700 Subject: [PATCH 0465/1133] toolchain: `extra_rustc_flags` after target rustc flags Summary: So that if you pass something on the command line, it takes precedence over any package/target flags Reviewed By: dtolnay Differential Revision: D54976353 fbshipit-source-id: de0c21b48e4512f9efd270280310c9ccb514cd43 --- prelude/rust/build.bzl | 1 + prelude/rust/rust_toolchain.bzl | 3 +++ 2 files changed, 4 insertions(+) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index b903495bc..6763520db 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -1015,6 +1015,7 @@ def _compute_common_args( _rustc_flags(toolchain_info.rustc_check_flags) if is_check else [], _rustc_flags(toolchain_info.rustc_coverage_flags) if ctx.attrs.coverage else [], _rustc_flags(ctx.attrs.rustc_flags), + _rustc_flags(toolchain_info.extra_rustc_flags), cmd_args(ctx.attrs.features, format = '--cfg=feature="{}"'), dep_args, ) diff --git a/prelude/rust/rust_toolchain.bzl b/prelude/rust/rust_toolchain.bzl index 9e2a41955..e2d226e25 100644 --- a/prelude/rust/rust_toolchain.bzl +++ b/prelude/rust/rust_toolchain.bzl @@ -40,6 +40,9 @@ rust_toolchain_attrs = { "rustc_target_triple": provider_field(str | None, default = None), # Baseline compiler config "rustc_flags": provider_field(list[typing.Any], default = []), + # Rustc flags, except that they are applied on the command line after the + # target's rustc flags + "extra_rustc_flags": provider_field(list[typing.Any], default = []), # Extra flags when building binaries "rustc_binary_flags": provider_field(list[typing.Any], default = []), # Extra flags for doing check builds From c78611430ad6a130c37f86f343d99c859aa9654b Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sat, 16 Mar 2024 12:16:09 -0700 Subject: [PATCH 0466/1133] Revert D54981621: More private cargo_package.bzl Differential Revision: D54981621 Original commit changeset: 94cf40020510 Original Phabricator Diff: D54981621 fbshipit-source-id: f84b70a543e60bd204cbfeeffe81a8c6ca14b677 --- prelude/rust/cargo_package.bzl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/prelude/rust/cargo_package.bzl b/prelude/rust/cargo_package.bzl index b83eff1ae..d9b505107 100644 --- a/prelude/rust/cargo_package.bzl +++ b/prelude/rust/cargo_package.bzl @@ -8,7 +8,7 @@ load("@prelude//:prelude.bzl", "native") load("@prelude//utils:selects.bzl", "selects") -_DEFAULT_PLATFORM_TEMPLATES = { +DEFAULT_PLATFORM_TEMPLATES = { "linux-arm64": select({ "DEFAULT": False, "config//os:linux": select({ @@ -68,10 +68,10 @@ _DEFAULT_PLATFORM_TEMPLATES = { }), } -def _apply_platform_attrs( +def apply_platform_attrs( platform_attrs, universal_attrs = {}, - templates = _DEFAULT_PLATFORM_TEMPLATES): + templates = DEFAULT_PLATFORM_TEMPLATES): combined_attrs = dict(universal_attrs) for platform, attrs in platform_attrs.items(): @@ -88,7 +88,7 @@ def _apply_platform_attrs( return combined_attrs def _cargo_rust_binary(name, platform = {}, **kwargs): - kwargs = _apply_platform_attrs(platform, kwargs) + kwargs = apply_platform_attrs(platform, kwargs) rustc_flags = kwargs.get("rustc_flags", []) kwargs["rustc_flags"] = ["--cap-lints=allow"] + rustc_flags @@ -96,7 +96,7 @@ def _cargo_rust_binary(name, platform = {}, **kwargs): native.rust_binary(name = name, **kwargs) def _cargo_rust_library(name, platform = {}, **kwargs): - kwargs = _apply_platform_attrs(platform, kwargs) + kwargs = apply_platform_attrs(platform, kwargs) rustc_flags = kwargs.get("rustc_flags", []) kwargs["rustc_flags"] = ["--cap-lints=allow"] + rustc_flags From a7bc7b20eac1c11ce657916a86cc7c3c11d39640 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sun, 17 Mar 2024 00:54:09 -0700 Subject: [PATCH 0467/1133] Explain what is cargo_package.bzl Reviewed By: JakobDegen Differential Revision: D54981687 fbshipit-source-id: 18795aaafe9ed7931fd0abaa708af40f936ec5db --- prelude/rust/cargo_package.bzl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/prelude/rust/cargo_package.bzl b/prelude/rust/cargo_package.bzl index d9b505107..b80b51f90 100644 --- a/prelude/rust/cargo_package.bzl +++ b/prelude/rust/cargo_package.bzl @@ -5,6 +5,9 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +# This file exports utilities for use with with reindeer. +# These are not used anywhere else in prelude and are not exported as prelude globals. + load("@prelude//:prelude.bzl", "native") load("@prelude//utils:selects.bzl", "selects") From ac196a8a10f5bae3225aa8bd6c1da4c43d91e6e8 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 17 Mar 2024 17:34:36 -0700 Subject: [PATCH 0468/1133] rules: Pipelined metadata as a separate `Emit` Summary: Acknowledge in the `Emit` definition that there's two kinds of metadata. I want this for next diff reasons, but a good cleanup anyway Reviewed By: dtolnay Differential Revision: D54976705 fbshipit-source-id: 2cf786c45f0948395ba5747b94758e7c3933c92b --- prelude/rust/build.bzl | 35 +++++++++++++---------------------- prelude/rust/build_params.bzl | 11 +++++++++-- prelude/rust/rust_binary.bzl | 4 +++- prelude/rust/rust_library.bzl | 2 +- 4 files changed, 26 insertions(+), 26 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 6763520db..4bc08cb28 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -176,7 +176,7 @@ def generate_rustdoc( dep_ctx = compile_ctx.dep_ctx, # to make sure we get the rmeta's generated for the crate dependencies, # rather than full .rlibs - emit = Emit("metadata"), + emit = Emit("metadata-full"), params = params, default_roots = default_roots, is_rustdoc_test = False, @@ -255,7 +255,7 @@ def generate_rustdoc_coverage( dep_ctx = compile_ctx.dep_ctx, # to make sure we get the rmeta's generated for the crate dependencies, # rather than full .rlibs - emit = Emit("metadata"), + emit = Emit("metadata-full"), params = params, default_roots = default_roots, is_rustdoc_test = False, @@ -1151,17 +1151,6 @@ def _rustc_emit( simple_crate = attr_simple_crate_for_filenames(ctx) crate_type = params.crate_type - # Metadata for pipelining needs has enough info to be used as an input - # for dependents. To do this reliably, we actually emit "link" but - # suppress actual codegen with -Zno-codegen. - # - # We don't bother to do this with "codegen" crates - ie, ones which are - # linked into an artifact like binaries and dylib, since they're not - # used as a pipelined dependency input. - pipeline_meta = emit == Emit("metadata") and \ - toolchain_info.pipelined and \ - not crate_type_codegen(crate_type) - emit_args = cmd_args() emit_env = {} extra_out = None @@ -1171,11 +1160,7 @@ def _rustc_emit( else: extra_hash = "-" + _metadata(ctx.label, False)[1] emit_args.add("-Cextra-filename={}".format(extra_hash)) - if pipeline_meta: - # Make sure hollow rlibs are distinct from real ones - filename = subdir + "/hollow/" + output_filename(simple_crate, Emit("link"), params, extra_hash) - else: - filename = subdir + "/" + output_filename(simple_crate, emit, params, extra_hash) + filename = subdir + "/" + output_filename(simple_crate, emit, params, extra_hash) emit_output = ctx.actions.declare_output(filename) @@ -1192,7 +1177,11 @@ def _rustc_emit( # command or else there are "found possibly newer version of crate" errors. emit_env["RUSTC_BOOTSTRAP"] = "1" - if pipeline_meta: + # We don't ever have metadata-only deps on codegen crates, so no need to do + # the slower thing + if emit == Emit("metadata-full") and \ + not crate_type_codegen(crate_type) and \ + toolchain_info.pipelined: # If we're doing a pipelined build, instead of emitting an actual rmeta # we emit a "hollow" .rlib - ie, it only contains lib.rmeta and no object # code. It should contain full information needed by any dependent @@ -1202,11 +1191,13 @@ def _rustc_emit( # Emit("link") operations are allowed to diverge without causing them to # get different crate hashes. emit_args.add("-Zno-codegen") - effective_emit = Emit("link") + effective_emit = "link" + elif emit == Emit("metadata-full") or emit == Emit("metadata-fast"): + effective_emit = "metadata" else: - effective_emit = emit + effective_emit = emit.value - emit_args.add(cmd_args("--emit=", effective_emit.value, "=", emit_output.as_output(), delimiter = "")) + emit_args.add(cmd_args("--emit=", effective_emit, "=", emit_output.as_output(), delimiter = "")) # Strip file extension from directory name. base, _ext = paths.split_extension(output_filename(simple_crate, emit, params)) diff --git a/prelude/rust/build_params.bzl b/prelude/rust/build_params.bzl index d0cdc8f36..a9120a87c 100644 --- a/prelude/rust/build_params.bzl +++ b/prelude/rust/build_params.bzl @@ -64,11 +64,17 @@ Emit = enum( "llvm-bc", "llvm-ir", "obj", - "metadata", "link", "dep-info", "mir", "expand", # pseudo emit alias for -Zunpretty=expanded + # Rustc actually has two different forms of metadata: + # - The full flavor, which is what's outputted when passing + # `--emit link,metadata` and can be used as a part of pipelined builds + # - The fast flavor, which is emitted from `--emit metadata`, is faster to + # build, but cannot be used in pipelined builds. + "metadata-full", + "metadata-fast", ) # Emitting this artifact generates code @@ -100,7 +106,8 @@ _EMIT_PREFIX_SUFFIX = { Emit("llvm-bc"): ("", ".bc"), Emit("llvm-ir"): ("", ".ll"), Emit("obj"): ("", ".o"), - Emit("metadata"): ("lib", ".rmeta"), # even binaries get called 'libfoo.rmeta' + Emit("metadata-fast"): ("lib", ".rmeta"), # even binaries get called 'libfoo.rmeta' + Emit("metadata-full"): (None, None), # Hollow rlibs, so they get the same name Emit("link"): (None, None), # crate type and reloc model dependent Emit("dep-info"): ("", ".d"), Emit("mir"): (None, ".mir"), diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index f4cabb9ac..5fb9f9c9e 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -196,7 +196,9 @@ def _rust_binary_common( link, meta = rust_compile_multi( ctx = ctx, compile_ctx = compile_ctx, - emits = [Emit("link"), Emit("metadata")], + # Use metadata-full to ensure that we share dependencies with the + # link variant + emits = [Emit("link"), Emit("metadata-full")], params = params, default_roots = default_roots, extra_link_args = executable_args.extra_link_args, diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index cf18650a9..db74a67ac 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -467,7 +467,7 @@ def _build_library_artifacts( link, meta = rust_compile_multi( ctx = ctx, compile_ctx = compile_ctx, - emits = [Emit("link"), Emit("metadata")], + emits = [Emit("link"), Emit("metadata-full")], params = params, default_roots = ["lib.rs"], ) From 50efb57a68eceb51e9486ec3154c41dc542f5e0d Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 17 Mar 2024 17:34:36 -0700 Subject: [PATCH 0469/1133] rules: Remove a doctest check Summary: Doctest crate types are always either `"binary"` or (in some cases) `"proc macro"`. Both fail the previous check in the chain Reviewed By: dtolnay Differential Revision: D54976704 fbshipit-source-id: 657e51a23f1086e9f5c044059aa3bd1058441be5 --- prelude/rust/build.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 4bc08cb28..1fab856c1 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -756,7 +756,7 @@ def dependency_args( # The benefit of doing this is that there's no requirment that the # dependency's generated code be provided to the linker via an rlib. It # could be provided by other means, say, a link group - use_rmeta = is_check or compile_ctx.dep_ctx.advanced_unstable_linking or (compile_ctx.toolchain_info.pipelined and not crate_type_codegen(crate_type) and not is_rustdoc_test) + use_rmeta = is_check or compile_ctx.dep_ctx.advanced_unstable_linking or (compile_ctx.toolchain_info.pipelined and not crate_type_codegen(crate_type)) # Use rmeta dependencies whenever possible because they # should be cheaper to produce. From cae0f348e6488eb794dadb85626834501148eae0 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 17 Mar 2024 17:34:36 -0700 Subject: [PATCH 0470/1133] rules: Always use proc macro rlibs Summary: These are always the same anyway: https://www.internalfb.com/code/fbsource/[fbf356081319]/fbcode/buck2/prelude/rust/rust_library.bzl?lines=464 So might as well Reviewed By: dtolnay Differential Revision: D54976702 fbshipit-source-id: cdf55b34127024de74ae4b020c96c64ca63f5ab5 --- prelude/rust/build.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 1fab856c1..1529d2dd6 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -770,7 +770,7 @@ def dependency_args( for marker in strategy.transitive_proc_macro_deps.keys(): info = available_proc_macros[marker.label][RustLinkInfo] strategy = strategy_info(info, dep_link_strategy) - transitive_deps[strategy.rmeta if use_rmeta else strategy.rlib] = info.crate + transitive_deps[strategy.rlib] = info.crate args.add(extern_arg(dep.flags, crate, artifact)) crate_targets.append((crate, dep.label)) From 863c0534f22265aada5724ae2e8ba388138c73bb Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 17 Mar 2024 17:34:36 -0700 Subject: [PATCH 0471/1133] rules: Introduce `MetadataKind` Summary: Need to add a variant here later this stack, but also we'll want even more in the future (`metadata-miri`) Reviewed By: dtolnay Differential Revision: D54976703 fbshipit-source-id: c7461d533cbffb121ff6c86dfb4c5f40a32c45db --- prelude/rust/build.bzl | 47 +++++++++++++++--------------- prelude/rust/build_params.bzl | 15 ++++++++-- prelude/rust/cargo_buildscript.bzl | 5 ++-- 3 files changed, 39 insertions(+), 28 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 1529d2dd6..95ab687c9 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -49,9 +49,10 @@ load( "BuildParams", # @unused Used as a type "CrateType", "Emit", + "MetadataKind", "crate_type_codegen", "crate_type_linked", - "emit_needs_codegen", + "dep_metadata_of_emit", "output_filename", ) load( @@ -724,9 +725,8 @@ def dependency_args( compile_ctx: CompileContext | None, deps: list[RustDependency], subdir: str, - crate_type: CrateType, dep_link_strategy: LinkStrategy, - is_check: bool, + dep_metadata_kind: MetadataKind, is_rustdoc_test: bool) -> (cmd_args, list[(CrateName, Label)]): args = cmd_args() transitive_deps = {} @@ -743,24 +743,9 @@ def dependency_args( strategy = strategy_info(dep.info, dep_link_strategy) - # With `advanced_unstable_linking`, we unconditionally pass the metadata - # artifacts. There are two things that work together to make this possible - # in the case of binaries: - # - # 1. The actual rlibs appear in the link providers, so they'll still be - # available for the linker to link in - # 2. The metadata artifacts aren't rmetas, but rather rlibs that just - # don't contain any generated code. Rustc can't distinguish these - # from real rlibs, and so doesn't throw an error - # - # The benefit of doing this is that there's no requirment that the - # dependency's generated code be provided to the linker via an rlib. It - # could be provided by other means, say, a link group - use_rmeta = is_check or compile_ctx.dep_ctx.advanced_unstable_linking or (compile_ctx.toolchain_info.pipelined and not crate_type_codegen(crate_type)) - # Use rmeta dependencies whenever possible because they # should be cheaper to produce. - if use_rmeta: + if dep_metadata_kind == MetadataKind("full"): artifact = strategy.rmeta transitive_artifacts = strategy.transitive_rmeta_deps else: @@ -791,7 +776,7 @@ def dependency_args( else: simple_artifacts[artifact] = None - prefix = "{}-deps{}".format(subdir, "-check" if is_check else "") + prefix = "{}-deps{}".format(subdir, dep_metadata_kind.value) if simple_artifacts: args.add(simple_symlinked_dirs(ctx, prefix, simple_artifacts)) if dynamic_artifacts: @@ -921,16 +906,32 @@ def _compute_common_args( if exec_is_windows: crate_root = crate_root.replace("/", "\\") - is_check = not emit_needs_codegen(emit) + # With `advanced_unstable_linking`, we unconditionally pass the metadata + # artifacts. There are two things that work together to make this possible + # in the case of binaries: + # + # 1. The actual rlibs appear in the link providers, so they'll still be + # available for the linker to link in + # 2. The metadata artifacts aren't rmetas, but rather rlibs that just + # don't contain any generated code. Rustc can't distinguish these + # from real rlibs, and so doesn't throw an error + # + # The benefit of doing this is that there's no requirment that the + # dependency's generated code be provided to the linker via an rlib. It + # could be provided by other means, say, a link group + dep_metadata_kind = dep_metadata_of_emit(emit) + is_check = dep_metadata_kind != MetadataKind("link") + if compile_ctx.dep_ctx.advanced_unstable_linking or (compile_ctx.toolchain_info.pipelined and not crate_type_codegen(crate_type)): + if dep_metadata_kind == MetadataKind("link"): + dep_metadata_kind = MetadataKind("full") dep_args, crate_map = dependency_args( ctx = ctx, compile_ctx = compile_ctx, deps = resolve_rust_deps(ctx, dep_ctx), subdir = subdir, - crate_type = crate_type, dep_link_strategy = params.dep_link_strategy, - is_check = is_check, + dep_metadata_kind = dep_metadata_kind, is_rustdoc_test = is_rustdoc_test, ) diff --git a/prelude/rust/build_params.bzl b/prelude/rust/build_params.bzl index a9120a87c..f647b9554 100644 --- a/prelude/rust/build_params.bzl +++ b/prelude/rust/build_params.bzl @@ -77,9 +77,20 @@ Emit = enum( "metadata-fast", ) +# The different quantities of Rust metadata that can be requested from +# dependencies. Each one corresponds to an `Emit` variant, but not all `Emit` +# variants output metadata +MetadataKind = enum( + "full", + "link", +) + # Emitting this artifact generates code -def emit_needs_codegen(emit: Emit) -> bool: - return emit.value in ("asm", "llvm-bc", "llvm-ir", "obj", "link", "mir") +def dep_metadata_of_emit(emit: Emit) -> MetadataKind: + if emit.value in ("asm", "llvm-bc", "llvm-ir", "obj", "link", "mir"): + return MetadataKind("link") + else: + return MetadataKind("full") # Represents a way of invoking rustc to produce an artifact. These values are computed from # information such as the rule type, linkstyle, crate type, etc. diff --git a/prelude/rust/cargo_buildscript.bzl b/prelude/rust/cargo_buildscript.bzl index 3602bab0a..6136d7a6d 100644 --- a/prelude/rust/cargo_buildscript.bzl +++ b/prelude/rust/cargo_buildscript.bzl @@ -26,7 +26,7 @@ load("@prelude//rust:rust_toolchain.bzl", "RustToolchainInfo") load("@prelude//rust:targets.bzl", "targets") load("@prelude//decls/toolchains_common.bzl", "toolchains_common") load(":build.bzl", "dependency_args") -load(":build_params.bzl", "CrateType") +load(":build_params.bzl", "MetadataKind") load(":context.bzl", "DepCollectionContext") load(":link_info.bzl", "RustProcMacroPlugin", "gather_explicit_sysroot_deps", "resolve_rust_deps_inner") load(":rust_toolchain.bzl", "PanicRuntime") @@ -52,9 +52,8 @@ def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: None, # compile_ctx deps, "any", # subdir - CrateType("rlib"), LinkStrategy("static_pic"), - True, # is_check + MetadataKind("full"), False, # is_rustdoc_test ) From b76f0071bf69322b969d1f140a66325dfcec0596 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 17 Mar 2024 17:34:36 -0700 Subject: [PATCH 0472/1133] rules: `MetadataKind`-keyed dicts Summary: There's a bunch of places where we currently have rlib + meta versions of the code, this diff replaces a lot of those with dicts Reviewed By: dtolnay Differential Revision: D54976700 fbshipit-source-id: cdd80625f44e2b8b6595e189644ec38079fd9612 --- prelude/rust/build.bzl | 12 ++---- prelude/rust/link_info.bzl | 16 ++++---- prelude/rust/rust_library.bzl | 73 ++++++++++++++++------------------- 3 files changed, 43 insertions(+), 58 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 95ab687c9..5ef590976 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -743,19 +743,13 @@ def dependency_args( strategy = strategy_info(dep.info, dep_link_strategy) - # Use rmeta dependencies whenever possible because they - # should be cheaper to produce. - if dep_metadata_kind == MetadataKind("full"): - artifact = strategy.rmeta - transitive_artifacts = strategy.transitive_rmeta_deps - else: - artifact = strategy.rlib - transitive_artifacts = strategy.transitive_deps + artifact = strategy.outputs[dep_metadata_kind] + transitive_artifacts = strategy.transitive_deps[dep_metadata_kind] for marker in strategy.transitive_proc_macro_deps.keys(): info = available_proc_macros[marker.label][RustLinkInfo] strategy = strategy_info(info, dep_link_strategy) - transitive_deps[strategy.rlib] = info.crate + transitive_deps[strategy.outputs[MetadataKind("link")]] = info.crate args.add(extern_arg(dep.flags, crate, artifact)) crate_targets.append((crate, dep.label)) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 85fafffa1..8c7a10f92 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -63,6 +63,10 @@ load( "@prelude//linking:types.bzl", "Linkage", # @unused Used as a type ) +load( + ":build_params.bzl", + "MetadataKind", # @unused Used as a type +) load( ":context.bzl", "CrateName", # @unused Used as a type @@ -92,18 +96,12 @@ RustProcMacroMarker = provider(fields = { # Information which is keyed on link_style RustLinkStrategyInfo = record( - # Path to library or binary - rlib = field(Artifact), + # Path to the rlib, rmeta, dylib, etc. + outputs = field(dict[MetadataKind, Artifact]), # Transitive dependencies which are relevant to the consumer. For crate types which do not # propagate their deps (specifically proc macros), this set is empty # This does not include the proc macros, which are passed separately in `RustLinkInfo` - transitive_deps = field(dict[Artifact, CrateName]), - - # Path for library metadata (used for check or pipelining) - rmeta = field(Artifact), - # Transitive rmeta deps. This is the same dict as `transitive_deps`, except that it has the - # rmeta and not the rlib artifact - transitive_rmeta_deps = field(dict[Artifact, CrateName]), + transitive_deps = field(dict[MetadataKind, dict[Artifact, CrateName]]), transitive_proc_macro_deps = field(dict[RustProcMacroMarker, ()]), # Path to PDB file with Windows debug data. diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index db74a67ac..555e732b4 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -83,6 +83,7 @@ load( "CrateType", "Emit", "LinkageLang", + "MetadataKind", "RuleType", "build_params", ) @@ -165,16 +166,14 @@ def prebuilt_rust_library_impl(ctx: AnalysisContext) -> list[Provider]: crate = attr_crate(ctx) strategies = {} for link_strategy in LinkStrategy: - tdeps, tmetadeps, external_debug_info, tprocmacrodeps = _compute_transitive_deps(ctx, dep_ctx, link_strategy) + tdeps, external_debug_info, tprocmacrodeps = _compute_transitive_deps(ctx, dep_ctx, link_strategy) external_debug_info = make_artifact_tset( actions = ctx.actions, children = external_debug_info, ) strategies[link_strategy] = RustLinkStrategyInfo( - rlib = ctx.attrs.rlib, + outputs = {m: ctx.attrs.rlib for m in MetadataKind}, transitive_deps = tdeps, - rmeta = ctx.attrs.rlib, - transitive_rmeta_deps = tmetadeps, transitive_proc_macro_deps = tprocmacrodeps, pdb = None, external_debug_info = external_debug_info, @@ -261,18 +260,18 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: native_param_artifact = {} check_artifacts = None - for params, (link, meta) in artifacts.items(): + for params, outputs in artifacts.items(): if LinkageLang("rust") in param_lang[params]: # Grab the check output for all kinds of builds to use # in the check subtarget. The link style doesn't matter # so pick the first. if check_artifacts == None: - check_artifacts = {"check": meta.output} - check_artifacts.update(meta.diag) + check_artifacts = {"check": outputs[MetadataKind("full")].output} + check_artifacts.update(outputs[MetadataKind("full")].diag) - rust_param_artifact[params] = (link, meta) + rust_param_artifact[params] = outputs if LinkageLang("native") in param_lang[params] or LinkageLang("native-unbundled") in param_lang[params]: - native_param_artifact[params] = link + native_param_artifact[params] = outputs[MetadataKind("link")] # For doctests, we need to know two things to know how to link them. The # first is that we need a link strategy, which affects how deps of this @@ -350,7 +349,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx = ctx, compile_ctx = compile_ctx, link_strategy = rustdoc_test_params.dep_link_strategy, - rlib = rust_param_artifact[static_library_params][0].output, + rlib = rust_param_artifact[static_library_params][MetadataKind("link")].output, params = rustdoc_test_params, default_roots = default_roots, ) @@ -453,7 +452,7 @@ def _build_params_for_styles( def _build_library_artifacts( ctx: AnalysisContext, compile_ctx: CompileContext, - params: list[BuildParams]) -> dict[BuildParams, (RustcOutput, RustcOutput)]: + params: list[BuildParams]) -> dict[BuildParams, dict[MetadataKind, RustcOutput]]: """ Generate the actual actions to build various output artifacts. Given the set parameters we need, return a mapping to the linkable and metadata artifacts. @@ -472,7 +471,10 @@ def _build_library_artifacts( default_roots = ["lib.rs"], ) - param_artifact[params] = (link, meta) + param_artifact[params] = { + MetadataKind("link"): link, + MetadataKind("full"): meta, + } return param_artifact @@ -481,8 +483,7 @@ def _handle_rust_artifact( dep_ctx: DepCollectionContext, crate_type: CrateType, link_strategy: LinkStrategy, - link: RustcOutput, - meta: RustcOutput) -> RustLinkStrategyInfo: + outputs: dict[MetadataKind, RustcOutput]) -> RustLinkStrategyInfo: """ Return the RustLinkInfo for a given set of artifacts. The main consideration is computing the right set of dependencies. @@ -491,41 +492,38 @@ def _handle_rust_artifact( # If we're a crate where our consumers should care about transitive deps, # then compute them (specifically, not proc-macro). if crate_type != CrateType("proc-macro"): - tdeps, tmetadeps, external_debug_info, tprocmacrodeps = _compute_transitive_deps(ctx, dep_ctx, link_strategy) + tdeps, external_debug_info, tprocmacrodeps = _compute_transitive_deps(ctx, dep_ctx, link_strategy) else: - tdeps, tmetadeps, external_debug_info, tprocmacrodeps = {}, {}, [], {} + tdeps, external_debug_info, tprocmacrodeps = {e: {} for e in MetadataKind}, [], {} + link_output = outputs[MetadataKind("link")] if not ctx.attrs.proc_macro: external_debug_info = make_artifact_tset( actions = ctx.actions, label = ctx.label, - artifacts = filter(None, [link.dwo_output_directory]), + artifacts = filter(None, [link_output.dwo_output_directory]), children = external_debug_info, ) return RustLinkStrategyInfo( - rlib = link.output, + outputs = {m: x.output for m, x in outputs.items()}, transitive_deps = tdeps, - rmeta = meta.output, - transitive_rmeta_deps = tmetadeps, transitive_proc_macro_deps = tprocmacrodeps, - pdb = link.pdb, + pdb = link_output.pdb, external_debug_info = external_debug_info, ) else: # Proc macro deps are always the real thing return RustLinkStrategyInfo( - rlib = link.output, + outputs = {m: link_output.output for m in MetadataKind}, transitive_deps = tdeps, - rmeta = link.output, - transitive_rmeta_deps = tdeps, transitive_proc_macro_deps = tprocmacrodeps, - pdb = link.pdb, + pdb = link_output.pdb, external_debug_info = ArtifactTSet(), ) def _default_providers( lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)], + param_artifact: dict[BuildParams, dict[MetadataKind, RustcOutput]], rustdoc: Artifact, rustdoc_test: cmd_args, doctests_enabled: bool, @@ -549,7 +547,7 @@ def _default_providers( # determined by `get_output_styles_for_linkage` in `linking/link_info.bzl`. # Do we want to do the same? for output_style in LibOutputStyle: - link, _ = param_artifact[lang_style_param[(LinkageLang("rust"), output_style)]] + link = param_artifact[lang_style_param[(LinkageLang("rust"), output_style)]][MetadataKind("link")] nested_sub_targets = {} if link.pdb: nested_sub_targets[PDB_SUB_TARGET] = get_pdb_providers(pdb = link.pdb, binary = link.output) @@ -690,7 +688,7 @@ def _rust_providers( ctx: AnalysisContext, compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], - param_artifact: dict[BuildParams, (RustcOutput, RustcOutput)], + param_artifact: dict[BuildParams, dict[MetadataKind, RustcOutput]], link_infos: dict[LibOutputStyle, LinkInfos]) -> RustLinkInfo: """ Return the set of providers for Rust linkage. @@ -703,8 +701,7 @@ def _rust_providers( strategy_info = {} for link_strategy in LinkStrategy: params = lang_style_param[(LinkageLang("rust"), get_lib_output_style(link_strategy, preferred_linkage, pic_behavior))] - link, meta = param_artifact[params] - strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, link, meta) + strategy_info[link_strategy] = _handle_rust_artifact(ctx, compile_ctx.dep_ctx, params.crate_type, link_strategy, param_artifact[params]) merged_link_info, shared_libs, inherited_graphs, inherited_link_deps = _rust_link_providers(ctx, compile_ctx.dep_ctx, compile_ctx.cxx_toolchain_info, link_infos, Linkage(ctx.attrs.preferred_linkage)) @@ -905,13 +902,11 @@ def _compute_transitive_deps( ctx: AnalysisContext, dep_ctx: DepCollectionContext, dep_link_strategy: LinkStrategy) -> ( - dict[Artifact, CrateName], - dict[Artifact, CrateName], + dict[MetadataKind, dict[Artifact, CrateName]], list[ArtifactTSet], dict[RustProcMacroMarker, ()], ): - transitive_deps = {} - transitive_rmeta_deps = {} + transitive_deps = {m: {} for m in MetadataKind} external_debug_info = [] transitive_proc_macro_deps = {} @@ -922,17 +917,15 @@ def _compute_transitive_deps( # We don't want to propagate proc macros directly, and they have no transitive deps continue strategy = strategy_info(dep.info, dep_link_strategy) - transitive_deps[strategy.rlib] = dep.info.crate - transitive_deps.update(strategy.transitive_deps) - - transitive_rmeta_deps[strategy.rmeta] = dep.info.crate - transitive_rmeta_deps.update(strategy.transitive_rmeta_deps) + for m in MetadataKind: + transitive_deps[m][strategy.outputs[m]] = dep.info.crate + transitive_deps[m].update(strategy.transitive_deps[m]) external_debug_info.append(strategy.external_debug_info) transitive_proc_macro_deps.update(strategy.transitive_proc_macro_deps) - return transitive_deps, transitive_rmeta_deps, external_debug_info, transitive_proc_macro_deps + return transitive_deps, external_debug_info, transitive_proc_macro_deps def rust_library_macro_wrapper(rust_library: typing.Callable) -> typing.Callable: def wrapper(**kwargs): From 7aa092443c1fb1c47186f55a50139174499dc877 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 17 Mar 2024 17:34:36 -0700 Subject: [PATCH 0473/1133] rules: Separate fast metadata graph Summary: Before this diff, fast metadata generation still used full metadata from its dependencies. This diff changes that, and effectively puts fast metadata onto a completely disjoint artifact graph from the remaining builds. For most purposes this is undesirable because it prevents sharing. However, in some cases that doesn't matter. Specifically, I have in mind rust-analyzer check builds, which are anyway on a separate isolation dir that will never see full builds. I'm not planning to expose this as a subtarget. We can add that later if we want to, but the problem is that we probably wouldn't just need `check-fast`, but also `diag.json-fast`, `clippy.json-fast`, etc. and that gets very messy very quickly. The plan for now is to just access this thing via BXL, more details in coming diffs Reviewed By: dtolnay Differential Revision: D54977133 fbshipit-source-id: 81f6f40e489c214fbce08c2fe1cbced3b65008fe --- prelude/rust/build_params.bzl | 3 +++ prelude/rust/rust_library.bzl | 7 ++++--- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/prelude/rust/build_params.bzl b/prelude/rust/build_params.bzl index f647b9554..4db3a6dd3 100644 --- a/prelude/rust/build_params.bzl +++ b/prelude/rust/build_params.bzl @@ -81,6 +81,7 @@ Emit = enum( # dependencies. Each one corresponds to an `Emit` variant, but not all `Emit` # variants output metadata MetadataKind = enum( + "fast", "full", "link", ) @@ -89,6 +90,8 @@ MetadataKind = enum( def dep_metadata_of_emit(emit: Emit) -> MetadataKind: if emit.value in ("asm", "llvm-bc", "llvm-ir", "obj", "link", "mir"): return MetadataKind("link") + elif emit.value == "metadata-fast": + return MetadataKind("fast") else: return MetadataKind("full") diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 555e732b4..5cd885cc2 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -463,17 +463,18 @@ def _build_library_artifacts( # Separate actions for each emit type # # In principle we don't really need metadata for C++-only artifacts, but I don't think it hurts - link, meta = rust_compile_multi( + link, meta_full, meta_fast = rust_compile_multi( ctx = ctx, compile_ctx = compile_ctx, - emits = [Emit("link"), Emit("metadata-full")], + emits = [Emit("link"), Emit("metadata-full"), Emit("metadata-fast")], params = params, default_roots = ["lib.rs"], ) param_artifact[params] = { MetadataKind("link"): link, - MetadataKind("full"): meta, + MetadataKind("full"): meta_full, + MetadataKind("fast"): meta_fast, } return param_artifact From 5311301df0fe3ba5dd2f911a8f98d27350c997d4 Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 17 Mar 2024 17:34:36 -0700 Subject: [PATCH 0474/1133] rules: Expand `diag` dict in `RustcOutput` Summary: Also puts `RustcOutput` in its own file. In the next diff I want to expose these outputs to BXL Reviewed By: dtolnay Differential Revision: D54977132 fbshipit-source-id: f00662ad57d8e9c7279a74d0d26df28dddd0cec4 --- prelude/rust/build.bzl | 35 ++++++++++++++-------------------- prelude/rust/outputs.bzl | 36 +++++++++++++++++++++++++++++++++++ prelude/rust/rust_binary.bzl | 3 ++- prelude/rust/rust_library.bzl | 9 ++++++--- 4 files changed, 58 insertions(+), 25 deletions(-) create mode 100644 prelude/rust/outputs.bzl diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 5ef590976..ff3d3480f 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -7,7 +7,6 @@ load( "@prelude//:artifact_tset.bzl", - "ArtifactTSet", # @unused Used as a type "project_artifacts", ) load("@prelude//:local_only.bzl", "link_cxx_binary_locally") @@ -84,21 +83,10 @@ load( "resolve_rust_deps", "strategy_info", ) +load(":outputs.bzl", "RustcOutput") load(":resources.bzl", "rust_attr_resources") load(":rust_toolchain.bzl", "PanicRuntime", "RustToolchainInfo") -RustcOutput = record( - output = field(Artifact), - stripped_output = field(Artifact), - diag = field(dict[str, Artifact]), - pdb = field([Artifact, None]), - dwp_output = field([Artifact, None]), - # Zero or more Split DWARF debug info files are emitted into this directory - # with unpredictable filenames. - dwo_output_directory = field([Artifact, None]), - extra_external_debug_info = field(list[ArtifactTSet]), -) - def compile_context(ctx: AnalysisContext) -> CompileContext: toolchain_info = ctx.attrs._rust_toolchain[RustToolchainInfo] cxx_toolchain_info = get_cxx_toolchain_info(ctx) @@ -573,7 +561,7 @@ def rust_compile( rustc_cmd.add(cmd_args(linker_argsfile, format = "-Clink-arg=@{}")) rustc_cmd.hidden(link_args_output.hidden) - (diag, build_status) = _rustc_invoke( + diag_txt, diag_json, build_status = _rustc_invoke( ctx = ctx, compile_ctx = compile_ctx, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), @@ -610,7 +598,7 @@ def rust_compile( {"clippy.toml": toolchain_info.clippy_toml}, ) clippy_env["CLIPPY_CONF_DIR"] = clippy_conf_dir - (clippy_diag, _) = _rustc_invoke( + clippy_txt, clippy_json, _ = _rustc_invoke( ctx = ctx, compile_ctx = compile_ctx, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), @@ -624,17 +612,18 @@ def rust_compile( allow_cache_upload = False, crate_map = common_args.crate_map, ) - diag.update(clippy_diag) + else: + clippy_txt = None + clippy_json = None if toolchain_info.failure_filter: # This is only needed when this action's output is being used as an # input, so we only need standard diagnostics (clippy is always # asked for explicitly). - stderr = diag["diag.txt"] filter_prov = RustFailureFilter( buildstatus = build_status, required = emit_op.output, - stderr = stderr, + stderr = diag_txt, ) filtered_output = failure_filter( @@ -706,7 +695,11 @@ def rust_compile( return RustcOutput( output = filtered_output, stripped_output = stripped_output, - diag = diag, + diag_txt = diag_txt, + diag_json = diag_json, + # Only available on metadata-like emits + clippy_txt = clippy_txt, + clippy_json = clippy_json, pdb = pdb_artifact, dwp_output = dwp_output, dwo_output_directory = dwo_output_directory, @@ -1225,7 +1218,7 @@ def _rustc_invoke( is_binary: bool, allow_cache_upload: bool, crate_map: list[(CrateName, Label)], - env: dict[str, str | ResolvedStringWithMacros | Artifact]) -> (dict[str, Artifact], [Artifact, None]): + env: dict[str, str | ResolvedStringWithMacros | Artifact]) -> (Artifact, Artifact, [Artifact, None]): exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" toolchain_info = compile_ctx.toolchain_info @@ -1291,7 +1284,7 @@ def _rustc_invoke( allow_cache_upload = allow_cache_upload, ) - return ({diag + ".json": json_diag, diag + ".txt": txt_diag}, build_status) + return (txt_diag, json_diag, build_status) # Our rustc and rustdoc commands can have arbitrarily large number of `--extern` # flags, so write to file to avoid hitting the platform's limit on command line diff --git a/prelude/rust/outputs.bzl b/prelude/rust/outputs.bzl new file mode 100644 index 000000000..cc7423e64 --- /dev/null +++ b/prelude/rust/outputs.bzl @@ -0,0 +1,36 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load( + "@prelude//:artifact_tset.bzl", + "ArtifactTSet", # @unused Used as a type +) + +RustcOutput = record( + output = field(Artifact), + stripped_output = field(Artifact), + diag_txt = field(Artifact), + diag_json = field(Artifact), + # Only available on metadata-like emits + clippy_txt = field(Artifact | None), + clippy_json = field(Artifact | None), + pdb = field([Artifact, None]), + dwp_output = field([Artifact, None]), + # Zero or more Split DWARF debug info files are emitted into this directory + # with unpredictable filenames. + dwo_output_directory = field([Artifact, None]), + extra_external_debug_info = field(list[ArtifactTSet]), +) + +def output_as_diag_subtargets(o: RustcOutput) -> dict[str, Artifact]: + return { + "check": o.output, + "clippy.json": o.clippy_json, + "clippy.txt": o.clippy_txt, + "diag.json": o.diag_json, + "diag.txt": o.diag_txt, + } diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 5fb9f9c9e..566a1e544 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -76,6 +76,7 @@ load( "inherited_rust_cxx_link_group_info", "inherited_shared_libs", ) +load(":outputs.bzl", "output_as_diag_subtargets") load(":resources.bzl", "rust_attr_resources") _CompileOutputs = record( @@ -210,7 +211,7 @@ def _rust_binary_common( ) args = cmd_args(link.output).hidden(executable_args.runtime_files) - extra_targets = [("check", meta.output)] + meta.diag.items() + extra_targets = output_as_diag_subtargets(meta).items() external_debug_info = project_artifacts( actions = ctx.actions, tsets = [inherited_external_debug_info( diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 5cd885cc2..ca43da26f 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -69,7 +69,6 @@ load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") load( ":build.bzl", - "RustcOutput", # @unused Used as a type "compile_context", "generate_rustdoc", "generate_rustdoc_coverage", @@ -109,6 +108,11 @@ load( "resolve_rust_deps", "strategy_info", ) +load( + ":outputs.bzl", + "RustcOutput", # @unused Used as a type + "output_as_diag_subtargets", +) load(":proc_macro_alias.bzl", "rust_proc_macro_alias") load(":resources.bzl", "rust_attr_resources") load(":rust_toolchain.bzl", "RustToolchainInfo") @@ -266,8 +270,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: # in the check subtarget. The link style doesn't matter # so pick the first. if check_artifacts == None: - check_artifacts = {"check": outputs[MetadataKind("full")].output} - check_artifacts.update(outputs[MetadataKind("full")].diag) + check_artifacts = output_as_diag_subtargets(outputs[MetadataKind("full")]) rust_param_artifact[params] = outputs if LinkageLang("native") in param_lang[params] or LinkageLang("native-unbundled") in param_lang[params]: From ac668c6f3df050f52e28838384c9429be856e67a Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 17 Mar 2024 17:34:36 -0700 Subject: [PATCH 0475/1133] rules: Simplify check artifacts creation Summary: This makes the situation around the lib output styles/link strategies a bit more clear, but also, is just a cleanup too. Reviewed By: dtolnay Differential Revision: D54977134 fbshipit-source-id: f0fd147f4cd97bd0c35515207aef6d179bb49b36 --- prelude/rust/rust_binary.bzl | 32 +++++++++++++++++++++----------- prelude/rust/rust_library.bzl | 13 ++++++------- 2 files changed, 27 insertions(+), 18 deletions(-) diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 566a1e544..603dfc678 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -56,7 +56,6 @@ load( "compile_context", "generate_rustdoc", "rust_compile", - "rust_compile_multi", ) load( ":build_params.bzl", @@ -82,7 +81,6 @@ load(":resources.bzl", "rust_attr_resources") _CompileOutputs = record( link = field(Artifact), args = field(ArgLike), - extra_targets = field(list[(str, Artifact)]), runtime_files = field(list[ArgLike]), external_debug_info = field(list[TransitiveSetArgsProjection]), sub_targets = field(dict[str, list[DefaultInfo]]), @@ -116,6 +114,8 @@ def _rust_binary_common( deps = cxx_attr_deps(ctx), ).values()) + extra_flags = toolchain_info.rustc_binary_flags + (extra_flags or []) + for link_strategy in LinkStrategy: # Unlike for libraries, there's no possibility of different link styles # resulting in the same build params, so no need to deduplicate. @@ -191,15 +191,11 @@ def _rust_binary_common( shared_libs, ) - extra_flags = toolchain_info.rustc_binary_flags + (extra_flags or []) - # Compile rust binary. - link, meta = rust_compile_multi( + link = rust_compile( ctx = ctx, compile_ctx = compile_ctx, - # Use metadata-full to ensure that we share dependencies with the - # link variant - emits = [Emit("link"), Emit("metadata-full")], + emit = Emit("link"), params = params, default_roots = default_roots, extra_link_args = executable_args.extra_link_args, @@ -211,7 +207,6 @@ def _rust_binary_common( ) args = cmd_args(link.output).hidden(executable_args.runtime_files) - extra_targets = output_as_diag_subtargets(meta).items() external_debug_info = project_artifacts( actions = ctx.actions, tsets = [inherited_external_debug_info( @@ -287,7 +282,6 @@ def _rust_binary_common( styles[link_strategy] = _CompileOutputs( link = link.output, args = args, - extra_targets = extra_targets, runtime_files = runtime_files, external_debug_info = executable_args.external_debug_info + external_debug_info, sub_targets = sub_targets_for_link_strategy, @@ -302,6 +296,22 @@ def _rust_binary_common( if link_strategy == specified_link_strategy and link.pdb: pdb = link.pdb + # FIXME(JakobDegen): It's a bit weird that this uses the specified link + # strategy but rustdoc and expand use the default link strategy. Figure out + # what's going on there. + meta_full = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + # Use metadata-full to ensure that we share dependencies with the link + # variant + emit = Emit("metadata-full"), + params = strategy_param[specified_link_strategy], + default_roots = default_roots, + extra_flags = extra_flags, + ) + + extra_meta_targets = output_as_diag_subtargets(meta_full).items() + expand = rust_compile( ctx = ctx, compile_ctx = compile_ctx, @@ -312,7 +322,7 @@ def _rust_binary_common( ) compiled_outputs = styles[specified_link_strategy] - extra_compiled_targets = (compiled_outputs.extra_targets + [ + extra_compiled_targets = (extra_meta_targets + [ ("doc", generate_rustdoc( ctx = ctx, compile_ctx = compile_ctx, diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index ca43da26f..e05c92dff 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -262,20 +262,19 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: rust_param_artifact = {} native_param_artifact = {} - check_artifacts = None for params, outputs in artifacts.items(): if LinkageLang("rust") in param_lang[params]: - # Grab the check output for all kinds of builds to use - # in the check subtarget. The link style doesn't matter - # so pick the first. - if check_artifacts == None: - check_artifacts = output_as_diag_subtargets(outputs[MetadataKind("full")]) - rust_param_artifact[params] = outputs if LinkageLang("native") in param_lang[params] or LinkageLang("native-unbundled") in param_lang[params]: native_param_artifact[params] = outputs[MetadataKind("link")] + # Grab the artifacts to use for the check subtargets. Picking a good + # `LibOutputStyle` ensures that the subtarget shares work with the main + # build if possible + check_params = lang_style_param[(LinkageLang("rust"), LibOutputStyle("archive"))] + check_artifacts = output_as_diag_subtargets(artifacts[check_params][MetadataKind("full")]) + # For doctests, we need to know two things to know how to link them. The # first is that we need a link strategy, which affects how deps of this # target are handled From cc9fbd59a735b44d2f8f48cc0fa9cf1344a7f4ad Mon Sep 17 00:00:00 2001 From: Jakob Degen Date: Sun, 17 Mar 2024 17:34:36 -0700 Subject: [PATCH 0476/1133] rules: Introduce `RustcExtraOutputsInfo` Summary: I would like to be able to access the `metadata-fast` output even on pipelined toolchains; however, polluting the subtargets with that information doesn't seem great. So instead, let's add a provider on which we can attach additional outputs from Rust targets Reviewed By: dtolnay Differential Revision: D54990522 fbshipit-source-id: 9c94435da5cb9b1b7cbd1765827ba0e3d6310d8f --- prelude/rust/outputs.bzl | 12 ++++++++++++ prelude/rust/rust_binary.bzl | 18 ++++++++++++------ prelude/rust/rust_library.bzl | 19 ++++++++++++++----- 3 files changed, 38 insertions(+), 11 deletions(-) diff --git a/prelude/rust/outputs.bzl b/prelude/rust/outputs.bzl index cc7423e64..dd3cca3c3 100644 --- a/prelude/rust/outputs.bzl +++ b/prelude/rust/outputs.bzl @@ -34,3 +34,15 @@ def output_as_diag_subtargets(o: RustcOutput) -> dict[str, Artifact]: "diag.json": o.diag_json, "diag.txt": o.diag_txt, } + +# Access to additional outputs from Rust compilation. +# +# This provider is intended to be available from all rules that compile Rust +# code. As a result, it must be different from `RustLinkInfo`, since it should +# not exist on a prebuilt Rust library, but should exist on a binary. +RustcExtraOutputsInfo = provider( + fields = { + "metadata_fast": RustcOutput, + "metadata_full": RustcOutput, + }, +) diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 603dfc678..00a3e2b3e 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -56,6 +56,7 @@ load( "compile_context", "generate_rustdoc", "rust_compile", + "rust_compile_multi", ) load( ":build_params.bzl", @@ -75,7 +76,7 @@ load( "inherited_rust_cxx_link_group_info", "inherited_shared_libs", ) -load(":outputs.bzl", "output_as_diag_subtargets") +load(":outputs.bzl", "RustcExtraOutputsInfo", "output_as_diag_subtargets") load(":resources.bzl", "rust_attr_resources") _CompileOutputs = record( @@ -299,17 +300,22 @@ def _rust_binary_common( # FIXME(JakobDegen): It's a bit weird that this uses the specified link # strategy but rustdoc and expand use the default link strategy. Figure out # what's going on there. - meta_full = rust_compile( + meta_full, meta_fast = rust_compile_multi( ctx = ctx, compile_ctx = compile_ctx, - # Use metadata-full to ensure that we share dependencies with the link - # variant - emit = Emit("metadata-full"), + emits = [Emit("metadata-full"), Emit("metadata-fast")], params = strategy_param[specified_link_strategy], default_roots = default_roots, extra_flags = extra_flags, ) + providers = [RustcExtraOutputsInfo( + metadata_full = meta_full, + metadata_fast = meta_fast, + )] + + # Use metadata-full to ensure that we share dependencies with the link + # variant extra_meta_targets = output_as_diag_subtargets(meta_full).items() expand = rust_compile( @@ -361,7 +367,7 @@ def _rust_binary_common( if dupmbin_toolchain: sub_targets[DUMPBIN_SUB_TARGET] = get_dumpbin_providers(ctx, compiled_outputs.link, dupmbin_toolchain) - providers = [ + providers += [ DefaultInfo( default_output = compiled_outputs.link, other_outputs = compiled_outputs.runtime_files + compiled_outputs.external_debug_info, diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index e05c92dff..85f498d3e 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -110,6 +110,7 @@ load( ) load( ":outputs.bzl", + "RustcExtraOutputsInfo", "RustcOutput", # @unused Used as a type "output_as_diag_subtargets", ) @@ -273,7 +274,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: # `LibOutputStyle` ensures that the subtarget shares work with the main # build if possible check_params = lang_style_param[(LinkageLang("rust"), LibOutputStyle("archive"))] - check_artifacts = output_as_diag_subtargets(artifacts[check_params][MetadataKind("full")]) + check_artifacts = artifacts[check_params] # For doctests, we need to know two things to know how to link them. The # first is that we need a link strategy, which affects how deps of this @@ -371,19 +372,21 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: rustdoc = rustdoc, rustdoc_test = rustdoc_test, doctests_enabled = doctests_enabled, - check_artifacts = check_artifacts, + check_artifacts = output_as_diag_subtargets(check_artifacts[MetadataKind("full")]), expand = expand.output, sources = compile_ctx.symlinked_srcs, rustdoc_coverage = rustdoc_coverage, ) - rust_link_info = _rust_providers( + rust_link_info, rust_extra_outputs_info = _rust_providers( ctx = ctx, compile_ctx = compile_ctx, lang_style_param = lang_style_param, param_artifact = rust_param_artifact, + check_artifacts = check_artifacts, link_infos = link_infos, ) providers.append(rust_link_info) + providers.append(rust_extra_outputs_info) providers += _native_providers( ctx = ctx, compile_ctx = compile_ctx, @@ -692,7 +695,8 @@ def _rust_providers( compile_ctx: CompileContext, lang_style_param: dict[(LinkageLang, LibOutputStyle), BuildParams], param_artifact: dict[BuildParams, dict[MetadataKind, RustcOutput]], - link_infos: dict[LibOutputStyle, LinkInfos]) -> RustLinkInfo: + check_artifacts: dict[MetadataKind, RustcOutput], + link_infos: dict[LibOutputStyle, LinkInfos]) -> (RustLinkInfo, RustcExtraOutputsInfo): """ Return the set of providers for Rust linkage. """ @@ -718,7 +722,12 @@ def _rust_providers( linkable_graphs = inherited_graphs, ) - return rust_link_info + rust_extra_outputs = RustcExtraOutputsInfo( + metadata_full = check_artifacts[MetadataKind("full")], + metadata_fast = check_artifacts[MetadataKind("fast")], + ) + + return (rust_link_info, rust_extra_outputs) def _link_infos( ctx: AnalysisContext, From 8c6e635cd0aa15e85580b7f42303f1cfd797e653 Mon Sep 17 00:00:00 2001 From: Emerson Ford Date: Mon, 18 Mar 2024 03:05:05 -0700 Subject: [PATCH 0477/1133] fix rust-project to use MetadataKind Summary: D54976700 eliminated the `rlib` field in favor of a `MetadataKind` keyed dict, which rust-project relies on. this fixes rust-project by moving it to also use the `MetadataKind` Reviewed By: macisamuele Differential Revision: D55007076 fbshipit-source-id: 5b293b3b6448f12c3f043239046a41e98e832665 --- prelude/rust/rust-analyzer/resolve_deps.bxl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/rust/rust-analyzer/resolve_deps.bxl b/prelude/rust/rust-analyzer/resolve_deps.bxl index 61dd48b89..5bddc146a 100644 --- a/prelude/rust/rust-analyzer/resolve_deps.bxl +++ b/prelude/rust/rust-analyzer/resolve_deps.bxl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//linking:link_info.bzl", "LinkStrategy") +load("@prelude//rust:build_params.bzl", "MetadataKind") load("@prelude//rust:link_info.bzl", "RustLinkInfo") def materialize(ctx, target): @@ -171,7 +172,7 @@ def expand_proc_macros(ctx, targets): proc_macro = getattr(attrs, "proc_macro", False) if proc_macro: analysis = ctx.analysis(target) - rlib = analysis.providers()[RustLinkInfo].strategies[LinkStrategy("shared")].rlib + rlib = analysis.providers()[RustLinkInfo].strategies[LinkStrategy("shared")].outputs[MetadataKind("link")] label = target.label.raw_target() out[label] = {"actual": label, "dylib": ctx.output.ensure(rlib).abs_path()} return out From 428b6c807933f5a20395f8745fecbf8a763884e1 Mon Sep 17 00:00:00 2001 From: Max Ovtsin Date: Mon, 18 Mar 2024 05:52:11 -0700 Subject: [PATCH 0478/1133] Move compiled pcm modules into swift_module_map Summary: Moving all PCM compiled Clang modules in Swift modulemap too along with swiftmodules to reduce number of arguments in argsfile. Reviewed By: rmaz Differential Revision: D54898842 fbshipit-source-id: bbcd737f0680ce41d911320bf05d611904854710 --- prelude/apple/swift/swift_compilation.bzl | 70 ++++++++++--------- prelude/apple/swift/swift_module_map.bzl | 16 +---- prelude/apple/swift/swift_pcm_compilation.bzl | 20 +++--- .../apple/swift/swift_sdk_pcm_compilation.bzl | 15 ++-- .../swift_sdk_swiftinterface_compilation.bzl | 6 +- prelude/apple/swift/swift_toolchain_types.bzl | 35 +++++++--- 6 files changed, 88 insertions(+), 74 deletions(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 75213702c..2c405e346 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -52,7 +52,7 @@ load( "get_incremental_swiftmodule_compilation_flags", "should_build_swift_incrementally", ) -load(":swift_module_map.bzl", "write_swift_module_map_with_swift_deps") +load(":swift_module_map.bzl", "write_swift_module_map_with_deps") load(":swift_pcm_compilation.bzl", "compile_underlying_pcm", "get_compiled_pcm_deps_tset", "get_swift_pcm_anon_targets") load( ":swift_pcm_compilation_types.bzl", @@ -560,9 +560,16 @@ def _get_shared_flags( sdk_clang_deps_tset = get_compiled_sdk_clang_deps_tset(ctx, deps_providers) sdk_swift_deps_tset = get_compiled_sdk_swift_deps_tset(ctx, deps_providers) - # Add flags required to import ObjC module dependencies - _add_clang_deps_flags(ctx, pcm_deps_tset, sdk_clang_deps_tset, cmd) - _add_swift_deps_flags(ctx, sdk_swift_deps_tset, cmd) + # If Swift Explicit Modules are enabled, a few things must be provided to a compilation job: + # 1. Direct and transitive SDK deps from `sdk_modules` attribute. + # 2. Direct and transitive user-defined deps. + # 3. Transitive SDK deps of user-defined deps. + # (This is the case, when a user-defined dep exports a type from SDK module, + # thus such SDK module should be implicitly visible to consumers of that custom dep) + _add_swift_module_map_args(ctx, sdk_swift_deps_tset, pcm_deps_tset, sdk_clang_deps_tset, cmd) + + _add_clang_deps_flags(ctx, pcm_deps_tset, cmd) + _add_swift_deps_flags(ctx, cmd) # Add flags for importing the ObjC part of this library _add_mixed_library_flags_to_cmd(ctx, cmd, underlying_module, objc_headers, objc_modulemap_pp_info) @@ -574,28 +581,34 @@ def _get_shared_flags( return cmd -def _add_swift_deps_flags( +def _add_swift_module_map_args( ctx: AnalysisContext, + sdk_swiftmodule_deps_tset: SwiftCompiledModuleTset, + pcm_deps_tset: SwiftCompiledModuleTset, sdk_deps_tset: SwiftCompiledModuleTset, cmd: cmd_args): - # If Explicit Modules are enabled, a few things must be provided to a compilation job: - # 1. Direct and transitive SDK deps from `sdk_modules` attribute. - # 2. Direct and transitive user-defined deps. - # 3. Transitive SDK deps of user-defined deps. - # (This is the case, when a user-defined dep exports a type from SDK module, - # thus such SDK module should be implicitly visible to consumers of that custom dep) + module_name = get_module_name(ctx) + sdk_swiftmodule_deps_tset = [sdk_swiftmodule_deps_tset] if sdk_swiftmodule_deps_tset else [] + all_deps_tset = ctx.actions.tset( + SwiftCompiledModuleTset, + children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps) + [pcm_deps_tset, sdk_deps_tset] + sdk_swiftmodule_deps_tset, + ) + swift_module_map_artifact = write_swift_module_map_with_deps( + ctx, + module_name, + all_deps_tset, + ) + cmd.add([ + "-Xfrontend", + "-explicit-swift-module-map-file", + "-Xfrontend", + swift_module_map_artifact, + ]) + +def _add_swift_deps_flags( + ctx: AnalysisContext, + cmd: cmd_args): if uses_explicit_modules(ctx): - module_name = get_module_name(ctx) - swift_deps_tset = ctx.actions.tset( - SwiftCompiledModuleTset, - children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps), - ) - swift_module_map_artifact = write_swift_module_map_with_swift_deps( - ctx, - module_name, - sdk_deps_tset, - swift_deps_tset, - ) cmd.add([ "-Xcc", "-fno-implicit-modules", @@ -603,10 +616,6 @@ def _add_swift_deps_flags( "-fno-implicit-module-maps", "-Xfrontend", "-disable-implicit-swift-modules", - "-Xfrontend", - "-explicit-swift-module-map-file", - "-Xfrontend", - swift_module_map_artifact, ]) else: depset = ctx.actions.tset(SwiftCompiledModuleTset, children = _get_swift_paths_tsets(ctx.attrs.deps + ctx.attrs.exported_deps)) @@ -615,15 +624,9 @@ def _add_swift_deps_flags( def _add_clang_deps_flags( ctx: AnalysisContext, pcm_deps_tset: SwiftCompiledModuleTset, - sdk_deps_tset: SwiftCompiledModuleTset, cmd: cmd_args) -> None: - # If a module uses Explicit Modules, all direct and - # transitive Clang deps have to be explicitly added. if uses_explicit_modules(ctx): - cmd.add(pcm_deps_tset.project_as_args("clang_deps")) - - # Add Clang sdk modules which do not go to swift modulemap - cmd.add(sdk_deps_tset.project_as_args("clang_deps")) + cmd.add(pcm_deps_tset.project_as_args("clang_importer_flags")) else: inherited_preprocessor_infos = cxx_inherited_preprocessor_infos(ctx.attrs.deps + ctx.attrs.exported_deps) preprocessors = cxx_merge_cpreprocessors(ctx, [], inherited_preprocessor_infos) @@ -640,6 +643,7 @@ def _add_mixed_library_flags_to_cmd( if uses_explicit_modules(ctx): if underlying_module: cmd.add(underlying_module.clang_importer_args) + cmd.add(underlying_module.clang_module_file_args) cmd.add("-import-underlying-module") return diff --git a/prelude/apple/swift/swift_module_map.bzl b/prelude/apple/swift/swift_module_map.bzl index 5547232f0..dd60747cf 100644 --- a/prelude/apple/swift/swift_module_map.bzl +++ b/prelude/apple/swift/swift_module_map.bzl @@ -8,22 +8,10 @@ load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type load(":swift_toolchain_types.bzl", "SwiftCompiledModuleTset") -def write_swift_module_map( +def write_swift_module_map_with_deps( ctx: AnalysisContext, module_name: str, - sdk_deps: SwiftCompiledModuleTset) -> ArgLike: - return write_swift_module_map_with_swift_deps(ctx, module_name, sdk_deps, None) - -def write_swift_module_map_with_swift_deps( - ctx: AnalysisContext, - module_name: str, - sdk_swift_deps: SwiftCompiledModuleTset, - swift_deps: [SwiftCompiledModuleTset, None]) -> ArgLike: - if swift_deps: - all_deps = ctx.actions.tset(SwiftCompiledModuleTset, children = [sdk_swift_deps, swift_deps]) - else: - all_deps = sdk_swift_deps - + all_deps: SwiftCompiledModuleTset) -> ArgLike: return ctx.actions.write_json( module_name + ".swift_module_map.json", all_deps.project_as_json("swift_module_map"), diff --git a/prelude/apple/swift/swift_pcm_compilation.bzl b/prelude/apple/swift/swift_pcm_compilation.bzl index 01626fce7..e244afef6 100644 --- a/prelude/apple/swift/swift_pcm_compilation.bzl +++ b/prelude/apple/swift/swift_pcm_compilation.bzl @@ -77,9 +77,9 @@ def _compiled_module_info( module_name: str, pcm_output: Artifact, pcm_info: SwiftPCMUncompiledInfo) -> SwiftCompiledModuleInfo: - clang_importer_args = cmd_args() - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args = cmd_args() + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-file=", @@ -90,8 +90,8 @@ def _compiled_module_info( delimiter = "", ), ) - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-map-file=", @@ -100,16 +100,19 @@ def _compiled_module_info( delimiter = "", ), ) - clang_importer_args.add("-Xcc") + + clang_importer_args = cmd_args("-Xcc") clang_importer_args.add(pcm_info.exported_preprocessor.relative_args.args) clang_importer_args.hidden(pcm_info.exported_preprocessor.modular_args) return SwiftCompiledModuleInfo( + clang_module_file_args = clang_deps_args, clang_importer_args = clang_importer_args, is_framework = False, is_swiftmodule = False, module_name = module_name, output_artifact = pcm_output, + clang_modulemap = pcm_info.exported_preprocessor.modulemap_path, ) def _swift_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: @@ -273,8 +276,9 @@ def _get_base_pcm_flags( swift_toolchain.resource_dir, ]) - cmd.add(sdk_deps_tset.project_as_args("clang_deps")) - cmd.add(pcm_deps_tset.project_as_args("clang_deps")) + cmd.add(sdk_deps_tset.project_as_args("clang_module_file_flags")) + cmd.add(pcm_deps_tset.project_as_args("clang_module_file_flags")) + cmd.add(pcm_deps_tset.project_as_args("clang_importer_flags")) modulemap_path = uncompiled_pcm_info.exported_preprocessor.modulemap_path pcm_output = ctx.actions.declare_output(module_name + ".pcm") diff --git a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl index c96606c89..c6cb328c9 100644 --- a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl +++ b/prelude/apple/swift/swift_sdk_pcm_compilation.bzl @@ -143,7 +143,7 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov "-I.", ]) - cmd.add(sdk_deps_tset.project_as_args("clang_deps")) + cmd.add(sdk_deps_tset.project_as_args("clang_module_file_flags")) expanded_modulemap_path_cmd = expand_relative_prefixed_sdk_path( cmd_args(swift_toolchain.sdk_path), @@ -183,9 +183,9 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov ) # Construct the args needed to be passed to the clang importer - clang_importer_args = cmd_args() - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args = cmd_args() + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-file=", @@ -196,8 +196,8 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov delimiter = "", ), ) - clang_importer_args.add("-Xcc") - clang_importer_args.add( + clang_deps_args.add("-Xcc") + clang_deps_args.add( cmd_args( [ "-fmodule-map-file=", @@ -208,11 +208,12 @@ def _swift_sdk_pcm_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Prov ) compiled_sdk = SwiftCompiledModuleInfo( - clang_importer_args = clang_importer_args, + clang_module_file_args = clang_deps_args, is_framework = uncompiled_sdk_module_info.is_framework, is_swiftmodule = False, module_name = module_name, output_artifact = pcm_output, + clang_modulemap = expanded_modulemap_path_cmd, ) return [ diff --git a/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl b/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl index 20667ddaf..909bb8e33 100644 --- a/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl +++ b/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl @@ -14,7 +14,7 @@ load( "extract_and_merge_clang_debug_infos", "extract_and_merge_swift_debug_infos", ) -load(":swift_module_map.bzl", "write_swift_module_map") +load(":swift_module_map.bzl", "write_swift_module_map_with_deps") load(":swift_sdk_pcm_compilation.bzl", "get_swift_sdk_pcm_anon_targets") load(":swift_toolchain_types.bzl", "SdkUncompiledModuleInfo", "SwiftCompiledModuleInfo", "SwiftCompiledModuleTset", "WrappedSdkCompiledModuleInfo") @@ -52,12 +52,12 @@ def _swift_interface_compilation_impl(ctx: AnalysisContext) -> [Promise, list[Pr clang_deps_tset = get_compiled_sdk_clang_deps_tset(ctx, sdk_deps_providers) swift_deps_tset = get_compiled_sdk_swift_deps_tset(ctx, sdk_deps_providers) - swift_module_map_artifact = write_swift_module_map(ctx, uncompiled_module_info_name, swift_deps_tset) + swift_module_map_artifact = write_swift_module_map_with_deps(ctx, uncompiled_module_info_name, swift_deps_tset) cmd.add([ "-explicit-swift-module-map-file", swift_module_map_artifact, ]) - cmd.add(clang_deps_tset.project_as_args("clang_deps")) + cmd.add(clang_deps_tset.project_as_args("clang_module_file_flags")) swiftmodule_output = ctx.actions.declare_output(uncompiled_module_info_name + SWIFTMODULE_EXTENSION) expanded_swiftinterface_cmd = expand_relative_prefixed_sdk_path( diff --git a/prelude/apple/swift/swift_toolchain_types.bzl b/prelude/apple/swift/swift_toolchain_types.bzl index 10e6941c3..b9b2b35fb 100644 --- a/prelude/apple/swift/swift_toolchain_types.bzl +++ b/prelude/apple/swift/swift_toolchain_types.bzl @@ -66,7 +66,9 @@ SdkSwiftOverlayInfo = provider(fields = { }) SwiftCompiledModuleInfo = provider(fields = { - "clang_importer_args": provider_field(typing.Any, default = None), # cmd_args of include flags for the clang importer. + "clang_importer_args": provider_field(typing.Any, default = None), # cmd_args of additional flags for the clang importer. + "clang_module_file_args": provider_field(typing.Any, default = None), # cmd_args of include flags for the clang importer. + "clang_modulemap": provider_field(typing.Any, default = None), # Clang modulemap file which is required for generation of swift_module_map. "is_framework": provider_field(typing.Any, default = None), "is_swiftmodule": provider_field(typing.Any, default = None), # If True then contains a compiled swiftmodule, otherwise Clang's pcm. "module_name": provider_field(typing.Any, default = None), # A real name of a module, without distinguishing suffixes. @@ -77,22 +79,37 @@ def _add_swiftmodule_search_path(module_info: SwiftCompiledModuleInfo): # We need to import the containing folder, not the file itself. return ["-I", cmd_args(module_info.output_artifact).parent()] if module_info.is_swiftmodule else [] -def _add_clang_import_flags(module_info: SwiftCompiledModuleInfo): +def _add_clang_module_file_flags(module_info: SwiftCompiledModuleInfo): if module_info.is_swiftmodule: return [] else: - return [module_info.clang_importer_args] + return [module_info.clang_module_file_args] + +def _add_clang_importer_flags(module_info: SwiftCompiledModuleInfo): + if module_info.is_swiftmodule: + return [] + else: + return [module_info.clang_importer_args] if module_info.clang_importer_args else [] def _swift_module_map_struct(module_info: SwiftCompiledModuleInfo): - return struct( - isFramework = module_info.is_framework, - moduleName = module_info.module_name, - modulePath = module_info.output_artifact, - ) + if module_info.is_swiftmodule: + return struct( + isFramework = module_info.is_framework, + moduleName = module_info.module_name, + modulePath = module_info.output_artifact, + ) + else: + return struct( + isFramework = module_info.is_framework, + moduleName = module_info.module_name, + clangModulePath = module_info.output_artifact, + clangModuleMapPath = cmd_args([module_info.clang_modulemap], delimiter = ""), + ) SwiftCompiledModuleTset = transitive_set( args_projections = { - "clang_deps": _add_clang_import_flags, + "clang_importer_flags": _add_clang_importer_flags, # Additional clang flags required for compilation. + "clang_module_file_flags": _add_clang_module_file_flags, # Projects pcm modules as cli flags. "module_search_path": _add_swiftmodule_search_path, }, json_projections = { From bd0aa45ce121702f8670c22af97288b8224d18cf Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Mon, 18 Mar 2024 08:27:37 -0700 Subject: [PATCH 0479/1133] fix python omnibus linking with lld Summary: lld does not handle `-flat_namespace` in the same way as ld64, which is causing issues when linking Python test targets. Modify the flags to use `dynamic_lookup` instead, and use two-level namespaces as normal. #build_platform[mac] Reviewed By: drodriguez Differential Revision: D54903679 fbshipit-source-id: 90053872184c76b32133482e64e6b8896bb54a54 --- prelude/cxx/linker.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/cxx/linker.bzl b/prelude/cxx/linker.bzl index 3bf86d3be..4e6180646 100644 --- a/prelude/cxx/linker.bzl +++ b/prelude/cxx/linker.bzl @@ -216,7 +216,7 @@ def get_ignore_undefined_symbols_flags(linker_type: str) -> list[str]: args.append("-Wl,--allow-shlib-undefined") args.append("-Wl,--unresolved-symbols=ignore-all") elif linker_type == "darwin": - args.append("-Wl,-flat_namespace,-undefined,suppress") + args.append("-Wl,-undefined,dynamic_lookup") else: fail("Linker type {} not supported".format(linker_type)) From e4a268a8bede06dfe52fbf58c9333677dbf9fe57 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Mon, 18 Mar 2024 08:27:37 -0700 Subject: [PATCH 0480/1133] fbshipit-source-id: cc805b594e4d80038318ba5ea17d7f61f1c94d6a --- prelude/cxx/linker.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prelude/cxx/linker.bzl b/prelude/cxx/linker.bzl index 4e6180646..17e7951fd 100644 --- a/prelude/cxx/linker.bzl +++ b/prelude/cxx/linker.bzl @@ -216,7 +216,9 @@ def get_ignore_undefined_symbols_flags(linker_type: str) -> list[str]: args.append("-Wl,--allow-shlib-undefined") args.append("-Wl,--unresolved-symbols=ignore-all") elif linker_type == "darwin": + # dynamic_lookup is incompatible with fixup chains args.append("-Wl,-undefined,dynamic_lookup") + args.append("-Wl,-no_fixup_chains") else: fail("Linker type {} not supported".format(linker_type)) From 8762a330cd5a7bdab1bb1f7a9e0d60ac1f11b838 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Mon, 18 Mar 2024 08:57:59 -0700 Subject: [PATCH 0481/1133] Require all .go files of a package to be in the same directory Summary: - Require all .go files of a package to be in the same directory - Add package_root attribute to for packages with BUCK file outside of package directory - Guess `package_root` automatically when we can do do it, i.e. when all .go files are within the same directory `srcs=["foo/a.go", "foo/b.go"]` Reviewed By: awalterschulze Differential Revision: D54775609 fbshipit-source-id: accc4fad457c479f94bb2ed3cada8200f8c0c9ef --- prelude/decls/go_common.bzl | 11 ++++ prelude/decls/go_rules.bzl | 5 ++ prelude/go/cgo_library.bzl | 2 +- prelude/go/compile.bzl | 19 +++++- prelude/go/go_binary.bzl | 2 +- prelude/go/go_exported_library.bzl | 2 +- prelude/go/go_library.bzl | 2 +- prelude/go/go_test.bzl | 2 +- prelude/go/tools/filter_srcs.py | 102 +++++++++++++---------------- 9 files changed, 85 insertions(+), 62 deletions(-) diff --git a/prelude/decls/go_common.bzl b/prelude/decls/go_common.bzl index dc7042a3e..a3a58a4e0 100644 --- a/prelude/decls/go_common.bzl +++ b/prelude/decls/go_common.bzl @@ -30,6 +30,16 @@ def _srcs_arg(): """), } +def _package_root_arg(): + return { + "package_root": attrs.option(attrs.string(), default = None, doc = """ + Sets Go package direactory (relative to BUCK file). + By default (or if None passes) package_root is being detected automatically. + Empty string of Go package is on the same level as BUCK file otherwise the subdirectory name. + Example for srcs = ["foo/bar.go"], package_root = "foo" +"""), + } + def _link_style_arg(): return { "link_style": attrs.option(attrs.enum(LinkableDepType), default = None, doc = """ @@ -149,6 +159,7 @@ def _tags_arg(): go_common = struct( deps_arg = _deps_arg, srcs_arg = _srcs_arg, + package_root_arg = _package_root_arg, link_style_arg = _link_style_arg, link_mode_arg = _link_mode_arg, cgo_compiler_flags_arg = _cgo_compiler_flags_arg, diff --git a/prelude/decls/go_rules.bzl b/prelude/decls/go_rules.bzl index c128410c0..2938b0a00 100644 --- a/prelude/decls/go_rules.bzl +++ b/prelude/decls/go_rules.bzl @@ -76,6 +76,7 @@ cgo_library = prelude_rule( cxx_common.platform_preprocessor_flags_arg() | go_common.cgo_compiler_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | cxx_common.compiler_flags_arg() | cxx_common.platform_compiler_flags_arg() | cxx_common.linker_extra_outputs_arg() | @@ -185,6 +186,7 @@ go_binary = prelude_rule( go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | go_common.cgo_enabled_arg() | go_common.race_arg() | go_common.tags_arg() | @@ -272,6 +274,7 @@ go_exported_library = prelude_rule( go_common.assembler_flags_arg() | go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | + go_common.package_root_arg() | go_common.cgo_enabled_arg() | go_common.race_arg() | go_common.tags_arg() | @@ -324,6 +327,7 @@ go_library = prelude_rule( go_common.compiler_flags_arg() | go_common.assembler_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | { "contacts": attrs.list(attrs.string(), default = []), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), @@ -424,6 +428,7 @@ go_test = prelude_rule( go_common.linker_flags_arg() | go_common.external_linker_flags_arg() | go_common.embedcfg_arg() | + go_common.package_root_arg() | go_common.cgo_enabled_arg() | go_common.race_arg() | go_common.tags_arg() | diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index d49667391..1499821c7 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -221,7 +221,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: # Merge all sources together to pass to the Go compile step. all_srcs = cmd_args(go_srcs + compiled_objects) if ctx.attrs.go_srcs: - all_srcs.add(get_filtered_srcs(ctx, ctx.attrs.go_srcs)) + all_srcs.add(get_filtered_srcs(ctx, ctx.attrs.go_srcs, ctx.attrs.package_root)) shared = ctx.attrs._compile_shared race = ctx.attrs._race diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index 86f7f7133..cd56bec3d 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -39,19 +39,21 @@ GoTestInfo = provider( def get_inherited_compile_pkgs(deps: list[Dependency]) -> dict[str, GoPkg]: return merge_pkgs([d[GoPkgCompileInfo].pkgs for d in deps if GoPkgCompileInfo in d]) -def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], tests: bool = False, force_disable_cgo: bool = False) -> cmd_args: +def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], package_root: str | None, tests: bool = False, force_disable_cgo: bool = False) -> cmd_args: """ Filter the input sources based on build pragma """ go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + package_root = package_root if package_root != None else infer_package_root(srcs) + # Delegate to `go list` to filter out srcs with incompatible `// +build` # pragmas. filtered_srcs = ctx.actions.declare_output("__filtered_srcs__.txt") srcs_dir = ctx.actions.symlinked_dir( "__srcs__", - {src.short_path: src for src in srcs}, + {paths.relativize(src.short_path, package_root): src for src in srcs}, ) filter_cmd = get_toolchain_cmd_args(go_toolchain, go_root = True, force_disable_cgo = force_disable_cgo) filter_cmd.add(go_toolchain.filter_srcs) @@ -66,6 +68,19 @@ def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], tests: bool = # Add filtered srcs to compile command. return cmd_args(filtered_srcs, format = "@{}").hidden(srcs).hidden(srcs_dir) +def infer_package_root(srcs: list[Artifact]) -> str: + go_sources = [s for s in srcs if s.extension == ".go"] + if len(go_sources) == 0: + return "" + dir_set = {paths.dirname(s.short_path): None for s in go_sources} + if len(dir_set) > 1: + fail("Provide `package_root` target attribute. Can't infer it when there are multiple directories containing .go files: {}. Sources: {}".format( + dir_set.keys(), + [s.short_path for s in go_sources], + )) + + return dir_set.keys()[0] + def _assemble_cmd( ctx: AnalysisContext, pkg_name: str, diff --git a/prelude/go/go_binary.bzl b/prelude/go/go_binary.bzl index f5ee6c7ac..ca87cc90c 100644 --- a/prelude/go/go_binary.bzl +++ b/prelude/go/go_binary.bzl @@ -23,7 +23,7 @@ def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: lib = compile( ctx, "main", - get_filtered_srcs(ctx, ctx.attrs.srcs), + get_filtered_srcs(ctx, ctx.attrs.srcs, ctx.attrs.package_root), deps = ctx.attrs.deps, compile_flags = ctx.attrs.compiler_flags, race = ctx.attrs._race, diff --git a/prelude/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl index 5210f1742..0142b5777 100644 --- a/prelude/go/go_exported_library.bzl +++ b/prelude/go/go_exported_library.bzl @@ -21,7 +21,7 @@ def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: lib = compile( ctx, "main", - get_filtered_srcs(ctx, ctx.attrs.srcs), + get_filtered_srcs(ctx, ctx.attrs.srcs, ctx.attrs.package_root), deps = ctx.attrs.deps, compile_flags = ctx.attrs.compiler_flags, shared = True, diff --git a/prelude/go/go_library.bzl b/prelude/go/go_library.bzl index 00309adaf..62040c7da 100644 --- a/prelude/go/go_library.bzl +++ b/prelude/go/go_library.bzl @@ -36,7 +36,7 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: pkg_name = go_attr_pkg_name(ctx) # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. - srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, force_disable_cgo = True) + srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, ctx.attrs.package_root, force_disable_cgo = True) shared = ctx.attrs._compile_shared race = ctx.attrs._race coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index 2c8759d28..a01a985d0 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -67,7 +67,7 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: # TODO: should we assert that pkg_name != None here? pkg_name = lib.pkg_name - srcs = get_filtered_srcs(ctx, srcs, tests = True) + srcs = get_filtered_srcs(ctx, srcs, ctx.attrs.package_root, tests = True) # If coverage is enabled for this test, we need to preprocess the sources # with the Go cover tool. diff --git a/prelude/go/tools/filter_srcs.py b/prelude/go/tools/filter_srcs.py index a242e981d..68fc68bdb 100755 --- a/prelude/go/tools/filter_srcs.py +++ b/prelude/go/tools/filter_srcs.py @@ -36,66 +36,58 @@ def main(argv): parser.add_argument("srcdir", type=Path) args = parser.parse_args(argv[1:]) - # Find all source sub-dirs, which we'll need to run `go list` from. - roots = set() - for root, _dirs, _files in os.walk(args.srcdir): - roots.add(root) - # Compute absolute paths for GOROOT, to enable `go list` to use `compile/asm/etc` goroot = os.environ.get("GOROOT", "") if goroot: goroot = os.path.realpath(goroot) - # Run `go list` on all source dirs to filter input sources by build pragmas. - for root in roots: - with tempfile.TemporaryDirectory() as go_cache_dir: - out = subprocess.check_output( - [ - "env", - "-i", - "GOROOT={}".format(goroot), - "GOARCH={}".format(os.environ.get("GOARCH", "")), - "GOOS={}".format(os.environ.get("GOOS", "")), - "CGO_ENABLED={}".format(os.environ.get("CGO_ENABLED", "0")), - "GO111MODULE=off", - "GOCACHE=" + go_cache_dir, - args.go.resolve(), - "list", - "-e", - "-json", - "-tags", - args.tags, - ".", - ], - cwd=root, - ).decode("utf-8") - - # Parse JSON output and print out sources. - idx = 0 - decoder = json.JSONDecoder() - while idx < len(out) - 1: - # The raw_decode method fails if there are any leading spaces, e.g. " {}" fails - # so manually trim the prefix of the string - if out[idx].isspace(): - idx += 1 - continue - - obj, idx = decoder.raw_decode(out, idx) - types = ["GoFiles", "EmbedFiles"] - if args.tests: - types.extend(["TestGoFiles", "XTestGoFiles"]) - else: - types.extend(["SFiles"]) - for typ in types: - for src in obj.get(typ, []): - src = Path(obj["Dir"]) / src - # Resolve the symlink - src = Path( - os.path.normpath(str(src.parent / os.readlink(str(src)))) - ) - # Relativize to the CWD. - src = src.relative_to(os.getcwd()) - print(src, file=args.output) + # Run `go list` to filter input sources by build pragmas. + with tempfile.TemporaryDirectory() as go_cache_dir: + out = subprocess.check_output( + [ + "env", + "-i", + "GOROOT={}".format(goroot), + "GOARCH={}".format(os.environ.get("GOARCH", "")), + "GOOS={}".format(os.environ.get("GOOS", "")), + "CGO_ENABLED={}".format(os.environ.get("CGO_ENABLED", "0")), + "GO111MODULE=off", + "GOCACHE=" + go_cache_dir, + args.go.resolve(), + "list", + "-e", + "-json", + "-tags", + args.tags, + ".", + ], + cwd=args.srcdir, + ).decode("utf-8") + + # Parse JSON output and print out sources. + idx = 0 + decoder = json.JSONDecoder() + while idx < len(out) - 1: + # The raw_decode method fails if there are any leading spaces, e.g. " {}" fails + # so manually trim the prefix of the string + if out[idx].isspace(): + idx += 1 + continue + + obj, idx = decoder.raw_decode(out, idx) + types = ["GoFiles", "EmbedFiles"] + if args.tests: + types.extend(["TestGoFiles", "XTestGoFiles"]) + else: + types.extend(["SFiles"]) + for typ in types: + for src in obj.get(typ, []): + src = Path(obj["Dir"]) / src + # Resolve the symlink + src = Path(os.path.normpath(str(src.parent / os.readlink(str(src))))) + # Relativize to the CWD. + src = src.relative_to(os.getcwd()) + print(src, file=args.output) args.output.close() From 2820ee85245d6d50dffd4f1619efd3fa526a430c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Muska=C5=82a?= Date: Mon, 18 Mar 2024 09:57:47 -0700 Subject: [PATCH 0482/1133] Fix spec Summary: The function doesn't return an ok tuple Created from CodeHub with https://fburl.com/edit-in-codehub Reviewed By: alanz Differential Revision: D55011446 fbshipit-source-id: f3abbb08a5ca5900ac576d1d691b2e1e514e0cb4 --- prelude/erlang/common_test/test_binary/src/test_binary.erl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/erlang/common_test/test_binary/src/test_binary.erl b/prelude/erlang/common_test/test_binary/src/test_binary.erl index 2403db9f2..d5dd4ed53 100644 --- a/prelude/erlang/common_test/test_binary/src/test_binary.erl +++ b/prelude/erlang/common_test/test_binary/src/test_binary.erl @@ -109,7 +109,7 @@ load_test_info(TestInfoFile) -> common_app_env = CommonAppEnv }. --spec parse_mfa(string()) -> {ok, artifact_annotations:annotation_function()} | {error, term()}. +-spec parse_mfa(string()) -> artifact_annotations:annotation_function() | {error, term()}. parse_mfa(MFA) -> case erl_scan:string(MFA) of {ok, From 88736a7d59fb7f573543212e090489f1ee87d0ba Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Mon, 18 Mar 2024 10:28:26 -0700 Subject: [PATCH 0483/1133] Add simple `python_wheel` support Summary: Simple rule to package non-transitive Python libs from `libraries` into a `.whl`. Currently, native extensions are statically linked, but we might want to make this more intelligent (e.g. aware of a coarser- grained wheel-level dep tree). This provides an alternative to the `getdeps.py` + `cmake` approach (e.g. https://www.internalfb.com/code/fbsource/[291f44ac0559]/fbcode/ai_infra/rsc/build/airstore_client_cmake_build.sh) to export fbcode Python libraries for use outside of fbcode. Context: https://fb.prod.workplace.com/groups/metaconda/posts/257145424045728/?comment_id=257769783983292&reply_comment_id=257779243982346 Reviewed By: manav-a Differential Revision: D52058351 fbshipit-source-id: bb04b3b56625b4e0f7998c2e397ddf60893ef025 --- prelude/python/python_wheel.bzl | 183 ++++++++++++++++++++++++++++++++ prelude/python/tools/BUCK.v2 | 6 ++ prelude/python/tools/wheel.py | 101 ++++++++++++++++++ 3 files changed, 290 insertions(+) create mode 100644 prelude/python/python_wheel.bzl create mode 100644 prelude/python/tools/wheel.py diff --git a/prelude/python/python_wheel.bzl b/prelude/python/python_wheel.bzl new file mode 100644 index 000000000..b3cdea59b --- /dev/null +++ b/prelude/python/python_wheel.bzl @@ -0,0 +1,183 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load( + "@prelude//cxx:link.bzl", + "cxx_link_shared_library", +) +load( + "@prelude//cxx:link_types.bzl", + "link_options", +) +load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") +load( + "@prelude//linking:link_info.bzl", + "LinkArgs", + "LinkStrategy", + "get_lib_output_style", + "get_link_info", +) +load( + "@prelude//linking:linkable_graph.bzl", + "LinkableGraph", + "LinkableNode", # @unused Used as a type + "LinkableRootInfo", + "get_deps_for_link", + "get_linkable_graph_node_map_func", + get_link_info_for_node = "get_link_info", +) +load("@prelude//python:manifest.bzl", "create_manifest_for_entries") +load("@prelude//python:python.bzl", "PythonLibraryInfo") +load("@prelude//utils:expect.bzl", "expect") +load( + "@prelude//utils:graph_utils.bzl", + "breadth_first_traversal_by", +) +load("@prelude//decls/toolchains_common.bzl", "toolchains_common") +load("@prelude//transitions/constraint_overrides.bzl", "constraint_overrides_transition") + +def _link_deps( + link_infos: dict[Label, LinkableNode], + deps: list[Label], + link_strategy: LinkStrategy, + pic_behavior: PicBehavior) -> list[Label]: + """ + Return transitive deps required to link dynamically against the given deps. + This will following through deps of statically linked inputs and exported + deps of everything else (see https://fburl.com/diffusion/rartsbkw from v1). + """ + + def find_deps(node: Label): + return get_deps_for_link(link_infos[node], link_strategy, pic_behavior) + + return breadth_first_traversal_by(link_infos, deps, find_deps) + +def _impl(ctx: AnalysisContext) -> list[Provider]: + providers = [] + + cmd = cmd_args(ctx.attrs._wheel[RunInfo]) + + name_parts = [ + ctx.attrs.dist or ctx.attrs.name, + ctx.attrs.version, + ctx.attrs.python, + ctx.attrs.abi, + ctx.attrs.platform, + ] + wheel = ctx.actions.declare_output("{}.whl".format("-".join(name_parts))) + cmd.add(cmd_args(wheel.as_output(), format = "--output={}")) + + cmd.add("--name={}".format(ctx.attrs.dist or ctx.attrs.name)) + cmd.add("--version={}".format(ctx.attrs.version)) + + srcs = [] + extensions = {} + for dep in ctx.attrs.libraries: + manifests = dep[PythonLibraryInfo].manifests.value + if manifests.srcs != None: + srcs.append(manifests.srcs) + if manifests.resources != None: + srcs.append(manifests.resources) + if manifests.extensions != None: + toolchain_info = get_cxx_toolchain_info(ctx) + items = manifests.extensions.items() + expect(len(items) == 1) + extension = items[0][0] + root = dep[LinkableRootInfo] + + # Add link inputs for the linkable root and any deps. + inputs = [] + inputs.append(get_link_info( + infos = root.link_infos, + prefer_stripped = ctx.attrs.prefer_stripped_objects, + )) + link_infos = get_linkable_graph_node_map_func(dep[LinkableGraph])() + for dep in _link_deps( + link_infos, + root.deps, + LinkStrategy("static_pic"), + toolchain_info.pic_behavior, + ): + node = link_infos[dep] + output_style = get_lib_output_style( + LinkStrategy("static_pic"), + node.preferred_linkage, + toolchain_info.pic_behavior, + ) + inputs.append(get_link_info_for_node( + node, + output_style, + prefer_stripped = ctx.attrs.prefer_stripped_objects, + )) + + # link the rule + link_result = cxx_link_shared_library( + ctx = ctx, + output = extension, + opts = link_options( + links = [LinkArgs(infos = inputs)], + category_suffix = "native_extension", + identifier = extension, + link_execution_preference = LinkExecutionPreference("any"), + ), + ) + extensions[extension] = link_result.linked_object + + if extensions: + srcs.append( + create_manifest_for_entries( + ctx, + name = "extensions.txt", + entries = [ + (name, extension.output, "") + for name, extension in extensions.items() + ], + ), + ) + + for manifest in srcs: + cmd.add(cmd_args(manifest.manifest, format = "--srcs={}")) + for a, _ in manifest.artifacts: + cmd.hidden(a) + + ctx.actions.run(cmd, category = "wheel") + providers.append(DefaultInfo(default_output = wheel)) + + return providers + +python_wheel = rule( + impl = _impl, + cfg = constraint_overrides_transition, + attrs = dict( + dist = attrs.option(attrs.string(), default = None), + version = attrs.string(default = "1.0.0"), + python = attrs.string( + default = select({ + "ovr_config//third-party/python/constraints:3.10": "py3.10", + "ovr_config//third-party/python/constraints:3.11": "py3.11", + "ovr_config//third-party/python/constraints:3.12": "py3.12", + "ovr_config//third-party/python/constraints:3.8": "py3.8", + "ovr_config//third-party/python/constraints:3.9": "py3.9", + }), + ), + abi = attrs.string(default = "none"), + platform = attrs.string( + default = select({ + "DEFAULT": "any", + "ovr_config//os:linux-arm64": "linux_aarch64", + "ovr_config//os:linux-x86_64": "linux_x86_64", + }), + ), + constraint_overrides = attrs.list(attrs.string(), default = []), + libraries = attrs.list(attrs.dep(providers = [PythonLibraryInfo]), default = []), + prefer_stripped_objects = attrs.default_only(attrs.bool(default = False)), + _wheel = attrs.default_only(attrs.exec_dep(default = "prelude//python/tools:wheel")), + _cxx_toolchain = toolchains_common.cxx(), + ), +) diff --git a/prelude/python/tools/BUCK.v2 b/prelude/python/tools/BUCK.v2 index 1e325372f..897ce3072 100644 --- a/prelude/python/tools/BUCK.v2 +++ b/prelude/python/tools/BUCK.v2 @@ -126,3 +126,9 @@ prelude.python_bootstrap_binary( main = "fail_with_message.py", visibility = ["PUBLIC"], ) + +prelude.python_bootstrap_binary( + name = "wheel", + main = "wheel.py", + visibility = ["PUBLIC"], +) diff --git a/prelude/python/tools/wheel.py b/prelude/python/tools/wheel.py new file mode 100644 index 000000000..6e825b6ae --- /dev/null +++ b/prelude/python/tools/wheel.py @@ -0,0 +1,101 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import contextlib +import json +import os +import sys +import zipfile +from types import TracebackType +from typing import List, Optional, Set, Type + + +# pyre-fixme[24]: Generic type `AbstractContextManager` expects 1 type parameter. +class WheelBuilder(contextlib.AbstractContextManager): + + def __init__(self, *, name: str, version: str, output: str) -> None: + self._name = name + self._version = version + self._record: list[str] = [] + self._outf = zipfile.ZipFile(output, mode="w") + + def write(self, dst: str, src: str) -> None: + self._record.append(dst) + self._outf.write(filename=src, arcname=dst) + + def writestr(self, dst: str, contents: str) -> None: + self._record.append(dst) + self._outf.writestr(zinfo_or_arcname=dst, data=contents) + + def _write_record(self) -> None: + record = f"{self._name}-{self._version}.dist-info/RECORD" + self._outf.writestr( + record, "".join(["{},,\n".format(f) for f in (self._record + [record])]) + ) + + def close(self) -> None: + self.writestr( + f"{self._name}-{self._version}.dist-info/METADATA", + f"""\ +Name: {self._name} +Version: {self._version} +""", + ) + self.writestr( + f"{self._name}-{self._version}.dist-info/WHEEL", + """\ +Wheel-Version: 1.0 +""", + ) + + self._write_record() + self._outf.close() + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + self.close() + + +def main(argv: List[str]) -> None: + parser = argparse.ArgumentParser() + parser.add_argument("--output", required=True) + parser.add_argument("--name", required=True) + parser.add_argument("--version", required=True) + parser.add_argument("--srcs", action="append", default=[]) + args = parser.parse_args(argv[1:]) + + pkgs: Set[str] = set() + pkgs_with_init = set() + + def _add_pkg(pkg: str) -> None: + pkgs.add(pkg) + parent = os.path.dirname(pkg) + if parent: + _add_pkg(parent) + + with WheelBuilder(name=args.name, version=args.version, output=args.output) as whl: + for src in args.srcs: + with open(src) as f: + manifest = json.load(f) + for dst, src, *_ in manifest: + if dst.endswith((".py", ".so")): + pkg = os.path.dirname(dst) + _add_pkg(pkg) + if os.path.basename(dst) == "__init__.py": + pkgs_with_init.add(pkg) + whl.write(dst, src) + + for pkg in pkgs - pkgs_with_init: + whl.writestr(os.path.join(pkg, "__init__.py"), "") + + +sys.exit(main(sys.argv)) From 1e0237e097a72c2b9de234d19b6310693a4d62d0 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Mon, 18 Mar 2024 11:09:35 -0700 Subject: [PATCH 0484/1133] Move decls/apple_common.bzl -> apple/apple_common.bzl Summary: Follow-up from https://www.internalfb.com/diff/D54923012?dst_version_fbid=2015633288852606&transaction_fbid=726456599318108 Reviewed By: chatura-atapattu Differential Revision: D54955320 fbshipit-source-id: ba756e3025844fae38d75e11af989d25972038f6 --- prelude/{decls => apple}/apple_common.bzl | 0 prelude/decls/cxx_rules.bzl | 2 +- prelude/decls/ios_rules.bzl | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) rename prelude/{decls => apple}/apple_common.bzl (100%) diff --git a/prelude/decls/apple_common.bzl b/prelude/apple/apple_common.bzl similarity index 100% rename from prelude/decls/apple_common.bzl rename to prelude/apple/apple_common.bzl diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 225980ffb..64239dafe 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -10,9 +10,9 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) +load("@prelude//apple:apple_common.bzl", "apple_common") load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:types.bzl", "Linkage") -load(":apple_common.bzl", "apple_common") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "LinkableDepType", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":genrule_common.bzl", "genrule_common") diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 42d9267fa..cf2fd9f53 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -10,10 +10,10 @@ # the generated docs, and so those should be verified to be accurate and # well-formatted (and then delete this TODO) +load("@prelude//apple:apple_common.bzl", "apple_common") load("@prelude//apple:resource_groups.bzl", "INLINED_RESOURCE_GROUP_MAP_ATTR") load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:types.bzl", "Linkage") -load(":apple_common.bzl", "apple_common") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "buck", "prelude_rule") load(":cxx_common.bzl", "cxx_common") load(":native_common.bzl", "native_common") From 14517f336e456143c55d799fe9e9457a19505907 Mon Sep 17 00:00:00 2001 From: Max Ovtsin Date: Mon, 18 Mar 2024 12:27:24 -0700 Subject: [PATCH 0485/1133] Add Swift module map only if explicit modules are enabled Summary: We don't need to add Swift module map in case of not enabled Explicit modules. Reviewed By: rmaz Differential Revision: D55021263 fbshipit-source-id: 1c13e791be7c90deeb8951f93f1608026b9b2492 --- prelude/apple/swift/swift_compilation.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 2c405e346..edce1d7fe 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -566,7 +566,8 @@ def _get_shared_flags( # 3. Transitive SDK deps of user-defined deps. # (This is the case, when a user-defined dep exports a type from SDK module, # thus such SDK module should be implicitly visible to consumers of that custom dep) - _add_swift_module_map_args(ctx, sdk_swift_deps_tset, pcm_deps_tset, sdk_clang_deps_tset, cmd) + if uses_explicit_modules(ctx): + _add_swift_module_map_args(ctx, sdk_swift_deps_tset, pcm_deps_tset, sdk_clang_deps_tset, cmd) _add_clang_deps_flags(ctx, pcm_deps_tset, cmd) _add_swift_deps_flags(ctx, cmd) From ee256cbff786dc084e793ab9af001d2e0d7631e4 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Mon, 18 Mar 2024 16:31:22 -0700 Subject: [PATCH 0486/1133] _should_include_entitlements -> should_include_entitlements Summary: will be used later in other file Reviewed By: rmaz Differential Revision: D55014092 fbshipit-source-id: 91d2da9d3f608d9e0a728375619563a99d3cb911 --- prelude/apple/apple_entitlements.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/apple/apple_entitlements.bzl b/prelude/apple/apple_entitlements.bzl index 6a44ffb45..dd51e4a19 100644 --- a/prelude/apple/apple_entitlements.bzl +++ b/prelude/apple/apple_entitlements.bzl @@ -12,12 +12,12 @@ load(":apple_sdk_metadata.bzl", "IPhoneSimulatorSdkMetadata", "MacOSXCatalystSdk load(":apple_toolchain_types.bzl", "AppleToolchainInfo") def get_entitlements_codesign_args(ctx: AnalysisContext, codesign_type: CodeSignType) -> list[ArgLike]: - include_entitlements = _should_include_entitlements(ctx, codesign_type) + include_entitlements = should_include_entitlements(ctx, codesign_type) maybe_entitlements = _entitlements_file(ctx) if include_entitlements else None entitlements_args = ["--entitlements", maybe_entitlements] if maybe_entitlements else [] return entitlements_args -def _should_include_entitlements(ctx: AnalysisContext, codesign_type: CodeSignType) -> bool: +def should_include_entitlements(ctx: AnalysisContext, codesign_type: CodeSignType) -> bool: if codesign_type.value == "distribution": return True From 63e8d65ffc929ff4a6d4b705193213d8e74c7a97 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Mon, 18 Mar 2024 16:31:22 -0700 Subject: [PATCH 0487/1133] add `codesign_entitlements` attribute to resource Summary: Introduce a `codesign_entitlements` field to `apple_resource` which will be used when this resource is codesigned on copy. Do not use it anywhere yet so no behavior change. Reviewed By: rmaz Differential Revision: D54586251 fbshipit-source-id: d7f8f668f6adb1493e697d5131a40711b2844fed --- prelude/apple/apple_bundle_part.bzl | 3 +++ prelude/apple/apple_bundle_resources.bzl | 6 ++++-- prelude/apple/apple_resource.bzl | 1 + prelude/apple/apple_resource_types.bzl | 1 + prelude/apple/apple_rules_impl.bzl | 1 + 5 files changed, 10 insertions(+), 2 deletions(-) diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index 9ad42106c..21cd21746 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -30,6 +30,8 @@ AppleBundlePart = record( new_name = field([str, None], None), # Marks parts which should be code signed separately from the whole bundle. codesign_on_copy = field(bool, False), + # Entitlements to use when this part is code signed separately. + codesign_entitlements = field([Artifact, None], None), ) SwiftStdlibArguments = record( @@ -148,6 +150,7 @@ def assemble_bundle( spec_file, ] + codesign_args + platform_args + swift_args) command.hidden([part.source for part in all_parts]) + command.hidden([part.codesign_entitlements for part in all_parts if part.codesign_entitlements]) run_incremental_args = {} incremental_state = ctx.actions.declare_output("incremental_state.json").as_output() diff --git a/prelude/apple/apple_bundle_resources.bzl b/prelude/apple/apple_bundle_resources.bzl index 8f84a9159..2a692d867 100644 --- a/prelude/apple/apple_bundle_resources.bzl +++ b/prelude/apple/apple_bundle_resources.bzl @@ -186,6 +186,7 @@ def _copy_resources(ctx: AnalysisContext, specs: list[AppleResourceSpec]) -> lis destination = bundle_destination, destination_relative_path = None, codesign_on_copy = spec.codesign_files_on_copy, + codesign_entitlements = spec.codesign_entitlements, ) for x in spec.files] result += _bundle_parts_for_dirs(spec.dirs, bundle_destination, False) result += _bundle_parts_for_dirs(spec.content_dirs, bundle_destination, True) @@ -352,7 +353,8 @@ def _process_apple_resource_file_if_needed( file: Artifact, destination: AppleBundleDestination, destination_relative_path: [str, None], - codesign_on_copy: bool = False) -> AppleBundlePart: + codesign_on_copy: bool = False, + codesign_entitlements: [Artifact, None] = None) -> AppleBundlePart: output_dir = "_ProcessedResources" basename = paths.basename(file.short_path) output_is_contents_dir = False @@ -383,7 +385,7 @@ def _process_apple_resource_file_if_needed( # When name is empty string only content of the directory will be copied, as opposed to the directory itself. # When name is `None`, directory or file will be copied as it is, without renaming. new_name = destination_relative_path if destination_relative_path else ("" if output_is_contents_dir else None) - return AppleBundlePart(source = processed, destination = destination, new_name = new_name, codesign_on_copy = codesign_on_copy) + return AppleBundlePart(source = processed, destination = destination, new_name = new_name, codesign_on_copy = codesign_on_copy, codesign_entitlements = codesign_entitlements) # Returns a path relative to the _parent_ of the lproj dir. # For example, given a variant file with a short path of`XX/YY.lproj/ZZ` diff --git a/prelude/apple/apple_resource.bzl b/prelude/apple/apple_resource.bzl index 7955fe1ea..e24538eec 100644 --- a/prelude/apple/apple_resource.bzl +++ b/prelude/apple/apple_resource.bzl @@ -18,6 +18,7 @@ def apple_resource_impl(ctx: AnalysisContext) -> list[Provider]: variant_files = ctx.attrs.variants or [], named_variant_files = ctx.attrs.named_variants or {}, codesign_files_on_copy = ctx.attrs.codesign_on_copy, + codesign_entitlements = ctx.attrs.codesign_entitlements, ) # `files` can contain `apple_library()` which in turn can have `apple_resource()` deps diff --git a/prelude/apple/apple_resource_types.bzl b/prelude/apple/apple_resource_types.bzl index 3104682fb..cbe3d2e3c 100644 --- a/prelude/apple/apple_resource_types.bzl +++ b/prelude/apple/apple_resource_types.bzl @@ -28,6 +28,7 @@ AppleResourceSpec = record( # `{ "ru.lproj" : ["Localizable.strings"] }` named_variant_files = field(dict[str, list[Artifact]], {}), codesign_files_on_copy = field(bool, False), + codesign_entitlements = field([Artifact, None], None), ) # Used when invoking `ibtool`, `actool` and `momc` diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index e7d634f6c..44afa7ba3 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -168,6 +168,7 @@ extra_attributes = { "_ipa_compression_level": attrs.enum(IpaCompressionLevel.values()), }, "apple_resource": { + "codesign_entitlements": attrs.option(attrs.source(), default = None), "codesign_on_copy": attrs.bool(default = False), "content_dirs": attrs.list(attrs.source(allow_directory = True), default = []), "dirs": attrs.list(attrs.source(allow_directory = True), default = []), From 201bbf6777c945eff3cc6da9b6a2c208248f59f4 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Mon, 18 Mar 2024 16:31:22 -0700 Subject: [PATCH 0488/1133] pass `codesign_entitlements` from rules to bundling script Summary: Pass separate entitlements for codesigned on copy path into bundling script/action. Do not use it anywhere yet except in JSON parsing so no behavior change. Reviewed By: rmaz Differential Revision: D55016668 fbshipit-source-id: 95ea78739508a797940d7790acab14536fb225e6 --- prelude/apple/apple_bundle_part.bzl | 11 +++++++---- .../apple/tools/bundling/assemble_bundle_types.py | 14 +++++++++++++- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index 21cd21746..9f2a26628 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -10,7 +10,7 @@ load("@prelude//utils:expect.bzl", "expect") load(":apple_bundle_destination.bzl", "AppleBundleDestination", "bundle_relative_path_for_destination") load(":apple_bundle_utility.bzl", "get_extension_attr", "get_product_name") load(":apple_code_signing_types.bzl", "CodeSignType") -load(":apple_entitlements.bzl", "get_entitlements_codesign_args") +load(":apple_entitlements.bzl", "get_entitlements_codesign_args", "should_include_entitlements") load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_sdk_metadata.bzl", "get_apple_sdk_metadata_for_sdk_name") load(":apple_swift_stdlib.bzl", "should_copy_swift_stdlib") @@ -55,13 +55,13 @@ def assemble_bundle( Returns extra subtargets related to bundling. """ all_parts = parts + [info_plist_part] if info_plist_part else [] - spec_file = _bundle_spec_json(ctx, all_parts) + codesign_type = _detect_codesign_type(ctx, skip_adhoc_signing) + spec_file = _bundle_spec_json(ctx, all_parts, codesign_type) tools = ctx.attrs._apple_tools[AppleToolsInfo] tool = tools.assemble_bundle codesign_args = [] - codesign_type = _detect_codesign_type(ctx, skip_adhoc_signing) codesign_tool = ctx.attrs._apple_toolchain[AppleToolchainInfo].codesign if ctx.attrs._dry_run_code_signing: @@ -222,8 +222,9 @@ def get_apple_bundle_part_relative_destination_path(ctx: AnalysisContext, part: return paths.join(bundle_relative_path, destination_file_or_directory_name) # Returns JSON to be passed into bundle assembling tool. It should contain a dictionary which maps bundle relative destination paths to source paths." -def _bundle_spec_json(ctx: AnalysisContext, parts: list[AppleBundlePart]) -> Artifact: +def _bundle_spec_json(ctx: AnalysisContext, parts: list[AppleBundlePart], codesign_type: CodeSignType) -> Artifact: specs = [] + include_entitlements = should_include_entitlements(ctx, codesign_type) for part in parts: part_spec = { @@ -232,6 +233,8 @@ def _bundle_spec_json(ctx: AnalysisContext, parts: list[AppleBundlePart]) -> Art } if part.codesign_on_copy: part_spec["codesign_on_copy"] = True + if include_entitlements and part.codesign_entitlements: + part_spec["codesign_entitlements"] = part.codesign_entitlements specs.append(part_spec) return ctx.actions.write_json("bundle_spec.json", specs) diff --git a/prelude/apple/tools/bundling/assemble_bundle_types.py b/prelude/apple/tools/bundling/assemble_bundle_types.py index 88233489f..f95d16100 100644 --- a/prelude/apple/tools/bundling/assemble_bundle_types.py +++ b/prelude/apple/tools/bundling/assemble_bundle_types.py @@ -26,6 +26,7 @@ class BundleSpecItem: # Should be bundle relative path, empty string means the root of the bundle dst: str codesign_on_copy: bool = False + codesign_entitlements: Optional[str] = None def __eq__(self: BundleSpecItem, other: Optional[BundleSpecItem]) -> bool: return ( @@ -33,19 +34,30 @@ def __eq__(self: BundleSpecItem, other: Optional[BundleSpecItem]) -> bool: and self.src == other.src and self.dst == other.dst and self.codesign_on_copy == other.codesign_on_copy + and self.codesign_entitlements == other.codesign_entitlements ) def __ne__(self: BundleSpecItem, other: BundleSpecItem) -> bool: return not self.__eq__(other) def __hash__(self: BundleSpecItem) -> int: - return hash((self.src, self.dst, self.codesign_on_copy)) + return hash( + (self.src, self.dst, self.codesign_on_copy, self.codesign_entitlements) + ) def __lt__(self: BundleSpecItem, other: BundleSpecItem) -> bool: return ( self.src < other.src or self.dst < other.dst or self.codesign_on_copy < other.codesign_on_copy + or ( + self.codesign_entitlements < other.codesign_entitlements + if (self.codesign_entitlements and other.codesign_entitlements) + else ( + self.codesign_entitlements is None + and other.codesign_entitlements is not None + ) + ) ) From 9453d04e4643ad827aec3bf29f93e52f559b3efc Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Mon, 18 Mar 2024 17:47:27 -0700 Subject: [PATCH 0489/1133] Only allow the modern version of the resource group map API Summary: Followup from D54923012, we only use the newer API for resource_group_map where it must be a target that returns a `ResourceGroupInfo`. Delete `INLINED_RESOURCE_GROUP_MAP_ATTR`. Reviewed By: d16r Differential Revision: D55025628 fbshipit-source-id: 0c5ab1b5b6cd5d4e917d56604a7bd9a6e9273cee --- prelude/apple/resource_groups.bzl | 15 +-------------- prelude/apple/user/apple_watchos_bundle.bzl | 4 ++-- prelude/apple/user/resource_group_map.bzl | 18 +++++++++++++----- prelude/decls/ios_rules.bzl | 4 ++-- 4 files changed, 18 insertions(+), 23 deletions(-) diff --git a/prelude/apple/resource_groups.bzl b/prelude/apple/resource_groups.bzl index beb4fd62d..5a32cdd80 100644 --- a/prelude/apple/resource_groups.bzl +++ b/prelude/apple/resource_groups.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:groups_types.bzl", "Group", "MATCH_ALL_LABEL", "Traversal") +load("@prelude//cxx:groups_types.bzl", "Group", "MATCH_ALL_LABEL") load( "@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", @@ -33,19 +33,6 @@ ResourceGroupInfo = provider( RESOURCE_GROUP_MAP_ATTR = attrs.option(attrs.dep(providers = [ResourceGroupInfo]), default = None) -INLINED_RESOURCE_GROUP_MAP_ATTR = attrs.list( - attrs.tuple( - attrs.string(), - attrs.list( - attrs.tuple( - attrs.dep(), - attrs.enum(Traversal.values()), - attrs.option(attrs.string()), - ), - ), - ), -) - ResourceGraphNode = record( label = field(Label), # Attribute labels on the target. diff --git a/prelude/apple/user/apple_watchos_bundle.bzl b/prelude/apple/user/apple_watchos_bundle.bzl index ce0bd9ddb..2c2cf7fc0 100644 --- a/prelude/apple/user/apple_watchos_bundle.bzl +++ b/prelude/apple/user/apple_watchos_bundle.bzl @@ -7,7 +7,7 @@ load("@prelude//apple:apple_bundle.bzl", "apple_bundle_impl") load("@prelude//apple:apple_rules_impl_utility.bzl", "apple_bundle_extra_attrs") -load("@prelude//apple:resource_groups.bzl", "INLINED_RESOURCE_GROUP_MAP_ATTR") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") load("@prelude//decls/ios_rules.bzl", "AppleBundleExtension") load(":watch_transition.bzl", "watch_transition") @@ -34,7 +34,7 @@ def _apple_bundle_base_attrs(): "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), "product_name": attrs.option(attrs.string(), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": attrs.option(INLINED_RESOURCE_GROUP_MAP_ATTR, default = None), + "resource_group_map": attrs.option(RESOURCE_GROUP_MAP_ATTR, default = None), "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), "try_skip_code_signing": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), diff --git a/prelude/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl index fd60d3175..d9bee033d 100644 --- a/prelude/apple/user/resource_group_map.bzl +++ b/prelude/apple/user/resource_group_map.bzl @@ -7,7 +7,6 @@ load( "@prelude//apple:resource_groups.bzl", - "INLINED_RESOURCE_GROUP_MAP_ATTR", "ResourceGraphNode", # @unused Used as a type "ResourceGroupInfo", "create_resource_graph", @@ -23,12 +22,10 @@ load( load( "@prelude//cxx:groups_types.bzl", "GroupMapping", # @unused Used as a type + "Traversal", ) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") -def resource_group_map_attr(): - return attrs.option(attrs.dep(providers = [ResourceGroupInfo]), default = None) - def _impl(ctx: AnalysisContext) -> list[Provider]: resource_groups = parse_groups_definitions(ctx.attrs.map, lambda root: root.label) @@ -102,6 +99,17 @@ registration_spec = RuleRegistrationSpec( name = "resource_group_map", impl = _impl, attrs = { - "map": INLINED_RESOURCE_GROUP_MAP_ATTR, + "map": attrs.list( + attrs.tuple( + attrs.string(), + attrs.list( + attrs.tuple( + attrs.dep(), + attrs.enum(Traversal.values()), + attrs.option(attrs.string()), + ), + ), + ), + ), }, ) diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index cf2fd9f53..16f37e13a 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -11,7 +11,7 @@ # well-formatted (and then delete this TODO) load("@prelude//apple:apple_common.bzl", "apple_common") -load("@prelude//apple:resource_groups.bzl", "INLINED_RESOURCE_GROUP_MAP_ATTR") +load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:types.bzl", "Linkage") load(":common.bzl", "CxxRuntimeType", "CxxSourceType", "HeadersAsRawHeadersMode", "IncludeType", "buck", "prelude_rule") @@ -373,7 +373,7 @@ apple_bundle = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "platform_binary": attrs.option(attrs.list(attrs.tuple(attrs.regex(), attrs.dep())), default = None), "resource_group": attrs.option(attrs.string(), default = None), - "resource_group_map": attrs.option(INLINED_RESOURCE_GROUP_MAP_ATTR, default = None), + "resource_group_map": attrs.option(RESOURCE_GROUP_MAP_ATTR, default = None), "skip_copying_swift_stdlib": attrs.option(attrs.bool(), default = None), "try_skip_code_signing": attrs.option(attrs.bool(), default = None), "xcode_product_type": attrs.option(attrs.string(), default = None), From 7aef19ba1ad311b1a1321f32ea3ac14970cb91a0 Mon Sep 17 00:00:00 2001 From: Cesar Valdez Date: Mon, 18 Mar 2024 20:34:59 -0700 Subject: [PATCH 0490/1133] Add support to specify a module to apple core data rule Summary: The core_data_model rule takes the current product_name and uses it for the module parameter in `momc`, this is fine for core data models that use a global namespace (default for objc) but it doesn't work for classes that are defined inside a Swift library. This diff as an optional `module` property to `apple_core_data` which is used if it is not null. Reviewed By: chatura-atapattu Differential Revision: D55033314 fbshipit-source-id: 183c365c1bc9baf2427761663f48a379943056b1 --- prelude/apple/apple_core_data.bzl | 3 ++- prelude/apple/apple_core_data_types.bzl | 1 + prelude/apple/apple_rules_impl.bzl | 1 + 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_core_data.bzl b/prelude/apple/apple_core_data.bzl index 82721a4af..97a5916a4 100644 --- a/prelude/apple/apple_core_data.bzl +++ b/prelude/apple/apple_core_data.bzl @@ -13,6 +13,7 @@ load(":resource_groups.bzl", "create_resource_graph") def apple_core_data_impl(ctx: AnalysisContext) -> list[Provider]: spec = AppleCoreDataSpec( + module = ctx.attrs.module, path = ctx.attrs.path, ) graph = create_resource_graph( @@ -64,7 +65,7 @@ def _get_momc_command(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec, p "--" + get_apple_sdk_name(ctx) + "-deployment-target", get_bundle_min_target_version(ctx, ctx.attrs.binary), "--module", - product_name, + core_data_spec.module if core_data_spec.module else product_name, core_data_spec.path, output_directory, ], delimiter = " ") diff --git a/prelude/apple/apple_core_data_types.bzl b/prelude/apple/apple_core_data_types.bzl index 700a5d602..a3cc54c04 100644 --- a/prelude/apple/apple_core_data_types.bzl +++ b/prelude/apple/apple_core_data_types.bzl @@ -6,5 +6,6 @@ # of this source tree. AppleCoreDataSpec = record( + module = field(str | None), path = field(Artifact), ) diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 44afa7ba3..a6db6e97e 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -224,6 +224,7 @@ extra_attributes = { "apple_universal_executable": _apple_universal_executable_extra_attrs(), "apple_xcuitest": apple_xcuitest_extra_attrs(), "core_data_model": { + "module": attrs.option(attrs.string(), default = None), "path": attrs.source(allow_directory = True), }, "prebuilt_apple_framework": { From 84e0702d7bc8d02504950c8ae3c69a855e6b29ce Mon Sep 17 00:00:00 2001 From: generatedunixname89002005232357 Date: Tue, 19 Mar 2024 07:10:28 -0700 Subject: [PATCH 0491/1133] Revert D54123919: Multisect successfully blamed "D54123919: [fbcode] substitute_minimal_xcode_with_pika_fat = true" for one test failure Summary: This diff reverts D54123919 D54123919: [fbcode] substitute_minimal_xcode_with_pika_fat = true by rmaz causes the following test failure: Tests affected: - [cogwheel:cogwheel_ios_mobile_lab_instagram_startup_jest#main](https://www.internalfb.com/intern/test/281475023842520/) Here's the Multisect link: https://www.internalfb.com/multisect/4620748 Here are the tasks that are relevant to this breakage: The backout may land if someone accepts it. If this diff has been generated in error, you can Commandeer and Abandon it. Reviewed By: fejese Differential Revision: D55044017 fbshipit-source-id: 95f34494e9d69244e6343df946588e531e547135 --- prelude/cxx/linker.bzl | 2 -- 1 file changed, 2 deletions(-) diff --git a/prelude/cxx/linker.bzl b/prelude/cxx/linker.bzl index 17e7951fd..4e6180646 100644 --- a/prelude/cxx/linker.bzl +++ b/prelude/cxx/linker.bzl @@ -216,9 +216,7 @@ def get_ignore_undefined_symbols_flags(linker_type: str) -> list[str]: args.append("-Wl,--allow-shlib-undefined") args.append("-Wl,--unresolved-symbols=ignore-all") elif linker_type == "darwin": - # dynamic_lookup is incompatible with fixup chains args.append("-Wl,-undefined,dynamic_lookup") - args.append("-Wl,-no_fixup_chains") else: fail("Linker type {} not supported".format(linker_type)) From abd3ada7911c4da94f149c015718b7fc6d49d5c5 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 19 Mar 2024 12:17:32 -0700 Subject: [PATCH 0492/1133] introduce `CodesignedPath` and use in `_codesign_paths` Summary: Intruduce an auxiliary struct to bind together path and entitlements which are used when codesigning that path. No actual behavior change. Reviewed By: rmaz Differential Revision: D55016955 fbshipit-source-id: dbb61f7d62ea79270e1a348506995120006c9962 --- .../tools/code_signing/codesign_bundle.py | 46 +++++++++++-------- 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index a4145761c..9454434a1 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -56,6 +56,18 @@ _LOGGER: logging.Logger = logging.getLogger(__name__) +@dataclass +class CodesignedPath: + path: Path + """ + Path relative to bundle root which needs to be codesigned + """ + entitlements: Optional[Path] + """ + Path to entitlements to be used when codesigning, relative to buck project + """ + + def _select_provisioning_profile( info_plist_metadata: InfoPlistMetadata, provisioning_profiles_dir: Path, @@ -398,11 +410,13 @@ def _dry_codesign_everything( # First sign codesign-on-copy directory paths _codesign_paths( - paths=codesign_on_copy_directory_paths, + paths=[ + CodesignedPath(path=p, entitlements=None) + for p in codesign_on_copy_directory_paths + ], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=None, platform=platform, codesign_args=codesign_args, ) @@ -418,11 +432,10 @@ def _dry_codesign_everything( # Lastly sign whole bundle _codesign_paths( - paths=[bundle_path], + paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=entitlements, platform=platform, codesign_args=codesign_args, ) @@ -441,9 +454,11 @@ def _codesign_everything( ) -> None: # First sign codesign-on-copy paths codesign_on_copy_filtered_paths = _filter_out_fast_adhoc_paths( - paths=[bundle_path / path for path in codesign_on_copy_paths], + paths=[ + CodesignedPath(path=bundle_path / path, entitlements=None) + for path in codesign_on_copy_paths + ], identity_fingerprint=identity_fingerprint, - entitlements=entitlements, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) @@ -452,15 +467,13 @@ def _codesign_everything( identity_fingerprint, tmp_dir, codesign_command_factory, - None, platform, codesign_args, ) # Lastly sign whole bundle root_bundle_paths = _filter_out_fast_adhoc_paths( - paths=[bundle_path], + paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], identity_fingerprint=identity_fingerprint, - entitlements=entitlements, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) @@ -469,7 +482,6 @@ def _codesign_everything( identity_fingerprint, tmp_dir, codesign_command_factory, - entitlements, platform, codesign_args, ) @@ -535,11 +547,10 @@ def _spawn_codesign_process( def _codesign_paths( - paths: List[Path], + paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], platform: ApplePlatform, codesign_args: List[str], ) -> None: @@ -548,11 +559,11 @@ def _codesign_paths( with ExitStack() as stack: for path in paths: process = _spawn_codesign_process( - path=path, + path=path.path, identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=entitlements, + entitlements=path.entitlements, stack=stack, codesign_args=codesign_args, ) @@ -564,12 +575,11 @@ def _codesign_paths( def _filter_out_fast_adhoc_paths( - paths: List[Path], + paths: List[CodesignedPath], identity_fingerprint: str, - entitlements: Optional[Path], platform: ApplePlatform, fast_adhoc_signing: bool, -) -> List[Path]: +) -> List[CodesignedPath]: if not fast_adhoc_signing: return paths # TODO(T149863217): Make skip checks run in parallel, they're usually fast (~15ms) @@ -578,6 +588,6 @@ def _filter_out_fast_adhoc_paths( p for p in paths if not should_skip_adhoc_signing_path( - p, identity_fingerprint, entitlements, platform + p.path, identity_fingerprint, p.entitlements, platform ) ] From 049a25ebe9739982d9217db52582ea8585ed3e30 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 19 Mar 2024 12:17:32 -0700 Subject: [PATCH 0493/1133] use `CodesignedPath` in `_codesign_everything` Summary: Refactoring, no behavior change Reviewed By: rmaz Differential Revision: D55016959 fbshipit-source-id: d66eb72e8d68ee17076a1a77f151bec42b37bbe7 --- .../tools/code_signing/codesign_bundle.py | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 9454434a1..e1f2e31e3 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -247,12 +247,16 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - bundle_path=bundle_path, - codesign_on_copy_paths=codesign_on_copy_paths, + root=CodesignedPath( + path=bundle_path, entitlements=prepared_entitlements_path + ), + codesign_on_copy_paths=[ + CodesignedPath(path=bundle_path / path, entitlements=None) + for path in codesign_on_copy_paths + ], identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), - entitlements=prepared_entitlements_path, platform=platform, fast_adhoc_signing=fast_adhoc_signing_enabled, codesign_args=codesign_args, @@ -442,22 +446,18 @@ def _dry_codesign_everything( def _codesign_everything( - bundle_path: Path, - codesign_on_copy_paths: List[str], + root: CodesignedPath, + codesign_on_copy_paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], platform: ApplePlatform, fast_adhoc_signing: bool, codesign_args: List[str], ) -> None: # First sign codesign-on-copy paths codesign_on_copy_filtered_paths = _filter_out_fast_adhoc_paths( - paths=[ - CodesignedPath(path=bundle_path / path, entitlements=None) - for path in codesign_on_copy_paths - ], + paths=codesign_on_copy_paths, identity_fingerprint=identity_fingerprint, platform=platform, fast_adhoc_signing=fast_adhoc_signing, @@ -471,14 +471,14 @@ def _codesign_everything( codesign_args, ) # Lastly sign whole bundle - root_bundle_paths = _filter_out_fast_adhoc_paths( - paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], + root_filtered_paths = _filter_out_fast_adhoc_paths( + paths=[root], identity_fingerprint=identity_fingerprint, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) _codesign_paths( - root_bundle_paths, + root_filtered_paths, identity_fingerprint, tmp_dir, codesign_command_factory, From 4bbcc16bb037e41603c1a35d8ce318d66cbd96cd Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 19 Mar 2024 12:17:32 -0700 Subject: [PATCH 0494/1133] use `CodesignedPath` in `_dry_codesign_everything` Summary: Refactoring, no behavior change intended. Reviewed By: rmaz Differential Revision: D55016958 fbshipit-source-id: 122ad821e8f7152d57369cc7e45359b36f43dc87 --- .../tools/code_signing/codesign_bundle.py | 39 +++++++++---------- 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index e1f2e31e3..83b5b9e7a 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -225,18 +225,23 @@ def codesign_bundle( prepared_entitlements_path = entitlements_path selected_identity_fingerprint = signing_context.codesign_identity + root = CodesignedPath(path=bundle_path, entitlements=prepared_entitlements_path) + codesigned_on_copy = [ + CodesignedPath(path=bundle_path / path, entitlements=None) + for path in codesign_on_copy_paths + ] + if codesign_configuration is CodesignConfiguration.dryRun: if codesign_tool is None: raise RuntimeError( "Expected codesign tool not to be the default one when dry run codesigning is requested." ) _dry_codesign_everything( - bundle_path=bundle_path, - codesign_on_copy_paths=codesign_on_copy_paths, + root=root, + codesign_on_copy_paths=codesigned_on_copy, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_tool=codesign_tool, - entitlements=prepared_entitlements_path, platform=platform, codesign_args=codesign_args, ) @@ -247,13 +252,8 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - root=CodesignedPath( - path=bundle_path, entitlements=prepared_entitlements_path - ), - codesign_on_copy_paths=[ - CodesignedPath(path=bundle_path / path, entitlements=None) - for path in codesign_on_copy_paths - ], + root=root, + codesign_on_copy_paths=codesigned_on_copy, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), @@ -396,28 +396,23 @@ def _read_entitlements_file(path: Optional[Path]) -> Optional[Dict[str, Any]]: def _dry_codesign_everything( - bundle_path: Path, - codesign_on_copy_paths: List[str], + root: CodesignedPath, + codesign_on_copy_paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_tool: Path, - entitlements: Optional[Path], platform: ApplePlatform, codesign_args: List[str], ) -> None: codesign_command_factory = DryRunCodesignCommandFactory(codesign_tool) - codesign_on_copy_abs_paths = [bundle_path / path for path in codesign_on_copy_paths] codesign_on_copy_directory_paths = [ - p for p in codesign_on_copy_abs_paths if p.is_dir() + p for p in codesign_on_copy_paths if p.path.is_dir() ] # First sign codesign-on-copy directory paths _codesign_paths( - paths=[ - CodesignedPath(path=p, entitlements=None) - for p in codesign_on_copy_directory_paths - ], + paths=codesign_on_copy_directory_paths, identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, @@ -428,7 +423,9 @@ def _dry_codesign_everything( # Dry codesigning creates a .plist inside every directory it signs. # That approach doesn't work for files so those files are written into .plist for root bundle. codesign_on_copy_file_paths = [ - p.relative_to(bundle_path) for p in codesign_on_copy_abs_paths if p.is_file() + p.path.relative_to(root.path) + for p in codesign_on_copy_paths + if p.path.is_file() ] codesign_command_factory.set_codesign_on_copy_file_paths( codesign_on_copy_file_paths @@ -436,7 +433,7 @@ def _dry_codesign_everything( # Lastly sign whole bundle _codesign_paths( - paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], + paths=[root], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, From dc19f5519c2b6a0538c8bfa890d913a46367e257 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 19 Mar 2024 12:17:32 -0700 Subject: [PATCH 0495/1133] simplify _spawn_codesign_process signature Summary: No behavior change intended, refactoring. Reviewed By: rmaz Differential Revision: D55016957 fbshipit-source-id: 8896f8e19e888f00fc99cf633f9d714193af1c49 --- prelude/apple/tools/code_signing/codesign_bundle.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 83b5b9e7a..20203606c 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -529,16 +529,15 @@ def _spawn_process( def _spawn_codesign_process( - path: Path, + path: CodesignedPath, identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], stack: ExitStack, codesign_args: List[str], ) -> ParallelProcess: command = codesign_command_factory.codesign_command( - path, identity_fingerprint, entitlements, codesign_args + path.path, identity_fingerprint, path.entitlements, codesign_args ) return _spawn_process(command=command, tmp_dir=tmp_dir, stack=stack) @@ -556,11 +555,10 @@ def _codesign_paths( with ExitStack() as stack: for path in paths: process = _spawn_codesign_process( - path=path.path, + path=path, identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=path.entitlements, stack=stack, codesign_args=codesign_args, ) From 6b056c98532545066cec1d8bdfb1fb281462ed42 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 19 Mar 2024 12:17:32 -0700 Subject: [PATCH 0496/1133] use `CodesignPath` in `codesign_bundle` Summary: no behavior change intended, just refactoring Reviewed By: rmaz Differential Revision: D55016956 fbshipit-source-id: 10c9ba7abda96a5a6083cf4d7c4ca243efa3e8eb --- prelude/apple/tools/bundling/main.py | 13 +++-- .../tools/code_signing/codesign_bundle.py | 49 +++++++++---------- prelude/apple/tools/code_signing/main.py | 13 ++++- 3 files changed, 43 insertions(+), 32 deletions(-) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 851ad0a6d..c4652e97a 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -22,6 +22,7 @@ AdhocSigningContext, codesign_bundle, CodesignConfiguration, + CodesignedPath, signing_context_with_profile_selection, ) from apple.tools.code_signing.list_codesign_identities import ( @@ -384,12 +385,18 @@ def _main() -> None: codesign_on_copy_paths = [ i.dst for i in spec if i.codesign_on_copy ] + swift_stdlib_paths + + bundle_path = CodesignedPath(path=args.output, entitlements=args.entitlements) + codesigned_on_copy = [ + CodesignedPath(path=bundle_path.path / path, entitlements=None) + for path in codesign_on_copy_paths + ] + codesign_bundle( - bundle_path=args.output, + bundle_path=bundle_path, signing_context=signing_context, - entitlements_path=args.entitlements, platform=args.platform, - codesign_on_copy_paths=codesign_on_copy_paths, + codesign_on_copy_paths=codesigned_on_copy, codesign_args=args.codesign_args, codesign_tool=args.codesign_tool, codesign_configuration=args.codesign_configuration, diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 20203606c..27e4b526a 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -183,11 +183,10 @@ class CodesignConfiguration(str, Enum): def codesign_bundle( - bundle_path: Path, + bundle_path: CodesignedPath, signing_context: Union[AdhocSigningContext, SigningContextWithProfileSelection], - entitlements_path: Optional[Path], platform: ApplePlatform, - codesign_on_copy_paths: List[str], + codesign_on_copy_paths: List[CodesignedPath], codesign_args: List[str], codesign_tool: Optional[Path] = None, codesign_configuration: Optional[CodesignConfiguration] = None, @@ -203,12 +202,13 @@ def codesign_bundle( ) if selection_profile_context: - prepared_entitlements_path = _prepare_entitlements_and_info_plist( - bundle_path=bundle_path, - entitlements_path=entitlements_path, - platform=platform, - signing_context=selection_profile_context, - tmp_dir=tmp_dir, + bundle_path_with_prepared_entitlements = ( + _prepare_entitlements_and_info_plist( + bundle_path=bundle_path, + platform=platform, + signing_context=selection_profile_context, + tmp_dir=tmp_dir, + ) ) selected_identity_fingerprint = ( selection_profile_context.selected_profile_info.identity.fingerprint @@ -222,23 +222,17 @@ def codesign_bundle( raise AssertionError( "Expected no profile selection context in `AdhocSigningContext` when `selection_profile_context` is `None`." ) - prepared_entitlements_path = entitlements_path + bundle_path_with_prepared_entitlements = bundle_path selected_identity_fingerprint = signing_context.codesign_identity - root = CodesignedPath(path=bundle_path, entitlements=prepared_entitlements_path) - codesigned_on_copy = [ - CodesignedPath(path=bundle_path / path, entitlements=None) - for path in codesign_on_copy_paths - ] - if codesign_configuration is CodesignConfiguration.dryRun: if codesign_tool is None: raise RuntimeError( "Expected codesign tool not to be the default one when dry run codesigning is requested." ) _dry_codesign_everything( - root=root, - codesign_on_copy_paths=codesigned_on_copy, + root=bundle_path_with_prepared_entitlements, + codesign_on_copy_paths=codesign_on_copy_paths, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_tool=codesign_tool, @@ -252,8 +246,8 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - root=root, - codesign_on_copy_paths=codesigned_on_copy, + root=bundle_path_with_prepared_entitlements, + codesign_on_copy_paths=codesign_on_copy_paths, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), @@ -264,16 +258,15 @@ def codesign_bundle( def _prepare_entitlements_and_info_plist( - bundle_path: Path, - entitlements_path: Optional[Path], + bundle_path: CodesignedPath, platform: ApplePlatform, signing_context: SigningContextWithProfileSelection, tmp_dir: str, -) -> Path: +) -> CodesignedPath: info_plist_metadata = signing_context.info_plist_metadata selected_profile = signing_context.selected_profile_info.profile prepared_entitlements_path = prepare_code_signing_entitlements( - entitlements_path, + bundle_path.entitlements, info_plist_metadata.bundle_id, selected_profile, tmp_dir, @@ -286,13 +279,15 @@ def _prepare_entitlements_and_info_plist( ) os.replace( prepared_info_plist_path, - bundle_path / signing_context.info_plist_destination, + bundle_path.path / signing_context.info_plist_destination, ) shutil.copy2( selected_profile.file_path, - bundle_path / platform.embedded_provisioning_profile_path(), + bundle_path.path / platform.embedded_provisioning_profile_path(), + ) + return CodesignedPath( + path=bundle_path.path, entitlements=prepared_entitlements_path ) - return prepared_entitlements_path async def _fast_read_provisioning_profiles_async( diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index eba6fbf2a..dd0c37ca6 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -15,6 +15,7 @@ from .codesign_bundle import ( AdhocSigningContext, codesign_bundle, + CodesignedPath, signing_context_with_profile_selection, ) from .list_codesign_identities import ListCodesignIdentities @@ -114,12 +115,20 @@ def _main() -> None: platform=args.platform, should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, ) + + bundle_path = CodesignedPath( + path=args.bundle_path, entitlements=args.entitlements + ) + codesign_on_copy_paths = [ + CodesignedPath(path=bundle_path.path / path, entitlements=None) + for path in args.codesign_on_copy + ] + codesign_bundle( bundle_path=args.bundle_path, signing_context=signing_context, - entitlements_path=args.entitlements, platform=args.platform, - codesign_on_copy_paths=args.codesign_on_copy or [], + codesign_on_copy_paths=codesign_on_copy_paths, codesign_args=[], ) except CodeSignProvisioningError as e: From 881b07adc6f3550f0a212770fbd3c6876cbcae4e Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 19 Mar 2024 13:12:04 -0700 Subject: [PATCH 0497/1133] remove use_m1_simulator field Summary: cleanup Reviewed By: rmaz Differential Revision: D55073777 fbshipit-source-id: df2fa2758f342b79fb5b4c71d76800177ffa59f5 --- prelude/apple/apple_rules_impl_utility.bzl | 1 - prelude/apple/apple_test.bzl | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 285575bd6..00acf8da3 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -105,7 +105,6 @@ def apple_test_extra_attrs(): "sanitizer_runtime_enabled": attrs.option(attrs.bool(), default = None), "stripped": attrs.bool(default = False), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), - "use_m1_simulator": attrs.bool(default = False), "_apple_toolchain": get_apple_toolchain_attr(), "_ios_booted_simulator": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:ios_booted_simulator", providers = [LocalResourceInfo]), "_ios_unbooted_simulator": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:ios_unbooted_simulator", providers = [LocalResourceInfo]), diff --git a/prelude/apple/apple_test.bzl b/prelude/apple/apple_test.bzl index 251283a00..74119f747 100644 --- a/prelude/apple/apple_test.bzl +++ b/prelude/apple/apple_test.bzl @@ -206,10 +206,10 @@ def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_ else: # @oss-disable: requires_ios_booted_simulator = ctx.attrs.test_host_app != None or ctx.attrs.ui_test_target_app != None - # @oss-disable: remote_execution_properties = ios_test_re_capabilities(use_unbooted_simulator = not requires_ios_booted_simulator, use_m1_simulator = ctx.attrs.use_m1_simulator) + # @oss-disable: remote_execution_properties = ios_test_re_capabilities(use_unbooted_simulator = not requires_ios_booted_simulator) remote_execution_properties = None # @oss-enable - # @oss-disable: remote_execution_use_case = apple_test_re_use_case(macos_test = sdk_name == MacOSXSdkMetadata.name, use_m1_simulator = ctx.attrs.use_m1_simulator) + # @oss-disable: remote_execution_use_case = apple_test_re_use_case(macos_test = sdk_name == MacOSXSdkMetadata.name) remote_execution_use_case = None # @oss-enable local_enabled = remote_execution_use_case == None From 6ab241ae1f8f35c6505d90c7188720302fba7fda Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Wed, 20 Mar 2024 09:41:52 -0700 Subject: [PATCH 0498/1133] add package_name Summary: Add support for the `-package-name` flag: https://github.com/apple/swift-evolution/blob/main/proposals/0386-package-access-modifier.md Reviewed By: chatura-atapattu, drodriguez Differential Revision: D55091791 fbshipit-source-id: 8a23f2c0638cda1b6a5e7ff73ed7d045bc0a973b --- prelude/apple/apple_rules_impl.bzl | 2 ++ prelude/apple/apple_rules_impl_utility.bzl | 1 + prelude/apple/swift/swift_compilation.bzl | 6 ++++++ 3 files changed, 9 insertions(+) diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index a6db6e97e..8298fbba2 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -92,6 +92,7 @@ def _apple_binary_extra_attrs(): "sanitizer_runtime_enabled": attrs.option(attrs.bool(), default = None), "stripped": attrs.option(attrs.bool(), default = None), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), + "swift_package_name": attrs.option(attrs.string(), default = None), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), "_apple_xctoolchain": get_apple_xctoolchain_attr(), @@ -118,6 +119,7 @@ def _apple_library_extra_attrs(): "supports_header_symlink_subtarget": attrs.bool(default = False), "supports_shlib_interfaces": attrs.bool(default = True), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), + "swift_package_name": attrs.option(attrs.string(), default = None), "use_archive": attrs.option(attrs.bool(), default = None), "_apple_toolchain": _APPLE_TOOLCHAIN_ATTR, "_apple_tools": attrs.exec_dep(default = "prelude//apple/tools:apple-tools", providers = [AppleToolsInfo]), diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 00acf8da3..6a03ae599 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -105,6 +105,7 @@ def apple_test_extra_attrs(): "sanitizer_runtime_enabled": attrs.option(attrs.bool(), default = None), "stripped": attrs.bool(default = False), "swift_compilation_mode": attrs.enum(SwiftCompilationMode.values(), default = "wmo"), + "swift_package_name": attrs.option(attrs.string(), default = None), "_apple_toolchain": get_apple_toolchain_attr(), "_ios_booted_simulator": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:ios_booted_simulator", providers = [LocalResourceInfo]), "_ios_unbooted_simulator": attrs.transition_dep(cfg = apple_simulators_transition, default = "fbsource//xplat/buck2/platform/apple:ios_unbooted_simulator", providers = [LocalResourceInfo]), diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index edce1d7fe..695aa78f8 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -489,6 +489,12 @@ def _get_shared_flags( "-parse-as-library", ]) + if ctx.attrs.swift_package_name != None: + cmd.add([ + "-package-name", + ctx.attrs.swift_package_name, + ]) + if uses_explicit_modules(ctx): cmd.add([ "-Xcc", From 4a193244430b1cf76584a11732c8ad562f9ffaf1 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 20 Mar 2024 10:26:39 -0700 Subject: [PATCH 0499/1133] ignore_artifacts() -> ignore_artifacts= Summary: Working on immutable `cmd_args`. [Context](https://fb.workplace.com/groups/buck2dev/posts/3575178566103523) Reviewed By: JakobDegen Differential Revision: D55092308 fbshipit-source-id: 15a9e1b15f7169ab39af454d698596e473b91d6d --- prelude/cxx/argsfiles.bzl | 2 +- prelude/cxx/cxx_link_utility.bzl | 9 ++++----- prelude/cxx/dist_lto/dist_lto.bzl | 3 +-- prelude/cxx/headers.bzl | 2 +- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/prelude/cxx/argsfiles.bzl b/prelude/cxx/argsfiles.bzl index 81dbdfeea..398a24c51 100644 --- a/prelude/cxx/argsfiles.bzl +++ b/prelude/cxx/argsfiles.bzl @@ -35,7 +35,7 @@ def get_argsfiles_output(ctx: AnalysisContext, argsfile_by_ext: dict[str, Compil dependent_outputs = [] for _, argsfile in argsfile_by_ext.items(): argsfiles.append(argsfile.file) - argsfile_names.add(cmd_args(argsfile.file).ignore_artifacts()) + argsfile_names.add(cmd_args(argsfile.file, ignore_artifacts = True)) dependent_outputs.extend(argsfile.input_args) argsfiles_summary = ctx.actions.write(summary_name, argsfile_names) diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 87b0de947..320339cd5 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -178,7 +178,8 @@ def cxx_sanitizer_runtime_arguments( fail("C++ sanitizer runtime enabled but there are no runtime files") if linker_info.type == "darwin": - runtime_rpath = cmd_args() + # ignore_artifacts as the runtime directory is not required at _link_ time + runtime_rpath = cmd_args(ignore_artifacts = True) runtime_files = linker_info.sanitizer_runtime_files for runtime_shared_lib in runtime_files: # Rpath-relative dylibs have an install name of `@rpath/libName.dylib`, @@ -191,8 +192,6 @@ def cxx_sanitizer_runtime_arguments( runtime_shared_lib_rpath = cmd_args(runtime_shared_lib_dir, format = "-Wl,-rpath,@executable_path/{}").relative_to(output, parent = 1) runtime_rpath.add(runtime_shared_lib_rpath) - # Ignore_artifacts() as the runtime directory is not required at _link_ time - runtime_rpath = runtime_rpath.ignore_artifacts() return CxxSanitizerRuntimeArguments( extra_link_args = [ runtime_rpath, @@ -245,8 +244,8 @@ def executable_shared_lib_arguments( runtime_files.append(shared_libs_symlink_tree) rpath_reference = get_rpath_origin(linker_type) - # We ignore_artifacts() here since we don't want the symlink tree to actually be there for the link. - rpath_arg = cmd_args(shared_libs_symlink_tree, format = "-Wl,-rpath,{}/{{}}".format(rpath_reference)).relative_to(output, parent = 1).ignore_artifacts() + # We ignore_artifacts here since we don't want the symlink tree to actually be there for the link. + rpath_arg = cmd_args(shared_libs_symlink_tree, format = "-Wl,-rpath,{}/{{}}".format(rpath_reference), ignore_artifacts = True).relative_to(output, parent = 1) extra_link_args.append(rpath_arg) return ExecutableSharedLibArguments( diff --git a/prelude/cxx/dist_lto/dist_lto.bzl b/prelude/cxx/dist_lto/dist_lto.bzl index 1c49f435a..8a50ad707 100644 --- a/prelude/cxx/dist_lto/dist_lto.bzl +++ b/prelude/cxx/dist_lto/dist_lto.bzl @@ -366,8 +366,7 @@ def cxx_dist_link( index_cmd = index_cmd_parts.link_cmd index_cmd.add(cmd_args(index_argfile, format = "@{}")) - output_as_string = cmd_args(output) - output_as_string.ignore_artifacts() + output_as_string = cmd_args(output, ignore_artifacts = True) index_cmd.add("-o", output_as_string) index_cmd.add(cmd_args(index_file_out.as_output(), format = "-Wl,--thinlto-index-only={}")) index_cmd.add("-Wl,--thinlto-emit-imports-files") diff --git a/prelude/cxx/headers.bzl b/prelude/cxx/headers.bzl index 21faa8741..406a8b56d 100644 --- a/prelude/cxx/headers.bzl +++ b/prelude/cxx/headers.bzl @@ -353,7 +353,7 @@ def _mk_hmap(ctx: AnalysisContext, name: str, headers: dict[str, (Artifact, str) header_args.add(n) # We don't care about the header contents -- just their names. - header_args.add(cmd_args(path, format = fmt).ignore_artifacts()) + header_args.add(cmd_args(path, format = fmt, ignore_artifacts = True)) hmap_args_file = ctx.actions.write(output.basename + ".argsfile", cmd_args(header_args, quote = "shell")) cmd.add(["--mappings-file", hmap_args_file]).hidden(header_args) From 136b540b1d9331f2522a51ea29ee14fc7cc23fd8 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 20 Mar 2024 10:26:39 -0700 Subject: [PATCH 0500/1133] ignore_artifacts() -> ignore_artifacts= Summary: Working on immutable `cmd_args`. [Context](https://fb.workplace.com/groups/buck2dev/posts/3575178566103523) Reviewed By: JakobDegen Differential Revision: D55092309 fbshipit-source-id: f5cbd802b5fcb750d98e3897bfc699d0546c63fe --- prelude/genrule.bzl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/prelude/genrule.bzl b/prelude/genrule.bzl index d999f92be..1ccc48d52 100644 --- a/prelude/genrule.bzl +++ b/prelude/genrule.bzl @@ -173,7 +173,7 @@ def process_genrule( cmd = ctx.attrs.bash if ctx.attrs.bash != None else ctx.attrs.cmd if cmd == None: fail("One of `cmd` or `bash` should be set.") - cmd = cmd_args(cmd) + cmd = cmd_args(cmd, ignore_artifacts = _ignore_artifacts(ctx)) # For backwards compatibility with Buck1. if is_windows: @@ -183,9 +183,6 @@ def process_genrule( for extra_env_var in extra_env_vars: cmd.replace_regex(regex("\\$(%s\\b|\\{%s\\})" % (extra_env_var, extra_env_var)), "%%%s%%" % extra_env_var) - if _ignore_artifacts(ctx): - cmd = cmd.ignore_artifacts() - if type(ctx.attrs.srcs) == type([]): # FIXME: We should always use the short_path, but currently that is sometimes blank. # See fbcode//buck2/tests/targets/rules/genrule:genrule-dot-input for a test that exposes it. From a3141c8a6a6d2edf569712e4534d15bdcf138a53 Mon Sep 17 00:00:00 2001 From: Joshua Selbo Date: Wed, 20 Mar 2024 11:15:55 -0700 Subject: [PATCH 0501/1133] Support Java agents on java,robolectric_test Summary: You can already specify agents via `vm_args`, but this allows providing `prebuilt_jar` targets as agents without hardcoding the jar path. Reviewed By: IanChilds Differential Revision: D55128092 fbshipit-source-id: 7365526203b3ddb845bcd2507cf23c84a7838fef --- prelude/android/android.bzl | 1 + prelude/java/java.bzl | 1 + prelude/java/java_test.bzl | 2 ++ prelude/kotlin/kotlin.bzl | 1 + 4 files changed, 5 insertions(+) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 868eccd20..6f959e573 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -215,6 +215,7 @@ extra_attributes = { }, "robolectric_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), + "java_agents": attrs.list(attrs.source(), default = []), "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), "robolectric_runtime_dependencies": attrs.list(attrs.source(), default = []), diff --git a/prelude/java/java.bzl b/prelude/java/java.bzl index f3be03b5a..ac0739580 100644 --- a/prelude/java/java.bzl +++ b/prelude/java/java.bzl @@ -78,6 +78,7 @@ extra_attributes = { }, "java_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), + "java_agents": attrs.list(attrs.source(), default = []), "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), "test_class_names_file": attrs.option(attrs.source(), default = None), diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index 86ed15509..697a0dccc 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -60,6 +60,8 @@ def build_junit_test( cmd.extend(java_test_toolchain.java_custom_class_loader_vm_args) classpath.append(java_test_toolchain.java_custom_class_loader_library_jar) + cmd.append(cmd_args(ctx.attrs.java_agents, format = "-javaagent:{}")) + classpath.extend( [java_test_toolchain.test_runner_library_jar] + [ diff --git a/prelude/kotlin/kotlin.bzl b/prelude/kotlin/kotlin.bzl index 6f60384e9..ca4785cac 100644 --- a/prelude/kotlin/kotlin.bzl +++ b/prelude/kotlin/kotlin.bzl @@ -33,6 +33,7 @@ extra_attributes = { }, "kotlin_test": { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), + "java_agents": attrs.list(attrs.source(), default = []), "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), "test_class_names_file": attrs.option(attrs.source(), default = None), From bd4370af47ec7d226c5b2901ba3fe78586e75520 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 20 Mar 2024 11:24:19 -0700 Subject: [PATCH 0502/1133] Fix local builds on Windows Summary: We have two issues with symlinks resolution here, it fails only on local execution, RE works fine Reviewed By: echistyakov Differential Revision: D55132433 fbshipit-source-id: 9a116878b469804c6002d6bbec781715903dca7a --- prelude/go/tools/filter_srcs.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/prelude/go/tools/filter_srcs.py b/prelude/go/tools/filter_srcs.py index 68fc68bdb..94e87fbde 100755 --- a/prelude/go/tools/filter_srcs.py +++ b/prelude/go/tools/filter_srcs.py @@ -82,9 +82,8 @@ def main(argv): types.extend(["SFiles"]) for typ in types: for src in obj.get(typ, []): - src = Path(obj["Dir"]) / src - # Resolve the symlink - src = Path(os.path.normpath(str(src.parent / os.readlink(str(src))))) + # Absolute path to the source file. + src = (args.srcdir / src).resolve() # Relativize to the CWD. src = src.relative_to(os.getcwd()) print(src, file=args.output) From c27b80784af743b79363beb05f0bf70c1b0a60ae Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 20 Mar 2024 11:25:35 -0700 Subject: [PATCH 0503/1133] ignore_artifacts() -> ignore_artifacts= Summary: Working on immutable `cmd_args`. [Context](https://fb.workplace.com/groups/buck2dev/posts/3575178566103523) Reviewed By: JakobDegen Differential Revision: D55092615 fbshipit-source-id: 355a10c6baa4913bffa074b5c816467fef007c68 --- prelude/rust/build.bzl | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index ff3d3480f..d8b6cbf88 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -801,9 +801,8 @@ def dynamic_symlinked_dirs( # Pass the list of rlibs to transitive_dependency_symlinks.py through a file # because there can be a lot of them. This avoids running out of command # line length, particularly on Windows. - relative_path = lambda artifact: (cmd_args(artifact, delimiter = "") - .relative_to(transitive_dependency_dir.project("i")) - .ignore_artifacts()) + relative_path = lambda artifact: (cmd_args(artifact, delimiter = "", ignore_artifacts = True) + .relative_to(transitive_dependency_dir.project("i"))) artifacts_json = ctx.actions.write_json( ctx.actions.declare_output("{}-dyn.json".format(prefix)), [ From f3f70e95fd43f288a82e45d9927b913e955cd571 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 20 Mar 2024 11:25:35 -0700 Subject: [PATCH 0504/1133] ignore_artifacts() -> ignore_artifacts= Summary: Working on immutable `cmd_args`. [Context](https://fb.workplace.com/groups/buck2dev/posts/3575178566103523) Reviewed By: JakobDegen Differential Revision: D55092618 fbshipit-source-id: 6b9e2cec8ae26be748e60fae3443f1955568a24a --- prelude/python/make_py_package.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 2b4726477..dfb0d616c 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -369,7 +369,7 @@ def _pex_bootstrap_args( else: cmd.add(["--main-function", main[1]]) if symlink_tree_path != None: - cmd.add(cmd_args(["--modules-dir", symlink_tree_path]).ignore_artifacts()) + cmd.add(cmd_args(["--modules-dir", symlink_tree_path], ignore_artifacts = True)) if toolchain.main_runner: cmd.add(["--main-runner", toolchain.main_runner]) From 8234bd49c92d2ab2ef49379dbc049cbb4d78bd5d Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 20 Mar 2024 11:25:35 -0700 Subject: [PATCH 0505/1133] ignore_artifacts() -> ignore_artifacts= Summary: Working on immutable `cmd_args`. [Context](https://fb.workplace.com/groups/buck2dev/posts/3575178566103523) Reviewed By: JakobDegen Differential Revision: D55092617 fbshipit-source-id: 193483e93344ce789d824b117bb23408de740551 --- prelude/ocaml/ocaml.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/ocaml/ocaml.bzl b/prelude/ocaml/ocaml.bzl index 67f0e6d4f..ebd89eaa9 100644 --- a/prelude/ocaml/ocaml.bzl +++ b/prelude/ocaml/ocaml.bzl @@ -321,7 +321,7 @@ def _preprocess(ctx: AnalysisContext, srcs: list[Artifact], build_mode: BuildMod parser_sig = ctx.actions.declare_output(name + ".mli") result.extend((parser_sig, parser)) - cmd = cmd_args([menhir, "--fixed-exception", "-b", cmd_args(prefix).ignore_artifacts(), src]) + cmd = cmd_args([menhir, "--fixed-exception", "-b", cmd_args(prefix, ignore_artifacts = True), src]) cmd.hidden(parser.as_output(), parser_sig.as_output()) ctx.actions.run(cmd, category = "ocaml_yacc_" + build_mode.value, identifier = src.short_path) From caac35856927aea0ed1cf54047002e07c6f047e2 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 20 Mar 2024 11:25:35 -0700 Subject: [PATCH 0506/1133] ignore_artifacts() -> ignore_artifacts= Summary: Working on immutable `cmd_args`. [Context](https://fb.workplace.com/groups/buck2dev/posts/3575178566103523) Reviewed By: JakobDegen Differential Revision: D55092616 fbshipit-source-id: d4f1316c03cb7e913db64cda25d62c317143d08b --- prelude/erlang/erlang_application.bzl | 3 +-- prelude/erlang/erlang_build.bzl | 4 +--- prelude/erlang/erlang_utils.bzl | 4 ++-- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/prelude/erlang/erlang_application.bzl b/prelude/erlang/erlang_application.bzl index 5c8606ed5..7180c1830 100644 --- a/prelude/erlang/erlang_application.bzl +++ b/prelude/erlang/erlang_application.bzl @@ -298,8 +298,7 @@ def _app_info_content( srcs: list[Artifact], output: Artifact) -> Artifact: """build an app_info.term file that contains the meta information for building the .app file""" - sources_args = convert(srcs) - sources_args.ignore_artifacts() + sources_args = convert(srcs, ignore_artifacts = True) data = { "applications": [ app[ErlangAppInfo].name diff --git a/prelude/erlang/erlang_build.bzl b/prelude/erlang/erlang_build.bzl index d35dec5f1..f616bf163 100644 --- a/prelude/erlang/erlang_build.bzl +++ b/prelude/erlang/erlang_build.bzl @@ -648,7 +648,7 @@ def _erlc_dependency_args( # A: the whole string would get passed as a single argument, as if it was quoted in CLI e.g. '-I include_path' # ...which the escript cannot parse, as it expects two separate arguments, e.g. '-I' 'include_path' - args = cmd_args([]) + args = cmd_args([], ignore_artifacts = True) # build -I options if path_in_arg: @@ -668,8 +668,6 @@ def _erlc_dependency_args( args.add("-pa") args.add(code_path) - args.ignore_artifacts() - return args def _get_erl_opts( diff --git a/prelude/erlang/erlang_utils.bzl b/prelude/erlang/erlang_utils.bzl index dcb20b3df..ac46e5b43 100644 --- a/prelude/erlang/erlang_utils.bzl +++ b/prelude/erlang/erlang_utils.bzl @@ -48,7 +48,7 @@ build_paths = struct( linktree = linktree, ) -def convert(data: typing.Any) -> cmd_args: +def convert(data: typing.Any, ignore_artifacts: bool = False) -> cmd_args: """ converts a lists/tuple/map data structure to a sub-term that can be embedded in another to_term_args or convert """ if type(data) == "list": @@ -64,7 +64,7 @@ def convert(data: typing.Any) -> cmd_args: elif type(data) == "bool": return convert_bool(data) - args = cmd_args([]) + args = cmd_args([], ignore_artifacts = ignore_artifacts) args.add(cmd_args(["\"", data, "\""], delimiter = "")) return args From 542c32ff8a070c379e62b0b996d6041680dec7aa Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Wed, 20 Mar 2024 13:18:40 -0700 Subject: [PATCH 0507/1133] Expose modified JavaLibraryInfo from robolectric_test Summary: We stopped exposing `JavaLibraryInfo` from `robolectric_test`, since that stops you depending upon it at compile-time (which is generally bad, if you need to depend upon something from within a `robolectric_test` you should extract it to a common `java_library`). However, we should just stop exposing the `compiling_deps`, since we might still want access to the rest of the provider. Reviewed By: navidqar Differential Revision: D55128985 fbshipit-source-id: 5b77c5d38f7ea09fb06109b10d4f1f76f8c3c8e6 --- prelude/android/robolectric_test.bzl | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/prelude/android/robolectric_test.bzl b/prelude/android/robolectric_test.bzl index 475a2c847..ca7125e76 100644 --- a/prelude/android/robolectric_test.bzl +++ b/prelude/android/robolectric_test.bzl @@ -9,6 +9,7 @@ load("@prelude//android:android_binary_resources_rules.bzl", "get_android_binary load("@prelude//android:android_library.bzl", "build_android_library") load("@prelude//android:android_providers.bzl", "merge_android_packageable_info") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//java:java_providers.bzl", "JavaLibraryInfo") load("@prelude//java:java_test.bzl", "build_junit_test") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//utils:expect.bzl", "expect") @@ -102,5 +103,12 @@ def robolectric_test_impl(ctx: AnalysisContext) -> list[Provider]: if ctx.attrs.used_as_dependency_deprecated_do_not_use: providers.append(java_providers.java_library_info) + else: + java_library_without_compiling_deps = JavaLibraryInfo( + compiling_deps = None, + library_output = java_providers.java_library_info.library_output, + output_for_classpath_macro = java_providers.java_library_info.output_for_classpath_macro, + ) + providers.append(java_library_without_compiling_deps) return providers From 01b0b71177938667e0c186d80490f0ede3ab330a Mon Sep 17 00:00:00 2001 From: Chris Ricca Date: Wed, 20 Mar 2024 13:25:02 -0700 Subject: [PATCH 0508/1133] Back out "use `CodesignPath` in `codesign_bundle`" Summary: Backing this out Differential Revision: D55142927 fbshipit-source-id: 248dfd76501cf0b5bcab0996f86da19d9e053f1b --- prelude/apple/tools/bundling/main.py | 13 ++--- .../tools/code_signing/codesign_bundle.py | 49 ++++++++++--------- prelude/apple/tools/code_signing/main.py | 13 +---- 3 files changed, 32 insertions(+), 43 deletions(-) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index c4652e97a..851ad0a6d 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -22,7 +22,6 @@ AdhocSigningContext, codesign_bundle, CodesignConfiguration, - CodesignedPath, signing_context_with_profile_selection, ) from apple.tools.code_signing.list_codesign_identities import ( @@ -385,18 +384,12 @@ def _main() -> None: codesign_on_copy_paths = [ i.dst for i in spec if i.codesign_on_copy ] + swift_stdlib_paths - - bundle_path = CodesignedPath(path=args.output, entitlements=args.entitlements) - codesigned_on_copy = [ - CodesignedPath(path=bundle_path.path / path, entitlements=None) - for path in codesign_on_copy_paths - ] - codesign_bundle( - bundle_path=bundle_path, + bundle_path=args.output, signing_context=signing_context, + entitlements_path=args.entitlements, platform=args.platform, - codesign_on_copy_paths=codesigned_on_copy, + codesign_on_copy_paths=codesign_on_copy_paths, codesign_args=args.codesign_args, codesign_tool=args.codesign_tool, codesign_configuration=args.codesign_configuration, diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 27e4b526a..20203606c 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -183,10 +183,11 @@ class CodesignConfiguration(str, Enum): def codesign_bundle( - bundle_path: CodesignedPath, + bundle_path: Path, signing_context: Union[AdhocSigningContext, SigningContextWithProfileSelection], + entitlements_path: Optional[Path], platform: ApplePlatform, - codesign_on_copy_paths: List[CodesignedPath], + codesign_on_copy_paths: List[str], codesign_args: List[str], codesign_tool: Optional[Path] = None, codesign_configuration: Optional[CodesignConfiguration] = None, @@ -202,13 +203,12 @@ def codesign_bundle( ) if selection_profile_context: - bundle_path_with_prepared_entitlements = ( - _prepare_entitlements_and_info_plist( - bundle_path=bundle_path, - platform=platform, - signing_context=selection_profile_context, - tmp_dir=tmp_dir, - ) + prepared_entitlements_path = _prepare_entitlements_and_info_plist( + bundle_path=bundle_path, + entitlements_path=entitlements_path, + platform=platform, + signing_context=selection_profile_context, + tmp_dir=tmp_dir, ) selected_identity_fingerprint = ( selection_profile_context.selected_profile_info.identity.fingerprint @@ -222,17 +222,23 @@ def codesign_bundle( raise AssertionError( "Expected no profile selection context in `AdhocSigningContext` when `selection_profile_context` is `None`." ) - bundle_path_with_prepared_entitlements = bundle_path + prepared_entitlements_path = entitlements_path selected_identity_fingerprint = signing_context.codesign_identity + root = CodesignedPath(path=bundle_path, entitlements=prepared_entitlements_path) + codesigned_on_copy = [ + CodesignedPath(path=bundle_path / path, entitlements=None) + for path in codesign_on_copy_paths + ] + if codesign_configuration is CodesignConfiguration.dryRun: if codesign_tool is None: raise RuntimeError( "Expected codesign tool not to be the default one when dry run codesigning is requested." ) _dry_codesign_everything( - root=bundle_path_with_prepared_entitlements, - codesign_on_copy_paths=codesign_on_copy_paths, + root=root, + codesign_on_copy_paths=codesigned_on_copy, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_tool=codesign_tool, @@ -246,8 +252,8 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - root=bundle_path_with_prepared_entitlements, - codesign_on_copy_paths=codesign_on_copy_paths, + root=root, + codesign_on_copy_paths=codesigned_on_copy, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), @@ -258,15 +264,16 @@ def codesign_bundle( def _prepare_entitlements_and_info_plist( - bundle_path: CodesignedPath, + bundle_path: Path, + entitlements_path: Optional[Path], platform: ApplePlatform, signing_context: SigningContextWithProfileSelection, tmp_dir: str, -) -> CodesignedPath: +) -> Path: info_plist_metadata = signing_context.info_plist_metadata selected_profile = signing_context.selected_profile_info.profile prepared_entitlements_path = prepare_code_signing_entitlements( - bundle_path.entitlements, + entitlements_path, info_plist_metadata.bundle_id, selected_profile, tmp_dir, @@ -279,15 +286,13 @@ def _prepare_entitlements_and_info_plist( ) os.replace( prepared_info_plist_path, - bundle_path.path / signing_context.info_plist_destination, + bundle_path / signing_context.info_plist_destination, ) shutil.copy2( selected_profile.file_path, - bundle_path.path / platform.embedded_provisioning_profile_path(), - ) - return CodesignedPath( - path=bundle_path.path, entitlements=prepared_entitlements_path + bundle_path / platform.embedded_provisioning_profile_path(), ) + return prepared_entitlements_path async def _fast_read_provisioning_profiles_async( diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index dd0c37ca6..eba6fbf2a 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -15,7 +15,6 @@ from .codesign_bundle import ( AdhocSigningContext, codesign_bundle, - CodesignedPath, signing_context_with_profile_selection, ) from .list_codesign_identities import ListCodesignIdentities @@ -115,20 +114,12 @@ def _main() -> None: platform=args.platform, should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, ) - - bundle_path = CodesignedPath( - path=args.bundle_path, entitlements=args.entitlements - ) - codesign_on_copy_paths = [ - CodesignedPath(path=bundle_path.path / path, entitlements=None) - for path in args.codesign_on_copy - ] - codesign_bundle( bundle_path=args.bundle_path, signing_context=signing_context, + entitlements_path=args.entitlements, platform=args.platform, - codesign_on_copy_paths=codesign_on_copy_paths, + codesign_on_copy_paths=args.codesign_on_copy or [], codesign_args=[], ) except CodeSignProvisioningError as e: From e74db8ca1c0844bd71995c9af7a74766346c20f5 Mon Sep 17 00:00:00 2001 From: Chris Ricca Date: Wed, 20 Mar 2024 13:25:02 -0700 Subject: [PATCH 0509/1133] Back out "simplify _spawn_codesign_process signature" Summary: Backing this out Differential Revision: D55142926 fbshipit-source-id: 0eecd0bd51720e2b0a41bdcf7c5b82560a354620 --- prelude/apple/tools/code_signing/codesign_bundle.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 20203606c..83b5b9e7a 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -529,15 +529,16 @@ def _spawn_process( def _spawn_codesign_process( - path: CodesignedPath, + path: Path, identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, + entitlements: Optional[Path], stack: ExitStack, codesign_args: List[str], ) -> ParallelProcess: command = codesign_command_factory.codesign_command( - path.path, identity_fingerprint, path.entitlements, codesign_args + path, identity_fingerprint, entitlements, codesign_args ) return _spawn_process(command=command, tmp_dir=tmp_dir, stack=stack) @@ -555,10 +556,11 @@ def _codesign_paths( with ExitStack() as stack: for path in paths: process = _spawn_codesign_process( - path=path, + path=path.path, identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, + entitlements=path.entitlements, stack=stack, codesign_args=codesign_args, ) From e025e235d07077329137bde9aa6745a306de0dda Mon Sep 17 00:00:00 2001 From: Chris Ricca Date: Wed, 20 Mar 2024 13:25:02 -0700 Subject: [PATCH 0510/1133] Back out "use `CodesignedPath` in `_dry_codesign_everything`" Summary: Backing this out Differential Revision: D55142923 fbshipit-source-id: ac969ec327d3a4e3800d5a9db5734e43b75f28e9 --- .../tools/code_signing/codesign_bundle.py | 39 ++++++++++--------- 1 file changed, 21 insertions(+), 18 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 83b5b9e7a..e1f2e31e3 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -225,23 +225,18 @@ def codesign_bundle( prepared_entitlements_path = entitlements_path selected_identity_fingerprint = signing_context.codesign_identity - root = CodesignedPath(path=bundle_path, entitlements=prepared_entitlements_path) - codesigned_on_copy = [ - CodesignedPath(path=bundle_path / path, entitlements=None) - for path in codesign_on_copy_paths - ] - if codesign_configuration is CodesignConfiguration.dryRun: if codesign_tool is None: raise RuntimeError( "Expected codesign tool not to be the default one when dry run codesigning is requested." ) _dry_codesign_everything( - root=root, - codesign_on_copy_paths=codesigned_on_copy, + bundle_path=bundle_path, + codesign_on_copy_paths=codesign_on_copy_paths, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_tool=codesign_tool, + entitlements=prepared_entitlements_path, platform=platform, codesign_args=codesign_args, ) @@ -252,8 +247,13 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - root=root, - codesign_on_copy_paths=codesigned_on_copy, + root=CodesignedPath( + path=bundle_path, entitlements=prepared_entitlements_path + ), + codesign_on_copy_paths=[ + CodesignedPath(path=bundle_path / path, entitlements=None) + for path in codesign_on_copy_paths + ], identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), @@ -396,23 +396,28 @@ def _read_entitlements_file(path: Optional[Path]) -> Optional[Dict[str, Any]]: def _dry_codesign_everything( - root: CodesignedPath, - codesign_on_copy_paths: List[CodesignedPath], + bundle_path: Path, + codesign_on_copy_paths: List[str], identity_fingerprint: str, tmp_dir: str, codesign_tool: Path, + entitlements: Optional[Path], platform: ApplePlatform, codesign_args: List[str], ) -> None: codesign_command_factory = DryRunCodesignCommandFactory(codesign_tool) + codesign_on_copy_abs_paths = [bundle_path / path for path in codesign_on_copy_paths] codesign_on_copy_directory_paths = [ - p for p in codesign_on_copy_paths if p.path.is_dir() + p for p in codesign_on_copy_abs_paths if p.is_dir() ] # First sign codesign-on-copy directory paths _codesign_paths( - paths=codesign_on_copy_directory_paths, + paths=[ + CodesignedPath(path=p, entitlements=None) + for p in codesign_on_copy_directory_paths + ], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, @@ -423,9 +428,7 @@ def _dry_codesign_everything( # Dry codesigning creates a .plist inside every directory it signs. # That approach doesn't work for files so those files are written into .plist for root bundle. codesign_on_copy_file_paths = [ - p.path.relative_to(root.path) - for p in codesign_on_copy_paths - if p.path.is_file() + p.relative_to(bundle_path) for p in codesign_on_copy_abs_paths if p.is_file() ] codesign_command_factory.set_codesign_on_copy_file_paths( codesign_on_copy_file_paths @@ -433,7 +436,7 @@ def _dry_codesign_everything( # Lastly sign whole bundle _codesign_paths( - paths=[root], + paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, From 9ad0b73c3f0e772e287573d5ccededba9c49d573 Mon Sep 17 00:00:00 2001 From: Chris Ricca Date: Wed, 20 Mar 2024 13:25:02 -0700 Subject: [PATCH 0511/1133] Back out "use `CodesignedPath` in `_codesign_everything`" Summary: Backing this out Differential Revision: D55142925 fbshipit-source-id: 3653ea2ef774eed938a747bb02bca83acafc43c1 --- .../tools/code_signing/codesign_bundle.py | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index e1f2e31e3..9454434a1 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -247,16 +247,12 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - root=CodesignedPath( - path=bundle_path, entitlements=prepared_entitlements_path - ), - codesign_on_copy_paths=[ - CodesignedPath(path=bundle_path / path, entitlements=None) - for path in codesign_on_copy_paths - ], + bundle_path=bundle_path, + codesign_on_copy_paths=codesign_on_copy_paths, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), + entitlements=prepared_entitlements_path, platform=platform, fast_adhoc_signing=fast_adhoc_signing_enabled, codesign_args=codesign_args, @@ -446,18 +442,22 @@ def _dry_codesign_everything( def _codesign_everything( - root: CodesignedPath, - codesign_on_copy_paths: List[CodesignedPath], + bundle_path: Path, + codesign_on_copy_paths: List[str], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, + entitlements: Optional[Path], platform: ApplePlatform, fast_adhoc_signing: bool, codesign_args: List[str], ) -> None: # First sign codesign-on-copy paths codesign_on_copy_filtered_paths = _filter_out_fast_adhoc_paths( - paths=codesign_on_copy_paths, + paths=[ + CodesignedPath(path=bundle_path / path, entitlements=None) + for path in codesign_on_copy_paths + ], identity_fingerprint=identity_fingerprint, platform=platform, fast_adhoc_signing=fast_adhoc_signing, @@ -471,14 +471,14 @@ def _codesign_everything( codesign_args, ) # Lastly sign whole bundle - root_filtered_paths = _filter_out_fast_adhoc_paths( - paths=[root], + root_bundle_paths = _filter_out_fast_adhoc_paths( + paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], identity_fingerprint=identity_fingerprint, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) _codesign_paths( - root_filtered_paths, + root_bundle_paths, identity_fingerprint, tmp_dir, codesign_command_factory, From 9e1b4f6535b8b7d5df4e905414ef497e1e971341 Mon Sep 17 00:00:00 2001 From: Chris Ricca Date: Wed, 20 Mar 2024 13:25:02 -0700 Subject: [PATCH 0512/1133] Back out "introduce `CodesignedPath` and use in `_codesign_paths`" Summary: Backing this out Differential Revision: D55142924 fbshipit-source-id: 5b752986460a3cbc170f73db1f81bf6d883623b4 --- .../tools/code_signing/codesign_bundle.py | 46 ++++++++----------- 1 file changed, 18 insertions(+), 28 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 9454434a1..a4145761c 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -56,18 +56,6 @@ _LOGGER: logging.Logger = logging.getLogger(__name__) -@dataclass -class CodesignedPath: - path: Path - """ - Path relative to bundle root which needs to be codesigned - """ - entitlements: Optional[Path] - """ - Path to entitlements to be used when codesigning, relative to buck project - """ - - def _select_provisioning_profile( info_plist_metadata: InfoPlistMetadata, provisioning_profiles_dir: Path, @@ -410,13 +398,11 @@ def _dry_codesign_everything( # First sign codesign-on-copy directory paths _codesign_paths( - paths=[ - CodesignedPath(path=p, entitlements=None) - for p in codesign_on_copy_directory_paths - ], + paths=codesign_on_copy_directory_paths, identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, + entitlements=None, platform=platform, codesign_args=codesign_args, ) @@ -432,10 +418,11 @@ def _dry_codesign_everything( # Lastly sign whole bundle _codesign_paths( - paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], + paths=[bundle_path], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, + entitlements=entitlements, platform=platform, codesign_args=codesign_args, ) @@ -454,11 +441,9 @@ def _codesign_everything( ) -> None: # First sign codesign-on-copy paths codesign_on_copy_filtered_paths = _filter_out_fast_adhoc_paths( - paths=[ - CodesignedPath(path=bundle_path / path, entitlements=None) - for path in codesign_on_copy_paths - ], + paths=[bundle_path / path for path in codesign_on_copy_paths], identity_fingerprint=identity_fingerprint, + entitlements=entitlements, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) @@ -467,13 +452,15 @@ def _codesign_everything( identity_fingerprint, tmp_dir, codesign_command_factory, + None, platform, codesign_args, ) # Lastly sign whole bundle root_bundle_paths = _filter_out_fast_adhoc_paths( - paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], + paths=[bundle_path], identity_fingerprint=identity_fingerprint, + entitlements=entitlements, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) @@ -482,6 +469,7 @@ def _codesign_everything( identity_fingerprint, tmp_dir, codesign_command_factory, + entitlements, platform, codesign_args, ) @@ -547,10 +535,11 @@ def _spawn_codesign_process( def _codesign_paths( - paths: List[CodesignedPath], + paths: List[Path], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, + entitlements: Optional[Path], platform: ApplePlatform, codesign_args: List[str], ) -> None: @@ -559,11 +548,11 @@ def _codesign_paths( with ExitStack() as stack: for path in paths: process = _spawn_codesign_process( - path=path.path, + path=path, identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=path.entitlements, + entitlements=entitlements, stack=stack, codesign_args=codesign_args, ) @@ -575,11 +564,12 @@ def _codesign_paths( def _filter_out_fast_adhoc_paths( - paths: List[CodesignedPath], + paths: List[Path], identity_fingerprint: str, + entitlements: Optional[Path], platform: ApplePlatform, fast_adhoc_signing: bool, -) -> List[CodesignedPath]: +) -> List[Path]: if not fast_adhoc_signing: return paths # TODO(T149863217): Make skip checks run in parallel, they're usually fast (~15ms) @@ -588,6 +578,6 @@ def _filter_out_fast_adhoc_paths( p for p in paths if not should_skip_adhoc_signing_path( - p.path, identity_fingerprint, p.entitlements, platform + p, identity_fingerprint, entitlements, platform ) ] From 063ff0917613d3f21f26607d3eee4dcbe7d80d11 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 13:32:37 -0700 Subject: [PATCH 0513/1133] Inline _build_library_artifacts Summary: This piece of code currently instantiates actions for all 3 of Emit("link"), Emit("metadata-full"), and Emit("metadata-fast") for all linkage languages and all link styles. Many of the cases being instantiated are redundant and their artifacts may not even make it into any Provider. There is a comment originating from D34773346 that says this doesn't hurt, but it does: it means all those redundant actions must be assigned distinct action identifiers. https://www.internalfb.com/code/fbsource/[f6841574cf9bcc03b6659bd62358e25f33acda4b][blame]/fbcode/buck2/prelude/rust/build.bzl?lines=1015%2C1276 This results in horrendous identifiers like the following, which appear in Buck console output. - `rustc rlib-static-static-metadata/foo-metadata rlib,static,metadata [diag]` - `rustc proc-macro-pic-static_pic-link/foo-link proc-macro,pic,link [diag]` - `rustc staticlib-pic-static_pic-link/foo-link staticlib,pic,link [diag]` - `rustc rlib-static-static-metadata/foo-metadata rlib,static,metadata [clippy]` With some care about which actions we instantiate, these same actions can be given more immediately comprehensible names: - `rustc check` - `rustc proc-macro` - `rustc staticlib [pic]` - `clippy` The identifier changes are made elsewhere in this stack. For now this diff just moves the body of `_build_library_artifacts` into `rust_library_impl` because it will need to get intertwined with some other stuff going on in this function (`rust_param_artifact`, `native_param_artifact`, `check_params`) to instantiate only the necessary set of actions. Reviewed By: JakobDegen Differential Revision: D55113083 fbshipit-source-id: 423db3e0209957547620c83de4c525aa0c98a012 --- prelude/rust/rust_library.bzl | 53 +++++++++++++++-------------------- 1 file changed, 22 insertions(+), 31 deletions(-) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 85f498d3e..2931250d2 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -259,7 +259,28 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: # distinct kinds of build we actually need to deal with. param_lang, lang_style_param = _build_params_for_styles(ctx, compile_ctx) - artifacts = _build_library_artifacts(ctx, compile_ctx, param_lang.keys()) + # Generate the actions to build various output artifacts. Given the set of + # parameters we need, populate a mapping to the linkable and metadata + # artifacts. + artifacts = {} + for params in param_lang.keys(): + # Separate actions for each emit type. + # + # In principle we don't need metadata for C++-only artifacts, but I + # don't think it hurts. + link, meta_full, meta_fast = rust_compile_multi( + ctx = ctx, + compile_ctx = compile_ctx, + emits = [Emit("link"), Emit("metadata-full"), Emit("metadata-fast")], + params = params, + default_roots = ["lib.rs"], + ) + + artifacts[params] = { + MetadataKind("link"): link, + MetadataKind("full"): meta_full, + MetadataKind("fast"): meta_fast, + } rust_param_artifact = {} native_param_artifact = {} @@ -454,36 +475,6 @@ def _build_params_for_styles( return (param_lang, style_param) -def _build_library_artifacts( - ctx: AnalysisContext, - compile_ctx: CompileContext, - params: list[BuildParams]) -> dict[BuildParams, dict[MetadataKind, RustcOutput]]: - """ - Generate the actual actions to build various output artifacts. Given the set - parameters we need, return a mapping to the linkable and metadata artifacts. - """ - param_artifact = {} - - for params in params: - # Separate actions for each emit type - # - # In principle we don't really need metadata for C++-only artifacts, but I don't think it hurts - link, meta_full, meta_fast = rust_compile_multi( - ctx = ctx, - compile_ctx = compile_ctx, - emits = [Emit("link"), Emit("metadata-full"), Emit("metadata-fast")], - params = params, - default_roots = ["lib.rs"], - ) - - param_artifact[params] = { - MetadataKind("link"): link, - MetadataKind("full"): meta_full, - MetadataKind("fast"): meta_fast, - } - - return param_artifact - def _handle_rust_artifact( ctx: AnalysisContext, dep_ctx: DepCollectionContext, From 03025a537c12396db4a1722fccf1e59f1926e83b Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 13:33:18 -0700 Subject: [PATCH 0514/1133] Rearrange the construction of rust vs native artifact maps Summary: Context: see summary of D55113083. There is no behavior change intended in this diff; this is just a simplification enabled by the previous diff that enables several actions to be eliminated later in D55113080 + D55113078. Reviewed By: JakobDegen Differential Revision: D55113084 fbshipit-source-id: 58ffd23ee3016898595779e63dda88ea70cb7df8 --- prelude/rust/rust_library.bzl | 33 ++++++++++++++------------------- 1 file changed, 14 insertions(+), 19 deletions(-) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 2931250d2..d1c19c8f2 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -260,10 +260,11 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: param_lang, lang_style_param = _build_params_for_styles(ctx, compile_ctx) # Generate the actions to build various output artifacts. Given the set of - # parameters we need, populate a mapping to the linkable and metadata - # artifacts. - artifacts = {} - for params in param_lang.keys(): + # parameters we need, populate maps to the linkable and metadata + # artifacts by linkage lang. + rust_param_artifact = {} + native_param_artifact = {} + for params, langs in param_lang.items(): # Separate actions for each emit type. # # In principle we don't need metadata for C++-only artifacts, but I @@ -276,26 +277,20 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: default_roots = ["lib.rs"], ) - artifacts[params] = { - MetadataKind("link"): link, - MetadataKind("full"): meta_full, - MetadataKind("fast"): meta_fast, - } - - rust_param_artifact = {} - native_param_artifact = {} - - for params, outputs in artifacts.items(): - if LinkageLang("rust") in param_lang[params]: - rust_param_artifact[params] = outputs - if LinkageLang("native") in param_lang[params] or LinkageLang("native-unbundled") in param_lang[params]: - native_param_artifact[params] = outputs[MetadataKind("link")] + if LinkageLang("rust") in langs: + rust_param_artifact[params] = { + MetadataKind("link"): link, + MetadataKind("full"): meta_full, + MetadataKind("fast"): meta_fast, + } + if LinkageLang("native") in langs or LinkageLang("native-unbundled") in langs: + native_param_artifact[params] = link # Grab the artifacts to use for the check subtargets. Picking a good # `LibOutputStyle` ensures that the subtarget shares work with the main # build if possible check_params = lang_style_param[(LinkageLang("rust"), LibOutputStyle("archive"))] - check_artifacts = artifacts[check_params] + check_artifacts = rust_param_artifact[check_params] # For doctests, we need to know two things to know how to link them. The # first is that we need a link strategy, which affects how deps of this From e968824925f3bb7937f4d327a17258b773e278f9 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 13:54:38 -0700 Subject: [PATCH 0515/1133] Skip instantiating metadata actions for native-only params Summary: Context: see summary of D55113083. By avoiding some unused instantiations of actions in the Emit("metadata-full") and Emit("metadata-fast") case, the instantiations we're keeping will get to have nice and short action identifiers such as `rustc check`. This diff does not yet change any action identifiers. That happens in D55113079 but requires this diff. Reviewed By: JakobDegen Differential Revision: D55113080 fbshipit-source-id: 1f289569a077c05f0eb070e768ff11b2d7944c32 --- prelude/rust/rust_library.bzl | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index d1c19c8f2..31120970c 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -265,26 +265,29 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: rust_param_artifact = {} native_param_artifact = {} for params, langs in param_lang.items(): - # Separate actions for each emit type. - # - # In principle we don't need metadata for C++-only artifacts, but I - # don't think it hurts. - link, meta_full, meta_fast = rust_compile_multi( + link_rust = LinkageLang("rust") in langs + link_native = LinkageLang("native") in langs or LinkageLang("native-unbundled") in langs + + # We don't need metadata to go with C++-only artifacts. + emits = [Emit("link")] + \ + ([Emit("metadata-full"), Emit("metadata-fast")] if link_rust else []) + + outputs = rust_compile_multi( ctx = ctx, compile_ctx = compile_ctx, - emits = [Emit("link"), Emit("metadata-full"), Emit("metadata-fast")], + emits = emits, params = params, default_roots = ["lib.rs"], ) - if LinkageLang("rust") in langs: + if link_rust: rust_param_artifact[params] = { - MetadataKind("link"): link, - MetadataKind("full"): meta_full, - MetadataKind("fast"): meta_fast, + MetadataKind("link"): outputs[0], + MetadataKind("full"): outputs[1], + MetadataKind("fast"): outputs[2], } - if LinkageLang("native") in langs or LinkageLang("native-unbundled") in langs: - native_param_artifact[params] = link + if link_native: + native_param_artifact[params] = outputs[0] # Grab the artifacts to use for the check subtargets. Picking a good # `LibOutputStyle` ensures that the subtarget shares work with the main From 072996fc98aa6d25293e9b982b884d6464481386 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 13:55:54 -0700 Subject: [PATCH 0516/1133] Instantiate clippy actions for only a single emit and single params Summary: Context: see summary of D55113083. There is no need to have different clippy actions for various link strategies of the same Rust target. Clippy only looks at metadata; it does not run a linker. Reviewed By: JakobDegen Differential Revision: D55113081 fbshipit-source-id: 504ea1a1d9474155d419a796c5627a19b9ab6a36 --- prelude/rust/build.bzl | 7 +++++-- prelude/rust/rust_binary.bzl | 1 + prelude/rust/rust_library.bzl | 13 +++++++------ 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index d8b6cbf88..71c04fb7e 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -426,6 +426,7 @@ def rust_compile_multi( predeclared_outputs: dict[Emit, Artifact] = {}, extra_flags: list[[str, ResolvedStringWithMacros]] = [], is_binary: bool = False, + designated_clippy: bool = False, allow_cache_upload: bool = False, rust_cxx_link_group_info: [RustCxxLinkGroupInfo, None] = None) -> list[RustcOutput]: outputs = [] @@ -441,6 +442,7 @@ def rust_compile_multi( predeclared_outputs = predeclared_outputs, extra_flags = extra_flags, is_binary = is_binary, + designated_clippy = designated_clippy and emit == Emit("metadata-full"), allow_cache_upload = allow_cache_upload, rust_cxx_link_group_info = rust_cxx_link_group_info, ) @@ -461,6 +463,7 @@ def rust_compile( predeclared_outputs: dict[Emit, Artifact] = {}, extra_flags: list[[str, ResolvedStringWithMacros]] = [], is_binary: bool = False, + designated_clippy: bool = False, allow_cache_upload: bool = False, rust_cxx_link_group_info: [RustCxxLinkGroupInfo, None] = None) -> RustcOutput: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" @@ -575,8 +578,8 @@ def rust_compile( env = emit_op.env, ) - # Add clippy diagnostic targets for check builds - if common_args.is_check: + # Add clippy diagnostic targets next to the designated check build + if designated_clippy: # We don't really need the outputs from this build, just to keep the artifact accounting straight clippy_emit_op = _rustc_emit( ctx = ctx, diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 00a3e2b3e..31970e961 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -307,6 +307,7 @@ def _rust_binary_common( params = strategy_param[specified_link_strategy], default_roots = default_roots, extra_flags = extra_flags, + designated_clippy = True, ) providers = [RustcExtraOutputsInfo( diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 31120970c..482d2fb1b 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -259,6 +259,11 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: # distinct kinds of build we actually need to deal with. param_lang, lang_style_param = _build_params_for_styles(ctx, compile_ctx) + # Grab the artifacts to use for the check subtargets. Picking a good + # `LibOutputStyle` ensures that the subtarget shares work with the main + # build if possible + check_params = lang_style_param[(LinkageLang("rust"), LibOutputStyle("archive"))] + # Generate the actions to build various output artifacts. Given the set of # parameters we need, populate maps to the linkable and metadata # artifacts by linkage lang. @@ -278,6 +283,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: emits = emits, params = params, default_roots = ["lib.rs"], + designated_clippy = params == check_params, ) if link_rust: @@ -289,12 +295,6 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: if link_native: native_param_artifact[params] = outputs[0] - # Grab the artifacts to use for the check subtargets. Picking a good - # `LibOutputStyle` ensures that the subtarget shares work with the main - # build if possible - check_params = lang_style_param[(LinkageLang("rust"), LibOutputStyle("archive"))] - check_artifacts = rust_param_artifact[check_params] - # For doctests, we need to know two things to know how to link them. The # first is that we need a link strategy, which affects how deps of this # target are handled @@ -385,6 +385,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: param_artifact = native_param_artifact, ) + check_artifacts = rust_param_artifact[check_params] providers += _default_providers( lang_style_param = lang_style_param, param_artifact = rust_param_artifact, From 71dfa92646f2c7aeba733f47ad0dbe3b1b6d971d Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 13:58:51 -0700 Subject: [PATCH 0517/1133] Shorten the action identifier of clippy actions Summary: Context: see summary of D55113083. As of the previous diff, there is only a single Clippy action for each Rust target, and we can just call it `clippy`. Previously these actions were called something like `bin-pic-shared-metadata-full/foo-metadata-full bin,pic,metadata-full [clippy]` or `rustc rlib-static-static-metadata/foo-metadata rlib,static,metadata [clippy]`. Reviewed By: JakobDegen Differential Revision: D55113082 fbshipit-source-id: 9ec5a32c9468aaf1f6b274bd744bd18ed24cf136 --- prelude/rust/build.bzl | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 71c04fb7e..9e6db04cb 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -569,10 +569,10 @@ def rust_compile( compile_ctx = compile_ctx, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), rustc_cmd = cmd_args(toolchain_info.compiler, rustc_cmd, emit_op.args), - diag = "diag", required_outputs = [emit_op.output], short_cmd = common_args.short_cmd, is_binary = is_binary, + is_clippy = False, allow_cache_upload = allow_cache_upload, crate_map = common_args.crate_map, env = emit_op.env, @@ -608,10 +608,10 @@ def rust_compile( # Lints go first to allow other args to override them. rustc_cmd = cmd_args(compile_ctx.clippy_wrapper, clippy_lints, rustc_cmd, clippy_emit_op.args), env = clippy_env, - diag = "clippy", required_outputs = [clippy_emit_op.output], short_cmd = common_args.short_cmd, is_binary = False, + is_clippy = True, allow_cache_upload = False, crate_map = common_args.crate_map, ) @@ -1214,10 +1214,10 @@ def _rustc_invoke( compile_ctx: CompileContext, prefix: str, rustc_cmd: cmd_args, - diag: str, required_outputs: list[Artifact], short_cmd: str, is_binary: bool, + is_clippy: bool, allow_cache_upload: bool, crate_map: list[(CrateName, Label)], env: dict[str, str | ResolvedStringWithMacros | Artifact]) -> (Artifact, Artifact, [Artifact, None]): @@ -1232,6 +1232,7 @@ def _rustc_invoke( path_env.update(more_path_env) # Save diagnostic outputs + diag = "clippy" if is_clippy else "diag" json_diag = ctx.actions.declare_output("{}-{}.json".format(prefix, diag)) txt_diag = ctx.actions.declare_output("{}-{}.txt".format(prefix, diag)) @@ -1275,12 +1276,18 @@ def _rustc_invoke( elif is_binary and link_cxx_binary_locally(ctx): prefer_local = True - identifier = "{} {} [{}]".format(prefix, short_cmd, diag) + if is_clippy: + category = "clippy" + identifier = None + else: + category = "rustc" + identifier = "{} {} [{}]".format(prefix, short_cmd, diag) + ctx.actions.run( compile_cmd, local_only = local_only, prefer_local = prefer_local, - category = "rustc", + category = category, identifier = identifier, no_outputs_cleanup = incremental_enabled, allow_cache_upload = allow_cache_upload, From d0346fb721cbdb0cfbf9f547ff5535aadb20a746 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 14:01:31 -0700 Subject: [PATCH 0518/1133] Instantiate metadata-fast actions only once Summary: Context: see summary of D55113083. Buck can produce Rust crate metadata in either "full" or "fast" fashion. "Full" metadata does `-Zno-codegen --emit=link=libfoo.rlib`, while "fast" metadata does `--emit=metadata=libfoo.rmeta`. The "full" metadata is important for pipelined builds: producing codegen for a crate needs "full" metadata of that crate's dependencies, but not codegen of those dependencies. Meanwhile the "fast" metadata is unusable for pipelined builds, but can be produced 12.5% faster than "full". For "full" metadata, link strategy is relevant even though metadata itself does not involve doing any linking. It matters because it affects which codegen builds can reuse artifacts produced by which metadata builds. If someone is alternating back and forth between `buck2 build :foo[check]` and `buck2 test :foo`, we prefer for both those commands to be reusing the same metadata artifacts for all of foo's transitive Rust dependencies, regardless of the preferred_linkage set by foo. But for "fast" metadata, link strategy never comes into play. "Fast" metadata can never be reused by a pipelined build. There should never be a reason to distinguish multiple flavors of "fast" metadata for the same crate. Reviewed By: JakobDegen Differential Revision: D55113078 fbshipit-source-id: 26efb42c026d373b0d31cfbef2738992bc618154 --- prelude/rust/build.bzl | 36 ------------------------------ prelude/rust/rust_binary.bzl | 14 +++++++++--- prelude/rust/rust_library.bzl | 41 ++++++++++++++++++++--------------- 3 files changed, 35 insertions(+), 56 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 9e6db04cb..0d5263e83 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -414,42 +414,6 @@ def generate_rustdoc_test( argfile_name = "{}.args".format(common_args.subdir), ) -# Generate multiple compile artifacts so that distinct sets of artifacts can be -# generated concurrently. -def rust_compile_multi( - ctx: AnalysisContext, - compile_ctx: CompileContext, - emits: list[Emit], - params: BuildParams, - default_roots: list[str], - extra_link_args: list[typing.Any] = [], - predeclared_outputs: dict[Emit, Artifact] = {}, - extra_flags: list[[str, ResolvedStringWithMacros]] = [], - is_binary: bool = False, - designated_clippy: bool = False, - allow_cache_upload: bool = False, - rust_cxx_link_group_info: [RustCxxLinkGroupInfo, None] = None) -> list[RustcOutput]: - outputs = [] - - for emit in emits: - outs = rust_compile( - ctx = ctx, - compile_ctx = compile_ctx, - emit = emit, - params = params, - default_roots = default_roots, - extra_link_args = extra_link_args, - predeclared_outputs = predeclared_outputs, - extra_flags = extra_flags, - is_binary = is_binary, - designated_clippy = designated_clippy and emit == Emit("metadata-full"), - allow_cache_upload = allow_cache_upload, - rust_cxx_link_group_info = rust_cxx_link_group_info, - ) - outputs.append(outs) - - return outputs - # Generate a compilation action. A single instance of rustc can emit # numerous output artifacts, so return an artifact object for each of # them. diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 31970e961..6f4e73251 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -56,7 +56,6 @@ load( "compile_context", "generate_rustdoc", "rust_compile", - "rust_compile_multi", ) load( ":build_params.bzl", @@ -300,16 +299,25 @@ def _rust_binary_common( # FIXME(JakobDegen): It's a bit weird that this uses the specified link # strategy but rustdoc and expand use the default link strategy. Figure out # what's going on there. - meta_full, meta_fast = rust_compile_multi( + meta_full = rust_compile( ctx = ctx, compile_ctx = compile_ctx, - emits = [Emit("metadata-full"), Emit("metadata-fast")], + emit = Emit("metadata-full"), params = strategy_param[specified_link_strategy], default_roots = default_roots, extra_flags = extra_flags, designated_clippy = True, ) + meta_fast = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-fast"), + params = strategy_param[LinkStrategy("static")], + default_roots = default_roots, + extra_flags = extra_flags, + ) + providers = [RustcExtraOutputsInfo( metadata_full = meta_full, metadata_fast = meta_fast, diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 482d2fb1b..793b552ad 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -74,7 +74,6 @@ load( "generate_rustdoc_coverage", "generate_rustdoc_test", "rust_compile", - "rust_compile_multi", ) load( ":build_params.bzl", @@ -264,36 +263,44 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: # build if possible check_params = lang_style_param[(LinkageLang("rust"), LibOutputStyle("archive"))] + meta_fast = rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-fast"), + params = check_params, + default_roots = ["lib.rs"], + ) + # Generate the actions to build various output artifacts. Given the set of # parameters we need, populate maps to the linkable and metadata # artifacts by linkage lang. rust_param_artifact = {} native_param_artifact = {} for params, langs in param_lang.items(): - link_rust = LinkageLang("rust") in langs - link_native = LinkageLang("native") in langs or LinkageLang("native-unbundled") in langs - - # We don't need metadata to go with C++-only artifacts. - emits = [Emit("link")] + \ - ([Emit("metadata-full"), Emit("metadata-fast")] if link_rust else []) - - outputs = rust_compile_multi( + link = rust_compile( ctx = ctx, compile_ctx = compile_ctx, - emits = emits, + emit = Emit("link"), params = params, default_roots = ["lib.rs"], - designated_clippy = params == check_params, ) - if link_rust: + if LinkageLang("rust") in langs: rust_param_artifact[params] = { - MetadataKind("link"): outputs[0], - MetadataKind("full"): outputs[1], - MetadataKind("fast"): outputs[2], + MetadataKind("link"): link, + MetadataKind("full"): rust_compile( + ctx = ctx, + compile_ctx = compile_ctx, + emit = Emit("metadata-full"), + params = params, + default_roots = ["lib.rs"], + designated_clippy = params == check_params, + ), + MetadataKind("fast"): meta_fast, } - if link_native: - native_param_artifact[params] = outputs[0] + + if LinkageLang("native") in langs or LinkageLang("native-unbundled") in langs: + native_param_artifact[params] = link # For doctests, we need to know two things to know how to link them. The # first is that we need a link strategy, which affects how deps of this From 918a29b00d4a66c7662b825d848ed01dcfeec6d4 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 14:03:51 -0700 Subject: [PATCH 0519/1133] Remove RustFailureFilter provider Summary: No reason this needs to be a `provider`. It is only used for arguments for one function call. It can be a `record`, but even that does not seem worthwhile. Reviewed By: JakobDegen Differential Revision: D55146869 fbshipit-source-id: bb0fce3287eaa6f80e6ca9bec8fdf34bc7145458 --- prelude/rust/build.bzl | 11 +++-------- prelude/rust/failure_filter.bzl | 20 ++++---------------- 2 files changed, 7 insertions(+), 24 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 0d5263e83..2513e4724 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -64,7 +64,6 @@ load( load(":extern.bzl", "crate_map_arg", "extern_arg") load( ":failure_filter.bzl", - "RustFailureFilter", "failure_filter", ) load( @@ -587,18 +586,14 @@ def rust_compile( # This is only needed when this action's output is being used as an # input, so we only need standard diagnostics (clippy is always # asked for explicitly). - filter_prov = RustFailureFilter( - buildstatus = build_status, - required = emit_op.output, - stderr = diag_txt, - ) - filtered_output = failure_filter( ctx = ctx, compile_ctx = compile_ctx, prefix = "{}/{}".format(common_args.subdir, emit.value), predecl_out = predeclared_outputs.get(emit), - failprov = filter_prov, + build_status = build_status, + required = emit_op.output, + stderr = diag_txt, short_cmd = common_args.short_cmd, ) else: diff --git a/prelude/rust/failure_filter.bzl b/prelude/rust/failure_filter.bzl index 7a8fa9ff3..f6ffa4121 100644 --- a/prelude/rust/failure_filter.bzl +++ b/prelude/rust/failure_filter.bzl @@ -7,16 +7,6 @@ load(":context.bzl", "CompileContext") -# Inputs to the fail filter -RustFailureFilter = provider(fields = { - # Build status json - "buildstatus": typing.Any, - # Required files - "required": typing.Any, - # stderr - "stderr": typing.Any, -}) - # This creates an action which takes a buildstatus json artifact as an input, and a list of other # artifacts. If all those artifacts are present in the buildstatus as successfully generated, then # the action will succeed with those artifacts as outputs. Otherwise it fails. @@ -26,15 +16,13 @@ def failure_filter( compile_ctx: CompileContext, prefix: str, predecl_out: [Artifact, None], - failprov: RustFailureFilter, + build_status: Artifact, + required: Artifact, + stderr: Artifact, short_cmd: str) -> Artifact: toolchain_info = compile_ctx.toolchain_info failure_filter_action = toolchain_info.failure_filter_action - buildstatus = failprov.buildstatus - required = failprov.required - stderr = failprov.stderr - if predecl_out: output = predecl_out else: @@ -49,7 +37,7 @@ def failure_filter( required, output.as_output(), "--build-status", - buildstatus, + build_status, ) ctx.actions.run(cmd, category = "failure_filter", identifier = "{} {}".format(prefix, short_cmd)) From 45831de365f414a9353c35b28917d4199d612762 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 14:06:35 -0700 Subject: [PATCH 0520/1133] Package _rustc_invoke's return values into a record Summary: I need to add another return value to `_rustc_invoke` in {D55113079}, and going to a 4-tuple seemed unwise. Reviewed By: JakobDegen Differential Revision: D55147290 fbshipit-source-id: e9ad5f1c66f745efa68bea0cf9772db9dae609d3 --- prelude/rust/build.bzl | 41 +++++++++++++++++++++++++---------------- 1 file changed, 25 insertions(+), 16 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 2513e4724..a74852811 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -527,7 +527,7 @@ def rust_compile( rustc_cmd.add(cmd_args(linker_argsfile, format = "-Clink-arg=@{}")) rustc_cmd.hidden(link_args_output.hidden) - diag_txt, diag_json, build_status = _rustc_invoke( + invoke = _rustc_invoke( ctx = ctx, compile_ctx = compile_ctx, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), @@ -564,7 +564,7 @@ def rust_compile( {"clippy.toml": toolchain_info.clippy_toml}, ) clippy_env["CLIPPY_CONF_DIR"] = clippy_conf_dir - clippy_txt, clippy_json, _ = _rustc_invoke( + clippy_invoke = _rustc_invoke( ctx = ctx, compile_ctx = compile_ctx, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), @@ -579,8 +579,7 @@ def rust_compile( crate_map = common_args.crate_map, ) else: - clippy_txt = None - clippy_json = None + clippy_invoke = None if toolchain_info.failure_filter: # This is only needed when this action's output is being used as an @@ -591,9 +590,9 @@ def rust_compile( compile_ctx = compile_ctx, prefix = "{}/{}".format(common_args.subdir, emit.value), predecl_out = predeclared_outputs.get(emit), - build_status = build_status, + build_status = invoke.build_status, required = emit_op.output, - stderr = diag_txt, + stderr = invoke.diag_txt, short_cmd = common_args.short_cmd, ) else: @@ -657,11 +656,11 @@ def rust_compile( return RustcOutput( output = filtered_output, stripped_output = stripped_output, - diag_txt = diag_txt, - diag_json = diag_json, + diag_txt = invoke.diag_txt, + diag_json = invoke.diag_json, # Only available on metadata-like emits - clippy_txt = clippy_txt, - clippy_json = clippy_json, + clippy_txt = clippy_invoke.diag_txt if clippy_invoke else None, + clippy_json = clippy_invoke.diag_json if clippy_invoke else None, pdb = pdb_artifact, dwp_output = dwp_output, dwo_output_directory = dwo_output_directory, @@ -1167,6 +1166,12 @@ def _rustc_emit( extra_out = extra_out, ) +Invoke = record( + diag_txt = field(Artifact), + diag_json = field(Artifact), + build_status = field([Artifact, None]), +) + # Invoke rustc and capture outputs def _rustc_invoke( ctx: AnalysisContext, @@ -1179,7 +1184,7 @@ def _rustc_invoke( is_clippy: bool, allow_cache_upload: bool, crate_map: list[(CrateName, Label)], - env: dict[str, str | ResolvedStringWithMacros | Artifact]) -> (Artifact, Artifact, [Artifact, None]): + env: dict[str, str | ResolvedStringWithMacros | Artifact]) -> Invoke: exec_is_windows = ctx.attrs._exec_os_type[OsLookup].platform == "windows" toolchain_info = compile_ctx.toolchain_info @@ -1192,12 +1197,12 @@ def _rustc_invoke( # Save diagnostic outputs diag = "clippy" if is_clippy else "diag" - json_diag = ctx.actions.declare_output("{}-{}.json".format(prefix, diag)) - txt_diag = ctx.actions.declare_output("{}-{}.txt".format(prefix, diag)) + diag_json = ctx.actions.declare_output("{}-{}.json".format(prefix, diag)) + diag_txt = ctx.actions.declare_output("{}-{}.txt".format(prefix, diag)) compile_cmd = cmd_args( - cmd_args(json_diag.as_output(), format = "--diag-json={}"), - cmd_args(txt_diag.as_output(), format = "--diag-txt={}"), + cmd_args(diag_json.as_output(), format = "--diag-json={}"), + cmd_args(diag_txt.as_output(), format = "--diag-txt={}"), "--remap-cwd-prefix=.", "--buck-target={}".format(ctx.label.raw_target()), ) @@ -1252,7 +1257,11 @@ def _rustc_invoke( allow_cache_upload = allow_cache_upload, ) - return (txt_diag, json_diag, build_status) + return Invoke( + diag_txt = diag_txt, + diag_json = diag_json, + build_status = build_status, + ) # Our rustc and rustdoc commands can have arbitrarily large number of `--extern` # flags, so write to file to avoid hitting the platform's limit on command line From 96ea14b5253b4c0f53c8fae3fa13bb585eebb1e6 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 14:12:30 -0700 Subject: [PATCH 0521/1133] Shorten Rust action names Summary: Context: see summary of D55113083. **Before:** actions like: - `rustc rlib-static-static-metadata/foo-metadata rlib,static,metadata [diag]` - `rustc proc-macro-pic-static_pic-link/foo-link proc-macro,pic,link [diag]` - `rustc staticlib-pic-static_pic-link/foo-link staticlib,pic,link [diag]` - `rustc rlib-static-static-metadata/foo-metadata rlib,static,metadata [clippy]` **After:** - `rustc check` - `rustc proc-macro` - `rustc staticlib [pic]` - `clippy` Reviewed By: JakobDegen Differential Revision: D55113079 fbshipit-source-id: 7663658250e7f322a6f4f5f4729da8ecf511f11a --- prelude/rust/build.bzl | 55 ++++++++++++++++++++++++++++----- prelude/rust/context.bzl | 6 ++-- prelude/rust/failure_filter.bzl | 5 ++- 3 files changed, 53 insertions(+), 13 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index a74852811..62ad3ad03 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -530,10 +530,10 @@ def rust_compile( invoke = _rustc_invoke( ctx = ctx, compile_ctx = compile_ctx, + common_args = common_args, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), rustc_cmd = cmd_args(toolchain_info.compiler, rustc_cmd, emit_op.args), required_outputs = [emit_op.output], - short_cmd = common_args.short_cmd, is_binary = is_binary, is_clippy = False, allow_cache_upload = allow_cache_upload, @@ -567,12 +567,12 @@ def rust_compile( clippy_invoke = _rustc_invoke( ctx = ctx, compile_ctx = compile_ctx, + common_args = common_args, prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), # Lints go first to allow other args to override them. rustc_cmd = cmd_args(compile_ctx.clippy_wrapper, clippy_lints, rustc_cmd, clippy_emit_op.args), env = clippy_env, required_outputs = [clippy_emit_op.output], - short_cmd = common_args.short_cmd, is_binary = False, is_clippy = True, allow_cache_upload = False, @@ -588,12 +588,11 @@ def rust_compile( filtered_output = failure_filter( ctx = ctx, compile_ctx = compile_ctx, - prefix = "{}/{}".format(common_args.subdir, emit.value), predecl_out = predeclared_outputs.get(emit), build_status = invoke.build_status, required = emit_op.output, stderr = invoke.diag_txt, - short_cmd = common_args.short_cmd, + identifier = invoke.identifier, ) else: filtered_output = emit_op.output @@ -780,7 +779,7 @@ def dynamic_symlinked_dirs( cmd_args(transitive_dependency_dir.as_output(), format = "--out-dir={}"), cmd_args(artifacts_json, format = "--artifacts={}"), ], - category = "tdep_symlinks", + category = "deps", identifier = str(len(compile_ctx.transitive_dependency_dirs)), ) @@ -972,7 +971,9 @@ def _compute_common_args( args = args, subdir = subdir, tempfile = tempfile, - short_cmd = "{},{},{}".format(crate_type.value, params.reloc_model.value, emit.value), + crate_type = crate_type, + params = params, + emit = emit, is_check = is_check, crate_map = crate_map, ) @@ -1080,6 +1081,38 @@ def _crate_root( fail("Could not infer crate_root. candidates=%s\nAdd 'crate_root = \"src/example.rs\"' to your attributes to disambiguate." % candidates.list()) +def _explain(crate_type: CrateType, link_strategy: LinkStrategy, emit: Emit) -> str: + link_strategy_suffix = { + LinkStrategy("static"): "", + LinkStrategy("static_pic"): " [pic]", + LinkStrategy("shared"): " [shared]", + }[link_strategy] + + if emit == Emit("metadata-full"): + return "check" + link_strategy_suffix + + if emit == Emit("metadata-fast"): + return "check [fast]" + + if emit == Emit("link"): + if crate_type == CrateType("bin"): + return "link" + link_strategy_suffix + if crate_type == CrateType("rlib"): + return "rlib" + link_strategy_suffix + if crate_type == CrateType("dylib"): + return "dylib" # always shared + if crate_type == CrateType("proc-macro"): + return "proc-macro" # always static_pic + if crate_type == CrateType("cdylib"): + return "cdylib" # always shared + if crate_type == CrateType("staticlib"): + return "staticlib" + link_strategy_suffix + + if emit == Emit("expand"): + return "expand" + + fail("unrecognized rustc action:", crate_type, link_strategy, emit) + EmitOperation = record( output = field(Artifact), args = field(cmd_args), @@ -1170,16 +1203,17 @@ Invoke = record( diag_txt = field(Artifact), diag_json = field(Artifact), build_status = field([Artifact, None]), + identifier = field([str, None]), ) # Invoke rustc and capture outputs def _rustc_invoke( ctx: AnalysisContext, compile_ctx: CompileContext, + common_args: CommonArgsInfo, prefix: str, rustc_cmd: cmd_args, required_outputs: list[Artifact], - short_cmd: str, is_binary: bool, is_clippy: bool, allow_cache_upload: bool, @@ -1245,7 +1279,11 @@ def _rustc_invoke( identifier = None else: category = "rustc" - identifier = "{} {} [{}]".format(prefix, short_cmd, diag) + identifier = _explain( + crate_type = common_args.crate_type, + link_strategy = common_args.params.dep_link_strategy, + emit = common_args.emit, + ) ctx.actions.run( compile_cmd, @@ -1261,6 +1299,7 @@ def _rustc_invoke( diag_txt = diag_txt, diag_json = diag_json, build_status = build_status, + identifier = identifier, ) # Our rustc and rustdoc commands can have arbitrarily large number of `--extern` diff --git a/prelude/rust/context.bzl b/prelude/rust/context.bzl index 74e76837f..f59400b09 100644 --- a/prelude/rust/context.bzl +++ b/prelude/rust/context.bzl @@ -7,7 +7,7 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") load("@prelude//linking:link_info.bzl", "LinkStrategy") -load(":build_params.bzl", "CrateType", "Emit") +load(":build_params.bzl", "BuildParams", "CrateType", "Emit") load(":rust_toolchain.bzl", "PanicRuntime", "RustExplicitSysrootDeps", "RustToolchainInfo") CrateName = record( @@ -21,7 +21,9 @@ CommonArgsInfo = record( args = field(cmd_args), subdir = field(str), tempfile = field(str), - short_cmd = field(str), + crate_type = field(CrateType), + params = field(BuildParams), + emit = field(Emit), is_check = field(bool), crate_map = field(list[(CrateName, Label)]), ) diff --git a/prelude/rust/failure_filter.bzl b/prelude/rust/failure_filter.bzl index f6ffa4121..96d7cac31 100644 --- a/prelude/rust/failure_filter.bzl +++ b/prelude/rust/failure_filter.bzl @@ -14,12 +14,11 @@ load(":context.bzl", "CompileContext") def failure_filter( ctx: AnalysisContext, compile_ctx: CompileContext, - prefix: str, predecl_out: [Artifact, None], build_status: Artifact, required: Artifact, stderr: Artifact, - short_cmd: str) -> Artifact: + identifier: str) -> Artifact: toolchain_info = compile_ctx.toolchain_info failure_filter_action = toolchain_info.failure_filter_action @@ -40,6 +39,6 @@ def failure_filter( build_status, ) - ctx.actions.run(cmd, category = "failure_filter", identifier = "{} {}".format(prefix, short_cmd)) + ctx.actions.run(cmd, category = "failure_filter", identifier = identifier) return output From b89f80967f0357b9208c387c5ff5407a7e4c29cb Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 20 Mar 2024 17:28:51 -0700 Subject: [PATCH 0522/1133] Fix erlang_tests_macro **kargs type Summary: `**kwargs: dict` means type of `kwargs` is `dict[str, dict]`. Following diff D55109746 fixes dict typechecking which reveals this problem. Reviewed By: JakobDegen Differential Revision: D55109913 fbshipit-source-id: 8e120527410f0f9ca03578926e1a78e75ecb50c3 --- prelude/erlang/erlang_tests.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/erlang/erlang_tests.bzl b/prelude/erlang/erlang_tests.bzl index 15a4bf797..478da71ee 100644 --- a/prelude/erlang/erlang_tests.bzl +++ b/prelude/erlang/erlang_tests.bzl @@ -43,7 +43,7 @@ def erlang_tests_macro( property_tests: list[str] = [], srcs: list[str] = [], prefix: str | None = None, - **common_attributes: dict) -> None: + **common_attributes) -> None: """ Generate multiple erlang_test targets based on the `suites` field. Also adds the default 'config' and 'deps' from the buck2 config. From a3037a0859f00474b34ec0096b4b53152328bbe5 Mon Sep 17 00:00:00 2001 From: Wei Han Date: Wed, 20 Mar 2024 17:43:07 -0700 Subject: [PATCH 0523/1133] Support validation_deps in android_library Summary: Now that validation_deps is supported in android_binary, apple_library, apple_binary, porting it to android_library. Reviewed By: IanChilds Differential Revision: D55047080 fbshipit-source-id: 6041351b3aca95a89fe3bc25c0056778f074725a --- prelude/android/android.bzl | 1 + prelude/android/android_library.bzl | 11 +++++++++-- prelude/java/java.bzl | 2 ++ prelude/java/java_library.bzl | 18 +++++++++++++++--- prelude/kotlin/kotlin.bzl | 2 ++ prelude/kotlin/kotlin_library.bzl | 16 ++++++++++++++-- 6 files changed, 43 insertions(+), 7 deletions(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 6f959e573..7231139ab 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -167,6 +167,7 @@ extra_attributes = { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.dep(), sorted = True, default = []), "_android_toolchain": toolchains_common.android(), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_dex_min_sdk_version": attrs.default_only(attrs.option(attrs.int(), default = dex_min_sdk_version())), diff --git a/prelude/android/android_library.bzl b/prelude/android/android_library.bzl index 5cd501499..18f2e501a 100644 --- a/prelude/android/android_library.bzl +++ b/prelude/android/android_library.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load( "@prelude//android:android_providers.bzl", "AndroidLibraryIntellijInfo", @@ -39,7 +40,10 @@ def android_library_impl(ctx: AnalysisContext) -> list[Provider]: }), ] - java_providers, android_library_intellij_info = build_android_library(ctx) + java_providers, android_library_intellij_info = build_android_library( + ctx = ctx, + validation_deps_outputs = get_validation_deps_outputs(ctx), + ) android_providers = [android_library_intellij_info] if android_library_intellij_info else [] return to_list(java_providers) + [ @@ -55,7 +59,8 @@ def android_library_impl(ctx: AnalysisContext) -> list[Provider]: def build_android_library( ctx: AnalysisContext, r_dot_java: [Artifact, None] = None, - extra_sub_targets = {}) -> (JavaProviders, [AndroidLibraryIntellijInfo, None]): + extra_sub_targets = {}, + validation_deps_outputs: [list[Artifact], None] = None) -> (JavaProviders, [AndroidLibraryIntellijInfo, None]): bootclasspath_entries = [] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath additional_classpath_entries = [] @@ -74,6 +79,7 @@ def build_android_library( additional_classpath_entries = additional_classpath_entries, bootclasspath_entries = bootclasspath_entries, extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ), android_library_intellij_info else: return build_java_library( @@ -82,6 +88,7 @@ def build_android_library( additional_classpath_entries = additional_classpath_entries, bootclasspath_entries = bootclasspath_entries, extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ), android_library_intellij_info def _get_dummy_r_dot_java( diff --git a/prelude/java/java.bzl b/prelude/java/java.bzl index ac0739580..87602f3c1 100644 --- a/prelude/java/java.bzl +++ b/prelude/java/java.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "VALIDATION_DEPS_ATTR_NAME") load("@prelude//android:build_only_native_code.bzl", "is_build_only_native_code") load("@prelude//android:configuration.bzl", "is_building_android_binary_attr") load("@prelude//android:min_sdk_version.bzl", "get_min_sdk_version_constraint_value_name", "get_min_sdk_version_range") @@ -66,6 +67,7 @@ extra_attributes = { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), "javac": attrs.option(attrs.one_of(attrs.exec_dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.dep(), sorted = True, default = []), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_dex_min_sdk_version": attrs.option(attrs.int(), default = dex_min_sdk_version()), "_dex_toolchain": toolchains_common.dex(), diff --git a/prelude/java/java_library.bzl b/prelude/java/java_library.bzl index 5c3571633..564194889 100644 --- a/prelude/java/java_library.bzl +++ b/prelude/java/java_library.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_providers.bzl", "merge_android_packageable_info") load( "@prelude//java:java_providers.bzl", @@ -502,7 +503,11 @@ def java_library_impl(ctx: AnalysisContext) -> list[Provider]: _check_dep_types(ctx.attrs.exported_provided_deps) _check_dep_types(ctx.attrs.runtime_deps) - java_providers = build_java_library(ctx, ctx.attrs.srcs) + java_providers = build_java_library( + ctx = ctx, + srcs = ctx.attrs.srcs, + validation_deps_outputs = get_validation_deps_outputs(ctx), + ) return to_list(java_providers) + [android_packageable_info] @@ -515,7 +520,8 @@ def build_java_library( additional_compiled_srcs: [Artifact, None] = None, generated_sources: list[Artifact] = [], override_abi_generation_mode: [AbiGenerationMode, None] = None, - extra_sub_targets: dict = {}) -> JavaProviders: + extra_sub_targets: dict = {}, + validation_deps_outputs: [list[Artifact], None] = None) -> JavaProviders: expect( not getattr(ctx.attrs, "_build_only_native_code", False), "Shouldn't call build_java_library if we're only building native code!", @@ -577,10 +583,16 @@ def build_java_library( "target_level": target_level, } + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + extra_arguments = cmd_args(ctx.attrs.extra_arguments) + if validation_deps_outputs: + extra_arguments.hidden(validation_deps_outputs) + outputs = compile_to_jar( ctx, plugin_params = plugin_params, - extra_arguments = cmd_args(ctx.attrs.extra_arguments), + extra_arguments = extra_arguments, **common_compile_kwargs ) diff --git a/prelude/kotlin/kotlin.bzl b/prelude/kotlin/kotlin.bzl index ca4785cac..ce9032c67 100644 --- a/prelude/kotlin/kotlin.bzl +++ b/prelude/kotlin/kotlin.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "VALIDATION_DEPS_ATTR_NAME") load("@prelude//android:build_only_native_code.bzl", "is_build_only_native_code") load("@prelude//android:configuration.bzl", "is_building_android_binary_attr") load("@prelude//java:java.bzl", "AbiGenerationMode", "dex_min_sdk_version") @@ -23,6 +24,7 @@ extra_attributes = { "abi_generation_mode": attrs.option(attrs.enum(AbiGenerationMode), default = None), "javac": attrs.option(attrs.one_of(attrs.dep(), attrs.source()), default = None), "resources_root": attrs.option(attrs.string(), default = None), + VALIDATION_DEPS_ATTR_NAME: attrs.set(attrs.dep(), sorted = True, default = []), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_dex_min_sdk_version": attrs.option(attrs.int(), default = dex_min_sdk_version()), "_dex_toolchain": toolchains_common.dex(), diff --git a/prelude/kotlin/kotlin_library.bzl b/prelude/kotlin/kotlin_library.bzl index b58fd9fbc..c3eefe0f7 100644 --- a/prelude/kotlin/kotlin_library.bzl +++ b/prelude/kotlin/kotlin_library.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_providers.bzl", "merge_android_packageable_info") load( "@prelude//java:java_library.bzl", @@ -264,14 +265,18 @@ def kotlin_library_impl(ctx: AnalysisContext) -> list[Provider]: android_packageable_info, ] - java_providers = build_kotlin_library(ctx) + java_providers = build_kotlin_library( + ctx = ctx, + validation_deps_outputs = get_validation_deps_outputs(ctx), + ) return to_list(java_providers) + [android_packageable_info] def build_kotlin_library( ctx: AnalysisContext, additional_classpath_entries: list[Artifact] = [], bootclasspath_entries: list[Artifact] = [], - extra_sub_targets: dict = {}) -> JavaProviders: + extra_sub_targets: dict = {}, + validation_deps_outputs: [list[Artifact], None] = None) -> JavaProviders: srcs = ctx.attrs.srcs has_kotlin_srcs = lazy.is_any(lambda src: src.extension == ".kt" or src.basename.endswith(".src.zip") or src.basename.endswith("-sources.jar"), srcs) @@ -284,6 +289,7 @@ def build_kotlin_library( # Match buck1, which always does class ABI generation for Kotlin targets unless explicitly specified. override_abi_generation_mode = get_abi_generation_mode(ctx.attrs.abi_generation_mode) or AbiGenerationMode("class"), extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ) else: @@ -331,11 +337,17 @@ def build_kotlin_library( additional_compiled_srcs = kotlinc_classes, generated_sources = filter(None, [kapt_generated_sources, ksp_generated_sources]), extra_sub_targets = extra_sub_targets, + validation_deps_outputs = validation_deps_outputs, ) return java_lib elif kotlin_toolchain.kotlinc_protocol == "kotlincd": source_level, target_level = get_java_version_attributes(ctx) extra_arguments = cmd_args(ctx.attrs.extra_arguments) + + # The outputs of validation_deps need to be added as hidden arguments + # to an action for the validation_deps targets to be built and enforced. + if validation_deps_outputs: + extra_arguments.hidden(validation_deps_outputs) common_kotlincd_kwargs = { "abi_generation_mode": get_abi_generation_mode(ctx.attrs.abi_generation_mode), "actions": ctx.actions, From e169e5fbac4f49934b91ab5361681437b52c4112 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 21:43:33 -0700 Subject: [PATCH 0524/1133] Convert predeclared_outputs from dict to single optional Artifact Summary: Having `predeclared_outputs: dict[Emit, Artifact]` made more sense when `rust_compile_multi` used to exist prior to D55113078. These days `rust_compile` is only ever used with a single `emit` at a time, so passing either 0 or 1 `predeclared_output` for that emit is sufficient. Reviewed By: JakobDegen Differential Revision: D55168997 fbshipit-source-id: 77dd582fb0f82e92b0784ff77dcb037c9ac22c31 --- prelude/rust/build.bzl | 16 +++++++--------- prelude/rust/failure_filter.bzl | 6 +++--- prelude/rust/rust_binary.bzl | 2 +- 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 62ad3ad03..ec6352fcd 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -423,7 +423,7 @@ def rust_compile( params: BuildParams, default_roots: list[str], extra_link_args: list[typing.Any] = [], - predeclared_outputs: dict[Emit, Artifact] = {}, + predeclared_output: [Artifact, None] = None, extra_flags: list[[str, ResolvedStringWithMacros]] = [], is_binary: bool = False, designated_clippy: bool = False, @@ -468,7 +468,6 @@ def rust_compile( ctx = ctx, compile_ctx = compile_ctx, emit = emit, - predeclared_outputs = {}, subdir = common_args.subdir, params = params, ) @@ -477,9 +476,9 @@ def rust_compile( ctx = ctx, compile_ctx = compile_ctx, emit = emit, - predeclared_outputs = predeclared_outputs, subdir = common_args.subdir, params = params, + predeclared_output = predeclared_output, ) pdb_artifact = None @@ -548,7 +547,6 @@ def rust_compile( ctx = ctx, compile_ctx = compile_ctx, emit = emit, - predeclared_outputs = {}, subdir = common_args.subdir + "-clippy", params = params, ) @@ -588,7 +586,7 @@ def rust_compile( filtered_output = failure_filter( ctx = ctx, compile_ctx = compile_ctx, - predecl_out = predeclared_outputs.get(emit), + predeclared_output = predeclared_output, build_status = invoke.build_status, required = emit_op.output, stderr = invoke.diag_txt, @@ -1125,9 +1123,9 @@ def _rustc_emit( ctx: AnalysisContext, compile_ctx: CompileContext, emit: Emit, - predeclared_outputs: dict[Emit, Artifact], subdir: str, - params: BuildParams) -> EmitOperation: + params: BuildParams, + predeclared_output: [Artifact, None] = None) -> EmitOperation: toolchain_info = compile_ctx.toolchain_info simple_crate = attr_simple_crate_for_filenames(ctx) crate_type = params.crate_type @@ -1136,8 +1134,8 @@ def _rustc_emit( emit_env = {} extra_out = None - if emit in predeclared_outputs: - emit_output = predeclared_outputs[emit] + if predeclared_output: + emit_output = predeclared_output else: extra_hash = "-" + _metadata(ctx.label, False)[1] emit_args.add("-Cextra-filename={}".format(extra_hash)) diff --git a/prelude/rust/failure_filter.bzl b/prelude/rust/failure_filter.bzl index 96d7cac31..67d1482f0 100644 --- a/prelude/rust/failure_filter.bzl +++ b/prelude/rust/failure_filter.bzl @@ -14,7 +14,7 @@ load(":context.bzl", "CompileContext") def failure_filter( ctx: AnalysisContext, compile_ctx: CompileContext, - predecl_out: [Artifact, None], + predeclared_output: [Artifact, None], build_status: Artifact, required: Artifact, stderr: Artifact, @@ -22,8 +22,8 @@ def failure_filter( toolchain_info = compile_ctx.toolchain_info failure_filter_action = toolchain_info.failure_filter_action - if predecl_out: - output = predecl_out + if predeclared_output: + output = predeclared_output else: output = ctx.actions.declare_output("out/" + required.short_path) diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 6f4e73251..08b388bfc 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -199,7 +199,7 @@ def _rust_binary_common( params = params, default_roots = default_roots, extra_link_args = executable_args.extra_link_args, - predeclared_outputs = {Emit("link"): output}, + predeclared_output = output, extra_flags = extra_flags, is_binary = True, allow_cache_upload = allow_cache_upload, From e5c7d0ac155ba0b0cb88512ebe3c42bda7a61fbc Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 21:45:19 -0700 Subject: [PATCH 0525/1133] Eliminate is_binary arg in favor of looking at crate type Summary: `is_binary` has been around since the early days (D33184736, and even before that under a different name). However D54977134 recently changed `is_binary` from being True to False for the "metadata-full" build of binary targets. And even before that, the "expand" subtarget for binary targets always used to have `is_binary` False. This makes a difference to the choice of `prefer_local`. Build-stamped Rust binaries are fine to check and expand in RE, but should link locally. This diff sidesteps trying to define what `is_binary` is supposed to mean by just looking directly at `crate_type` and `emit` in the 2 places that cared. Reviewed By: JakobDegen Differential Revision: D55169143 fbshipit-source-id: 5fbad47454163f6bf3e483f45c1901a48a70e318 --- prelude/rust/build.bzl | 12 ++++++------ prelude/rust/rust_binary.bzl | 1 - 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index ec6352fcd..d2fb71ec7 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -425,7 +425,6 @@ def rust_compile( extra_link_args: list[typing.Any] = [], predeclared_output: [Artifact, None] = None, extra_flags: list[[str, ResolvedStringWithMacros]] = [], - is_binary: bool = False, designated_clippy: bool = False, allow_cache_upload: bool = False, rust_cxx_link_group_info: [RustCxxLinkGroupInfo, None] = None) -> RustcOutput: @@ -533,7 +532,6 @@ def rust_compile( prefix = "{}/{}".format(common_args.subdir, common_args.tempfile), rustc_cmd = cmd_args(toolchain_info.compiler, rustc_cmd, emit_op.args), required_outputs = [emit_op.output], - is_binary = is_binary, is_clippy = False, allow_cache_upload = allow_cache_upload, crate_map = common_args.crate_map, @@ -571,7 +569,6 @@ def rust_compile( rustc_cmd = cmd_args(compile_ctx.clippy_wrapper, clippy_lints, rustc_cmd, clippy_emit_op.args), env = clippy_env, required_outputs = [clippy_emit_op.output], - is_binary = False, is_clippy = True, allow_cache_upload = False, crate_map = common_args.crate_map, @@ -624,7 +621,9 @@ def rust_compile( dwo_output_directory = None extra_external_debug_info = [] - if is_binary and dwp_available(compile_ctx.cxx_toolchain_info): + if params.crate_type == CrateType("bin") and \ + emit == Emit("link") and \ + dwp_available(compile_ctx.cxx_toolchain_info): dwp_output = dwp( ctx, compile_ctx.cxx_toolchain_info, @@ -1212,7 +1211,6 @@ def _rustc_invoke( prefix: str, rustc_cmd: cmd_args, required_outputs: list[Artifact], - is_binary: bool, is_clippy: bool, allow_cache_upload: bool, crate_map: list[(CrateName, Label)], @@ -1269,7 +1267,9 @@ def _rustc_invoke( prefer_local = False if incremental_enabled: local_only = True - elif is_binary and link_cxx_binary_locally(ctx): + elif common_args.crate_type == CrateType("bin") and \ + common_args.emit == Emit("link") and \ + link_cxx_binary_locally(ctx): prefer_local = True if is_clippy: diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 08b388bfc..1e1818032 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -201,7 +201,6 @@ def _rust_binary_common( extra_link_args = executable_args.extra_link_args, predeclared_output = output, extra_flags = extra_flags, - is_binary = True, allow_cache_upload = allow_cache_upload, rust_cxx_link_group_info = rust_cxx_link_group_info, ) From 61a0fde87b7a9288dd1960002df37ad1b3b6fa76 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 21:48:55 -0700 Subject: [PATCH 0526/1133] More consistently stick to static_pic as the default static linkage Summary: We were defaulting things to "static" in some places and "static_pic" in others. This led to a large number of targets building metadata redundantly as both "static" and "static_pic" when running `check_all.sh` on fbcode. This diff tries to stamp out places that were using "static" as a default static link style where they could have been "static_pic". Reviewed By: diliop, JakobDegen Differential Revision: D55169694 fbshipit-source-id: f97932abe60d239323e77cdc2be0e97da8e661bd --- prelude/rust/cargo_buildscript.bzl | 23 ++++++++++++++--------- prelude/rust/link_info.bzl | 9 +++++++++ prelude/rust/rust_binary.bzl | 2 +- prelude/rust/rust_library.bzl | 7 ++++--- 4 files changed, 28 insertions(+), 13 deletions(-) diff --git a/prelude/rust/cargo_buildscript.bzl b/prelude/rust/cargo_buildscript.bzl index 6136d7a6d..69da19168 100644 --- a/prelude/rust/cargo_buildscript.bzl +++ b/prelude/rust/cargo_buildscript.bzl @@ -20,7 +20,6 @@ load("@prelude//:prelude.bzl", "native") load("@prelude//decls:common.bzl", "buck") -load("@prelude//linking:link_info.bzl", "LinkStrategy") load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//rust:rust_toolchain.bzl", "RustToolchainInfo") load("@prelude//rust:targets.bzl", "targets") @@ -28,7 +27,13 @@ load("@prelude//decls/toolchains_common.bzl", "toolchains_common") load(":build.bzl", "dependency_args") load(":build_params.bzl", "MetadataKind") load(":context.bzl", "DepCollectionContext") -load(":link_info.bzl", "RustProcMacroPlugin", "gather_explicit_sysroot_deps", "resolve_rust_deps_inner") +load( + ":link_info.bzl", + "DEFAULT_STATIC_LINK_STRATEGY", + "RustProcMacroPlugin", + "gather_explicit_sysroot_deps", + "resolve_rust_deps_inner", +) load(":rust_toolchain.bzl", "PanicRuntime") def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: @@ -48,13 +53,13 @@ def _make_rustc_shim(ctx: AnalysisContext, cwd: Artifact) -> cmd_args: deps = gather_explicit_sysroot_deps(dep_ctx) deps = resolve_rust_deps_inner(ctx, deps) dep_args, _ = dependency_args( - ctx, - None, # compile_ctx - deps, - "any", # subdir - LinkStrategy("static_pic"), - MetadataKind("full"), - False, # is_rustdoc_test + ctx = ctx, + compile_ctx = None, + deps = deps, + subdir = "any", + dep_link_strategy = DEFAULT_STATIC_LINK_STRATEGY, + dep_metadata_kind = MetadataKind("full"), + is_rustdoc_test = False, ) null_path = "nul" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "/dev/null" diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 8c7a10f92..737d3d14b 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -43,6 +43,7 @@ load( ) load( "@prelude//linking:link_info.bzl", + "LibOutputStyle", "LinkInfo", "LinkStrategy", "MergedLinkInfo", @@ -75,7 +76,15 @@ load( load(":rust_toolchain.bzl", "PanicRuntime") # Link strategy for targets which do not set an explicit `link_style` attribute. +# +# These values are also used as the defaults for check/clippy subtargets on +# libraries, and are the only way in which metadata-fast output can be built. +# +# Internally at Meta, these are a good choice for a default because they allow +# sharing work between check builds and dev mode builds, which have shared link +# strategy, and so consume their dependencies as `static_pic`. DEFAULT_STATIC_LINK_STRATEGY = LinkStrategy("static_pic") +DEFAULT_STATIC_LIB_OUTPUT_STYLE = LibOutputStyle("pic_archive") # Override dylib crates to static_pic, so that Rust code is always # statically linked. diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 1e1818032..c001cae2a 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -312,7 +312,7 @@ def _rust_binary_common( ctx = ctx, compile_ctx = compile_ctx, emit = Emit("metadata-fast"), - params = strategy_param[LinkStrategy("static")], + params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], default_roots = default_roots, extra_flags = extra_flags, ) diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 793b552ad..885286b4b 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -93,6 +93,7 @@ load( ) load( ":link_info.bzl", + "DEFAULT_STATIC_LIB_OUTPUT_STYLE", "DEFAULT_STATIC_LINK_STRATEGY", "RustLinkInfo", "RustLinkStrategyInfo", @@ -261,7 +262,7 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: # Grab the artifacts to use for the check subtargets. Picking a good # `LibOutputStyle` ensures that the subtarget shares work with the main # build if possible - check_params = lang_style_param[(LinkageLang("rust"), LibOutputStyle("archive"))] + check_params = lang_style_param[(LinkageLang("rust"), DEFAULT_STATIC_LIB_OUTPUT_STYLE)] meta_fast = rust_compile( ctx = ctx, @@ -324,12 +325,12 @@ def rust_library_impl(ctx: AnalysisContext) -> list[Provider]: # being built in a "shared" way well, so this must be a static output style. if ctx.attrs.doc_link_style: doc_output_style = { - "shared": LibOutputStyle("pic_archive"), + "shared": DEFAULT_STATIC_LIB_OUTPUT_STYLE, "static": LibOutputStyle("archive"), "static_pic": LibOutputStyle("pic_archive"), }[ctx.attrs.doc_link_style] else: - doc_output_style = LibOutputStyle("pic_archive") + doc_output_style = DEFAULT_STATIC_LIB_OUTPUT_STYLE static_library_params = lang_style_param[(LinkageLang("rust"), doc_output_style)] # Among {rustdoc, doctests, macro expand}, doctests are the only one which From 19d2cb56bcfbe9325de83b933507eb53fb4eb994 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 20 Mar 2024 21:53:36 -0700 Subject: [PATCH 0527/1133] Disregard link_style="static" for Rust binary's metadata subtargets MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: According to `fbgs -f TARGETS 'link_style = "static"' | wc -l` (and `"static_pic"`), there are **182** targets in fbcode using link_style="static" and **40** using link_style="static_pic". Not all of these are Rust targets but many are. It's hard to know whether the link_style="static" ones really prefer non-PIC over PIC. It's more likely all they care about is avoiding dynamic linking in mode/dev. This diff changes the `…[check]` and `diag.json` and `diag.txt` and `clippy.json` and `clippy.txt` subtargets on Rust binary and Rust unittest targets to ignore the target's specified `link_style`. The implications are: - 👍 This will speed up `check_all.sh` by avoiding redundantly building both "static" and "static_pic" metadata of library targets which are dependencies of both a plain Rust binary and some other Rust binary which uses `link_style = "static"`. - 👎 This will reduce sharing of metadata between `arc rust-check` and `buck2 build` of binaries that use `link_style = "static"` with pipelining enabled. - 👍 This will improve cache hit rate for dependencies when using `arc rust-check` on binaries that have `link_style = "static"`. Reviewed By: diliop, JakobDegen Differential Revision: D55170631 fbshipit-source-id: c97eb6563e6e6f4752d8120143e2dbc5424744e4 --- prelude/rust/rust_binary.bzl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index c001cae2a..62912c8e8 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -295,14 +295,11 @@ def _rust_binary_common( if link_strategy == specified_link_strategy and link.pdb: pdb = link.pdb - # FIXME(JakobDegen): It's a bit weird that this uses the specified link - # strategy but rustdoc and expand use the default link strategy. Figure out - # what's going on there. meta_full = rust_compile( ctx = ctx, compile_ctx = compile_ctx, emit = Emit("metadata-full"), - params = strategy_param[specified_link_strategy], + params = strategy_param[DEFAULT_STATIC_LINK_STRATEGY], default_roots = default_roots, extra_flags = extra_flags, designated_clippy = True, From ce0cd7be51676319cc23017e01c29205b81c31dd Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Thu, 21 Mar 2024 08:18:48 -0700 Subject: [PATCH 0528/1133] remove produce_interface_from_stub_shared_library Summary: Refactor `produce_interface_from_stub_shared_library` to be another enum value for `shared_library_interface_mode` on `cxx_toolchain`. Later diffs will add another mode to generate shared library interface from headers. Reviewed By: blackm00n Differential Revision: D55036775 fbshipit-source-id: 9ca69418fc7b23d7903c82004ecd594fcb27b039 --- prelude/cxx/cxx_library.bzl | 5 +++-- prelude/cxx/cxx_toolchain.bzl | 2 -- prelude/cxx/cxx_toolchain_types.bzl | 3 +-- prelude/cxx/user/cxx_toolchain_override.bzl | 2 -- prelude/toolchains/cxx.bzl | 1 - prelude/toolchains/cxx/zig/defs.bzl | 1 - 6 files changed, 4 insertions(+), 10 deletions(-) diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 926472621..257189731 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -146,7 +146,7 @@ load( "cxx_platform_supported", "cxx_use_shlib_intfs", ) -load(":cxx_toolchain_types.bzl", "is_bitcode_format") +load(":cxx_toolchain_types.bzl", "ShlibInterfacesMode", "is_bitcode_format") load( ":cxx_types.bzl", "CxxRuleConstructorParams", # @unused Used as a type @@ -1421,7 +1421,8 @@ def _shared_library( # If shared library interfaces are enabled, link that and use it as # the shared lib that dependents will link against. if cxx_use_shlib_intfs(ctx): - if not linker_info.produce_interface_from_stub_shared_library: + mode = get_cxx_toolchain_info(ctx).linker_info.shlib_interfaces + if mode == ShlibInterfacesMode("stub_from_library"): shlib_for_interface = exported_shlib elif not gnu_use_link_groups: # TODO(agallagher): There's a bug in shlib intfs interacting with link diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index 8eca13ea8..dc2fbf894 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -123,7 +123,6 @@ def cxx_toolchain_impl(ctx): static_pic_dep_runtime_ld_flags = ctx.attrs.static_pic_dep_runtime_ld_flags, type = ctx.attrs.linker_type, use_archiver_flags = ctx.attrs.use_archiver_flags, - produce_interface_from_stub_shared_library = ctx.attrs.produce_interface_from_stub_shared_library, ) utilities_info = BinaryUtilitiesInfo( @@ -212,7 +211,6 @@ def cxx_toolchain_extra_attributes(is_toolchain_rule): # Used for resolving any 'platform_*' attributes. "platform_name": attrs.option(attrs.string(), default = None), "private_headers_symlinks_enabled": attrs.bool(default = True), - "produce_interface_from_stub_shared_library": attrs.bool(default = False), "public_headers_symlinks_enabled": attrs.bool(default = True), "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), "rc_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index ffd05a17e..de10a5b39 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -9,7 +9,7 @@ load("@prelude//cxx:debug.bzl", "SplitDebugMode") LinkerType = ["gnu", "darwin", "windows", "wasm"] -ShlibInterfacesMode = enum("disabled", "enabled", "defined_only") +ShlibInterfacesMode = enum("disabled", "enabled", "defined_only", "stub_from_library") # TODO(T110378149): Consider whether it makes sense to move these things to # configurations/constraints rather than part of the toolchain. @@ -65,7 +65,6 @@ LinkerInfo = provider( "use_archiver_flags": provider_field(typing.Any, default = None), "force_full_hybrid_if_capable": provider_field(typing.Any, default = None), "is_pdb_generated": provider_field(typing.Any, default = None), # bool - "produce_interface_from_stub_shared_library": provider_field(typing.Any, default = None), # bool }, ) diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index b7f6111a1..f8dbb5bff 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -116,7 +116,6 @@ def _cxx_toolchain_override(ctx): use_archiver_flags = value_or(ctx.attrs.use_archiver_flags, base_linker_info.use_archiver_flags), force_full_hybrid_if_capable = value_or(ctx.attrs.force_full_hybrid_if_capable, base_linker_info.force_full_hybrid_if_capable), is_pdb_generated = pdb_expected, - produce_interface_from_stub_shared_library = value_or(ctx.attrs.produce_interface_from_stub_shared_library, base_linker_info.produce_interface_from_stub_shared_library), ) base_binary_utilities_info = base_toolchain.binary_utilities_info @@ -212,7 +211,6 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "platform_deps_aliases": attrs.option(attrs.list(attrs.string()), default = None), "platform_name": attrs.option(attrs.string(), default = None), "post_linker_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "produce_interface_from_stub_shared_library": attrs.option(attrs.bool(), default = None), "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), "sanitizer_runtime_enabled": attrs.bool(default = False), "sanitizer_runtime_files": attrs.option(attrs.set(attrs.dep(), sorted = True, default = []), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform diff --git a/prelude/toolchains/cxx.bzl b/prelude/toolchains/cxx.bzl index d1b1dc159..3bbbeab42 100644 --- a/prelude/toolchains/cxx.bzl +++ b/prelude/toolchains/cxx.bzl @@ -118,7 +118,6 @@ def _system_cxx_toolchain_impl(ctx: AnalysisContext): static_library_extension = static_library_extension, force_full_hybrid_if_capable = False, is_pdb_generated = is_pdb_generated(linker_type, ctx.attrs.link_flags), - produce_interface_from_stub_shared_library = True, link_ordering = ctx.attrs.link_ordering, ), bolt_enabled = False, diff --git a/prelude/toolchains/cxx/zig/defs.bzl b/prelude/toolchains/cxx/zig/defs.bzl index 4b8a7ffd8..bc6bfcccb 100644 --- a/prelude/toolchains/cxx/zig/defs.bzl +++ b/prelude/toolchains/cxx/zig/defs.bzl @@ -376,7 +376,6 @@ def _cxx_zig_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: #lto_mode = None, # TODO support LTO object_file_extension = "o", #mk_shlib_intf = None, # not needed if shlib_interfaces = "disabled" - produce_interface_from_stub_shared_library = True, shlib_interfaces = ShlibInterfacesMode("disabled"), shared_dep_runtime_ld_flags = ctx.attrs.shared_dep_runtime_ld_flags, shared_library_name_default_prefix = "lib", From b74b3bbbbfd5b7e6d6dd82d04ef372ee0d8090f9 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Thu, 21 Mar 2024 08:18:48 -0700 Subject: [PATCH 0529/1133] add tbd output Summary: Add a new `stub_from_headers` shared library interface mode to control tbd output for libraries. When set, generate tbd files as part of Swift compilation, and add a new `tbd` subtarget. Reviewed By: maxovtsin Differential Revision: D55138851 fbshipit-source-id: fe87fa259901f1f1c3acb678b72c49654670afca --- prelude/apple/apple_library.bzl | 5 +++++ prelude/apple/swift/swift_compilation.bzl | 25 ++++++++++++++++++++--- prelude/cxx/cxx_library.bzl | 3 +++ prelude/cxx/cxx_toolchain_types.bzl | 2 +- 4 files changed, 31 insertions(+), 4 deletions(-) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index 2a3232e89..94fed9f2e 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -276,6 +276,11 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte default_output = swift_compile.output_map_artifact if swift_compile else None, ), ], + "swift-tbd": [ + DefaultInfo( + default_output = swift_compile.tbd if swift_compile else None, + ), + ], "swiftmodule": [ DefaultInfo( default_output = swift_compile.swiftmodule if swift_compile else None, diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 695aa78f8..ae845beef 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -21,6 +21,9 @@ load( "@prelude//cxx:compile.bzl", "CxxSrcWithFlags", # @unused Used as a type ) +load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//cxx:cxx_library_utility.bzl", "cxx_use_shlib_intfs") +load("@prelude//cxx:cxx_toolchain_types.bzl", "ShlibInterfacesMode") load("@prelude//cxx:headers.bzl", "CHeader") load( "@prelude//cxx:link_groups.bzl", @@ -113,6 +116,8 @@ SwiftCompilationOutput = record( compilation_database = field(SwiftCompilationDatabase), # An artifact that represent the Swift module map for this target. output_map_artifact = field([Artifact, None]), + # An optional artifact of the partial tbd file emitted for this module. + tbd = field([Artifact, None]), ) SwiftDebugInfo = record( @@ -255,12 +260,17 @@ def compile_swift( output_header = ctx.actions.declare_output(module_name + "-Swift.h") output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) + output_tbd = None + + if cxx_use_shlib_intfs(ctx) and \ + get_cxx_toolchain_info(ctx).linker_info.shlib_interfaces == ShlibInterfacesMode("stub_from_headers"): + output_tbd = ctx.actions.declare_output(module_name + ".tbd") if toolchain.can_toolchain_emit_obj_c_header_textually: - _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header) + _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header, output_tbd) else: unprocessed_header = ctx.actions.declare_output(module_name + "-SwiftUnprocessed.h") - _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, unprocessed_header) + _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, unprocessed_header, output_tbd) _perform_swift_postprocessing(ctx, module_name, unprocessed_header, output_header) object_output = _compile_object(ctx, toolchain, shared_flags, srcs) @@ -303,6 +313,7 @@ def compile_swift( swift_debug_info = extract_and_merge_swift_debug_infos(ctx, deps_providers, [output_swiftmodule]), clang_debug_info = extract_and_merge_clang_debug_infos(ctx, deps_providers), compilation_database = _create_compilation_database(ctx, srcs, object_output.argsfiles.absolute[SWIFT_EXTENSION]), + tbd = output_tbd, ), swift_interface_info) # Swift headers are postprocessed to make them compatible with Objective-C @@ -339,7 +350,8 @@ def _compile_swiftmodule( shared_flags: cmd_args, srcs: list[CxxSrcWithFlags], output_swiftmodule: Artifact, - output_header: Artifact) -> CompileArgsfiles: + output_header: Artifact, + output_tbd: [Artifact, None]) -> CompileArgsfiles: argfile_cmd = cmd_args(shared_flags) argfile_cmd.add([ "-emit-module", @@ -366,6 +378,13 @@ def _compile_swiftmodule( "-wmo", ]) + if output_tbd != None: + cmd.add([ + "-emit-tbd", + "-emit-tbd-path", + output_tbd.as_output(), + ]) + return _compile_with_argsfile(ctx, "swiftmodule_compile", SWIFTMODULE_EXTENSION, argfile_cmd, srcs, cmd, toolchain) def _compile_object( diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 257189731..1a8bbfaf2 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -1424,6 +1424,9 @@ def _shared_library( mode = get_cxx_toolchain_info(ctx).linker_info.shlib_interfaces if mode == ShlibInterfacesMode("stub_from_library"): shlib_for_interface = exported_shlib + elif mode == ShlibInterfacesMode("stub_from_headers"): + # TODO: collect tbd output from providers and merge + shlib_for_interface = None elif not gnu_use_link_groups: # TODO(agallagher): There's a bug in shlib intfs interacting with link # groups, where we don't include the symbols we're meant to export from diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/prelude/cxx/cxx_toolchain_types.bzl index de10a5b39..906a23da5 100644 --- a/prelude/cxx/cxx_toolchain_types.bzl +++ b/prelude/cxx/cxx_toolchain_types.bzl @@ -9,7 +9,7 @@ load("@prelude//cxx:debug.bzl", "SplitDebugMode") LinkerType = ["gnu", "darwin", "windows", "wasm"] -ShlibInterfacesMode = enum("disabled", "enabled", "defined_only", "stub_from_library") +ShlibInterfacesMode = enum("disabled", "enabled", "defined_only", "stub_from_library", "stub_from_headers") # TODO(T110378149): Consider whether it makes sense to move these things to # configurations/constraints rather than part of the toolchain. From 94792fea81d6757ecbeed35e74cbb7729ffd4ad8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andreas=20L=C3=B6scher?= Date: Thu, 21 Mar 2024 12:05:10 -0700 Subject: [PATCH 0530/1133] add unbundled escript mode Summary: This adds an unbundled escript mode, that doesn't build a bundled .escript file. Instead I creates a release-like directory lib/ directory structrure. It additionally creates an escript trampoline that sets the emu args, and calls the actual main modules main function. Reviewed By: acw224 Differential Revision: D55164320 fbshipit-source-id: 6aed405cc641b56d57ac32855d28bf37a436b9a5 --- prelude/decls/erlang_rules.bzl | 3 + prelude/erlang/erlang_escript.bzl | 143 +++++++++++++----- prelude/erlang/erlang_info.bzl | 1 + prelude/erlang/erlang_release.bzl | 9 +- prelude/erlang/erlang_toolchain.bzl | 6 + prelude/erlang/toolchain/BUCK.v2 | 1 + .../erlang/toolchain/escript_trampoline.sh | 11 ++ 7 files changed, 136 insertions(+), 38 deletions(-) create mode 100755 prelude/erlang/toolchain/escript_trampoline.sh diff --git a/prelude/decls/erlang_rules.bzl b/prelude/decls/erlang_rules.bzl index dbee8c448..01d726411 100644 --- a/prelude/decls/erlang_rules.bzl +++ b/prelude/decls/erlang_rules.bzl @@ -160,6 +160,9 @@ rules_attributes = { "_toolchain": attrs.toolchain_dep(default = "toolchains//:erlang-default"), }, "erlang_escript": { + "bundled": attrs.bool(default = True, doc = """ + Setting bundled to `True` does generate a folder structure and escript trampoline instead of an archive. + """), "deps": attrs.list(attrs.dep(), doc = """ List of Erlang applications that are bundled in the escript. This includes all transitive dependencies as well. """), diff --git a/prelude/erlang/erlang_escript.bzl b/prelude/erlang/erlang_escript.bzl index ef3cf834d..da1ec05d5 100644 --- a/prelude/erlang/erlang_escript.bzl +++ b/prelude/erlang/erlang_escript.bzl @@ -7,8 +7,9 @@ load("@prelude//:paths.bzl", "paths") load(":erlang_build.bzl", "erlang_build") -load(":erlang_dependencies.bzl", "check_dependencies", "flatten_dependencies") +load(":erlang_dependencies.bzl", "ErlAppDependencies", "check_dependencies", "flatten_dependencies") load(":erlang_info.bzl", "ErlangAppInfo") +load(":erlang_release.bzl", "build_lib_dir") load( ":erlang_toolchain.bzl", "Toolchain", # @unused Used as type @@ -17,43 +18,56 @@ load( ) load(":erlang_utils.bzl", "action_identifier", "to_term_args") -def create_escript( - ctx: AnalysisContext, - spec_file: Artifact, - toolchain: Toolchain, - files: list[Artifact], - output: Artifact, - escript_name: str) -> None: - """ build the escript with the escript builder tool - """ - script = toolchain.escript_builder - - escript_build_cmd = cmd_args( - [ - toolchain.otp_binaries.escript, - script, - spec_file, - ], - ) - escript_build_cmd.hidden(output.as_output()) - escript_build_cmd.hidden(files) - erlang_build.utils.run_with_env( - ctx, - toolchain, - escript_build_cmd, - category = "escript", - identifier = action_identifier(toolchain, escript_name), - ) - return None - def erlang_escript_impl(ctx: AnalysisContext) -> list[Provider]: # select the correct tools from the toolchain - toolchain_name = get_primary(ctx) toolchain = select_toolchains(ctx)[get_primary(ctx)] # collect all dependencies dependencies = flatten_dependencies(ctx, check_dependencies(ctx.attrs.deps, [ErlangAppInfo])) + if ctx.attrs.bundled: + return _bundled_escript_impl(ctx, dependencies, toolchain) + else: + return _unbundles_escript_impl(ctx, dependencies, toolchain) + +def _unbundles_escript_impl(ctx: AnalysisContext, dependencies: ErlAppDependencies, toolchain: Toolchain) -> list[Provider]: + if ctx.attrs.resources: + fail("resources are not supported with unbundled escripts, add them to an applications priv/ directory instead") + + escript_name = _escript_name(ctx) + + lib_dir = build_lib_dir( + ctx, + toolchain, + escript_name, + dependencies, + ) + + escript_trampoline = build_escript_trampoline(ctx, toolchain) + + trampoline = { + "run.escript": escript_trampoline, + } + + all_outputs = {} + for outputs in [lib_dir, trampoline]: + all_outputs.update(outputs) + + output = ctx.actions.symlinked_dir( + escript_name, + all_outputs, + ) + + cmd = cmd_args([ + toolchain.escript_trampoline, + output, + toolchain.otp_binaries.escript, + ]) + + return [DefaultInfo(default_output = output), RunInfo(cmd)] + +def _bundled_escript_impl(ctx: AnalysisContext, dependencies: ErlAppDependencies, toolchain: Toolchain) -> list[Provider]: + toolchain_name = get_primary(ctx) artifacts = {} for dep in dependencies.values(): @@ -81,10 +95,7 @@ def erlang_escript_impl(ctx: AnalysisContext) -> list[Provider]: fail("multiple artifacts defined for path %s", (artifact.short_path)) artifacts[artifact.short_path] = artifact - if ctx.attrs.script_name: - escript_name = ctx.attrs.script_name - else: - escript_name = ctx.attrs.name + ".escript" + escript_name = _escript_name(ctx) output = ctx.actions.declare_output(escript_name) args = ctx.attrs.emu_args @@ -116,6 +127,68 @@ def erlang_escript_impl(ctx: AnalysisContext) -> list[Provider]: RunInfo(escript_cmd), ] +def create_escript( + ctx: AnalysisContext, + spec_file: Artifact, + toolchain: Toolchain, + files: list[Artifact], + output: Artifact, + escript_name: str) -> None: + """ build the escript with the escript builder tool + """ + script = toolchain.escript_builder + + escript_build_cmd = cmd_args( + [ + toolchain.otp_binaries.escript, + script, + spec_file, + ], + ) + escript_build_cmd.hidden(output.as_output()) + escript_build_cmd.hidden(files) + erlang_build.utils.run_with_env( + ctx, + toolchain, + escript_build_cmd, + category = "escript", + identifier = action_identifier(toolchain, escript_name), + ) + return None + +def _escript_name(ctx: AnalysisContext) -> str: + if ctx.attrs.script_name: + return ctx.attrs.script_name + else: + return ctx.attrs.name + ".escript" + +def _main_module(ctx: AnalysisContext) -> str: + if ctx.attrs.main_module: + return ctx.attrs.main_module + else: + return ctx.attrs.name + +def build_escript_trampoline(ctx: AnalysisContext, toolchain) -> Artifact: + data = cmd_args() + + data.add("#!/usr/bin/env escript") + data.add("%% -*- erlang -*-") + data.add("%%! {}".format(" ".join(ctx.attrs.emu_args))) + + data.add("-module('{}').".format(_escript_name(ctx))) + data.add("-export([main/1]).") + data.add("main(Args) ->") + data.add(" ScriptDir = filename:dirname(escript:script_name()),") + data.add(' EBinDirs = filelib:wildcard(filename:join([ScriptDir, "lib", "*", "ebin"])),') + data.add(" code:add_paths(EBinDirs),") + data.add(" {}:main(Args).".format(_main_module(ctx))) + + return ctx.actions.write( + paths.join(erlang_build.utils.build_dir(toolchain), "run.escript"), + data, + is_executable = True, + ) + def _ebin_path(file: Artifact, app_name: str) -> str: return paths.join(app_name, "ebin", file.basename) diff --git a/prelude/erlang/erlang_info.bzl b/prelude/erlang/erlang_info.bzl index eb4c3d085..f1fab0250 100644 --- a/prelude/erlang/erlang_info.bzl +++ b/prelude/erlang/erlang_info.bzl @@ -91,6 +91,7 @@ ErlangToolchainInfo = provider( "dependency_finalizer": provider_field(typing.Any, default = None), # trampoline rerouting stdout to stderr "erlc_trampoline": provider_field(typing.Any, default = None), + "escript_trampoline": provider_field(typing.Any, default = None), # name to parse_transform artifacts mapping for core parse_transforms (that are always used) and # user defines ones "core_parse_transforms": provider_field(typing.Any, default = None), diff --git a/prelude/erlang/erlang_release.bzl b/prelude/erlang/erlang_release.bzl index 1da7d2c3e..864c7813c 100644 --- a/prelude/erlang/erlang_release.bzl +++ b/prelude/erlang/erlang_release.bzl @@ -75,7 +75,7 @@ def _build_primary_release(ctx: AnalysisContext, apps: ErlAppDependencies) -> li def _build_release(ctx: AnalysisContext, toolchain: Toolchain, apps: ErlAppDependencies) -> dict[str, Artifact]: # OTP base structure - lib_dir = _build_lib_dir(ctx, toolchain, apps) + lib_dir = build_lib_dir(ctx, toolchain, _relname(ctx), apps) boot_scripts = _build_boot_script(ctx, toolchain, lib_dir["lib"]) # release specific variables in bin/release_variables @@ -100,12 +100,15 @@ def _build_release(ctx: AnalysisContext, toolchain: Toolchain, apps: ErlAppDepen return all_outputs -def _build_lib_dir(ctx: AnalysisContext, toolchain: Toolchain, all_apps: ErlAppDependencies) -> dict[str, Artifact]: +def build_lib_dir( + ctx: AnalysisContext, + toolchain: Toolchain, + release_name: str, + all_apps: ErlAppDependencies) -> dict[str, Artifact]: """Build lib dir according to OTP specifications. .. seealso:: `OTP Design Principles Release Structure `_ """ - release_name = _relname(ctx) build_dir = erlang_build.utils.build_dir(toolchain) link_spec = { diff --git a/prelude/erlang/erlang_toolchain.bzl b/prelude/erlang/erlang_toolchain.bzl index 94106e769..9ae28bca6 100644 --- a/prelude/erlang/erlang_toolchain.bzl +++ b/prelude/erlang/erlang_toolchain.bzl @@ -41,6 +41,7 @@ Toolchain = record( dependency_analyzer = field(Artifact), dependency_finalizer = field(Artifact), erlc_trampoline = field(Artifact), + escript_trampoline = field(Artifact), escript_builder = field(Artifact), otp_binaries = field(Tools), release_variables_builder = field(Artifact), @@ -65,6 +66,7 @@ ToolchainUtillInfo = provider( "dependency_finalizer": provider_field(typing.Any, default = None), "edoc": provider_field(typing.Any, default = None), "erlc_trampoline": provider_field(typing.Any, default = None), + "escript_trampoline": provider_field(typing.Any, default = None), "escript_builder": provider_field(typing.Any, default = None), "release_variables_builder": provider_field(typing.Any, default = None), "include_erts": provider_field(typing.Any, default = None), @@ -97,6 +99,7 @@ def _multi_version_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: dependency_finalizer = toolchain_info.dependency_finalizer, erl_opts = toolchain_info.erl_opts, erlc_trampoline = toolchain_info.erlc_trampoline, + escript_trampoline = toolchain_info.escript_trampoline, escript_builder = toolchain_info.escript_builder, otp_binaries = toolchain_info.otp_binaries, release_variables_builder = toolchain_info.release_variables_builder, @@ -191,6 +194,7 @@ def _config_erlang_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: env = ctx.attrs.env, emu_flags = emu_flags, erlc_trampoline = utils.erlc_trampoline, + escript_trampoline = utils.escript_trampoline, escript_builder = utils.escript_builder, otp_binaries = otp_binaries, release_variables_builder = utils.release_variables_builder, @@ -362,6 +366,7 @@ def _toolchain_utils(ctx: AnalysisContext) -> list[Provider]: dependency_finalizer = ctx.attrs.dependency_finalizer, edoc = ctx.attrs.edoc, erlc_trampoline = ctx.attrs.erlc_trampoline, + escript_trampoline = ctx.attrs.escript_trampoline, escript_builder = ctx.attrs.escript_builder, release_variables_builder = ctx.attrs.release_variables_builder, include_erts = ctx.attrs.include_erts, @@ -380,6 +385,7 @@ toolchain_utilities = rule( "edoc": attrs.source(), "erlc_trampoline": attrs.source(), "escript_builder": attrs.source(), + "escript_trampoline": attrs.source(), "include_erts": attrs.source(), "release_variables_builder": attrs.source(), "utility_modules": attrs.list(attrs.source()), diff --git a/prelude/erlang/toolchain/BUCK.v2 b/prelude/erlang/toolchain/BUCK.v2 index 3e0d26ce0..82e381c8c 100644 --- a/prelude/erlang/toolchain/BUCK.v2 +++ b/prelude/erlang/toolchain/BUCK.v2 @@ -27,6 +27,7 @@ toolchain_utilities( edoc = "edoc_cli.escript", erlc_trampoline = "erlc_trampoline.sh", escript_builder = "escript_builder.escript", + escript_trampoline = "escript_trampoline.sh", include_erts = "include_erts.escript", release_variables_builder = "release_variables_builder.escript", utility_modules = [ diff --git a/prelude/erlang/toolchain/escript_trampoline.sh b/prelude/erlang/toolchain/escript_trampoline.sh new file mode 100755 index 000000000..dbf39844b --- /dev/null +++ b/prelude/erlang/toolchain/escript_trampoline.sh @@ -0,0 +1,11 @@ +#! /usr/bin/env bash +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +cmd=("$2" "$1/run.escript" "${@:3}") + +"${cmd[@]}" From 45e75dbe1c4fb7c3ce5ec2b995c1f1767e483c34 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Thu, 21 Mar 2024 14:02:35 -0700 Subject: [PATCH 0531/1133] rpath shouldn't depend on package_style Summary: There are a few problems with this code: 1. The inplace RPATH doesn't apply to binaries built with `[inplace]` 2. They hardcode implementation details like the `#link-tree` path or `runtime/lib` 3. They don't seem to work properly :) Reviewed By: fried, itamaro, aleivag, cxxxs Differential Revision: D55157385 fbshipit-source-id: 69d747c4c69ae33aa5fcb3a8f00176ef4de24638 --- prelude/python/python_binary.bzl | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 3e3250ee3..f085fc455 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -590,13 +590,8 @@ def _convert_python_library_to_executable( # Set rpaths to find 1) the shared libs dir and the 2) runtime libs dir. rpath_ref = get_rpath_origin(get_cxx_toolchain_info(ctx).linker_info.type) rpath_ldflag = "-Wl,-rpath,{}/".format(rpath_ref) - if package_style == PackageStyle("standalone"): - extra_binary_link_flags.append(rpath_ldflag + "../..") - extra_binary_link_flags.append(rpath_ldflag + "../lib") - else: - rpath_ldflag_prefix = rpath_ldflag + "{}#link-tree".format(ctx.attrs.name) - extra_binary_link_flags.append(rpath_ldflag_prefix + "/runtime/lib") - extra_binary_link_flags.append(rpath_ldflag_prefix) + extra_binary_link_flags.append(rpath_ldflag + "../..") + extra_binary_link_flags.append(rpath_ldflag + "../lib") impl_params = CxxRuleConstructorParams( rule_type = "python_binary", From ccd404f0dc3b22d078d69dce7d448a049ba2d2dc Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Thu, 21 Mar 2024 14:05:06 -0700 Subject: [PATCH 0532/1133] Move the universal binary constraint from build_mode to cpu Summary: The `universal` constraint used for universal binaries is not a build mode, but more a cpu constraint. Move it there. Reviewed By: milend Differential Revision: D55146158 fbshipit-source-id: e949894e903da3d734596f4ae72bf3744169b068 --- prelude/apple/user/apple_resource_transition.bzl | 4 ++-- prelude/apple/user/cpu_split_transition.bzl | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/prelude/apple/user/apple_resource_transition.bzl b/prelude/apple/user/apple_resource_transition.bzl index 99464c6a5..5d28b1149 100644 --- a/prelude/apple/user/apple_resource_transition.bzl +++ b/prelude/apple/user/apple_resource_transition.bzl @@ -32,8 +32,8 @@ apple_resource_transition = transition( impl = _impl, refs = { "cpu": "config//cpu/constraints:cpu", - "universal": "config//build_mode/apple/constraints:universal", - "universal_enabled": "config//build_mode/apple/constraints:universal-enabled", + "universal": "config//cpu/constraints:universal", + "universal_enabled": "config//cpu/constraints:universal-enabled", }, attrs = [ "skip_universal_resource_dedupe", diff --git a/prelude/apple/user/cpu_split_transition.bzl b/prelude/apple/user/cpu_split_transition.bzl index 4beda5002..e11ddafa0 100644 --- a/prelude/apple/user/cpu_split_transition.bzl +++ b/prelude/apple/user/cpu_split_transition.bzl @@ -110,8 +110,8 @@ cpu_split_transition = transition( "macos": "config//os/constraints:macos", "os": "config//os/constraints:os", "sdk": "config//os/sdk/apple/constraints:_", - "universal": "config//build_mode/apple/constraints:universal", - "universal_enabled": "config//build_mode/apple/constraints:universal-enabled", + "universal": "config//cpu/constraints:universal", + "universal_enabled": "config//cpu/constraints:universal-enabled", "watchos": "config//os/constraints:watchos", "watchos_device_sdk": "config//os/sdk/apple/constraints:watchos", "watchos_simulator_sdk": "config//os/sdk/apple/constraints:watchsimulator", From 85dbde32a56a3fcac7fe5b2d6fd7333a25729e9b Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Thu, 21 Mar 2024 14:05:06 -0700 Subject: [PATCH 0533/1133] Define arches and sdk as constants Summary: We treat these like constants, change the typing to reflect that. Reviewed By: rmaz Differential Revision: D55152121 fbshipit-source-id: 0fbffa404129a821217fcdbe2f8aad174bca3bd0 --- prelude/platforms/apple/arch.bzl | 10 ++++++++-- prelude/platforms/apple/sdk.bzl | 13 +++++++++++-- 2 files changed, 19 insertions(+), 4 deletions(-) diff --git a/prelude/platforms/apple/arch.bzl b/prelude/platforms/apple/arch.bzl index f6258b04b..e351bed4e 100644 --- a/prelude/platforms/apple/arch.bzl +++ b/prelude/platforms/apple/arch.bzl @@ -5,6 +5,12 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -AppleArches = ["i386", "x86_64", "arm64", "arm64_32", "armv7k"] +_APPLE_ARCHES = [ + "arm64", + "arm64_32", + "armv7k", + "i386", + "x86_64", +] -AppleArch = enum(*AppleArches) +AppleArch = enum(*_APPLE_ARCHES) diff --git a/prelude/platforms/apple/sdk.bzl b/prelude/platforms/apple/sdk.bzl index 857896304..e03573ee0 100644 --- a/prelude/platforms/apple/sdk.bzl +++ b/prelude/platforms/apple/sdk.bzl @@ -5,6 +5,15 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -AppleSdks = ["iphoneos", "iphonesimulator", "maccatalyst", "macosx", "visionos", "visionsimulator", "watchos", "watchsimulator"] +_APPLE_SDKS = [ + "iphoneos", + "iphonesimulator", + "maccatalyst", + "macosx", + "visionos", + "visionsimulator", + "watchos", + "watchsimulator", +] -AppleSdk = enum(*AppleSdks) +AppleSdk = enum(*_APPLE_SDKS) From ffdda90fb7f68fdb9018ad4e5e3fbf84742bcaac Mon Sep 17 00:00:00 2001 From: Conner Nilsen Date: Thu, 21 Mar 2024 14:21:09 -0700 Subject: [PATCH 0534/1133] Remigrate nested configurations [1/5] Summary: (This diff retries the configurationless re-migration after rerunning due to CI errors) When doing the Pyre Configurationless migration, I didn't realize that .pyre_configuration.local files could be nested, and how that would affect the migration. When the migration actually happened, and since we were processing configurations alphabetically, parent configurations got migrated first. These parent configurations often have the nested projects in the ignore all errors list, so those headers got applied instead of the actual headers that they should be using. This diff fixes the headers for a number of projects and applies suppressions for any type errors that may have slipped through since the project got migrated. ignore-nocommit Reviewed By: grievejia Differential Revision: D55148328 fbshipit-source-id: 5a618b747c27246434020fb140dc851543a16fb9 --- prelude/python/tools/sourcedb_merger/inputs.py | 3 ++- prelude/python/tools/sourcedb_merger/legacy_merge.py | 3 ++- prelude/python/tools/sourcedb_merger/legacy_outputs.py | 3 ++- prelude/python/tools/sourcedb_merger/merge.py | 3 ++- prelude/python/tools/sourcedb_merger/outputs.py | 3 ++- prelude/python/tools/sourcedb_merger/tests/__init__.py | 3 ++- prelude/python/tools/sourcedb_merger/tests/inputs_test.py | 3 ++- .../python/tools/sourcedb_merger/tests/legacy_output_test.py | 3 ++- prelude/python/tools/sourcedb_merger/tests/outputs_test.py | 3 ++- 9 files changed, 18 insertions(+), 9 deletions(-) diff --git a/prelude/python/tools/sourcedb_merger/inputs.py b/prelude/python/tools/sourcedb_merger/inputs.py index db2d4ed24..2f298a258 100644 --- a/prelude/python/tools/sourcedb_merger/inputs.py +++ b/prelude/python/tools/sourcedb_merger/inputs.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import dataclasses import json diff --git a/prelude/python/tools/sourcedb_merger/legacy_merge.py b/prelude/python/tools/sourcedb_merger/legacy_merge.py index b6d977d11..0eb16221f 100644 --- a/prelude/python/tools/sourcedb_merger/legacy_merge.py +++ b/prelude/python/tools/sourcedb_merger/legacy_merge.py @@ -6,7 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import argparse import pathlib diff --git a/prelude/python/tools/sourcedb_merger/legacy_outputs.py b/prelude/python/tools/sourcedb_merger/legacy_outputs.py index 89ecd93fe..ab225f9b0 100644 --- a/prelude/python/tools/sourcedb_merger/legacy_outputs.py +++ b/prelude/python/tools/sourcedb_merger/legacy_outputs.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import dataclasses import json diff --git a/prelude/python/tools/sourcedb_merger/merge.py b/prelude/python/tools/sourcedb_merger/merge.py index b21a00a9e..f4dd4845b 100644 --- a/prelude/python/tools/sourcedb_merger/merge.py +++ b/prelude/python/tools/sourcedb_merger/merge.py @@ -6,7 +6,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import argparse import pathlib diff --git a/prelude/python/tools/sourcedb_merger/outputs.py b/prelude/python/tools/sourcedb_merger/outputs.py index 40e6ef688..2565f75c5 100644 --- a/prelude/python/tools/sourcedb_merger/outputs.py +++ b/prelude/python/tools/sourcedb_merger/outputs.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import dataclasses import json diff --git a/prelude/python/tools/sourcedb_merger/tests/__init__.py b/prelude/python/tools/sourcedb_merger/tests/__init__.py index 858c58d3c..1e666ca2a 100644 --- a/prelude/python/tools/sourcedb_merger/tests/__init__.py +++ b/prelude/python/tools/sourcedb_merger/tests/__init__.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + from .inputs_test import * # noqa from .legacy_output_test import * # noqa diff --git a/prelude/python/tools/sourcedb_merger/tests/inputs_test.py b/prelude/python/tools/sourcedb_merger/tests/inputs_test.py index da77a5463..75a8ce7e3 100644 --- a/prelude/python/tools/sourcedb_merger/tests/inputs_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/inputs_test.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import contextlib import json diff --git a/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py b/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py index 4457ac417..3d587cbb8 100644 --- a/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import unittest from typing import Mapping diff --git a/prelude/python/tools/sourcedb_merger/tests/outputs_test.py b/prelude/python/tools/sourcedb_merger/tests/outputs_test.py index 7b3db01c2..cb147a3db 100644 --- a/prelude/python/tools/sourcedb_merger/tests/outputs_test.py +++ b/prelude/python/tools/sourcedb_merger/tests/outputs_test.py @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# pyre-ignore-all-errors +# pyre-strict + import unittest From 7437a309f54b3da0f3a85a5a9765a08c1b18cb05 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Thu, 21 Mar 2024 17:20:30 -0700 Subject: [PATCH 0535/1133] No need for `-Cprefer-dynamic` with explicit sysroot deps Summary: Reference: Rustc needs to be able to load metadata about direct as well as transitive dependencies. For direct dependencies, these are passed by `--extern` with a path to the specific artifact to use, so there is no ambiguity. For transitive dependencies, they are passed in bulk by zero or more `-Ldependency` flags, which refer to directories containing artifacts. Within those directories, information about a specific crate might be available from more than one artifact, such as a dylib and rlib for the same crate. The behavior of `-Cprefer-dynamic=no` (the default) is that when a transitive dependency exists as both rlib and dylib, metadata is loaded from the rlib. If some dependencies are available in dylib but not rlib, the dylib is used for those. The behavior of `-Cprefer-dynamic=yes` is that when a transitive dependency exists as both rlib and dylib, instead the dylib is used. Crucially, the ambiguity on whether to use rlib or dylib for a particular transitive dependency only occurs if the rlib and dylib both describe the same crate i.e. contain the same crate hash. Buck never produces an rlib and dylib containing the same crate hash. Cargo does, because when you specify `[lib] crate-type = ["rlib", "dylib"]` in Cargo.toml, both artifacts get built using a single rustc invocation with `--crate-type=rlib --crate-type=dylib`. Whereas Buck only ever produces a single crate type from a single rustc invocation. https://www.internalfb.com/code/fbsource/[c5e44abca21d3aa361e512b968a5d96e94b7788d]/fbcode/buck2/prelude/rust/build.bzl?lines=947 There would be one invocation with `--crate-type=rlib` and a different one with `--crate-type=dylib`. And the `--crate-type` argument is factored into the crate hash, so the two artifacts will get different hashes. **Cargo:** ``` $ rustc /dev/null --edition=2021 --crate-name=repro --crate-type=rlib --crate-type=dylib $ rustc -Zls=root librepro.rlib | head -3 Crate info: name repro hash 4277f4de69076fd163e90406835f7518 stable_crate_id StableCrateId(18217904404856821084) $ rustc -Zls=root librepro.so | head -3 Crate info: name repro hash 4277f4de69076fd163e90406835f7518 stable_crate_id StableCrateId(18217904404856821084) ``` **Buck:** ``` $ rustc /dev/null --edition=2021 --crate-name=repro --crate-type=rlib $ rustc /dev/null --edition=2021 --crate-name=repro --crate-type=dylib $ rustc -Zls=root librepro.rlib | head -3 Crate info: name repro hash 64886f5a31d5e5b679ce5de8a8fe3986 stable_crate_id StableCrateId(18217904404856821084) $ rustc -Zls=root librepro.so | head -3 Crate info: name repro hash 03b1fa7a919af62730e34f648ab9b247 stable_crate_id StableCrateId(18217904404856821084) ``` Reviewed By: JakobDegen Differential Revision: D55179027 fbshipit-source-id: d11ba48bb080c38fa929e775e8bec81daf0670c6 --- prelude/rust/build.bzl | 42 +++++++++++++++++++++++++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index d2fb71ec7..04b297f02 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -899,6 +899,46 @@ def _compute_common_args( else: crate_name_arg = cmd_args("--crate-name=", crate.simple, delimiter = "") + # The `-Cprefer-dynamic` flag controls rustc's choice of artifacts for + # transitive dependencies, both for loading metadata and linking them. + # Direct dependencies are given to rustc one-by-one using `--extern` with a + # path to a specific artifact, so there is never ambiguity what artifact to + # use for a direct dependency. But transitive dependencies are passed in + # bulk via zero or more `-Ldependency` flags, which are directories + # containing artifacts. Within those directories, information about a + # specific crate might be available from more than one artifact, such as a + # dylib and rlib for the same crate. + # + # With `-Cprefer-dynamic=no` (the default), when a transitive dependency + # exists as both rlib and dylib, metadata is loaded from the rlib. If some + # dependencies are available in dylib but not rlib, the dylib is used for + # those. With `-Cprefer-dynamic=yes`, when a transitive dependency exists as + # both rlib and dylib, instead the dylib is used. + # + # The ambiguity over whether to use rlib or dylib for a particular + # transitive dependency only occurs if the rlib and dylib both describe the + # same crate i.e. contain the same crate hash. + # + # Buck-built libraries never produce an rlib and dylib containing the same + # crate hash, since that only occurs when outputting multiple crate types + # through a single rustc invocation: `--crate-type=rlib --crate-type=dylib`. + # In Buck, different crate types are built by different rustc invocations. + # But Cargo does invoke rustc with multiple crate types when you write + # `[lib] crate-type = ["rlib", "dylib"]` in Cargo.toml, and in fact the + # standard libraries built by x.py and distributed by Rustup are built this + # way. + if toolchain_info.explicit_sysroot_deps: + # Standard libraries are being passed explicitly, and Buck-built + # dependencies never collide on crate hash, so `-Cprefer-dynamic` cannot + # make a difference. + prefer_dynamic_flags = [] + elif crate_type == CrateType("dylib"): + # Use standard library dylibs from the implicit sysroot. + prefer_dynamic_flags = ["-Cprefer-dynamic=yes"] + else: + # Use standard library rlibs from the implicit sysroot. + prefer_dynamic_flags = ["-Cprefer-dynamic=no"] # (the default) + split_debuginfo_flags = { # Rustc's default behavior: debug info is put into every rlib and # staticlib, then copied into the executables and shared libraries by @@ -950,7 +990,7 @@ def _compute_common_args( "-Cmetadata={}".format(_metadata(ctx.label, is_rustdoc_test)[0]), # Make diagnostics json with the option to extract rendered text ["--error-format=json", "--json=diagnostic-rendered-ansi"] if not is_rustdoc_test else [], - ["-Cprefer-dynamic=yes"] if crate_type == CrateType("dylib") else [], + prefer_dynamic_flags, ["--target={}".format(toolchain_info.rustc_target_triple)] if toolchain_info.rustc_target_triple else [], split_debuginfo_flags, compile_ctx.sysroot_args, From a1cddde2db891160d7301afa48cd03766f77096d Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Thu, 21 Mar 2024 17:21:13 -0700 Subject: [PATCH 0536/1133] Respect FORCE_RLIB for implicit sysroot deps Summary: As discussed in D55179027, it's not clear we have a good reason to be passing `-Cprefer-dynamic=yes`. Buck always does Rust->Rust dependencies using rlib, and there isn't a reason the standard library would need to diverge from that. Let's revisit this if we ever start using dylib for Rust->Rust dependencies i.e. if FORCE_RLIB goes away or becomes configurable on the toolchain. https://www.internalfb.com/code/fbsource/[002deff2c4c3b317fc6946be7e7dab5ce6ee17d9]/fbcode/buck2/prelude/rust/link_info.bzl?lines=92-94 Reviewed By: JakobDegen Differential Revision: D55220508 fbshipit-source-id: c74912bae88bed5e417173e5e05e58162215a192 --- prelude/rust/build.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 04b297f02..82ceec7cd 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -68,6 +68,7 @@ load( ) load( ":link_info.bzl", + "FORCE_RLIB", "RustCxxLinkGroupInfo", #@unused Used as a type "RustDependency", "RustLinkInfo", @@ -932,7 +933,7 @@ def _compute_common_args( # dependencies never collide on crate hash, so `-Cprefer-dynamic` cannot # make a difference. prefer_dynamic_flags = [] - elif crate_type == CrateType("dylib"): + elif crate_type == CrateType("dylib") and not FORCE_RLIB: # Use standard library dylibs from the implicit sysroot. prefer_dynamic_flags = ["-Cprefer-dynamic=yes"] else: From 35f71ca759099cd1a1bcea1c6f286e4e527e08a4 Mon Sep 17 00:00:00 2001 From: generatedunixname89002005307016 Date: Thu, 21 Mar 2024 17:52:48 -0700 Subject: [PATCH 0537/1133] suppress errors in `buck2/prelude/python/tools/sourcedb_merger` Differential Revision: D55225350 fbshipit-source-id: acd6978256719b96ec92a03a56866a0fc40ed425 --- .../tools/sourcedb_merger/tests/__init__.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/prelude/python/tools/sourcedb_merger/tests/__init__.py b/prelude/python/tools/sourcedb_merger/tests/__init__.py index 1e666ca2a..2444a8d0f 100644 --- a/prelude/python/tools/sourcedb_merger/tests/__init__.py +++ b/prelude/python/tools/sourcedb_merger/tests/__init__.py @@ -8,6 +8,29 @@ # pyre-strict +# pyre-fixme[21]: Could not find name `BuildMapLoadError` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `PartialBuildMap` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `Target` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `TargetEntry` in `tests.inputs_test`. +# pyre-fixme[21]: Could not find name `load_targets_and_build_maps_from_json` in +# `tests.inputs_test`. from .inputs_test import * # noqa + +# pyre-fixme[21]: Could not find name `ConflictInfo` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `ConflictMap` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `FullBuildMap` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `MergeResult` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `PartialBuildMap` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `SourceInfo` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `Target` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `TargetEntry` in `tests.legacy_output_test`. +# pyre-fixme[21]: Could not find name `merge_partial_build_maps` in +# `tests.legacy_output_test`. from .legacy_output_test import * # noqa + +# pyre-fixme[21]: Could not find name `PartialBuildMap` in `tests.outputs_test`. +# pyre-fixme[21]: Could not find name `Target` in `tests.outputs_test`. +# pyre-fixme[21]: Could not find name `TargetEntry` in `tests.outputs_test`. +# pyre-fixme[21]: Could not find name `merge_partial_build_maps` in +# `tests.outputs_test`. from .outputs_test import * # noqa From 2ba65ec9038d6c40c9c65cdec422271da5c9a7f0 Mon Sep 17 00:00:00 2001 From: Artem Pianykh Date: Fri, 22 Mar 2024 01:34:59 -0700 Subject: [PATCH 0538/1133] Fix extremely long linking with link groups and sanitizers enabled Summary: Long story short, a combination of factors triggered a pathological behavior in lld where it would repeatedly do a linear scan of all symbols thousands of times. Reviewed By: Nekitosss, asm89 Differential Revision: D55061860 fbshipit-source-id: 8fb9dbbcd371981e9dc48fa04b0c7cfd3eb8f4f8 --- prelude/cxx/symbols.bzl | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/prelude/cxx/symbols.bzl b/prelude/cxx/symbols.bzl index d423b4450..3948f614c 100644 --- a/prelude/cxx/symbols.bzl +++ b/prelude/cxx/symbols.bzl @@ -56,6 +56,7 @@ def _extract_symbol_names( $lines = $result -split '`n' $lines = $lines | ForEach-Object {{ ($_ -split ' ')[1] }} $lines = $lines | ForEach-Object {{ ($_ -split '@')[0] }} + $lines = $lines | Where-Object {{ $_ -notmatch '__odr_asan_gen_.*' }} $lines = $lines | Sort-Object -Unique [IO.File]::WriteAllLines('{{}}', $lines) }}""".format(nm_flags) @@ -73,6 +74,15 @@ def _extract_symbol_names( ' | cut -d" " -f2 ' + # Strip off ABI Version (@...) when using llvm-nm to keep compat with buck1 " | cut -d@ -f1 " + + # Remove ASAN ODR generated symbols: __odr_asan_gen_*. They are + # handled by a separate asan_dynamic_list.txt list of asan patterns. + # BUT MORE IMPORTANTLY, symbols like __odr_asan_XXX[abi:cxx11] force + # lld into a code path that repeatedly does a linear scan of all + # symbols for O(num_patterns_with_bracket * num_symbols). This + # totally tanks link time for builds with sanitizers! Anecdotally, + # a binary with 3.7M symbols and 2K __odr_asan_XXX[abi:cxx11] can + # spend 6 mins processing patterns and 10s actually linking. + " | grep -v -E '__odr_asan_gen_.*'" + # Sort and dedup symbols. Use the `C` locale and do it in-memory to # make it significantly faster. CAUTION: if ten of these processes # run in parallel, they'll have cumulative allocations larger than RAM. From 6e099b8826c65c9c197ff6c96fd9d039a31ebf24 Mon Sep 17 00:00:00 2001 From: Milen Dzhumerov Date: Fri, 22 Mar 2024 03:30:12 -0700 Subject: [PATCH 0539/1133] Sanitizers: add sanitizer flags at the end of linker flags Summary: Sanitizer args need to be at the end in case there's an embedded Swift runtime in the app bundle. Differential Revision: D55189393 fbshipit-source-id: 9a680c91e74ba8d58f646720d59be2fd7bb2afab --- prelude/cxx/link.bzl | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/prelude/cxx/link.bzl b/prelude/cxx/link.bzl index 624662886..4a4ff3789 100644 --- a/prelude/cxx/link.bzl +++ b/prelude/cxx/link.bzl @@ -166,9 +166,6 @@ def cxx_link_into( all_link_args = cmd_args(link_cmd_parts.linker_flags) all_link_args.add(get_output_flags(linker_info.type, output)) - sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain_info, output) - all_link_args.add(sanitizer_runtime_args.extra_link_args) - # Darwin LTO requires extra link outputs to preserve debug info split_debug_output = None split_debug_lto_info = get_split_debug_lto_info(ctx.actions, cxx_toolchain_info, output.short_path) @@ -200,6 +197,12 @@ def cxx_link_into( ) all_link_args.add(link_args_output.link_args) + # Sanitizer runtime args must appear at the end because it can affect + # behavior of Swift runtime loading when the app also has an embedded + # Swift runtime. + sanitizer_runtime_args = cxx_sanitizer_runtime_arguments(ctx, cxx_toolchain_info, output) + all_link_args.add(sanitizer_runtime_args.extra_link_args) + bitcode_linkables = [] for link_item in opts.links: if link_item.infos == None: From 336efd55eb2d9d85768a4901f4a2f17e55650cf0 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 10:35:23 -0700 Subject: [PATCH 0540/1133] Don't pass in full shared libs dict unnecessarily Summary: The `_make_py_package_impl` helper only cares if shared libs exist, and so doesn't need all bookkeeping about them. Reviewed By: dtolnay Differential Revision: D54970779 fbshipit-source-id: da314766ea7e14b3aa8afb02d8c554e46ae245b3 --- prelude/python/make_py_package.bzl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index dfb0d616c..61db2f5cf 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -160,7 +160,7 @@ def make_py_package( make_py_package_cmd, package_style, build_args, - shared_libraries, + len(shared_libraries) > 0, preload_libraries, common_modules_args, dep_artifacts, @@ -179,7 +179,7 @@ def make_py_package( make_py_package_cmd, PackageStyle(style), build_args, - shared_libraries, + len(shared_libraries) > 0, preload_libraries, common_modules_args, dep_artifacts, @@ -204,7 +204,7 @@ def _make_py_package_impl( make_py_package_cmd: RunInfo | None, package_style: PackageStyle, build_args: list[ArgLike], - shared_libraries: dict[str, (LinkedObject, bool)], + shared_libraries: bool, preload_libraries: cmd_args, common_modules_args: cmd_args, dep_artifacts: list[(ArgLike, str)], @@ -351,7 +351,7 @@ def _pex_bootstrap_args( toolchain: PythonToolchainInfo, main: EntryPoint, output: Artifact, - shared_libraries: dict[str, (LinkedObject, bool)], + shared_libraries: bool, preload_libraries: cmd_args, symlink_tree_path: Artifact | None, package_style: PackageStyle, From 5b15f064bb887f689fabdab92e7ab9270a0ec517 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 10:35:23 -0700 Subject: [PATCH 0541/1133] Simplify debuginfo file handling Summary: We only actually use the debuginfo artifacts, so avoid wrapping in a manifest object and maintaining a dict of names to artifacts, just to throw the names away at the end. Reviewed By: dtolnay Differential Revision: D54980209 fbshipit-source-id: b34989dbeaf50d83c7f2a91f48fcfbfebd9275a9 --- prelude/python/make_py_package.bzl | 13 +++++++------ prelude/python/python_binary.bzl | 13 +++++++------ 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 61db2f5cf..abda071b9 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -34,7 +34,6 @@ PexModules = record( manifests = field(PythonLibraryManifestsInterface), extensions = field(ManifestInfo | None, None), extra_manifests = field(ManifestInfo | None, None), - debuginfo_manifest = field(ManifestInfo | None, None), compile = field(bool, False), ) @@ -121,7 +120,8 @@ def make_py_package( shared_libraries: dict[str, (LinkedObject, bool)], main: EntryPoint, hidden_resources: list[ArgLike] | None, - allow_cache_upload: bool) -> PexProviders: + allow_cache_upload: bool, + debuginfo_files: list[Artifact] = []) -> PexProviders: """ Passes a standardized set of flags to a `make_py_package` binary to create a python "executable". @@ -152,6 +152,7 @@ def make_py_package( pex_modules, [startup_function] if startup_function else [], {name: lib for name, (lib, _) in shared_libraries.items()}, + debuginfo_files = debuginfo_files, ) default = _make_py_package_impl( @@ -388,7 +389,8 @@ def _pex_modules_common_args( ctx: AnalysisContext, pex_modules: PexModules, extra_manifests: list[ArgLike], - shared_libraries: dict[str, LinkedObject]) -> (cmd_args, list[(ArgLike, str)], list[(ArgLike, str)]): + shared_libraries: dict[str, LinkedObject], + debuginfo_files: list[Artifact]) -> (cmd_args, list[(ArgLike, str)], list[(ArgLike, str)]): srcs = [] src_artifacts = [] deps = [] @@ -441,11 +443,10 @@ def _pex_modules_common_args( cmd.add(cmd_args(native_library_srcs_args, format = "@{}")) cmd.add(cmd_args(native_library_dests_path, format = "@{}")) - if pex_modules.debuginfo_manifest: - debuginfo_files = pex_modules.debuginfo_manifest.artifacts + if debuginfo_files: debuginfo_srcs_path = ctx.actions.write( "__debuginfo___srcs.txt", - _srcs([src for src, _ in debuginfo_files], format = "--debuginfo-src={}"), + _srcs(debuginfo_files, format = "--debuginfo-src={}"), ) debuginfo_srcs_args = cmd_args(debuginfo_srcs_path) cmd.add(cmd_args(debuginfo_srcs_args, format = "@{}")) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index f085fc455..9d3ec9562 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -205,13 +205,13 @@ def _get_root_link_group_specs( return specs -def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[str, (LinkedObject, Label | bool)], dict[str, Artifact]): - debuginfo_artifacts = {} +def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[str, (LinkedObject, Label | bool)], list[Artifact]): + debuginfo_artifacts = [] transformed = {} for name, (artifact, extra) in data.items(): stripped_binary, debuginfo = strip_debug_with_gnu_debuglink(ctx, name, artifact.unstripped_output) transformed[name] = LinkedObject(output = stripped_binary, unstripped_output = artifact.unstripped_output, dwp = artifact.dwp), extra - debuginfo_artifacts[name + ".debuginfo"] = debuginfo + debuginfo_artifacts.append(debuginfo) return transformed, debuginfo_artifacts def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[Group]: @@ -671,7 +671,6 @@ def _convert_python_library_to_executable( extra_manifests = create_manifest_for_source_map(ctx, "extra_manifests", extra_artifacts) shared_libraries = {} - debuginfo_artifacts = {} # Create the map of native libraries to their artifacts and whether they # need to be preloaded. Note that we merge preload deps into regular deps @@ -682,16 +681,17 @@ def _convert_python_library_to_executable( # Strip native libraries and extensions and update the .gnu_debuglink references if we are extracting # debug symbols from the par + debuginfo_files = [] if ctx.attrs.strip_libpar == "extract" and package_style == PackageStyle("standalone") and cxx_is_gnu(ctx): shared_libraries, library_debuginfo = _split_debuginfo(ctx, shared_libraries) extensions, extension_debuginfo = _split_debuginfo(ctx, extensions) - debuginfo_artifacts = library_debuginfo | extension_debuginfo + debuginfo_files += library_debuginfo + debuginfo_files += extension_debuginfo # Combine sources and extensions into a map of all modules. pex_modules = PexModules( manifests = library.manifests(), extra_manifests = extra_manifests, - debuginfo_manifest = create_manifest_for_source_map(ctx, "debuginfo", debuginfo_artifacts) if debuginfo_artifacts else None, compile = compile, extensions = create_manifest_for_extensions( ctx, @@ -714,6 +714,7 @@ def _convert_python_library_to_executable( main, hidden_resources, allow_cache_upload, + debuginfo_files = debuginfo_files, ) pex.sub_targets.update(extra) From b358f243a0b131993d559158d40cfefc96e559f6 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 10:35:23 -0700 Subject: [PATCH 0542/1133] Simplify extra dep handling Summary: A lot of the extra dep/runtime files/debuginfo handling would needlessly propagate path information separately, just to throw it away. Reviewed By: dtolnay Differential Revision: D54987076 fbshipit-source-id: adde45ed7d2df85f653b27cca9b87f9fd91d1c43 --- prelude/python/make_py_package.bzl | 38 +++++++++++++++--------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index abda071b9..141b70ce3 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -41,7 +41,7 @@ PexModules = record( # providers. PexProviders = record( default_output = field(Artifact), - other_outputs = list[(ArgLike, str)], + other_outputs = list[ArgLike], other_outputs_prefix = str | None, hidden_resources = list[ArgLike], sub_targets = dict[str, list[Provider]], @@ -59,7 +59,7 @@ def make_py_package_providers( def make_default_info(pex: PexProviders) -> Provider: return DefaultInfo( default_output = pex.default_output, - other_outputs = [a for a, _ in pex.other_outputs] + pex.hidden_resources, + other_outputs = pex.other_outputs + pex.hidden_resources, sub_targets = pex.sub_targets, ) @@ -208,8 +208,8 @@ def _make_py_package_impl( shared_libraries: bool, preload_libraries: cmd_args, common_modules_args: cmd_args, - dep_artifacts: list[(ArgLike, str)], - debug_artifacts: list[(ArgLike, str)], + dep_artifacts: list[ArgLike], + debug_artifacts: list[ArgLike], main: EntryPoint, hidden_resources: list[ArgLike] | None, manifest_module: ArgLike | None, @@ -275,7 +275,7 @@ def _make_py_package_impl( # For inplace builds add local artifacts to outputs so they get properly materialized runtime_files.extend(dep_artifacts) - runtime_files.append((symlink_tree_path, symlink_tree_path.short_path)) + runtime_files.append(symlink_tree_path) # For standalone builds, or builds setting make_py_package we generate args for calling make_par.py if standalone or make_py_package_cmd != None: @@ -330,12 +330,12 @@ def _make_py_package_impl( other_outputs_prefix = symlink_tree_path.short_path if symlink_tree_path != None else None, hidden_resources = hidden_resources, sub_targets = {}, - run_cmd = cmd_args(run_args).hidden([a for a, _ in runtime_files] + hidden_resources), + run_cmd = cmd_args(run_args).hidden(runtime_files + hidden_resources), ) -def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[(ArgLike, str)]) -> list[Provider]: +def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[ArgLike]) -> list[Provider]: out = ctx.actions.write_json("debuginfo.manifest.json", debug_artifacts) - return [DefaultInfo(default_output = out, other_outputs = [a for a, _ in debug_artifacts])] + return [DefaultInfo(default_output = out, other_outputs = debug_artifacts)] def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: dict[str, (LinkedObject, bool)]) -> cmd_args: preload_libraries_path = ctx.actions.write( @@ -390,7 +390,7 @@ def _pex_modules_common_args( pex_modules: PexModules, extra_manifests: list[ArgLike], shared_libraries: dict[str, LinkedObject], - debuginfo_files: list[Artifact]) -> (cmd_args, list[(ArgLike, str)], list[(ArgLike, str)]): + debuginfo_files: list[Artifact]) -> (cmd_args, list[ArgLike], list[ArgLike]): srcs = [] src_artifacts = [] deps = [] @@ -410,9 +410,9 @@ def _pex_modules_common_args( if extra_manifests: srcs.extend(extra_manifests) - deps.extend(src_artifacts) + deps.extend([a[0] for a in src_artifacts]) resources = pex_modules.manifests.resource_manifests() - deps.extend(pex_modules.manifests.resource_artifacts_with_paths()) + deps.extend([a[0] for a in pex_modules.manifests.resource_artifacts_with_paths()]) src_manifests_path = ctx.actions.write( "__src_manifests.txt", @@ -470,9 +470,9 @@ def _pex_modules_common_args( cmd.add(cmd_args(dwp_srcs_args, format = "@{}")) cmd.add(cmd_args(dwp_dests_path, format = "@{}")) - debug_artifacts.extend(dwp) + debug_artifacts.extend([d for d, _ in dwp]) - deps.extend([(lib.output, name) for name, lib in shared_libraries.items()]) + deps.extend([lib.output for lib in shared_libraries.values()]) external_debug_info = project_artifacts( ctx.actions, @@ -480,15 +480,15 @@ def _pex_modules_common_args( ) # HACK: external_debug_info has an empty path - debug_artifacts.extend([(d, "") for d in external_debug_info]) + debug_artifacts.extend(external_debug_info) return (cmd, deps, debug_artifacts) def _pex_modules_args( ctx: AnalysisContext, common_args: cmd_args, - dep_artifacts: list[(ArgLike, str)], - debug_artifacts: list[(ArgLike, str)], + dep_artifacts: list[ArgLike], + debug_artifacts: list[ArgLike], symlink_tree_path: Artifact | None, manifest_module: ArgLike | None, pex_modules: PexModules, @@ -508,7 +508,7 @@ def _pex_modules_args( if pex_modules.compile: pyc_mode = PycInvalidationMode("UNCHECKED_HASH") if symlink_tree_path == None else PycInvalidationMode("CHECKED_HASH") bytecode_manifests = pex_modules.manifests.bytecode_manifests(pyc_mode) - dep_artifacts.extend(pex_modules.manifests.bytecode_artifacts_with_paths(pyc_mode)) + dep_artifacts.extend([a[0] for a in pex_modules.manifests.bytecode_artifacts_with_paths(pyc_mode)]) bytecode_manifests_path = ctx.actions.write( "__bytecode_manifests{}.txt".format(output_suffix), @@ -525,9 +525,9 @@ def _pex_modules_args( else: # Accumulate all the artifacts we depend on. Only add them to the command # if we are not going to create symlinks. - cmd.hidden([a for a, _ in dep_artifacts]) + cmd.hidden(dep_artifacts) - cmd.hidden([a for a, _ in debug_artifacts]) + cmd.hidden(debug_artifacts) return cmd From dec718b036a0c33a7ec18004b7db5d891f2a0c06 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 10:35:23 -0700 Subject: [PATCH 0543/1133] Represent shared libs as a list instead of dict Summary: This refactors internal bookkeeping of shared libs to use `list`s of `SharedLibrary` objects, instead of `dict`s mapping the libs `SONAME` to the `SharedLibrary` and defers the process of "merging" them to `dict` until needed. This helps move towards supporting shared libs where we don't know the `SONAME` at analysis time. Reviewed By: dtolnay Differential Revision: D54964964 fbshipit-source-id: 03dbd1401c99bf0d6672a5d107bca575531c60b9 --- .../android_binary_native_library_rules.bzl | 5 +- prelude/android/voltron.bzl | 6 +- prelude/cxx/cxx_executable.bzl | 49 ++++---- prelude/cxx/cxx_link_utility.bzl | 23 ++-- prelude/cxx/cxx_types.bzl | 2 +- prelude/go/link.bzl | 4 +- prelude/haskell/haskell.bzl | 20 ++-- prelude/haskell/haskell_ghci.bzl | 7 +- prelude/java/java_binary.bzl | 8 +- prelude/java/java_test.bzl | 16 ++- prelude/julia/julia_binary.bzl | 16 ++- prelude/linking/shared_libraries.bzl | 105 ++++++++++-------- prelude/python/python_binary.bzl | 14 +-- prelude/rust/build.bzl | 5 +- prelude/rust/rust_binary.bzl | 38 ++++--- 15 files changed, 180 insertions(+), 138 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index e7a07bf5d..33dc0e55a 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -57,6 +57,7 @@ load( "get_strip_non_global_flags", "merge_shared_libraries", "traverse_shared_library_info", + "with_unique_sonames", ) load("@prelude//linking:strip.bzl", "strip_object") load("@prelude//linking:types.bzl", "Linkage") @@ -870,8 +871,8 @@ def get_default_shared_libs(ctx: AnalysisContext, deps: list[Dependency], shared deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), ) return { - so_name: shared_lib - for so_name, shared_lib in traverse_shared_library_info(shared_library_info).items() + soname: shared_lib + for soname, shared_lib in with_unique_sonames(traverse_shared_library_info(shared_library_info)).items() if not (shared_libraries_to_exclude and shared_libraries_to_exclude.contains(shared_lib.label.raw_target())) } diff --git a/prelude/android/voltron.bzl b/prelude/android/voltron.bzl index d6622c261..288194878 100644 --- a/prelude/android/voltron.bzl +++ b/prelude/android/voltron.bzl @@ -68,7 +68,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions, ctx.label, [android_packageable_info], - traversed_shared_library_info.values(), + traversed_shared_library_info, ctx.attrs._android_toolchain[AndroidToolchainInfo], ctx.attrs.application_module_configs, ctx.attrs.application_module_dependencies, @@ -86,7 +86,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: ]).hidden(targets_to_jars_args) if ctx.attrs.should_include_libraries: - targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), so_name], delimiter = " ") for so_name, shared_lib in traversed_shared_library_info.items()] + targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), shared_lib.soname], delimiter = " ") for shared_lib in traversed_shared_library_info] targets_to_so_names = ctx.actions.write("targets_to_so_names.txt", targets_to_so_names_args) cmd.add([ "--targets-to-so-names", @@ -121,7 +121,7 @@ def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[st ctx.actions, deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), ) - shared_libraries.extend(traverse_shared_library_info(shared_library_info).values()) + shared_libraries.extend(traverse_shared_library_info(shared_library_info)) cmd, output = _get_base_cmd_and_output( ctx.actions, diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index ee9c4184b..683f3ecfa 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -69,6 +69,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type "merge_shared_libraries", "traverse_shared_library_info", ) @@ -177,7 +178,7 @@ CxxExecutableOutput = record( # materialized when this executable is the output of a build, not when it is # used by other rules. They become other_outputs on DefaultInfo. external_debug_info_artifacts = list[TransitiveSetArgsProjection], - shared_libs = dict[str, LinkedObject], + shared_libs = list[SharedLibrary], # All link group links that were generated in the executable. auto_link_groups = field(dict[str, LinkedObject], {}), compilation_db = CxxCompilationDbInfo, @@ -418,11 +419,10 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [get_link_group_map_json(ctx, filtered_targets)] # Set up shared libraries symlink tree only when needed - shared_libs = {} + shared_libs = [] # Add in extra, rule-specific shared libs. - for name, shlib in impl_params.extra_shared_libs.items(): - shared_libs[name] = shlib.lib + shared_libs.extend(impl_params.extra_shared_libs) # Only setup a shared library symlink tree when shared linkage or link_groups is used gnu_use_link_groups = cxx_is_gnu(ctx) and link_group_mappings @@ -442,17 +442,18 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, labels_to_links_map = labels_to_links_map, ) - def shlib_filter(_name, shared_lib): - return not gnu_use_link_groups or is_link_group_shlib(shared_lib.label, link_group_ctx) - - for name, shared_lib in traverse_shared_library_info(shlib_info, filter_func = shlib_filter).items(): - shared_libs[name] = shared_lib.lib + for shlib in traverse_shared_library_info(shlib_info): + if not gnu_use_link_groups or is_link_group_shlib(shlib.label, link_group_ctx): + shared_libs.append(shlib) if gnu_use_link_groups: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.update(link_group_lib.shared_libs) + shared_libs.extend([ + SharedLibrary(soname = name, lib = lib, label = ctx.label) + for name, lib in link_group_lib.shared_libs.items() + ]) toolchain_info = get_cxx_toolchain_info(ctx) linker_info = toolchain_info.linker_info @@ -490,7 +491,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, ctx, # If shlib lib tree generation is enabled, pass in the shared libs (which # will trigger the necessary link tree and link args). - shared_libs if impl_params.exe_shared_libs_link_tree else {}, + shared_libs if impl_params.exe_shared_libs_link_tree else [], impl_params.executable_name, linker_info.binary_extension, link_options( @@ -530,29 +531,29 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets["rpath-tree"] = [DefaultInfo( default_output = shared_libs_symlink_tree, other_outputs = [ - lib.output - for lib in shared_libs.values() + shlib.lib.output + for shlib in shared_libs ] + [ - lib.dwp - for lib in shared_libs.values() - if lib.dwp + shlib.lib.dwp + for shlib in shared_libs + if shlib.lib.dwp ], )] sub_targets["shared-libraries"] = [DefaultInfo( default_output = ctx.actions.write_json( binary.output.basename + ".shared-libraries.json", { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, name) for name in shared_libs.keys()], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, name) for name, lib in shared_libs.items() if lib.dwp], + "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs], + "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs if shlib.lib.dwp], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if shared_libs_symlink_tree else [], }, ), sub_targets = { - name: [DefaultInfo( - default_output = lib.output, - sub_targets = {"dwp": [DefaultInfo(default_output = lib.dwp)]} if lib.dwp else {}, + shlib.soname: [DefaultInfo( + default_output = shlib.lib.output, + sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] - for name, lib in shared_libs.items() + for shlib in shared_libs }, )] if link_group_mappings: @@ -640,7 +641,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, actions = ctx.actions, children = ( [binary.external_debug_info] + - [s.external_debug_info for s in shared_libs.values()] + + [s.lib.external_debug_info for s in shared_libs] + impl_params.additional.static_external_debug_info ), ) @@ -698,7 +699,7 @@ _CxxLinkExecutableResult = record( def _link_into_executable( ctx: AnalysisContext, - shared_libs: dict[str, LinkedObject], + shared_libs: list[SharedLibrary], executable_name: [str, None], binary_extension: str, opts: LinkOptions) -> _CxxLinkExecutableResult: diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 320339cd5..079eeab99 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -14,11 +14,15 @@ load( "@prelude//linking:link_info.bzl", "LinkArgs", "LinkOrdering", # @unused Used as a type - "LinkedObject", # @unused Used as a type "unpack_link_args", "unpack_link_args_filelist", ) load("@prelude//linking:lto.bzl", "LtoMode") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type + "create_shlib_symlink_tree", +) load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type def generates_split_debug(toolchain: CxxToolchainInfo): @@ -213,7 +217,7 @@ def executable_shared_lib_arguments( ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, output: Artifact, - shared_libs: dict[str, LinkedObject]) -> ExecutableSharedLibArguments: + shared_libs: list[SharedLibrary]) -> ExecutableSharedLibArguments: extra_link_args = [] runtime_files = [] shared_libs_symlink_tree = None @@ -222,7 +226,7 @@ def executable_shared_lib_arguments( # of a build. Do not add to runtime_files. external_debug_info = project_artifacts( actions = ctx.actions, - tsets = [shlib.external_debug_info for shlib in shared_libs.values()], + tsets = [shlib.lib.external_debug_info for shlib in shared_libs], ) linker_type = cxx_toolchain.linker_info.type @@ -230,16 +234,17 @@ def executable_shared_lib_arguments( if len(shared_libs) > 0: if linker_type == "windows": shared_libs_symlink_tree = [ctx.actions.symlink_file( - shlib.output.basename, - shlib.output, - ) for _, shlib in shared_libs.items()] + shlib.lib.output.basename, + shlib.lib.output, + ) for shlib in shared_libs] runtime_files.extend(shared_libs_symlink_tree) # Windows doesn't support rpath. else: - shared_libs_symlink_tree = ctx.actions.symlinked_dir( - shared_libs_symlink_tree_name(output), - {name: shlib.output for name, shlib in shared_libs.items()}, + shared_libs_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = shared_libs_symlink_tree_name(output), + shared_libs = shared_libs, ) runtime_files.append(shared_libs_symlink_tree) rpath_reference = get_rpath_origin(linker_type) diff --git a/prelude/cxx/cxx_types.bzl b/prelude/cxx/cxx_types.bzl index e6ba9f3a5..e9e814b32 100644 --- a/prelude/cxx/cxx_types.bzl +++ b/prelude/cxx/cxx_types.bzl @@ -178,7 +178,7 @@ CxxRuleConstructorParams = record( # shared libs to include in the symlink tree). extra_link_roots = field(list[LinkableProviders], []), # Additional shared libs to "package". - extra_shared_libs = field(dict[str, SharedLibrary], {}), + extra_shared_libs = field(list[SharedLibrary], []), auto_link_group_specs = field([list[LinkGroupLibSpec], None], None), link_group_info = field([LinkGroupInfo, None], None), # Whether to use pre-stripped objects when linking. diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 600c5d51f..fb57250d5 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -79,9 +79,7 @@ def _process_shared_dependencies( ctx.actions, deps = filter(None, map_idx(SharedLibraryInfo, deps)), ) - shared_libs = {} - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): - shared_libs[name] = shared_lib.lib + shared_libs = traverse_shared_library_info(shlib_info) return executable_shared_lib_arguments( ctx, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 2a105b645..b87e09f36 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -128,8 +128,10 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", "SharedLibraryInfo", "create_shared_libraries", + "create_shlib_symlink_tree", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -1020,7 +1022,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: deps = slis, ) - sos = {} + sos = [] if link_group_info != None: own_binary_link_flags = [] @@ -1115,15 +1117,16 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: labels_to_links_map = labels_to_links_map, ) - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): + for shared_lib in traverse_shared_library_info(shlib_info): label = shared_lib.label if is_link_group_shlib(label, link_group_ctx): - sos[name] = shared_lib.lib + sos.append(shared_lib) # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - sos.update(link_group_lib.shared_libs) + for soname, lib in link_group_lib.shared_libs.items(): + sos.append(SharedLibrary(soname = soname, lib = lib, label = ctx.label)) else: nlis = [] @@ -1136,8 +1139,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: li = lib.get(MergedLinkInfo) if li != None: nlis.append(li) - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): - sos[name] = shared_lib.lib + sos.extend(traverse_shared_library_info(shlib_info)) infos = get_link_args_for_strategy(ctx, nlis, to_link_strategy(link_style)) link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) @@ -1151,7 +1153,11 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: rpath_ref = get_rpath_origin(get_cxx_toolchain_info(ctx).linker_info.type) rpath_ldflag = "-Wl,{}/{}".format(rpath_ref, sos_dir) link.add("-optl", "-Wl,-rpath", "-optl", rpath_ldflag) - symlink_dir = ctx.actions.symlinked_dir(sos_dir, {n: o.output for n, o in sos.items()}) + symlink_dir = create_shlib_symlink_tree( + actions = ctx.actions, + out = sos_dir, + shared_libs = sos, + ) run.hidden(symlink_dir) providers = [ diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index faea3eb2f..c4e1bb03b 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -49,6 +49,7 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "traverse_shared_library_info", + "with_unique_sonames", ) load("@prelude//linking:types.bzl", "Linkage") load( @@ -480,10 +481,10 @@ def _build_preload_deps_root( if SharedLibraryInfo in preload_dep: slib_info = preload_dep[SharedLibraryInfo] - shlib = traverse_shared_library_info(slib_info).items() + shlib = traverse_shared_library_info(slib_info) - for shlib_name, shared_lib in shlib: - preload_symlinks[shlib_name] = shared_lib.lib.output + for soname, shared_lib in with_unique_sonames(shlib).items(): + preload_symlinks[soname] = shared_lib.lib.output # TODO(T150785851): build or get SO for direct preload_deps # TODO(T150785851): find out why the only SOs missing are the ones from diff --git a/prelude/java/java_binary.bzl b/prelude/java/java_binary.bzl index d80498ef9..d11069b35 100644 --- a/prelude/java/java_binary.bzl +++ b/prelude/java/java_binary.bzl @@ -22,7 +22,7 @@ load( "get_java_packaging_info", ) -def _generate_script(generate_wrapper: bool, native_libs: dict[str, SharedLibrary]) -> bool: +def _generate_script(generate_wrapper: bool, native_libs: list[SharedLibrary]) -> bool: # if `generate_wrapper` is set and no native libs then it should be a wrapper script as result, # otherwise fat jar will be generated (inner jar or script will be included inside a final fat jar) return generate_wrapper and len(native_libs) == 0 @@ -31,7 +31,7 @@ def _create_fat_jar( ctx: AnalysisContext, java_toolchain: JavaToolchainInfo, jars: cmd_args, - native_libs: dict[str, SharedLibrary], + native_libs: list[SharedLibrary], do_not_create_inner_jar: bool, generate_wrapper: bool) -> list[Artifact]: extension = "sh" if _generate_script(generate_wrapper, native_libs) else "jar" @@ -55,7 +55,7 @@ def _create_fat_jar( ) args += [ "--native_libs_file", - ctx.actions.write("native_libs", [cmd_args([so_name, native_lib.lib.output], delimiter = " ") for so_name, native_lib in native_libs.items()]), + ctx.actions.write("native_libs", [cmd_args([native_lib.soname, native_lib.lib.output], delimiter = " ") for native_lib in native_libs]), ] if do_not_create_inner_jar: args += [ @@ -107,7 +107,7 @@ def _create_fat_jar( outputs.append(classpath_args_output) fat_jar_cmd = cmd_args(args) - fat_jar_cmd.hidden(jars, [native_lib.lib.output for native_lib in native_libs.values()]) + fat_jar_cmd.hidden(jars, [native_lib.lib.output for native_lib in native_libs]) ctx.actions.run( fat_jar_cmd, diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index 697a0dccc..a61fc0cb7 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -14,7 +14,13 @@ load("@prelude//java:java_library.bzl", "build_java_library") load("@prelude//java:java_providers.bzl", "JavaLibraryInfo", "JavaPackagingInfo", "get_all_java_packaging_deps_tset") load("@prelude//java:java_toolchain.bzl", "JavaTestToolchainInfo", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") -load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraryInfo", + "create_shlib_symlink_tree", + "merge_shared_libraries", + "traverse_shared_library_info", +) load( "@prelude//tests:re_utils.bzl", "get_re_executors_from_props", @@ -182,8 +188,10 @@ def _get_native_libs_env(ctx: AnalysisContext) -> dict: deps = shared_library_infos, ) - native_linkables = traverse_shared_library_info(shared_library_info) - cxx_library_symlink_tree_dict = {so_name: shared_lib.lib.output for so_name, shared_lib in native_linkables.items()} - cxx_library_symlink_tree = ctx.actions.symlinked_dir("cxx_library_symlink_tree", cxx_library_symlink_tree_dict) + cxx_library_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = "cxx_library_symlink_tree", + shared_libs = traverse_shared_library_info(shared_library_info), + ) return {"BUCK_LD_SYMLINK_TREE": cxx_library_symlink_tree} diff --git a/prelude/julia/julia_binary.bzl b/prelude/julia/julia_binary.bzl index c1cedffee..0aeb2a016 100644 --- a/prelude/julia/julia_binary.bzl +++ b/prelude/julia/julia_binary.bzl @@ -5,7 +5,12 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "create_shlib_symlink_tree", + "merge_shared_libraries", + "traverse_shared_library_info", +) load("@prelude//utils:utils.bzl", "flatten") load(":julia_info.bzl", "JuliaLibraryInfo", "JuliaLibraryTSet", "JuliaToolchainInfo") @@ -47,12 +52,13 @@ def build_jll_shlibs_mapping(ctx: AnalysisContext, json_info_file: Artifact): filter(None, [d.shared_library_info for d in deps]), )) - shared_libs_symlink_tree = ctx.actions.symlinked_dir( - "__shared_libs_symlink_tree__", - {name: shlib.lib.output for name, shlib in shlibs.items()}, + shared_libs_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = "__shared_libs_symlink_tree__", + shared_libs = shlibs, ) - shlib_label_to_soname = {shlib.label: name for name, shlib in shlibs.items()} + shlib_label_to_soname = {shlib.label: shlib.soname for shlib in shlibs} # iterate through all the jll libraries json_info = [] diff --git a/prelude/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl index 7d95e5345..a93310fa8 100644 --- a/prelude/linking/shared_libraries.bzl +++ b/prelude/linking/shared_libraries.bzl @@ -19,13 +19,13 @@ SharedLibrary = record( # for downstream rules to reproduce the shared library with some modifications (for example # android relinker will link again with an added version script argument). # TODO(cjhopman): This is currently always available. - link_args = field(list[LinkArgs] | None), + link_args = field(list[LinkArgs] | None, None), # The sonames of the shared libraries that this links against. # TODO(cjhopman): This is currently always available. - shlib_deps = field(list[str] | None), - stripped_lib = field([Artifact, None]), - can_be_asset = field(bool), - for_primary_apk = field(bool), + shlib_deps = field(list[str] | None, None), + stripped_lib = field(Artifact | None, None), + can_be_asset = field(bool, False), + for_primary_apk = field(bool, False), soname = field(str), label = field(Label), ) @@ -35,7 +35,7 @@ SharedLibraries = record( # Since the SONAME is what the dynamic loader uses to uniquely identify # libraries, using this as the key allows easily detecting conflicts from # dependencies. - libraries = field(dict[str, SharedLibrary]), + libraries = field(list[SharedLibrary]), ) # T-set of SharedLibraries @@ -62,7 +62,7 @@ def create_shared_libraries( """ cxx_toolchain = getattr(ctx.attrs, "_cxx_toolchain", None) return SharedLibraries( - libraries = {name: SharedLibrary( + libraries = [SharedLibrary( lib = shlib, stripped_lib = strip_object( ctx, @@ -76,41 +76,9 @@ def create_shared_libraries( for_primary_apk = getattr(ctx.attrs, "used_by_wrap_script", False), label = ctx.label, soname = name, - ) for (name, shlib) in libraries.items()}, + ) for (name, shlib) in libraries.items()], ) -# We do a lot of merging library maps, so don't use O(n) type annotations -def _merge_lib_map( - # dict[str, SharedLibrary] - dest_mapping, - # [dict[str, SharedLibrary] - mapping_to_merge, - filter_func) -> None: - """ - Merges a mapping_to_merge into `dest_mapping`. Fails if different libraries - map to the same name. - """ - for (name, src) in mapping_to_merge.items(): - if filter_func != None and not filter_func(name, src): - continue - existing = dest_mapping.get(name) - if existing != None and existing.lib != src.lib: - error = ( - "Duplicate library {}! Conflicting mappings:\n" + - "{} from {}\n" + - "{} from {}" - ) - fail( - error.format( - name, - existing.lib, - existing.label, - src.lib, - src.label, - ), - ) - dest_mapping[name] = src - # Merge multiple SharedLibraryInfo. The value in `node` represents a set of # SharedLibraries that is provided by the target being analyzed. It's optional # because that might not always exist, e.g. a Python library can pass through @@ -131,11 +99,58 @@ def merge_shared_libraries( set = actions.tset(SharedLibrariesTSet, **kwargs) if kwargs else None return SharedLibraryInfo(set = set) -def traverse_shared_library_info( - info: SharedLibraryInfo, - filter_func = None): # -> dict[str, SharedLibrary]: - libraries = {} +def traverse_shared_library_info(info: SharedLibraryInfo): # -> list[SharedLibrary]: + libraries = [] if info.set: for libs in info.set.traverse(): - _merge_lib_map(libraries, libs.libraries, filter_func) + libraries.extend(libs.libraries) return libraries + +# Helper to merge shlibs, throwing an error if more than one have the same SONAME. +def _merge_shlibs( + shared_libs: list[SharedLibrary], + resolve_soname: typing.Callable) -> dict[str, SharedLibrary]: + merged = {} + for shlib in shared_libs: + soname = resolve_soname(shlib.soname) + existing = merged.get(soname) + if existing != None and existing.lib != shlib.lib: + error = ( + "Duplicate library {}! Conflicting mappings:\n" + + "{} from {}\n" + + "{} from {}" + ) + fail( + error.format( + shlib.soname, + existing.lib, + existing.label, + shlib.lib, + shlib.label, + ), + ) + merged[soname] = shlib + return merged + +def with_unique_sonames(shared_libs: list[SharedLibrary]) -> dict[str, SharedLibrary]: + """ + Convert a list of `SharedLibrary`s to a map of unique SONAMEs to the + corresponding `SharedLibrary`. + + Will fail if the same SONAME maps to multiple `SharedLibrary`s. + """ + return _merge_shlibs( + shared_libs = shared_libs, + resolve_soname = lambda s: s, + ) + +def create_shlib_symlink_tree(actions: AnalysisActions, out: str, shared_libs: list[SharedLibrary]): + """ + Merged shared libs into a symlink tree mapping the library's SONAME to + it's artifact. + """ + merged = with_unique_sonames(shared_libs = shared_libs) + return actions.symlinked_dir( + out, + {name: shlib.lib.output for name, shlib in merged.items()}, + ) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 9d3ec9562..93436837e 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -455,8 +455,8 @@ def _convert_python_library_to_executable( # Convert preloaded deps to a set of their names to be loaded by. preload_labels = {d.label: None for d in ctx.attrs.preload_deps} preload_names = { - name: None - for name, shared_lib in library.shared_libraries().items() + shared_lib.soname: None + for shared_lib in library.shared_libraries() if shared_lib.label in preload_labels } @@ -627,8 +627,8 @@ def _convert_python_library_to_executable( extra["native-executable"] = [DefaultInfo(default_output = executable_info.binary, sub_targets = executable_info.sub_targets)] # Add sub-targets for libs. - for name, lib in executable_info.shared_libs.items(): - extra[name] = [DefaultInfo(default_output = lib.output)] + for shlib in executable_info.shared_libs: + extra[shlib.soname] = [DefaultInfo(default_output = shlib.lib.output)] for name, group in executable_info.auto_link_groups.items(): extra[name] = [DefaultInfo(default_output = group.output)] @@ -645,8 +645,8 @@ def _convert_python_library_to_executable( # Put native libraries into the runtime location, as we need to unpack # potentially all of them before startup. native_libs = { - paths.join("runtime", "lib", name): lib - for name, lib in executable_info.shared_libs.items() + paths.join("runtime", "lib", shlib.soname): shlib.lib + for shlib in executable_info.shared_libs } preload_names = [paths.join("runtime", "lib", n) for n in preload_names] @@ -660,7 +660,7 @@ def _convert_python_library_to_executable( extra_artifacts["static_extension_finder.py"] = ctx.attrs.static_extension_finder else: - native_libs = {name: shared_lib.lib for name, shared_lib in library.shared_libraries().items()} + native_libs = {shared_lib.soname: shared_lib.lib for shared_lib in library.shared_libraries()} if dbg_source_db: extra_artifacts["dbg-db.json"] = dbg_source_db.default_outputs[0] diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 82ceec7cd..745f3331d 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -315,14 +315,13 @@ def generate_rustdoc_test( ) # Gather and setup symlink tree of transitive shared library deps. - shared_libs = {} + shared_libs = [] if link_strategy == LinkStrategy("shared"): shlib_info = merge_shared_libraries( ctx.actions, deps = inherited_shared_libs(ctx, doc_dep_ctx), ) - for soname, shared_lib in traverse_shared_library_info(shlib_info).items(): - shared_libs[soname] = shared_lib.lib + shared_libs.extend(traverse_shared_library_info(shlib_info)) executable_args = executable_shared_lib_arguments( ctx, compile_ctx.cxx_toolchain_info, diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 62912c8e8..72b05f635 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -40,6 +40,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -133,7 +134,7 @@ def _rust_binary_common( output = ctx.actions.declare_output(name) # Gather and setup symlink tree of transitive shared library deps. - shared_libs = {} + shared_libs = [] rust_cxx_link_group_info = None link_group_mappings = {} @@ -170,17 +171,18 @@ def _rust_binary_common( labels_to_links_map = labels_to_links_map, ) - def shlib_filter(_name, shared_lib): - return not rust_cxx_link_group_info or is_link_group_shlib(shared_lib.label, link_group_ctx) - - for soname, shared_lib in traverse_shared_library_info(shlib_info, filter_func = shlib_filter).items(): - shared_libs[soname] = shared_lib.lib + for shlib in traverse_shared_library_info(shlib_info): + if not rust_cxx_link_group_info or is_link_group_shlib(shlib.label, link_group_ctx): + shared_libs.append(shlib) if rust_cxx_link_group_info: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.update(link_group_lib.shared_libs) + shared_libs.extend([ + SharedLibrary(soname = name, lib = lib, label = ctx.label) + for name, lib in link_group_lib.shared_libs.items() + ]) # link groups shared libraries link args are directly added to the link command, # we don't have to add them here @@ -239,17 +241,17 @@ def _rust_binary_common( default_output = ctx.actions.write_json( name + ".shared-libraries.json", { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, name) for name in shared_libs.keys()], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, name) for name, lib in shared_libs.items() if lib.dwp], + "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs], + "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs if shlib.lib.dwp], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if executable_args.shared_libs_symlink_tree else [], }, ), sub_targets = { - name: [DefaultInfo( - default_output = lib.output, - sub_targets = {"dwp": [DefaultInfo(default_output = lib.dwp)]} if lib.dwp else {}, + shlib.soname: [DefaultInfo( + default_output = shlib.lib.output, + sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] - for name, lib in shared_libs.items() + for shlib in shared_libs }, )] @@ -257,12 +259,12 @@ def _rust_binary_common( sub_targets_for_link_strategy["rpath-tree"] = [DefaultInfo( default_output = executable_args.shared_libs_symlink_tree, other_outputs = [ - lib.output - for lib in shared_libs.values() + shlib.lib.output + for shlib in shared_libs ] + [ - lib.dwp - for lib in shared_libs.values() - if lib.dwp + shlib.lib.dwp + for shlib in shared_libs + if shlib.lib.dwp ], )] From 91f09e778f8fd3fcdd1279150ff79a9b84cdecc5 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 10:35:23 -0700 Subject: [PATCH 0544/1133] Use `SharedLibrary` to present shlibs Summary: Make consistent with `SharedLibraryInfo` and use `SharedLibrary` objects to hold the node shlibs instead of `LinkedObject`. Reviewed By: dtolnay Differential Revision: D55001128 fbshipit-source-id: bff3285a5669538c10a8a97f8917871124c46217 --- .../android_binary_native_library_rules.bzl | 8 +++-- prelude/cxx/cxx.bzl | 5 +-- prelude/cxx/cxx_executable.bzl | 5 +-- prelude/cxx/cxx_library.bzl | 7 +++-- prelude/cxx/link_groups.bzl | 9 +++++- prelude/cxx/omnibus.bzl | 31 ++++++++++++++----- prelude/cxx/prebuilt_cxx_library_group.bzl | 5 +-- prelude/haskell/haskell.bzl | 14 ++++----- prelude/haskell/haskell_ghci.bzl | 4 +-- prelude/linking/link_groups.bzl | 9 ++++-- prelude/linking/linkable_graph.bzl | 9 ++++-- prelude/python/python_binary.bzl | 2 +- prelude/rust/rust_binary.bzl | 6 +--- prelude/rust/rust_library.bzl | 5 +-- 14 files changed, 73 insertions(+), 46 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 33dc0e55a..f6b71a7e6 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -1045,7 +1045,7 @@ def _shared_lib_for_prebuilt_shared( transitive_linkable_cache: dict[Label, bool], platform: [str, None] = None) -> SharedLibrary: expect( - len(node_data.shared_libs) == 1, + len(node_data.shared_libs.libraries) == 1, "unexpected shared_libs length for somerge of {} ({})".format(target, node_data.shared_libs), ) @@ -1063,7 +1063,9 @@ def _shared_lib_for_prebuilt_shared( "prebuilt shared library `{}` with exported_deps not supported by somerge".format(target), ) - soname, shlib = node_data.shared_libs.items()[0] + shlib = node_data.shared_libs.libraries[0] + soname = shlib.soname + shlib = shlib.lib output_path = _platform_output_path(soname, platform) return SharedLibrary( lib = shlib, @@ -1270,7 +1272,7 @@ def _get_merged_linkables_for_platform( expect(target_to_link_group[key] == group) node = linkable_nodes[key] - default_solibs = list(node.shared_libs.keys()) + default_solibs = list([shlib.soname for shlib in node.shared_libs.libraries]) if not default_solibs and node.preferred_linkage == Linkage("static"): default_solibs = [node.default_soname] diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 4783ab695..dea2b33c4 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -556,9 +556,10 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: )) # Propagate shared libraries up the tree. + shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in exported_first_order_deps]), )) @@ -603,7 +604,7 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: # If we don't have link input for this link style, we pass in `None` so # that omnibus knows to avoid it. link_infos = libraries, - shared_libs = solibs, + shared_libs = shared_libs, linker_flags = linker_flags, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, ), diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 683f3ecfa..066597c67 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -450,10 +450,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.extend([ - SharedLibrary(soname = name, lib = lib, label = ctx.label) - for name, lib in link_group_lib.shared_libs.items() - ]) + shared_libs.extend(link_group_lib.shared_libs.libraries) toolchain_info = get_cxx_toolchain_info(ctx) linker_info = toolchain_info.linker_info diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 1a8bbfaf2..56ad1b67c 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -501,6 +501,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc link_execution_preference = link_execution_preference, ) solib_as_dict = {library_outputs.solib[0]: library_outputs.solib[1]} if library_outputs.solib else {} + shared_libs = create_shared_libraries(ctx, solib_as_dict) for _, link_style_output in library_outputs.outputs.items(): for key in link_style_output.sub_targets.keys(): @@ -608,7 +609,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc if impl_params.generate_providers.shared_libraries: providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solib_as_dict), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in non_exported_deps]) + filter(None, [x.get(SharedLibraryInfo) for x in exported_deps]), )) @@ -717,7 +718,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # that omnibus knows to avoid it. include_in_android_mergemap = getattr(ctx.attrs, "include_in_android_merge_map_output", True) and default_output != None, link_infos = library_outputs.link_infos, - shared_libs = solib_as_dict, + shared_libs = shared_libs, linker_flags = linker_flags, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, ), @@ -831,7 +832,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc merge_link_group_lib_info( label = ctx.label, name = link_group, - shared_libs = solib_as_dict, + shared_libs = shared_libs, shared_link_infos = library_outputs.link_infos.get(LibOutputStyle("shared_lib")), deps = exported_deps + non_exported_deps, ), diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 9081c44c3..86661ab90 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -45,6 +45,10 @@ load( "get_linkable_graph_node_map_func", "get_transitive_deps", ) +load( + "@prelude//linking:shared_libraries.bzl", + "create_shared_libraries", +) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") @@ -874,7 +878,10 @@ def create_link_groups( linked_link_groups[link_group_spec.group.name] = _LinkedLinkGroup( artifact = link_group_lib, library = None if not link_group_spec.is_shared_lib else LinkGroupLib( - shared_libs = {link_group_spec.name: link_group_lib}, + shared_libs = create_shared_libraries( + ctx = ctx, + libraries = {link_group_spec.name: link_group_lib}, + ), shared_link_infos = LinkInfos( default = wrap_link_info( link_info, diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index 15e2f52f2..283ad6239 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -37,6 +37,10 @@ load( "linkable_deps", "linkable_graph", ) +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", +) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") load( @@ -118,7 +122,7 @@ OmnibusRootProduct = record( # The result of the omnibus link. OmnibusSharedLibraries = record( omnibus = field([CxxLinkResult, None], None), - libraries = field(dict[str, LinkedObject], {}), + libraries = field(list[SharedLibrary], []), roots = field(dict[Label, OmnibusRootProduct], {}), exclusion_roots = field(list[Label]), excluded = field(list[Label]), @@ -512,9 +516,9 @@ def _create_omnibus( root_products.values(), # ... and the shared libs from excluded nodes. [ - shared_lib.output + shared_lib.lib.output for label in spec.excluded - for shared_lib in spec.link_infos[label].shared_libs.values() + for shared_lib in spec.link_infos[label].shared_libs.libraries ], # Extract explicit global symbol names from flags in all body link args. global_symbols_link_args, @@ -685,7 +689,7 @@ def create_omnibus_libraries( # Create dummy omnibus dummy_omnibus = create_dummy_omnibus(ctx, extra_ldflags) - libraries = {} + libraries = [] root_products = {} # Link all root nodes against the dummy libomnibus lib. @@ -704,7 +708,13 @@ def create_omnibus_libraries( allow_cache_upload = True, ) if root.name != None: - libraries[root.name] = product.shared_library + libraries.append( + SharedLibrary( + soname = root.name, + lib = product.shared_library, + label = label, + ), + ) root_products[label] = product # If we have body nodes, then link them into the monolithic libomnibus.so. @@ -719,12 +729,17 @@ def create_omnibus_libraries( prefer_stripped_objects, allow_cache_upload = True, ) - libraries[_omnibus_soname(ctx)] = omnibus.linked_object + libraries.append( + SharedLibrary( + soname = _omnibus_soname(ctx), + lib = omnibus.linked_object, + label = ctx.label, + ), + ) # For all excluded nodes, just add their regular shared libs. for label in spec.excluded: - for name, lib in spec.link_infos[label].shared_libs.items(): - libraries[name] = lib + libraries.extend(spec.link_infos[label].shared_libs.libraries) return OmnibusSharedLibraries( omnibus = omnibus, diff --git a/prelude/cxx/prebuilt_cxx_library_group.bzl b/prelude/cxx/prebuilt_cxx_library_group.bzl index 2cfbdae5f..2effc957f 100644 --- a/prelude/cxx/prebuilt_cxx_library_group.bzl +++ b/prelude/cxx/prebuilt_cxx_library_group.bzl @@ -335,9 +335,10 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: )) # Propagate shared libraries up the tree. + shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in deps + exported_deps]), )) @@ -352,7 +353,7 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: exported_deps = exported_deps, preferred_linkage = preferred_linkage, link_infos = libraries, - shared_libs = solibs, + shared_libs = shared_libs, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, # TODO(cjhopman): this should be set to non-None default_soname = None, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index b87e09f36..8bcd786a9 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -128,7 +128,6 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", "SharedLibraryInfo", "create_shared_libraries", "create_shlib_symlink_tree", @@ -342,6 +341,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: solibs = {} for soname, lib in ctx.attrs.shared_libs.items(): solibs[soname] = LinkedObject(output = lib, unstripped_output = lib) + shared_libs = create_shared_libraries(ctx, solibs) linkable_graph = create_linkable_graph( ctx, @@ -351,7 +351,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx = ctx, exported_deps = ctx.attrs.deps, link_infos = {_to_lib_output_style(s): v for s, v in link_infos.items()}, - shared_libs = solibs, + shared_libs = shared_libs, default_soname = None, ), ), @@ -369,7 +369,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: cxx_merge_cpreprocessors(ctx, [own_pp_info], inherited_pp_info), merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, shared_library_infos, ), merge_link_group_lib_info(deps = ctx.attrs.deps), @@ -820,6 +820,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage, pic_behavior, ) + shared_libs = create_shared_libraries(ctx, solibs) # TODO(cjhopman): this haskell implementation does not consistently handle LibOutputStyle # and LinkStrategy as expected and it's hard to tell what the intent of the existing code is @@ -862,7 +863,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage = preferred_linkage, exported_deps = ctx.attrs.deps, link_infos = {_to_lib_output_style(s): v for s, v in link_infos.items()}, - shared_libs = solibs, + shared_libs = shared_libs, # TODO(cjhopman): this should be set to non-None default_soname = None, ), @@ -908,7 +909,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: cxx_merge_cpreprocessors(ctx, pp, inherited_pp_info), merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, shared_library_infos, ), haskell_haddock_lib(ctx, pkgname), @@ -1125,8 +1126,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - for soname, lib in link_group_lib.shared_libs.items(): - sos.append(SharedLibrary(soname = soname, lib = lib, label = ctx.label)) + sos.extend(link_group_lib.shared_libs.libraries) else: nlis = [] diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index c4e1bb03b..d65330f43 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -286,8 +286,8 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: shared_li = node.link_infos.get(output_style, None) if shared_li != None: tp_deps_shared_link_infos[node_label] = shared_li.default - for libname, linkObject in node.shared_libs.items(): - so_symlinks[libname] = linkObject.output + for shlib in node.shared_libs.libraries: + so_symlinks[shlib.soname] = shlib.lib.output # Create symlinks to the TP dependencies' SOs so_symlinks_root_path = ctx.label.name + ".so-symlinks" diff --git a/prelude/linking/link_groups.bzl b/prelude/linking/link_groups.bzl index 6d6bdbef3..a3fb2e1c5 100644 --- a/prelude/linking/link_groups.bzl +++ b/prelude/linking/link_groups.bzl @@ -12,7 +12,10 @@ load( load( ":link_info.bzl", "LinkInfos", - "LinkedObject", +) +load( + ":shared_libraries.bzl", + "SharedLibraries", ) # Information about a linkable node which explicitly sets `link_group`. @@ -20,7 +23,7 @@ LinkGroupLib = record( # The label of the owning target (if any). label = field([Label, None], None), # The shared libs to package for this link group. - shared_libs = field(dict[str, LinkedObject]), + shared_libs = field(SharedLibraries), # The link info to link against this link group. shared_link_infos = field(LinkInfos), ) @@ -48,7 +51,7 @@ def gather_link_group_libs( def merge_link_group_lib_info( label: [Label, None] = None, name: [str, None] = None, - shared_libs: [dict[str, LinkedObject], None] = None, + shared_libs: [SharedLibraries, None] = None, shared_link_infos: [LinkInfos, None] = None, deps: list[Dependency] = [], children: list[LinkGroupLibInfo] = []) -> LinkGroupLibInfo: diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index c47b5d035..0f70b1d98 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -20,13 +20,16 @@ load( "LinkInfo", # @unused Used as a type "LinkInfos", "LinkStrategy", - "LinkedObject", "LinkerFlags", "MergedLinkInfo", "get_lib_output_style", "get_output_styles_for_linkage", _get_link_info = "get_link_info", ) +load( + ":shared_libraries.bzl", + "SharedLibraries", +) # A provider with information used to link a rule into a shared library. # Potential omnibus roots must provide this so that omnibus can link them @@ -74,7 +77,7 @@ LinkableNode = record( # Shared libraries provided by this target. Used if this target is # excluded. - shared_libs = field(dict[str, LinkedObject], {}), + shared_libs = field(SharedLibraries, SharedLibraries(libraries = [])), # The soname this node would use in default link strategies. May be used by non-default # link strategies as a lib's soname. @@ -143,7 +146,7 @@ def create_linkable_node( deps: list[Dependency | LinkableGraph] = [], exported_deps: list[Dependency | LinkableGraph] = [], link_infos: dict[LibOutputStyle, LinkInfos] = {}, - shared_libs: dict[str, LinkedObject] = {}, + shared_libs: SharedLibraries = SharedLibraries(libraries = []), can_be_asset: bool = True, include_in_android_mergemap: bool = True, linker_flags: [LinkerFlags, None] = None, diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 93436837e..cf4f9cab3 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -497,7 +497,7 @@ def _convert_python_library_to_executable( dest: (omnibus_libs.roots[label].shared_library, label) for dest, (_, label) in extensions.items() } - native_libs = omnibus_libs.libraries + native_libs = {shlib.soname: shlib.lib for shlib in omnibus_libs.libraries} omnibus_providers = [] diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 72b05f635..2364a5b6d 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -40,7 +40,6 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -179,10 +178,7 @@ def _rust_binary_common( # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.extend([ - SharedLibrary(soname = name, lib = lib, label = ctx.label) - for name, lib in link_group_lib.shared_libs.items() - ]) + shared_libs.extend(link_group_lib.shared_libs.libraries) # link groups shared libraries link args are directly added to the link command, # we don't have to add them here diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 885286b4b..89917b34c 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -851,9 +851,10 @@ def _native_providers( ) # Native shared library provider. + shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, inherited_shlibs, )) @@ -890,7 +891,7 @@ def _native_providers( deps = inherited_link_graphs, exported_deps = inherited_exported_deps, link_infos = link_infos, - shared_libs = solibs, + shared_libs = shared_libs, default_soname = shlib_name, include_in_android_mergemap = False, ), From b756b503e2b9b9617ff92dad3d220db43534e556 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 10:35:23 -0700 Subject: [PATCH 0545/1133] Defer resolving shared lib sonames Summary: This pushes shared lib SONAME resolution down into Python package handling, to where the SONAMEs are actually used. This will facilitate later changes to support libraries with SONAMEs which aren't known at analysis time. Reviewed By: igorsugak Differential Revision: D54987032 fbshipit-source-id: d1015cfd71fd2830987fd1054f775bb9df87f5de --- prelude/cxx/link_groups.bzl | 15 ++-- prelude/python/make_py_package.bzl | 54 ++++++++++----- prelude/python/python_binary.bzl | 108 +++++++++++++++++------------ 3 files changed, 112 insertions(+), 65 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 86661ab90..b979b65e2 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -47,7 +47,8 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "create_shared_libraries", + "SharedLibraries", + "SharedLibrary", ) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -140,6 +141,7 @@ LinkGroupLibSpec = record( root = field([LinkableRootInfo, None], None), # The link group to link. group = field(Group), + label = field(Label | None, None), ) _LinkedLinkGroup = record( @@ -878,9 +880,14 @@ def create_link_groups( linked_link_groups[link_group_spec.group.name] = _LinkedLinkGroup( artifact = link_group_lib, library = None if not link_group_spec.is_shared_lib else LinkGroupLib( - shared_libs = create_shared_libraries( - ctx = ctx, - libraries = {link_group_spec.name: link_group_lib}, + shared_libs = SharedLibraries( + libraries = [ + SharedLibrary( + label = link_group_spec.label or ctx.label, + soname = link_group_spec.name, + lib = link_group_lib, + ), + ], ), shared_link_infos = LinkInfos( default = wrap_link_info( diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 141b70ce3..4c97530ac 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -12,13 +12,14 @@ execution load("@prelude//:artifact_tset.bzl", "project_artifacts") load("@prelude//:local_only.bzl", "package_python_locally") +load("@prelude//:paths.bzl", "paths") load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_is_gnu", ) load( - "@prelude//linking:link_info.bzl", - "LinkedObject", # @unused Used as a type + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type ) load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -117,7 +118,7 @@ def make_py_package( package_style: PackageStyle, build_args: list[ArgLike], pex_modules: PexModules, - shared_libraries: dict[str, (LinkedObject, bool)], + shared_libraries: list[(str, SharedLibrary, bool)], main: EntryPoint, hidden_resources: list[ArgLike] | None, allow_cache_upload: bool, @@ -144,14 +145,21 @@ def make_py_package( if pex_modules.extensions: srcs.append(pex_modules.extensions.manifest) - preload_libraries = _preload_libraries_args(ctx, shared_libraries) + preload_libraries = _preload_libraries_args( + ctx = ctx, + shared_libraries = [ + (libdir, shlib) + for libdir, shlib, preload in shared_libraries + if preload + ], + ) startup_function = generate_startup_function_loader(ctx) manifest_module = generate_manifest_module(ctx, python_toolchain, srcs) common_modules_args, dep_artifacts, debug_artifacts = _pex_modules_common_args( ctx, pex_modules, [startup_function] if startup_function else [], - {name: lib for name, (lib, _) in shared_libraries.items()}, + [(shlib, libdir) for libdir, shlib, _ in shared_libraries], debuginfo_files = debuginfo_files, ) @@ -337,13 +345,12 @@ def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[ArgLike]) - out = ctx.actions.write_json("debuginfo.manifest.json", debug_artifacts) return [DefaultInfo(default_output = out, other_outputs = debug_artifacts)] -def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: dict[str, (LinkedObject, bool)]) -> cmd_args: +def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(str, SharedLibrary)]) -> cmd_args: preload_libraries_path = ctx.actions.write( "__preload_libraries.txt", cmd_args([ - "--preload={}".format(name) - for name, (_, preload) in shared_libraries.items() - if preload + "--preload={}".format(paths.join(libdir, shlib.soname)) + for libdir, shlib in shared_libraries ]), ) return cmd_args(preload_libraries_path, format = "@{}") @@ -389,7 +396,7 @@ def _pex_modules_common_args( ctx: AnalysisContext, pex_modules: PexModules, extra_manifests: list[ArgLike], - shared_libraries: dict[str, LinkedObject], + shared_libraries: list[(SharedLibrary, str)], debuginfo_files: list[Artifact]) -> (cmd_args, list[ArgLike], list[ArgLike]): srcs = [] src_artifacts = [] @@ -423,14 +430,17 @@ def _pex_modules_common_args( _srcs(resources, format = "--resource-manifest={}"), ) - native_libraries = [s.output for s in shared_libraries.values()] + native_libraries = [shlib.lib.output for shlib, _ in shared_libraries] native_library_srcs_path = ctx.actions.write( "__native_libraries___srcs.txt", _srcs(native_libraries, format = "--native-library-src={}"), ) native_library_dests_path = ctx.actions.write( "__native_libraries___dests.txt", - ["--native-library-dest={}".format(lib) for lib in shared_libraries], + [ + "--native-library-dest={}".format(paths.join(libdir, shlib.soname)) + for shlib, libdir in shared_libraries + ], ) src_manifest_args = cmd_args(src_manifests_path).hidden(srcs) @@ -455,9 +465,17 @@ def _pex_modules_common_args( if ctx.attrs.package_split_dwarf_dwp: if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): # rename to match extracted debuginfo package - dwp = [(s.dwp, "{}.debuginfo.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] + dwp = [ + (shlib.lib.dwp, paths.join(libdir, "{}.debuginfo.dwp".format(shlib.soname))) + for shlib, libdir in shared_libraries + if shlib.lib.dwp != None + ] else: - dwp = [(s.dwp, "{}.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] + dwp = [ + (shlib.lib.dwp, paths.join(libdir, "{}.dwp".format(shlib.soname))) + for shlib, libdir in shared_libraries + if shlib.lib.dwp != None + ] dwp_srcs_path = ctx.actions.write( "__dwp___srcs.txt", _srcs([src for src, _ in dwp], format = "--dwp-src={}"), @@ -472,11 +490,15 @@ def _pex_modules_common_args( debug_artifacts.extend([d for d, _ in dwp]) - deps.extend([lib.output for lib in shared_libraries.values()]) + for shlib, _ in shared_libraries: + deps.append(shlib.lib.output) external_debug_info = project_artifacts( ctx.actions, - [lib.external_debug_info for lib in shared_libraries.values()], + [ + shlib.lib.external_debug_info + for shlib, _ in shared_libraries + ], ) # HACK: external_debug_info has an empty path diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index cf4f9cab3..5b56a7ba5 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -71,7 +71,12 @@ load( "LinkableProviders", # @unused Used as a type "linkables", ) -load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", + "merge_shared_libraries", + "traverse_shared_library_info", +) load("@prelude//linking:strip.bzl", "strip_debug_with_gnu_debuglink") load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:utils.bzl", "flatten", "value_or") @@ -164,6 +169,7 @@ def _get_root_link_group_specs( name = dep.linkable_root_info.name, is_shared_lib = True, root = dep.linkable_root_info, + label = dep.linkable_graph.nodes.value.label, group = Group( name = dep.linkable_root_info.name, mappings = [ @@ -205,15 +211,6 @@ def _get_root_link_group_specs( return specs -def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[str, (LinkedObject, Label | bool)], list[Artifact]): - debuginfo_artifacts = [] - transformed = {} - for name, (artifact, extra) in data.items(): - stripped_binary, debuginfo = strip_debug_with_gnu_debuglink(ctx, name, artifact.unstripped_output) - transformed[name] = LinkedObject(output = stripped_binary, unstripped_output = artifact.unstripped_output, dwp = artifact.dwp), extra - debuginfo_artifacts.append(debuginfo) - return transformed, debuginfo_artifacts - def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[Group]: """ Create link group mappings for shared-only libs that'll force the link to @@ -454,11 +451,6 @@ def _convert_python_library_to_executable( # Convert preloaded deps to a set of their names to be loaded by. preload_labels = {d.label: None for d in ctx.attrs.preload_deps} - preload_names = { - shared_lib.soname: None - for shared_lib in library.shared_libraries() - if shared_lib.label in preload_labels - } extensions = {} extra_artifacts = {} @@ -497,7 +489,7 @@ def _convert_python_library_to_executable( dest: (omnibus_libs.roots[label].shared_library, label) for dest, (_, label) in extensions.items() } - native_libs = {shlib.soname: shlib.lib for shlib in omnibus_libs.libraries} + shared_libs = [("", shlib) for shlib in omnibus_libs.libraries] omnibus_providers = [] @@ -644,23 +636,29 @@ def _convert_python_library_to_executable( # Put native libraries into the runtime location, as we need to unpack # potentially all of them before startup. - native_libs = { - paths.join("runtime", "lib", shlib.soname): shlib.lib - for shlib in executable_info.shared_libs - } - preload_names = [paths.join("runtime", "lib", n) for n in preload_names] + shared_libs = [("runtime/lib", s) for s in executable_info.shared_libs] # TODO expect(len(executable_info.runtime_files) == 0, "OH NO THERE ARE RUNTIME FILES") extra_artifacts.update(dict(extension_info.artifacts)) - native_libs["runtime/bin/{}".format(ctx.attrs.executable_name)] = LinkedObject( - output = executable_info.binary, - unstripped_output = executable_info.binary, - dwp = executable_info.dwp, - ) + shared_libs.append(( + "runtime/bin", + SharedLibrary( + soname = ctx.attrs.executable_name, + label = ctx.label, + lib = LinkedObject( + output = executable_info.binary, + unstripped_output = executable_info.binary, + dwp = executable_info.dwp, + ), + ), + )) extra_artifacts["static_extension_finder.py"] = ctx.attrs.static_extension_finder else: - native_libs = {shared_lib.soname: shared_lib.lib for shared_lib in library.shared_libraries()} + shared_libs = [ + ("", shared_lib) + for shared_lib in library.shared_libraries() + ] if dbg_source_db: extra_artifacts["dbg-db.json"] = dbg_source_db.default_outputs[0] @@ -670,23 +668,43 @@ def _convert_python_library_to_executable( extra_manifests = create_manifest_for_source_map(ctx, "extra_manifests", extra_artifacts) - shared_libraries = {} - # Create the map of native libraries to their artifacts and whether they # need to be preloaded. Note that we merge preload deps into regular deps # above, before gathering up all native libraries, so we're guaranteed to # have all preload libraries (and their transitive deps) here. - for name, lib in native_libs.items(): - shared_libraries[name] = lib, name in preload_names + shared_libs = [ + (libdir, shlib, shlib.label in preload_labels) + for libdir, shlib in shared_libs + ] # Strip native libraries and extensions and update the .gnu_debuglink references if we are extracting # debug symbols from the par debuginfo_files = [] if ctx.attrs.strip_libpar == "extract" and package_style == PackageStyle("standalone") and cxx_is_gnu(ctx): - shared_libraries, library_debuginfo = _split_debuginfo(ctx, shared_libraries) - extensions, extension_debuginfo = _split_debuginfo(ctx, extensions) - debuginfo_files += library_debuginfo - debuginfo_files += extension_debuginfo + for shlib in shared_libs: + stripped, debuginfo = strip_debug_with_gnu_debuglink( + ctx = ctx, + name = shlib.lib.unstripped_output.basename, + obj = shlib.lib.unstripped_output, + ) + shlib[1] = LinkedObject( + output = stripped, + unstripped_output = shlib.lib.unstripped_output, + dwp = shlib.lib.dwp, + ) + debuginfo_files.append(debuginfo) + for name, extension in extensions.items(): + stripped, debuginfo = strip_debug_with_gnu_debuglink( + ctx = ctx, + name = extension.unstripped_output.basename, + obj = extension.unstripped_output, + ) + extensions[name] = LinkedObject( + output = stripped, + unstripped_output = extension.unstripped_output, + dwp = extension.dwp, + ) + debuginfo_files.append(debuginfo) # Combine sources and extensions into a map of all modules. pex_modules = PexModules( @@ -704,16 +722,16 @@ def _convert_python_library_to_executable( # Build the PEX. pex = make_py_package( - ctx, - python_toolchain, - ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, - package_style, - ctx.attrs.build_args, - pex_modules, - shared_libraries, - main, - hidden_resources, - allow_cache_upload, + ctx = ctx, + python_toolchain = python_toolchain, + make_py_package_cmd = ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, + package_style = package_style, + build_args = ctx.attrs.build_args, + pex_modules = pex_modules, + shared_libraries = shared_libs, + main = main, + hidden_resources = hidden_resources, + allow_cache_upload = allow_cache_upload, debuginfo_files = debuginfo_files, ) From 3f597c54ad7945b566b6f1db5cf9df936e4b9b7c Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 10:35:23 -0700 Subject: [PATCH 0546/1133] Make shared lib soname opaque Summary: Use a new, opaque type for the`soname` field on `SharedLibrary`, which can hold either a static `str` or an `Artifact` that contains a build-time generated SONAME, and updated users that require a static SONAME to enumerate this clearly in the code. This will allow migrating some rules to support SONAMEs which aren't known at analysis time (see D55000520). Reviewed By: shayne-fletcher, igorsugak Differential Revision: D54989682 fbshipit-source-id: ef3a6b613237d28523f088f0c054e5e34732d6c7 --- .../android_binary_native_library_rules.bzl | 25 ++-- prelude/android/voltron.bzl | 2 +- prelude/cxx/cxx_executable.bzl | 25 +++- prelude/cxx/link_groups.bzl | 4 +- prelude/cxx/omnibus.bzl | 7 +- prelude/haskell/haskell_ghci.bzl | 17 +-- prelude/java/java_binary.bzl | 2 +- prelude/julia/julia_binary.bzl | 2 +- prelude/linking/shared_libraries.bzl | 107 ++++++++++++++++-- prelude/python/make_py_package.bzl | 8 +- prelude/python/python_binary.bzl | 10 +- prelude/rust/rust_binary.bzl | 23 +++- 12 files changed, 180 insertions(+), 52 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index f6b71a7e6..73120a3a3 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -54,10 +54,11 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibrary", # @unused Used as a type "SharedLibraryInfo", # @unused Used as a type + "create_shlib", "get_strip_non_global_flags", "merge_shared_libraries", "traverse_shared_library_info", - "with_unique_sonames", + "with_unique_str_sonames", ) load("@prelude//linking:strip.bzl", "strip_object") load("@prelude//linking:types.bzl", "Linkage") @@ -872,7 +873,7 @@ def get_default_shared_libs(ctx: AnalysisContext, deps: list[Dependency], shared ) return { soname: shared_lib - for soname, shared_lib in with_unique_sonames(traverse_shared_library_info(shared_library_info)).items() + for soname, shared_lib in with_unique_str_sonames(traverse_shared_library_info(shared_library_info)).items() if not (shared_libraries_to_exclude and shared_libraries_to_exclude.contains(shared_lib.label.raw_target())) } @@ -1064,10 +1065,10 @@ def _shared_lib_for_prebuilt_shared( ) shlib = node_data.shared_libs.libraries[0] - soname = shlib.soname + soname = shlib.soname.ensure_str() shlib = shlib.lib output_path = _platform_output_path(soname, platform) - return SharedLibrary( + return create_shlib( lib = shlib, stripped_lib = strip_lib(ctx, cxx_toolchain, shlib.output, output_path), link_args = None, @@ -1248,8 +1249,8 @@ def _get_merged_linkables_for_platform( # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents exported_linker_flags = None, ) - group_shared_libs[shared_lib.soname] = MergedSharedLibrary( - soname = shared_lib.soname, + group_shared_libs[shared_lib.soname.ensure_str()] = MergedSharedLibrary( + soname = shared_lib.soname.ensure_str(), lib = shared_lib, apk_module = group_data.apk_module, solib_constituents = [], @@ -1272,7 +1273,7 @@ def _get_merged_linkables_for_platform( expect(target_to_link_group[key] == group) node = linkable_nodes[key] - default_solibs = list([shlib.soname for shlib in node.shared_libs.libraries]) + default_solibs = list([shlib.soname.ensure_str() for shlib in node.shared_libs.libraries]) if not default_solibs and node.preferred_linkage == Linkage("static"): default_solibs = [node.default_soname] @@ -1328,7 +1329,7 @@ def _get_merged_linkables_for_platform( soname = soname, link_args = link_args, cxx_toolchain = cxx_toolchain, - shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname for label in shlib_deps], + shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname.ensure_str() for label in shlib_deps], label = group_data.constituents[0], can_be_asset = can_be_asset, ) @@ -1473,16 +1474,16 @@ def _create_relinkable_links( can_be_asset = node.can_be_asset, ) shared_lib_overrides[target] = LinkInfo( - name = shared_lib.soname, + name = shared_lib.soname.ensure_str(), pre_flags = node.linker_flags.exported_flags, linkables = [SharedLibLinkable( lib = shared_lib.lib.output, )], post_flags = node.linker_flags.exported_post_flags, ) - shared_libs[shared_lib.soname] = shared_lib + shared_libs[shared_lib.soname.ensure_str()] = shared_lib - return {lib.soname: lib for lib in shared_libs.values()}, debug_link_deps + return {lib.soname.ensure_str(): lib for lib in shared_libs.values()}, debug_link_deps # To support migration from a tset-based link strategy, we are trying to match buck's internal tset # traversal logic here. Look for implementation of TopologicalTransitiveSetIteratorGen @@ -1805,7 +1806,7 @@ def create_shared_lib( ) shlib = link_result.linked_object - return SharedLibrary( + return create_shlib( lib = shlib, stripped_lib = strip_lib(ctx, cxx_toolchain, shlib.output), shlib_deps = shared_lib_deps, diff --git a/prelude/android/voltron.bzl b/prelude/android/voltron.bzl index 288194878..aca1242d8 100644 --- a/prelude/android/voltron.bzl +++ b/prelude/android/voltron.bzl @@ -86,7 +86,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: ]).hidden(targets_to_jars_args) if ctx.attrs.should_include_libraries: - targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), shared_lib.soname], delimiter = " ") for shared_lib in traversed_shared_library_info] + targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), shared_lib.soname.ensure_str()], delimiter = " ") for shared_lib in traversed_shared_library_info] targets_to_so_names = ctx.actions.write("targets_to_so_names.txt", targets_to_so_names_args) cmd.add([ "--targets-to-so-names", diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 066597c67..52ce5c115 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -536,23 +536,40 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, if shlib.lib.dwp ], )] + + # TODO(agallagher) There appears to be pre-existing soname conflicts + # when building this (when using link groups), which prevents using + # `with_unique_str_sonames`. + str_soname_shlibs = { + shlib.soname.ensure_str(): shlib + for shlib in shared_libs + if shlib.soname.is_str() + } sub_targets["shared-libraries"] = [DefaultInfo( default_output = ctx.actions.write_json( binary.output.basename + ".shared-libraries.json", { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs if shlib.lib.dwp], + "libraries": [ + "{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, soname) + for soname in str_soname_shlibs + ], + "librariesdwp": [ + "{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, soname) + for soname, shlib in str_soname_shlibs.items() + if shlib.lib.dwp + ], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if shared_libs_symlink_tree else [], }, ), sub_targets = { - shlib.soname: [DefaultInfo( + soname: [DefaultInfo( default_output = shlib.lib.output, sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] - for shlib in shared_libs + for soname, shlib in str_soname_shlibs.items() }, )] + if link_group_mappings: readable_mappings = {} for node, group in link_group_mappings.items(): diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index b979b65e2..d605d48fe 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -48,7 +48,7 @@ load( load( "@prelude//linking:shared_libraries.bzl", "SharedLibraries", - "SharedLibrary", + "create_shlib", ) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -882,7 +882,7 @@ def create_link_groups( library = None if not link_group_spec.is_shared_lib else LinkGroupLib( shared_libs = SharedLibraries( libraries = [ - SharedLibrary( + create_shlib( label = link_group_spec.label or ctx.label, soname = link_group_spec.name, lib = link_group_lib, diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index 283ad6239..5dc708615 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -39,7 +39,8 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", + "SharedLibrary", # @unused Used as a type + "create_shlib", ) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") @@ -709,7 +710,7 @@ def create_omnibus_libraries( ) if root.name != None: libraries.append( - SharedLibrary( + create_shlib( soname = root.name, lib = product.shared_library, label = label, @@ -730,7 +731,7 @@ def create_omnibus_libraries( allow_cache_upload = True, ) libraries.append( - SharedLibrary( + create_shlib( soname = _omnibus_soname(ctx), lib = omnibus.linked_object, label = ctx.label, diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index d65330f43..1ffa39632 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -48,8 +48,9 @@ load( load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", + "create_shlib_symlink_tree", "traverse_shared_library_info", - "with_unique_sonames", + "with_unique_str_sonames", ) load("@prelude//linking:types.bzl", "Linkage") load( @@ -272,7 +273,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: # Handle third-party dependencies of the omnibus SO tp_deps_shared_link_infos = {} - so_symlinks = {} + prebuilt_shlibs = [] for node_label in prebuilt_so_deps.keys(): node = graph_nodes[node_label] @@ -286,14 +287,14 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: shared_li = node.link_infos.get(output_style, None) if shared_li != None: tp_deps_shared_link_infos[node_label] = shared_li.default - for shlib in node.shared_libs.libraries: - so_symlinks[shlib.soname] = shlib.lib.output + prebuilt_shlibs.extend(node.shared_libs.libraries) # Create symlinks to the TP dependencies' SOs so_symlinks_root_path = ctx.label.name + ".so-symlinks" - so_symlinks_root = ctx.actions.symlinked_dir( - so_symlinks_root_path, - so_symlinks, + so_symlinks_root = create_shlib_symlink_tree( + actions = ctx.actions, + out = so_symlinks_root_path, + shared_libs = prebuilt_shlibs, ) linker_info = get_cxx_toolchain_info(ctx).linker_info @@ -483,7 +484,7 @@ def _build_preload_deps_root( shlib = traverse_shared_library_info(slib_info) - for soname, shared_lib in with_unique_sonames(shlib).items(): + for soname, shared_lib in with_unique_str_sonames(shlib).items(): preload_symlinks[soname] = shared_lib.lib.output # TODO(T150785851): build or get SO for direct preload_deps diff --git a/prelude/java/java_binary.bzl b/prelude/java/java_binary.bzl index d11069b35..abd32259e 100644 --- a/prelude/java/java_binary.bzl +++ b/prelude/java/java_binary.bzl @@ -55,7 +55,7 @@ def _create_fat_jar( ) args += [ "--native_libs_file", - ctx.actions.write("native_libs", [cmd_args([native_lib.soname, native_lib.lib.output], delimiter = " ") for native_lib in native_libs]), + ctx.actions.write("native_libs", [cmd_args([native_lib.soname.ensure_str(), native_lib.lib.output], delimiter = " ") for native_lib in native_libs]), ] if do_not_create_inner_jar: args += [ diff --git a/prelude/julia/julia_binary.bzl b/prelude/julia/julia_binary.bzl index 0aeb2a016..ea649408c 100644 --- a/prelude/julia/julia_binary.bzl +++ b/prelude/julia/julia_binary.bzl @@ -58,7 +58,7 @@ def build_jll_shlibs_mapping(ctx: AnalysisContext, json_info_file: Artifact): shared_libs = shlibs, ) - shlib_label_to_soname = {shlib.label: shlib.soname for shlib in shlibs} + shlib_label_to_soname = {shlib.label: shlib.soname.ensure_str() for shlib in shlibs} # iterate through all the jll libraries json_info = [] diff --git a/prelude/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl index a93310fa8..f9ba6ffeb 100644 --- a/prelude/linking/shared_libraries.bzl +++ b/prelude/linking/shared_libraries.bzl @@ -12,6 +12,18 @@ load( "LinkedObject", # @unused Used as a type ) load("@prelude//linking:strip.bzl", "strip_object") +load("@prelude//utils:expect.bzl", "expect") + +_Soname = record( + # Return the SONAME as a string, throwing an error if it is actually an + # artifact. + ensure_str = field(typing.Callable), + # Return `True` if the SONAME is respresented as a string. + is_str = field(typing.Callable), + # The the actual SONAME can be rerepsented by a static string, or the + # contents of a file genrated at build time. + _soname = field(str | Artifact), +) SharedLibrary = record( lib = field(LinkedObject), @@ -26,10 +38,31 @@ SharedLibrary = record( stripped_lib = field(Artifact | None, None), can_be_asset = field(bool, False), for_primary_apk = field(bool, False), - soname = field(str), + soname = field(_Soname), label = field(Label), ) +def _ensure_str(soname: str | Artifact) -> str: + expect(type(soname) == type(""), "SONAME is not a `str`: {}", soname) + return soname + +def _soname(soname: str | Artifact) -> _Soname: + return _Soname( + ensure_str = lambda: _ensure_str(soname), + is_str = lambda: type(soname) == type(""), + _soname = soname, + ) + +def create_shlib( + # The soname can either be a string or an artifact with the soname in + # text form. + soname: str | Artifact, + **kwargs): + return SharedLibrary( + soname = _soname(soname), + **kwargs + ) + SharedLibraries = record( # A mapping of shared library SONAME (e.g. `libfoo.so.2`) to the artifact. # Since the SONAME is what the dynamic loader uses to uniquely identify @@ -62,7 +95,7 @@ def create_shared_libraries( """ cxx_toolchain = getattr(ctx.attrs, "_cxx_toolchain", None) return SharedLibraries( - libraries = [SharedLibrary( + libraries = [create_shlib( lib = shlib, stripped_lib = strip_object( ctx, @@ -132,7 +165,9 @@ def _merge_shlibs( merged[soname] = shlib return merged -def with_unique_sonames(shared_libs: list[SharedLibrary]) -> dict[str, SharedLibrary]: +def with_unique_str_sonames( + shared_libs: list[SharedLibrary], + skip_dynamic: bool = False) -> dict[str, SharedLibrary]: """ Convert a list of `SharedLibrary`s to a map of unique SONAMEs to the corresponding `SharedLibrary`. @@ -140,17 +175,71 @@ def with_unique_sonames(shared_libs: list[SharedLibrary]) -> dict[str, SharedLib Will fail if the same SONAME maps to multiple `SharedLibrary`s. """ return _merge_shlibs( - shared_libs = shared_libs, - resolve_soname = lambda s: s, + shared_libs = [ + shlib + for shlib in shared_libs + if shlib.soname.is_str() or not skip_dynamic + ], + resolve_soname = lambda s: s.ensure_str(), ) +def gen_shared_libs_action( + actions: AnalysisActions, + out: str, + shared_libs: list[SharedLibrary], + gen_action: typing.Callable, + dir = False): + """ + Produce an action by first resolving all SONAME of the given shlibs and + enforcing that each SONAME is unique. + + The provided `gen_action` callable is called with a map of unique SONAMEs + to the corresponding shlibs. + """ + + output = actions.declare_output(out, dir = dir) + + def func(actions, artifacts, output): + def resolve_soname(soname): + if soname.is_str(): + return soname._soname + else: + return artifacts[soname._soname].read_string().strip() + + gen_action( + actions, + output, + _merge_shlibs( + shared_libs = shared_libs, + resolve_soname = resolve_soname, + ), + ) + + dynamic_sonames = [shlib.soname._soname for shlib in shared_libs if not shlib.soname.is_str()] + if dynamic_sonames: + actions.dynamic_output( + dynamic = [shlib.soname._soname for shlib in shared_libs if not shlib.soname.is_str()], + inputs = [], + outputs = [output], + f = lambda ctx, artifacts, outputs: func(ctx.actions, artifacts, outputs[output]), + ) + else: + func(actions, {}, output) + + return output + def create_shlib_symlink_tree(actions: AnalysisActions, out: str, shared_libs: list[SharedLibrary]): """ Merged shared libs into a symlink tree mapping the library's SONAME to it's artifact. """ - merged = with_unique_sonames(shared_libs = shared_libs) - return actions.symlinked_dir( - out, - {name: shlib.lib.output for name, shlib in merged.items()}, + return gen_shared_libs_action( + actions = actions, + out = out, + shared_libs = shared_libs, + gen_action = lambda actions, output, shared_libs: actions.symlinked_dir( + output, + {name: shlib.lib.output for name, shlib in shared_libs.items()}, + ), + dir = True, ) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 4c97530ac..ad8334917 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -349,7 +349,7 @@ def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(str, S preload_libraries_path = ctx.actions.write( "__preload_libraries.txt", cmd_args([ - "--preload={}".format(paths.join(libdir, shlib.soname)) + "--preload={}".format(paths.join(libdir, shlib.soname.ensure_str())) for libdir, shlib in shared_libraries ]), ) @@ -438,7 +438,7 @@ def _pex_modules_common_args( native_library_dests_path = ctx.actions.write( "__native_libraries___dests.txt", [ - "--native-library-dest={}".format(paths.join(libdir, shlib.soname)) + "--native-library-dest={}".format(paths.join(libdir, shlib.soname.ensure_str())) for shlib, libdir in shared_libraries ], ) @@ -466,13 +466,13 @@ def _pex_modules_common_args( if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): # rename to match extracted debuginfo package dwp = [ - (shlib.lib.dwp, paths.join(libdir, "{}.debuginfo.dwp".format(shlib.soname))) + (shlib.lib.dwp, paths.join(libdir, "{}.debuginfo.dwp".format(shlib.soname.ensure_str()))) for shlib, libdir in shared_libraries if shlib.lib.dwp != None ] else: dwp = [ - (shlib.lib.dwp, paths.join(libdir, "{}.dwp".format(shlib.soname))) + (shlib.lib.dwp, paths.join(libdir, "{}.dwp".format(shlib.soname.ensure_str()))) for shlib, libdir in shared_libraries if shlib.lib.dwp != None ] diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 5b56a7ba5..982fb1002 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -73,7 +73,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", + "create_shlib", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -620,7 +620,11 @@ def _convert_python_library_to_executable( # Add sub-targets for libs. for shlib in executable_info.shared_libs: - extra[shlib.soname] = [DefaultInfo(default_output = shlib.lib.output)] + # TODO(agallagher) There appears to be pre-existing soname conflicts + # when building this (when using link groups), which prevents using + # `with_unique_str_sonames`. + if shlib.soname.is_str(): + extra[shlib.soname.ensure_str()] = [DefaultInfo(default_output = shlib.lib.output)] for name, group in executable_info.auto_link_groups.items(): extra[name] = [DefaultInfo(default_output = group.output)] @@ -642,7 +646,7 @@ def _convert_python_library_to_executable( extra_artifacts.update(dict(extension_info.artifacts)) shared_libs.append(( "runtime/bin", - SharedLibrary( + create_shlib( soname = ctx.attrs.executable_name, label = ctx.label, lib = LinkedObject( diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 2364a5b6d..c623b11ea 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -233,21 +233,36 @@ def _rust_binary_common( sub_targets_for_link_strategy = {} + # TODO(agallagher) There appears to be pre-existing soname conflicts + # when building this (when using link groups), which prevents using + # `with_unique_str_sonames`. + str_soname_shlibs = { + shlib.soname.ensure_str(): shlib + for shlib in shared_libs + if shlib.soname.is_str() + } sub_targets_for_link_strategy["shared-libraries"] = [DefaultInfo( default_output = ctx.actions.write_json( name + ".shared-libraries.json", { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs if shlib.lib.dwp], + "libraries": [ + "{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, soname) + for soname in str_soname_shlibs + ], + "librariesdwp": [ + "{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, soname) + for soname, shlib in str_soname_shlibs.items() + if shlib.lib.dwp + ], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if executable_args.shared_libs_symlink_tree else [], }, ), sub_targets = { - shlib.soname: [DefaultInfo( + soname: [DefaultInfo( default_output = shlib.lib.output, sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] - for shlib in shared_libs + for soname, shlib in str_soname_shlibs.items() }, )] From c730a26c2325fd241b8f91e240e6a2489396a363 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 10:35:23 -0700 Subject: [PATCH 0547/1133] Don't require `str` SONAMEs when packaging Summary: Switch to use `gen_shared_libs_action` when generating shared lib manifests, which support resolving both `str` and `Artifact` SONAMEs. Reviewed By: igorsugak Differential Revision: D55000520 fbshipit-source-id: a52b19c560c7a056595cef6c475a3baf7f93c3cd --- prelude/linking/shared_libraries.bzl | 23 +++++++ prelude/python/make_py_package.bzl | 90 +++++++++++++++++----------- 2 files changed, 77 insertions(+), 36 deletions(-) diff --git a/prelude/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl index f9ba6ffeb..a419c82b7 100644 --- a/prelude/linking/shared_libraries.bzl +++ b/prelude/linking/shared_libraries.bzl @@ -228,6 +228,29 @@ def gen_shared_libs_action( return output +def zip_shlibs( + merged: dict[str, SharedLibrary], + vals: list[(SharedLibrary, typing.Any)]) -> list[(str, SharedLibrary, typing.Any)]: + """ + Helper to "zip" together the soname->shlib map to a list with associated + shared lib values. + + This is useful for callers of `gen_shared_libs_action` to combine the merged + shared libs, in dedup'd dict form, with some additional data. + """ + + zipped = [] + + # Walk through the shlib and val tuples + idx = 0 + for soname, shlib in merged.items(): + for idx in range(idx, len(vals)): + if vals[idx][0] == shlib: + break + zipped.append((soname, shlib, vals[idx][1])) + + return zipped + def create_shlib_symlink_tree(actions: AnalysisActions, out: str, shared_libs: list[SharedLibrary]): """ Merged shared libs into a symlink tree mapping the library's SONAME to diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index ad8334917..3ffa2ac28 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -20,6 +20,8 @@ load( load( "@prelude//linking:shared_libraries.bzl", "SharedLibrary", # @unused Used as a type + "gen_shared_libs_action", + "zip_shlibs", ) load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -148,7 +150,7 @@ def make_py_package( preload_libraries = _preload_libraries_args( ctx = ctx, shared_libraries = [ - (libdir, shlib) + (shlib, libdir) for libdir, shlib, preload in shared_libraries if preload ], @@ -345,13 +347,18 @@ def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[ArgLike]) - out = ctx.actions.write_json("debuginfo.manifest.json", debug_artifacts) return [DefaultInfo(default_output = out, other_outputs = debug_artifacts)] -def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(str, SharedLibrary)]) -> cmd_args: - preload_libraries_path = ctx.actions.write( - "__preload_libraries.txt", - cmd_args([ - "--preload={}".format(paths.join(libdir, shlib.soname.ensure_str())) - for libdir, shlib in shared_libraries - ]), +def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(SharedLibrary, str)]) -> cmd_args: + preload_libraries_path = gen_shared_libs_action( + actions = ctx.actions, + out = "__preload_libraries.txt", + shared_libs = [shlib for shlib, _ in shared_libraries], + gen_action = lambda actions, output, shared_libs: actions.write( + output, + [ + "--preload={}".format(paths.join(libdir, soname)) + for soname, _, libdir in zip_shlibs(shared_libs, shared_libraries) + ], + ), ) return cmd_args(preload_libraries_path, format = "@{}") @@ -430,17 +437,29 @@ def _pex_modules_common_args( _srcs(resources, format = "--resource-manifest={}"), ) - native_libraries = [shlib.lib.output for shlib, _ in shared_libraries] - native_library_srcs_path = ctx.actions.write( - "__native_libraries___srcs.txt", - _srcs(native_libraries, format = "--native-library-src={}"), + native_library_srcs_path = gen_shared_libs_action( + actions = ctx.actions, + out = "__native_libraries___srcs.txt", + shared_libs = [shlib for shlib, _ in shared_libraries], + gen_action = lambda actions, output, shared_libs: actions.write( + output, + _srcs( + [shlib.lib.output for shlib in shared_libs.values()], + format = "--native-library-src={}", + ), + ), ) - native_library_dests_path = ctx.actions.write( - "__native_libraries___dests.txt", - [ - "--native-library-dest={}".format(paths.join(libdir, shlib.soname.ensure_str())) - for shlib, libdir in shared_libraries - ], + native_library_dests_path = gen_shared_libs_action( + actions = ctx.actions, + out = "__native_libraries___dests.txt", + shared_libs = [shlib for shlib, _ in shared_libraries], + gen_action = lambda actions, output, shared_libs: actions.write( + output, + [ + "--native-library-dest={}".format(paths.join(libdir, soname)) + for soname, _, libdir in zip_shlibs(shared_libs, shared_libraries) + ], + ), ) src_manifest_args = cmd_args(src_manifests_path).hidden(srcs) @@ -463,32 +482,31 @@ def _pex_modules_common_args( debug_artifacts.extend(debuginfo_files) if ctx.attrs.package_split_dwarf_dwp: - if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): - # rename to match extracted debuginfo package - dwp = [ - (shlib.lib.dwp, paths.join(libdir, "{}.debuginfo.dwp".format(shlib.soname.ensure_str()))) - for shlib, libdir in shared_libraries - if shlib.lib.dwp != None - ] - else: - dwp = [ - (shlib.lib.dwp, paths.join(libdir, "{}.dwp".format(shlib.soname.ensure_str()))) - for shlib, libdir in shared_libraries - if shlib.lib.dwp != None - ] dwp_srcs_path = ctx.actions.write( "__dwp___srcs.txt", - _srcs([src for src, _ in dwp], format = "--dwp-src={}"), + _srcs([shlib.lib.dwp for shlib, _ in shared_libraries if shlib.lib.dwp != None], format = "--dwp-src={}"), ) - dwp_dests_path = ctx.actions.write( - "__dwp___dests.txt", - _srcs([dest for _, dest in dwp], format = "--dwp-dest={}"), + if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): + dwp_tmpl = "{}.debuginfo.dwp" + else: + dwp_tmpl = "{}.dwp" + dwp_dests_path = gen_shared_libs_action( + actions = ctx.actions, + out = "__dwp___dests.txt", + shared_libs = [shlib for shlib, _ in shared_libraries], + gen_action = lambda actions, output, shared_libs: actions.write( + output, + _srcs([ + paths.join(libdir, dwp_tmpl.format(soname)) + for soname, _, libdir in zip_shlibs(shared_libs, shared_libraries) + ]), + ), ) dwp_srcs_args = cmd_args(dwp_srcs_path) cmd.add(cmd_args(dwp_srcs_args, format = "@{}")) cmd.add(cmd_args(dwp_dests_path, format = "@{}")) - debug_artifacts.extend([d for d, _ in dwp]) + debug_artifacts.extend([shlib.lib.dwp for shlib, _ in shared_libraries if shlib.lib.dwp != None]) for shlib, _ in shared_libraries: deps.append(shlib.lib.output) From aa342de33ab242e15dbf3cb100f094cb59cf9d04 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Fri, 22 Mar 2024 11:14:38 -0700 Subject: [PATCH 0548/1133] Use some pure code instead of cmd_args.add Summary: The plan is to make all `cmd_args` calls pure, so it could be made immutable. Reviewed By: IanChilds Differential Revision: D55230411 fbshipit-source-id: 511bfe899abb7935fdfe0250c877e0576f0c8c1f --- prelude/zip_file/zip_file.bzl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/prelude/zip_file/zip_file.bzl b/prelude/zip_file/zip_file.bzl index 79b890215..9c98b55db 100644 --- a/prelude/zip_file/zip_file.bzl +++ b/prelude/zip_file/zip_file.bzl @@ -38,13 +38,13 @@ def zip_file_impl(ctx: AnalysisContext) -> list[Provider]: ]) if srcs: - srcs_file_cmd = cmd_args() - # add artifact and is_source flag pair - for src in srcs: - srcs_file_cmd.add(src) - srcs_file_cmd.add(src.short_path) - srcs_file_cmd.add(str(src.is_source)) + srcs_file_cmd = cmd_args( + [ + [src, src.short_path, str(src.is_source)] + for src in srcs + ], + ) entries_file = ctx.actions.write("entries", srcs_file_cmd) create_zip_cmd.add("--entries_file") From c2d5d7918d4da96514835593af6983d0fa6527b0 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Fri, 22 Mar 2024 11:14:38 -0700 Subject: [PATCH 0549/1133] Use some pure code instead of cmd_args.add Summary: Not that is matters much here, but this is also cheaper. Reviewed By: IanChilds Differential Revision: D55230412 fbshipit-source-id: e1b953aafebacef5faba25b680f99f1fe6e2a494 --- prelude/linking/strip.bzl | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/prelude/linking/strip.bzl b/prelude/linking/strip.bzl index 6db252413..28bbc2998 100644 --- a/prelude/linking/strip.bzl +++ b/prelude/linking/strip.bzl @@ -80,10 +80,13 @@ def strip_object(ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, unstripp stripped_lib = ctx.actions.declare_output("stripped/{}".format(output_path)) # TODO(T109996375) support configuring the flags used for stripping - cmd = cmd_args() - cmd.add(strip) - cmd.add(strip_flags) - cmd.add([unstripped, "-o", stripped_lib.as_output()]) + cmd = cmd_args( + strip, + strip_flags, + unstripped, + "-o", + stripped_lib.as_output(), + ) effective_category_suffix = category_suffix if category_suffix else "shared_lib" category = "strip_{}".format(effective_category_suffix) From 66974579b2b073b99878fa9b180b7cd040bdf6df Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Fri, 22 Mar 2024 11:14:38 -0700 Subject: [PATCH 0550/1133] Use some pure code instead of cmd_args.add Reviewed By: IanChilds Differential Revision: D55230413 fbshipit-source-id: 39351264ae771454d0d1420fd7480b4332c7ebfd --- prelude/cxx/windows_resource.bzl | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/prelude/cxx/windows_resource.bzl b/prelude/cxx/windows_resource.bzl index b144c2988..1be963028 100644 --- a/prelude/cxx/windows_resource.bzl +++ b/prelude/cxx/windows_resource.bzl @@ -20,10 +20,12 @@ def windows_resource_impl(ctx: AnalysisContext) -> list[Provider]: "__objects__", "{}.res".format(src.short_path), ) - rc_cmd = cmd_args(toolchain.rc_compiler_info.compiler) - rc_cmd.add(toolchain.rc_compiler_info.compiler_flags) - rc_cmd.add(cmd_args(rc_output.as_output(), format = "/fo{}")) - rc_cmd.add(src) + rc_cmd = cmd_args( + toolchain.rc_compiler_info.compiler, + toolchain.rc_compiler_info.compiler_flags, + cmd_args(rc_output.as_output(), format = "/fo{}"), + src, + ) ctx.actions.run( rc_cmd, @@ -34,10 +36,12 @@ def windows_resource_impl(ctx: AnalysisContext) -> list[Provider]: "__objects__", "{}.obj".format(src.short_path), ) - cvtres_cmd = cmd_args(toolchain.cvtres_compiler_info.compiler) - cvtres_cmd.add(toolchain.cvtres_compiler_info.compiler_flags) - cvtres_cmd.add(cmd_args(cvtres_output.as_output(), format = "/OUT:{}")) - cvtres_cmd.add(rc_output) + cvtres_cmd = cmd_args( + toolchain.cvtres_compiler_info.compiler, + toolchain.cvtres_compiler_info.compiler_flags, + cmd_args(cvtres_output.as_output(), format = "/OUT:{}"), + rc_output, + ) ctx.actions.run( cvtres_cmd, From 0b345e43268d3be31e3237457b00e4033ba5ce9d Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Fri, 22 Mar 2024 11:14:38 -0700 Subject: [PATCH 0551/1133] Use some pure code instead of cmd_args.add Reviewed By: IanChilds Differential Revision: D55230559 fbshipit-source-id: e087261bded541e568197b755b5a3685c84b83d0 --- prelude/cxx/cxx_toolchain.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prelude/cxx/cxx_toolchain.bzl b/prelude/cxx/cxx_toolchain.bzl index dc2fbf894..a818b3336 100644 --- a/prelude/cxx/cxx_toolchain.bzl +++ b/prelude/cxx/cxx_toolchain.bzl @@ -30,7 +30,7 @@ def cxx_toolchain_impl(ctx): c_info = CCompilerInfo( compiler = c_compiler, compiler_type = ctx.attrs.c_compiler_type or ctx.attrs.compiler_type, - compiler_flags = cmd_args(ctx.attrs.c_compiler_flags).add(c_lto_flags), + compiler_flags = cmd_args(ctx.attrs.c_compiler_flags, c_lto_flags), preprocessor = c_compiler, preprocessor_flags = cmd_args(ctx.attrs.c_preprocessor_flags), dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], @@ -40,7 +40,7 @@ def cxx_toolchain_impl(ctx): cxx_info = CxxCompilerInfo( compiler = cxx_compiler, compiler_type = ctx.attrs.cxx_compiler_type or ctx.attrs.compiler_type, - compiler_flags = cmd_args(ctx.attrs.cxx_compiler_flags).add(c_lto_flags), + compiler_flags = cmd_args(ctx.attrs.cxx_compiler_flags, c_lto_flags), preprocessor = cxx_compiler, preprocessor_flags = cmd_args(ctx.attrs.cxx_preprocessor_flags), dep_files_processor = ctx.attrs._dep_files_processor[RunInfo], @@ -102,7 +102,7 @@ def cxx_toolchain_impl(ctx): link_weight = ctx.attrs.link_weight, link_ordering = ctx.attrs.link_ordering, linker = ctx.attrs.linker[RunInfo], - linker_flags = cmd_args(ctx.attrs.linker_flags).add(c_lto_flags), + linker_flags = cmd_args(ctx.attrs.linker_flags, c_lto_flags), post_linker_flags = cmd_args(ctx.attrs.post_linker_flags), lto_mode = lto_mode, mk_shlib_intf = ctx.attrs.shared_library_interface_producer, From afe02f8d185d12c000e8f2a695002a7da8dcf83d Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Fri, 22 Mar 2024 11:14:38 -0700 Subject: [PATCH 0552/1133] Use some pure code instead of cmd_args.add Reviewed By: IanChilds Differential Revision: D55230557 fbshipit-source-id: 9b1c52cea9eddb197dbbb7533942a86c6dfa24fd --- prelude/cxx/cxx_bolt.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/prelude/cxx/cxx_bolt.bzl b/prelude/cxx/cxx_bolt.bzl index 271ab5e80..b26ac90cb 100644 --- a/prelude/cxx/cxx_bolt.bzl +++ b/prelude/cxx/cxx_bolt.bzl @@ -21,11 +21,10 @@ def bolt(ctx: AnalysisContext, prebolt_output: Artifact, identifier: [str, None] if not bolt_msdk or not cxx_use_bolt(ctx): fail("Cannot use bolt if bolt_msdk is not available or bolt profile is not available") - args = cmd_args() # bolt command format: # {llvm_bolt} {input_bin} -o $OUT -data={fdata} {args} - args.add( + args = cmd_args( cmd_args(bolt_msdk, format = "{}/bin/llvm-bolt"), prebolt_output, "-o", From 0688cc9763ce73d53c07b9cea6a1dbecc25501b6 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 12:35:42 -0700 Subject: [PATCH 0553/1133] Revert D55000520: Don't require `str` SONAMEs when packaging Differential Revision: D55000520 Original commit changeset: a52b19c560c7 Original Phabricator Diff: D55000520 fbshipit-source-id: cdb63a08fa7b803ee7151cfa455558941684332c --- prelude/linking/shared_libraries.bzl | 23 ------- prelude/python/make_py_package.bzl | 90 +++++++++++----------------- 2 files changed, 36 insertions(+), 77 deletions(-) diff --git a/prelude/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl index a419c82b7..f9ba6ffeb 100644 --- a/prelude/linking/shared_libraries.bzl +++ b/prelude/linking/shared_libraries.bzl @@ -228,29 +228,6 @@ def gen_shared_libs_action( return output -def zip_shlibs( - merged: dict[str, SharedLibrary], - vals: list[(SharedLibrary, typing.Any)]) -> list[(str, SharedLibrary, typing.Any)]: - """ - Helper to "zip" together the soname->shlib map to a list with associated - shared lib values. - - This is useful for callers of `gen_shared_libs_action` to combine the merged - shared libs, in dedup'd dict form, with some additional data. - """ - - zipped = [] - - # Walk through the shlib and val tuples - idx = 0 - for soname, shlib in merged.items(): - for idx in range(idx, len(vals)): - if vals[idx][0] == shlib: - break - zipped.append((soname, shlib, vals[idx][1])) - - return zipped - def create_shlib_symlink_tree(actions: AnalysisActions, out: str, shared_libs: list[SharedLibrary]): """ Merged shared libs into a symlink tree mapping the library's SONAME to diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 3ffa2ac28..ad8334917 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -20,8 +20,6 @@ load( load( "@prelude//linking:shared_libraries.bzl", "SharedLibrary", # @unused Used as a type - "gen_shared_libs_action", - "zip_shlibs", ) load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -150,7 +148,7 @@ def make_py_package( preload_libraries = _preload_libraries_args( ctx = ctx, shared_libraries = [ - (shlib, libdir) + (libdir, shlib) for libdir, shlib, preload in shared_libraries if preload ], @@ -347,18 +345,13 @@ def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[ArgLike]) - out = ctx.actions.write_json("debuginfo.manifest.json", debug_artifacts) return [DefaultInfo(default_output = out, other_outputs = debug_artifacts)] -def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(SharedLibrary, str)]) -> cmd_args: - preload_libraries_path = gen_shared_libs_action( - actions = ctx.actions, - out = "__preload_libraries.txt", - shared_libs = [shlib for shlib, _ in shared_libraries], - gen_action = lambda actions, output, shared_libs: actions.write( - output, - [ - "--preload={}".format(paths.join(libdir, soname)) - for soname, _, libdir in zip_shlibs(shared_libs, shared_libraries) - ], - ), +def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(str, SharedLibrary)]) -> cmd_args: + preload_libraries_path = ctx.actions.write( + "__preload_libraries.txt", + cmd_args([ + "--preload={}".format(paths.join(libdir, shlib.soname.ensure_str())) + for libdir, shlib in shared_libraries + ]), ) return cmd_args(preload_libraries_path, format = "@{}") @@ -437,29 +430,17 @@ def _pex_modules_common_args( _srcs(resources, format = "--resource-manifest={}"), ) - native_library_srcs_path = gen_shared_libs_action( - actions = ctx.actions, - out = "__native_libraries___srcs.txt", - shared_libs = [shlib for shlib, _ in shared_libraries], - gen_action = lambda actions, output, shared_libs: actions.write( - output, - _srcs( - [shlib.lib.output for shlib in shared_libs.values()], - format = "--native-library-src={}", - ), - ), + native_libraries = [shlib.lib.output for shlib, _ in shared_libraries] + native_library_srcs_path = ctx.actions.write( + "__native_libraries___srcs.txt", + _srcs(native_libraries, format = "--native-library-src={}"), ) - native_library_dests_path = gen_shared_libs_action( - actions = ctx.actions, - out = "__native_libraries___dests.txt", - shared_libs = [shlib for shlib, _ in shared_libraries], - gen_action = lambda actions, output, shared_libs: actions.write( - output, - [ - "--native-library-dest={}".format(paths.join(libdir, soname)) - for soname, _, libdir in zip_shlibs(shared_libs, shared_libraries) - ], - ), + native_library_dests_path = ctx.actions.write( + "__native_libraries___dests.txt", + [ + "--native-library-dest={}".format(paths.join(libdir, shlib.soname.ensure_str())) + for shlib, libdir in shared_libraries + ], ) src_manifest_args = cmd_args(src_manifests_path).hidden(srcs) @@ -482,31 +463,32 @@ def _pex_modules_common_args( debug_artifacts.extend(debuginfo_files) if ctx.attrs.package_split_dwarf_dwp: + if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): + # rename to match extracted debuginfo package + dwp = [ + (shlib.lib.dwp, paths.join(libdir, "{}.debuginfo.dwp".format(shlib.soname.ensure_str()))) + for shlib, libdir in shared_libraries + if shlib.lib.dwp != None + ] + else: + dwp = [ + (shlib.lib.dwp, paths.join(libdir, "{}.dwp".format(shlib.soname.ensure_str()))) + for shlib, libdir in shared_libraries + if shlib.lib.dwp != None + ] dwp_srcs_path = ctx.actions.write( "__dwp___srcs.txt", - _srcs([shlib.lib.dwp for shlib, _ in shared_libraries if shlib.lib.dwp != None], format = "--dwp-src={}"), + _srcs([src for src, _ in dwp], format = "--dwp-src={}"), ) - if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): - dwp_tmpl = "{}.debuginfo.dwp" - else: - dwp_tmpl = "{}.dwp" - dwp_dests_path = gen_shared_libs_action( - actions = ctx.actions, - out = "__dwp___dests.txt", - shared_libs = [shlib for shlib, _ in shared_libraries], - gen_action = lambda actions, output, shared_libs: actions.write( - output, - _srcs([ - paths.join(libdir, dwp_tmpl.format(soname)) - for soname, _, libdir in zip_shlibs(shared_libs, shared_libraries) - ]), - ), + dwp_dests_path = ctx.actions.write( + "__dwp___dests.txt", + _srcs([dest for _, dest in dwp], format = "--dwp-dest={}"), ) dwp_srcs_args = cmd_args(dwp_srcs_path) cmd.add(cmd_args(dwp_srcs_args, format = "@{}")) cmd.add(cmd_args(dwp_dests_path, format = "@{}")) - debug_artifacts.extend([shlib.lib.dwp for shlib, _ in shared_libraries if shlib.lib.dwp != None]) + debug_artifacts.extend([d for d, _ in dwp]) for shlib, _ in shared_libraries: deps.append(shlib.lib.output) From be631d3e271ac7f8a0937f2e3a890923586a9857 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 12:35:42 -0700 Subject: [PATCH 0554/1133] Revert D54989682: Make shared lib soname opaque Differential Revision: D54989682 Original commit changeset: ef3a6b613237 Original Phabricator Diff: D54989682 fbshipit-source-id: ca8bf15b6b9333f8c4a0d604cd8c3676b769131a --- .../android_binary_native_library_rules.bzl | 25 ++-- prelude/android/voltron.bzl | 2 +- prelude/cxx/cxx_executable.bzl | 25 +--- prelude/cxx/link_groups.bzl | 4 +- prelude/cxx/omnibus.bzl | 7 +- prelude/haskell/haskell_ghci.bzl | 17 ++- prelude/java/java_binary.bzl | 2 +- prelude/julia/julia_binary.bzl | 2 +- prelude/linking/shared_libraries.bzl | 107 ++---------------- prelude/python/make_py_package.bzl | 8 +- prelude/python/python_binary.bzl | 10 +- prelude/rust/rust_binary.bzl | 23 +--- 12 files changed, 52 insertions(+), 180 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 73120a3a3..f6b71a7e6 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -54,11 +54,10 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibrary", # @unused Used as a type "SharedLibraryInfo", # @unused Used as a type - "create_shlib", "get_strip_non_global_flags", "merge_shared_libraries", "traverse_shared_library_info", - "with_unique_str_sonames", + "with_unique_sonames", ) load("@prelude//linking:strip.bzl", "strip_object") load("@prelude//linking:types.bzl", "Linkage") @@ -873,7 +872,7 @@ def get_default_shared_libs(ctx: AnalysisContext, deps: list[Dependency], shared ) return { soname: shared_lib - for soname, shared_lib in with_unique_str_sonames(traverse_shared_library_info(shared_library_info)).items() + for soname, shared_lib in with_unique_sonames(traverse_shared_library_info(shared_library_info)).items() if not (shared_libraries_to_exclude and shared_libraries_to_exclude.contains(shared_lib.label.raw_target())) } @@ -1065,10 +1064,10 @@ def _shared_lib_for_prebuilt_shared( ) shlib = node_data.shared_libs.libraries[0] - soname = shlib.soname.ensure_str() + soname = shlib.soname shlib = shlib.lib output_path = _platform_output_path(soname, platform) - return create_shlib( + return SharedLibrary( lib = shlib, stripped_lib = strip_lib(ctx, cxx_toolchain, shlib.output, output_path), link_args = None, @@ -1249,8 +1248,8 @@ def _get_merged_linkables_for_platform( # exported linker flags for shared libs are in their linkinfo itself and are not exported from dependents exported_linker_flags = None, ) - group_shared_libs[shared_lib.soname.ensure_str()] = MergedSharedLibrary( - soname = shared_lib.soname.ensure_str(), + group_shared_libs[shared_lib.soname] = MergedSharedLibrary( + soname = shared_lib.soname, lib = shared_lib, apk_module = group_data.apk_module, solib_constituents = [], @@ -1273,7 +1272,7 @@ def _get_merged_linkables_for_platform( expect(target_to_link_group[key] == group) node = linkable_nodes[key] - default_solibs = list([shlib.soname.ensure_str() for shlib in node.shared_libs.libraries]) + default_solibs = list([shlib.soname for shlib in node.shared_libs.libraries]) if not default_solibs and node.preferred_linkage == Linkage("static"): default_solibs = [node.default_soname] @@ -1329,7 +1328,7 @@ def _get_merged_linkables_for_platform( soname = soname, link_args = link_args, cxx_toolchain = cxx_toolchain, - shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname.ensure_str() for label in shlib_deps], + shared_lib_deps = [link_group_linkable_nodes[label].shared_lib.soname for label in shlib_deps], label = group_data.constituents[0], can_be_asset = can_be_asset, ) @@ -1474,16 +1473,16 @@ def _create_relinkable_links( can_be_asset = node.can_be_asset, ) shared_lib_overrides[target] = LinkInfo( - name = shared_lib.soname.ensure_str(), + name = shared_lib.soname, pre_flags = node.linker_flags.exported_flags, linkables = [SharedLibLinkable( lib = shared_lib.lib.output, )], post_flags = node.linker_flags.exported_post_flags, ) - shared_libs[shared_lib.soname.ensure_str()] = shared_lib + shared_libs[shared_lib.soname] = shared_lib - return {lib.soname.ensure_str(): lib for lib in shared_libs.values()}, debug_link_deps + return {lib.soname: lib for lib in shared_libs.values()}, debug_link_deps # To support migration from a tset-based link strategy, we are trying to match buck's internal tset # traversal logic here. Look for implementation of TopologicalTransitiveSetIteratorGen @@ -1806,7 +1805,7 @@ def create_shared_lib( ) shlib = link_result.linked_object - return create_shlib( + return SharedLibrary( lib = shlib, stripped_lib = strip_lib(ctx, cxx_toolchain, shlib.output), shlib_deps = shared_lib_deps, diff --git a/prelude/android/voltron.bzl b/prelude/android/voltron.bzl index aca1242d8..288194878 100644 --- a/prelude/android/voltron.bzl +++ b/prelude/android/voltron.bzl @@ -86,7 +86,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: ]).hidden(targets_to_jars_args) if ctx.attrs.should_include_libraries: - targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), shared_lib.soname.ensure_str()], delimiter = " ") for shared_lib in traversed_shared_library_info] + targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), shared_lib.soname], delimiter = " ") for shared_lib in traversed_shared_library_info] targets_to_so_names = ctx.actions.write("targets_to_so_names.txt", targets_to_so_names_args) cmd.add([ "--targets-to-so-names", diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 52ce5c115..066597c67 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -536,40 +536,23 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, if shlib.lib.dwp ], )] - - # TODO(agallagher) There appears to be pre-existing soname conflicts - # when building this (when using link groups), which prevents using - # `with_unique_str_sonames`. - str_soname_shlibs = { - shlib.soname.ensure_str(): shlib - for shlib in shared_libs - if shlib.soname.is_str() - } sub_targets["shared-libraries"] = [DefaultInfo( default_output = ctx.actions.write_json( binary.output.basename + ".shared-libraries.json", { - "libraries": [ - "{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, soname) - for soname in str_soname_shlibs - ], - "librariesdwp": [ - "{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, soname) - for soname, shlib in str_soname_shlibs.items() - if shlib.lib.dwp - ], + "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs], + "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs if shlib.lib.dwp], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if shared_libs_symlink_tree else [], }, ), sub_targets = { - soname: [DefaultInfo( + shlib.soname: [DefaultInfo( default_output = shlib.lib.output, sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] - for soname, shlib in str_soname_shlibs.items() + for shlib in shared_libs }, )] - if link_group_mappings: readable_mappings = {} for node, group in link_group_mappings.items(): diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index d605d48fe..b979b65e2 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -48,7 +48,7 @@ load( load( "@prelude//linking:shared_libraries.bzl", "SharedLibraries", - "create_shlib", + "SharedLibrary", ) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -882,7 +882,7 @@ def create_link_groups( library = None if not link_group_spec.is_shared_lib else LinkGroupLib( shared_libs = SharedLibraries( libraries = [ - create_shlib( + SharedLibrary( label = link_group_spec.label or ctx.label, soname = link_group_spec.name, lib = link_group_lib, diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index 5dc708615..283ad6239 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -39,8 +39,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", # @unused Used as a type - "create_shlib", + "SharedLibrary", ) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") @@ -710,7 +709,7 @@ def create_omnibus_libraries( ) if root.name != None: libraries.append( - create_shlib( + SharedLibrary( soname = root.name, lib = product.shared_library, label = label, @@ -731,7 +730,7 @@ def create_omnibus_libraries( allow_cache_upload = True, ) libraries.append( - create_shlib( + SharedLibrary( soname = _omnibus_soname(ctx), lib = omnibus.linked_object, label = ctx.label, diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index 1ffa39632..d65330f43 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -48,9 +48,8 @@ load( load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", - "create_shlib_symlink_tree", "traverse_shared_library_info", - "with_unique_str_sonames", + "with_unique_sonames", ) load("@prelude//linking:types.bzl", "Linkage") load( @@ -273,7 +272,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: # Handle third-party dependencies of the omnibus SO tp_deps_shared_link_infos = {} - prebuilt_shlibs = [] + so_symlinks = {} for node_label in prebuilt_so_deps.keys(): node = graph_nodes[node_label] @@ -287,14 +286,14 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: shared_li = node.link_infos.get(output_style, None) if shared_li != None: tp_deps_shared_link_infos[node_label] = shared_li.default - prebuilt_shlibs.extend(node.shared_libs.libraries) + for shlib in node.shared_libs.libraries: + so_symlinks[shlib.soname] = shlib.lib.output # Create symlinks to the TP dependencies' SOs so_symlinks_root_path = ctx.label.name + ".so-symlinks" - so_symlinks_root = create_shlib_symlink_tree( - actions = ctx.actions, - out = so_symlinks_root_path, - shared_libs = prebuilt_shlibs, + so_symlinks_root = ctx.actions.symlinked_dir( + so_symlinks_root_path, + so_symlinks, ) linker_info = get_cxx_toolchain_info(ctx).linker_info @@ -484,7 +483,7 @@ def _build_preload_deps_root( shlib = traverse_shared_library_info(slib_info) - for soname, shared_lib in with_unique_str_sonames(shlib).items(): + for soname, shared_lib in with_unique_sonames(shlib).items(): preload_symlinks[soname] = shared_lib.lib.output # TODO(T150785851): build or get SO for direct preload_deps diff --git a/prelude/java/java_binary.bzl b/prelude/java/java_binary.bzl index abd32259e..d11069b35 100644 --- a/prelude/java/java_binary.bzl +++ b/prelude/java/java_binary.bzl @@ -55,7 +55,7 @@ def _create_fat_jar( ) args += [ "--native_libs_file", - ctx.actions.write("native_libs", [cmd_args([native_lib.soname.ensure_str(), native_lib.lib.output], delimiter = " ") for native_lib in native_libs]), + ctx.actions.write("native_libs", [cmd_args([native_lib.soname, native_lib.lib.output], delimiter = " ") for native_lib in native_libs]), ] if do_not_create_inner_jar: args += [ diff --git a/prelude/julia/julia_binary.bzl b/prelude/julia/julia_binary.bzl index ea649408c..0aeb2a016 100644 --- a/prelude/julia/julia_binary.bzl +++ b/prelude/julia/julia_binary.bzl @@ -58,7 +58,7 @@ def build_jll_shlibs_mapping(ctx: AnalysisContext, json_info_file: Artifact): shared_libs = shlibs, ) - shlib_label_to_soname = {shlib.label: shlib.soname.ensure_str() for shlib in shlibs} + shlib_label_to_soname = {shlib.label: shlib.soname for shlib in shlibs} # iterate through all the jll libraries json_info = [] diff --git a/prelude/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl index f9ba6ffeb..a93310fa8 100644 --- a/prelude/linking/shared_libraries.bzl +++ b/prelude/linking/shared_libraries.bzl @@ -12,18 +12,6 @@ load( "LinkedObject", # @unused Used as a type ) load("@prelude//linking:strip.bzl", "strip_object") -load("@prelude//utils:expect.bzl", "expect") - -_Soname = record( - # Return the SONAME as a string, throwing an error if it is actually an - # artifact. - ensure_str = field(typing.Callable), - # Return `True` if the SONAME is respresented as a string. - is_str = field(typing.Callable), - # The the actual SONAME can be rerepsented by a static string, or the - # contents of a file genrated at build time. - _soname = field(str | Artifact), -) SharedLibrary = record( lib = field(LinkedObject), @@ -38,31 +26,10 @@ SharedLibrary = record( stripped_lib = field(Artifact | None, None), can_be_asset = field(bool, False), for_primary_apk = field(bool, False), - soname = field(_Soname), + soname = field(str), label = field(Label), ) -def _ensure_str(soname: str | Artifact) -> str: - expect(type(soname) == type(""), "SONAME is not a `str`: {}", soname) - return soname - -def _soname(soname: str | Artifact) -> _Soname: - return _Soname( - ensure_str = lambda: _ensure_str(soname), - is_str = lambda: type(soname) == type(""), - _soname = soname, - ) - -def create_shlib( - # The soname can either be a string or an artifact with the soname in - # text form. - soname: str | Artifact, - **kwargs): - return SharedLibrary( - soname = _soname(soname), - **kwargs - ) - SharedLibraries = record( # A mapping of shared library SONAME (e.g. `libfoo.so.2`) to the artifact. # Since the SONAME is what the dynamic loader uses to uniquely identify @@ -95,7 +62,7 @@ def create_shared_libraries( """ cxx_toolchain = getattr(ctx.attrs, "_cxx_toolchain", None) return SharedLibraries( - libraries = [create_shlib( + libraries = [SharedLibrary( lib = shlib, stripped_lib = strip_object( ctx, @@ -165,9 +132,7 @@ def _merge_shlibs( merged[soname] = shlib return merged -def with_unique_str_sonames( - shared_libs: list[SharedLibrary], - skip_dynamic: bool = False) -> dict[str, SharedLibrary]: +def with_unique_sonames(shared_libs: list[SharedLibrary]) -> dict[str, SharedLibrary]: """ Convert a list of `SharedLibrary`s to a map of unique SONAMEs to the corresponding `SharedLibrary`. @@ -175,71 +140,17 @@ def with_unique_str_sonames( Will fail if the same SONAME maps to multiple `SharedLibrary`s. """ return _merge_shlibs( - shared_libs = [ - shlib - for shlib in shared_libs - if shlib.soname.is_str() or not skip_dynamic - ], - resolve_soname = lambda s: s.ensure_str(), + shared_libs = shared_libs, + resolve_soname = lambda s: s, ) -def gen_shared_libs_action( - actions: AnalysisActions, - out: str, - shared_libs: list[SharedLibrary], - gen_action: typing.Callable, - dir = False): - """ - Produce an action by first resolving all SONAME of the given shlibs and - enforcing that each SONAME is unique. - - The provided `gen_action` callable is called with a map of unique SONAMEs - to the corresponding shlibs. - """ - - output = actions.declare_output(out, dir = dir) - - def func(actions, artifacts, output): - def resolve_soname(soname): - if soname.is_str(): - return soname._soname - else: - return artifacts[soname._soname].read_string().strip() - - gen_action( - actions, - output, - _merge_shlibs( - shared_libs = shared_libs, - resolve_soname = resolve_soname, - ), - ) - - dynamic_sonames = [shlib.soname._soname for shlib in shared_libs if not shlib.soname.is_str()] - if dynamic_sonames: - actions.dynamic_output( - dynamic = [shlib.soname._soname for shlib in shared_libs if not shlib.soname.is_str()], - inputs = [], - outputs = [output], - f = lambda ctx, artifacts, outputs: func(ctx.actions, artifacts, outputs[output]), - ) - else: - func(actions, {}, output) - - return output - def create_shlib_symlink_tree(actions: AnalysisActions, out: str, shared_libs: list[SharedLibrary]): """ Merged shared libs into a symlink tree mapping the library's SONAME to it's artifact. """ - return gen_shared_libs_action( - actions = actions, - out = out, - shared_libs = shared_libs, - gen_action = lambda actions, output, shared_libs: actions.symlinked_dir( - output, - {name: shlib.lib.output for name, shlib in shared_libs.items()}, - ), - dir = True, + merged = with_unique_sonames(shared_libs = shared_libs) + return actions.symlinked_dir( + out, + {name: shlib.lib.output for name, shlib in merged.items()}, ) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index ad8334917..4c97530ac 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -349,7 +349,7 @@ def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(str, S preload_libraries_path = ctx.actions.write( "__preload_libraries.txt", cmd_args([ - "--preload={}".format(paths.join(libdir, shlib.soname.ensure_str())) + "--preload={}".format(paths.join(libdir, shlib.soname)) for libdir, shlib in shared_libraries ]), ) @@ -438,7 +438,7 @@ def _pex_modules_common_args( native_library_dests_path = ctx.actions.write( "__native_libraries___dests.txt", [ - "--native-library-dest={}".format(paths.join(libdir, shlib.soname.ensure_str())) + "--native-library-dest={}".format(paths.join(libdir, shlib.soname)) for shlib, libdir in shared_libraries ], ) @@ -466,13 +466,13 @@ def _pex_modules_common_args( if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): # rename to match extracted debuginfo package dwp = [ - (shlib.lib.dwp, paths.join(libdir, "{}.debuginfo.dwp".format(shlib.soname.ensure_str()))) + (shlib.lib.dwp, paths.join(libdir, "{}.debuginfo.dwp".format(shlib.soname))) for shlib, libdir in shared_libraries if shlib.lib.dwp != None ] else: dwp = [ - (shlib.lib.dwp, paths.join(libdir, "{}.dwp".format(shlib.soname.ensure_str()))) + (shlib.lib.dwp, paths.join(libdir, "{}.dwp".format(shlib.soname))) for shlib, libdir in shared_libraries if shlib.lib.dwp != None ] diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 982fb1002..5b56a7ba5 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -73,7 +73,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "create_shlib", + "SharedLibrary", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -620,11 +620,7 @@ def _convert_python_library_to_executable( # Add sub-targets for libs. for shlib in executable_info.shared_libs: - # TODO(agallagher) There appears to be pre-existing soname conflicts - # when building this (when using link groups), which prevents using - # `with_unique_str_sonames`. - if shlib.soname.is_str(): - extra[shlib.soname.ensure_str()] = [DefaultInfo(default_output = shlib.lib.output)] + extra[shlib.soname] = [DefaultInfo(default_output = shlib.lib.output)] for name, group in executable_info.auto_link_groups.items(): extra[name] = [DefaultInfo(default_output = group.output)] @@ -646,7 +642,7 @@ def _convert_python_library_to_executable( extra_artifacts.update(dict(extension_info.artifacts)) shared_libs.append(( "runtime/bin", - create_shlib( + SharedLibrary( soname = ctx.attrs.executable_name, label = ctx.label, lib = LinkedObject( diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index c623b11ea..2364a5b6d 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -233,36 +233,21 @@ def _rust_binary_common( sub_targets_for_link_strategy = {} - # TODO(agallagher) There appears to be pre-existing soname conflicts - # when building this (when using link groups), which prevents using - # `with_unique_str_sonames`. - str_soname_shlibs = { - shlib.soname.ensure_str(): shlib - for shlib in shared_libs - if shlib.soname.is_str() - } sub_targets_for_link_strategy["shared-libraries"] = [DefaultInfo( default_output = ctx.actions.write_json( name + ".shared-libraries.json", { - "libraries": [ - "{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, soname) - for soname in str_soname_shlibs - ], - "librariesdwp": [ - "{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, soname) - for soname, shlib in str_soname_shlibs.items() - if shlib.lib.dwp - ], + "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs], + "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs if shlib.lib.dwp], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if executable_args.shared_libs_symlink_tree else [], }, ), sub_targets = { - soname: [DefaultInfo( + shlib.soname: [DefaultInfo( default_output = shlib.lib.output, sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] - for soname, shlib in str_soname_shlibs.items() + for shlib in shared_libs }, )] From 99c590d79e303daac64dae779ec35a768051d495 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Fri, 22 Mar 2024 12:35:42 -0700 Subject: [PATCH 0555/1133] Revert D54987032: Defer resolving shared lib sonames Differential Revision: D54987032 Original commit changeset: d1015cfd71fd Original Phabricator Diff: D54987032 fbshipit-source-id: ca39a184bacff0b305bd160c8f1c4520ba10494c --- prelude/cxx/link_groups.bzl | 15 ++-- prelude/python/make_py_package.bzl | 54 +++++---------- prelude/python/python_binary.bzl | 108 ++++++++++++----------------- 3 files changed, 65 insertions(+), 112 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index b979b65e2..86661ab90 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -47,8 +47,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibraries", - "SharedLibrary", + "create_shared_libraries", ) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -141,7 +140,6 @@ LinkGroupLibSpec = record( root = field([LinkableRootInfo, None], None), # The link group to link. group = field(Group), - label = field(Label | None, None), ) _LinkedLinkGroup = record( @@ -880,14 +878,9 @@ def create_link_groups( linked_link_groups[link_group_spec.group.name] = _LinkedLinkGroup( artifact = link_group_lib, library = None if not link_group_spec.is_shared_lib else LinkGroupLib( - shared_libs = SharedLibraries( - libraries = [ - SharedLibrary( - label = link_group_spec.label or ctx.label, - soname = link_group_spec.name, - lib = link_group_lib, - ), - ], + shared_libs = create_shared_libraries( + ctx = ctx, + libraries = {link_group_spec.name: link_group_lib}, ), shared_link_infos = LinkInfos( default = wrap_link_info( diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 4c97530ac..141b70ce3 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -12,14 +12,13 @@ execution load("@prelude//:artifact_tset.bzl", "project_artifacts") load("@prelude//:local_only.bzl", "package_python_locally") -load("@prelude//:paths.bzl", "paths") load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_is_gnu", ) load( - "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", # @unused Used as a type + "@prelude//linking:link_info.bzl", + "LinkedObject", # @unused Used as a type ) load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -118,7 +117,7 @@ def make_py_package( package_style: PackageStyle, build_args: list[ArgLike], pex_modules: PexModules, - shared_libraries: list[(str, SharedLibrary, bool)], + shared_libraries: dict[str, (LinkedObject, bool)], main: EntryPoint, hidden_resources: list[ArgLike] | None, allow_cache_upload: bool, @@ -145,21 +144,14 @@ def make_py_package( if pex_modules.extensions: srcs.append(pex_modules.extensions.manifest) - preload_libraries = _preload_libraries_args( - ctx = ctx, - shared_libraries = [ - (libdir, shlib) - for libdir, shlib, preload in shared_libraries - if preload - ], - ) + preload_libraries = _preload_libraries_args(ctx, shared_libraries) startup_function = generate_startup_function_loader(ctx) manifest_module = generate_manifest_module(ctx, python_toolchain, srcs) common_modules_args, dep_artifacts, debug_artifacts = _pex_modules_common_args( ctx, pex_modules, [startup_function] if startup_function else [], - [(shlib, libdir) for libdir, shlib, _ in shared_libraries], + {name: lib for name, (lib, _) in shared_libraries.items()}, debuginfo_files = debuginfo_files, ) @@ -345,12 +337,13 @@ def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[ArgLike]) - out = ctx.actions.write_json("debuginfo.manifest.json", debug_artifacts) return [DefaultInfo(default_output = out, other_outputs = debug_artifacts)] -def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(str, SharedLibrary)]) -> cmd_args: +def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: dict[str, (LinkedObject, bool)]) -> cmd_args: preload_libraries_path = ctx.actions.write( "__preload_libraries.txt", cmd_args([ - "--preload={}".format(paths.join(libdir, shlib.soname)) - for libdir, shlib in shared_libraries + "--preload={}".format(name) + for name, (_, preload) in shared_libraries.items() + if preload ]), ) return cmd_args(preload_libraries_path, format = "@{}") @@ -396,7 +389,7 @@ def _pex_modules_common_args( ctx: AnalysisContext, pex_modules: PexModules, extra_manifests: list[ArgLike], - shared_libraries: list[(SharedLibrary, str)], + shared_libraries: dict[str, LinkedObject], debuginfo_files: list[Artifact]) -> (cmd_args, list[ArgLike], list[ArgLike]): srcs = [] src_artifacts = [] @@ -430,17 +423,14 @@ def _pex_modules_common_args( _srcs(resources, format = "--resource-manifest={}"), ) - native_libraries = [shlib.lib.output for shlib, _ in shared_libraries] + native_libraries = [s.output for s in shared_libraries.values()] native_library_srcs_path = ctx.actions.write( "__native_libraries___srcs.txt", _srcs(native_libraries, format = "--native-library-src={}"), ) native_library_dests_path = ctx.actions.write( "__native_libraries___dests.txt", - [ - "--native-library-dest={}".format(paths.join(libdir, shlib.soname)) - for shlib, libdir in shared_libraries - ], + ["--native-library-dest={}".format(lib) for lib in shared_libraries], ) src_manifest_args = cmd_args(src_manifests_path).hidden(srcs) @@ -465,17 +455,9 @@ def _pex_modules_common_args( if ctx.attrs.package_split_dwarf_dwp: if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): # rename to match extracted debuginfo package - dwp = [ - (shlib.lib.dwp, paths.join(libdir, "{}.debuginfo.dwp".format(shlib.soname))) - for shlib, libdir in shared_libraries - if shlib.lib.dwp != None - ] + dwp = [(s.dwp, "{}.debuginfo.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] else: - dwp = [ - (shlib.lib.dwp, paths.join(libdir, "{}.dwp".format(shlib.soname))) - for shlib, libdir in shared_libraries - if shlib.lib.dwp != None - ] + dwp = [(s.dwp, "{}.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] dwp_srcs_path = ctx.actions.write( "__dwp___srcs.txt", _srcs([src for src, _ in dwp], format = "--dwp-src={}"), @@ -490,15 +472,11 @@ def _pex_modules_common_args( debug_artifacts.extend([d for d, _ in dwp]) - for shlib, _ in shared_libraries: - deps.append(shlib.lib.output) + deps.extend([lib.output for lib in shared_libraries.values()]) external_debug_info = project_artifacts( ctx.actions, - [ - shlib.lib.external_debug_info - for shlib, _ in shared_libraries - ], + [lib.external_debug_info for lib in shared_libraries.values()], ) # HACK: external_debug_info has an empty path diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 5b56a7ba5..cf4f9cab3 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -71,12 +71,7 @@ load( "LinkableProviders", # @unused Used as a type "linkables", ) -load( - "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", - "merge_shared_libraries", - "traverse_shared_library_info", -) +load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") load("@prelude//linking:strip.bzl", "strip_debug_with_gnu_debuglink") load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:utils.bzl", "flatten", "value_or") @@ -169,7 +164,6 @@ def _get_root_link_group_specs( name = dep.linkable_root_info.name, is_shared_lib = True, root = dep.linkable_root_info, - label = dep.linkable_graph.nodes.value.label, group = Group( name = dep.linkable_root_info.name, mappings = [ @@ -211,6 +205,15 @@ def _get_root_link_group_specs( return specs +def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[str, (LinkedObject, Label | bool)], list[Artifact]): + debuginfo_artifacts = [] + transformed = {} + for name, (artifact, extra) in data.items(): + stripped_binary, debuginfo = strip_debug_with_gnu_debuglink(ctx, name, artifact.unstripped_output) + transformed[name] = LinkedObject(output = stripped_binary, unstripped_output = artifact.unstripped_output, dwp = artifact.dwp), extra + debuginfo_artifacts.append(debuginfo) + return transformed, debuginfo_artifacts + def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[Group]: """ Create link group mappings for shared-only libs that'll force the link to @@ -451,6 +454,11 @@ def _convert_python_library_to_executable( # Convert preloaded deps to a set of their names to be loaded by. preload_labels = {d.label: None for d in ctx.attrs.preload_deps} + preload_names = { + shared_lib.soname: None + for shared_lib in library.shared_libraries() + if shared_lib.label in preload_labels + } extensions = {} extra_artifacts = {} @@ -489,7 +497,7 @@ def _convert_python_library_to_executable( dest: (omnibus_libs.roots[label].shared_library, label) for dest, (_, label) in extensions.items() } - shared_libs = [("", shlib) for shlib in omnibus_libs.libraries] + native_libs = {shlib.soname: shlib.lib for shlib in omnibus_libs.libraries} omnibus_providers = [] @@ -636,29 +644,23 @@ def _convert_python_library_to_executable( # Put native libraries into the runtime location, as we need to unpack # potentially all of them before startup. - shared_libs = [("runtime/lib", s) for s in executable_info.shared_libs] + native_libs = { + paths.join("runtime", "lib", shlib.soname): shlib.lib + for shlib in executable_info.shared_libs + } + preload_names = [paths.join("runtime", "lib", n) for n in preload_names] # TODO expect(len(executable_info.runtime_files) == 0, "OH NO THERE ARE RUNTIME FILES") extra_artifacts.update(dict(extension_info.artifacts)) - shared_libs.append(( - "runtime/bin", - SharedLibrary( - soname = ctx.attrs.executable_name, - label = ctx.label, - lib = LinkedObject( - output = executable_info.binary, - unstripped_output = executable_info.binary, - dwp = executable_info.dwp, - ), - ), - )) + native_libs["runtime/bin/{}".format(ctx.attrs.executable_name)] = LinkedObject( + output = executable_info.binary, + unstripped_output = executable_info.binary, + dwp = executable_info.dwp, + ) extra_artifacts["static_extension_finder.py"] = ctx.attrs.static_extension_finder else: - shared_libs = [ - ("", shared_lib) - for shared_lib in library.shared_libraries() - ] + native_libs = {shared_lib.soname: shared_lib.lib for shared_lib in library.shared_libraries()} if dbg_source_db: extra_artifacts["dbg-db.json"] = dbg_source_db.default_outputs[0] @@ -668,43 +670,23 @@ def _convert_python_library_to_executable( extra_manifests = create_manifest_for_source_map(ctx, "extra_manifests", extra_artifacts) + shared_libraries = {} + # Create the map of native libraries to their artifacts and whether they # need to be preloaded. Note that we merge preload deps into regular deps # above, before gathering up all native libraries, so we're guaranteed to # have all preload libraries (and their transitive deps) here. - shared_libs = [ - (libdir, shlib, shlib.label in preload_labels) - for libdir, shlib in shared_libs - ] + for name, lib in native_libs.items(): + shared_libraries[name] = lib, name in preload_names # Strip native libraries and extensions and update the .gnu_debuglink references if we are extracting # debug symbols from the par debuginfo_files = [] if ctx.attrs.strip_libpar == "extract" and package_style == PackageStyle("standalone") and cxx_is_gnu(ctx): - for shlib in shared_libs: - stripped, debuginfo = strip_debug_with_gnu_debuglink( - ctx = ctx, - name = shlib.lib.unstripped_output.basename, - obj = shlib.lib.unstripped_output, - ) - shlib[1] = LinkedObject( - output = stripped, - unstripped_output = shlib.lib.unstripped_output, - dwp = shlib.lib.dwp, - ) - debuginfo_files.append(debuginfo) - for name, extension in extensions.items(): - stripped, debuginfo = strip_debug_with_gnu_debuglink( - ctx = ctx, - name = extension.unstripped_output.basename, - obj = extension.unstripped_output, - ) - extensions[name] = LinkedObject( - output = stripped, - unstripped_output = extension.unstripped_output, - dwp = extension.dwp, - ) - debuginfo_files.append(debuginfo) + shared_libraries, library_debuginfo = _split_debuginfo(ctx, shared_libraries) + extensions, extension_debuginfo = _split_debuginfo(ctx, extensions) + debuginfo_files += library_debuginfo + debuginfo_files += extension_debuginfo # Combine sources and extensions into a map of all modules. pex_modules = PexModules( @@ -722,16 +704,16 @@ def _convert_python_library_to_executable( # Build the PEX. pex = make_py_package( - ctx = ctx, - python_toolchain = python_toolchain, - make_py_package_cmd = ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, - package_style = package_style, - build_args = ctx.attrs.build_args, - pex_modules = pex_modules, - shared_libraries = shared_libs, - main = main, - hidden_resources = hidden_resources, - allow_cache_upload = allow_cache_upload, + ctx, + python_toolchain, + ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, + package_style, + ctx.attrs.build_args, + pex_modules, + shared_libraries, + main, + hidden_resources, + allow_cache_upload, debuginfo_files = debuginfo_files, ) From 30e648a7933f70ee61c7234f4d4c490a77502242 Mon Sep 17 00:00:00 2001 From: Vlad Fridman Date: Fri, 22 Mar 2024 12:36:12 -0700 Subject: [PATCH 0556/1133] Fix aast codemod --jast Summary: When running jast codemods using aast there would be an error: P1198500168 This fixes it Reviewed By: jsendros Differential Revision: D55244876 fbshipit-source-id: 86086875498dc4eb8ca4cbb995ec58473516f149 --- prelude/jvm/cd_jar_creator_util.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index c25aff571..f476bead5 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -427,6 +427,7 @@ def prepare_cd_exe( "--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.model=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.processing=ALL-UNNAMED", + "--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED", "--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED", "--add-opens=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED", "--add-opens=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED", From 10cb02fd38b9a97795d847943bd9b37bba5f5945 Mon Sep 17 00:00:00 2001 From: Chris Tolliday Date: Fri, 22 Mar 2024 14:04:52 -0700 Subject: [PATCH 0557/1133] Fix other_outputs for exopackage apks to fix validation_deps Summary: Using files for install_info should ensure we don't miss anything. Reviewed By: IanChilds Differential Revision: D55224732 fbshipit-source-id: 837ece0f6ec369ff9266a8064e03736268abf178 --- prelude/android/android_apk.bzl | 31 ++++--------------------------- 1 file changed, 4 insertions(+), 27 deletions(-) diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index 94ab2a5c4..5a1f996f1 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -41,12 +41,10 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: native_library_info = native_library_info.exopackage_info, resources_info = resources_info.exopackage_info, ) - exopackage_outputs = _get_exopackage_outputs(exopackage_info) default_output = ctx.actions.write("exopackage_apk_warning", "exopackage apks should not be used externally, try buck install or building with exopackage disabled\n") sub_targets["exo_apk"] = [DefaultInfo(default_output = output_apk)] # Used by tests else: exopackage_info = None - exopackage_outputs = [] default_output = output_apk class_to_srcs, class_to_srcs_subtargets = get_class_to_source_map_info( @@ -58,6 +56,8 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: # We can only be sure that an APK has native libs if it has any shared libraries. Prebuilt native libraries dirs can exist but be empty. definitely_has_native_libs = bool(native_library_info.shared_libraries) + install_info = get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, definitely_has_native_libs = definitely_has_native_libs) + return [ AndroidApkInfo(apk = output_apk, manifest = resources_info.manifest, materialized_artifacts = android_binary_info.materialized_artifacts), AndroidApkUnderTestInfo( @@ -71,8 +71,8 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: r_dot_java_packages = set([info.specified_r_dot_java_package for info in resources_info.unfiltered_resource_infos if info.specified_r_dot_java_package]), shared_libraries = set(native_library_info.shared_libraries), ), - DefaultInfo(default_output = default_output, other_outputs = exopackage_outputs + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), - get_install_info(ctx, output_apk = output_apk, manifest = resources_info.manifest, exopackage_info = exopackage_info, definitely_has_native_libs = definitely_has_native_libs), + DefaultInfo(default_output = default_output, other_outputs = install_info.files.values(), sub_targets = sub_targets | class_to_srcs_subtargets), + install_info, TemplatePlaceholderInfo( keyed_variables = { "classpath": cmd_args([dep.jar for dep in java_packaging_deps if dep.jar], delimiter = get_path_separator_for_exec_os(ctx)), @@ -200,29 +200,6 @@ def get_install_info( files = files, ) -def _get_exopackage_outputs(exopackage_info: ExopackageInfo) -> list[Artifact]: - outputs = [] - secondary_dex_exopackage_info = exopackage_info.secondary_dex_info - if secondary_dex_exopackage_info: - outputs.append(secondary_dex_exopackage_info.metadata) - outputs.append(secondary_dex_exopackage_info.directory) - - native_library_exopackage_info = exopackage_info.native_library_info - if native_library_exopackage_info: - outputs.append(native_library_exopackage_info.metadata) - outputs.append(native_library_exopackage_info.directory) - - resources_info = exopackage_info.resources_info - if resources_info: - outputs.append(resources_info.res) - outputs.append(resources_info.res_hash) - - if resources_info.assets: - outputs.append(resources_info.assets) - outputs.append(resources_info.assets_hash) - - return outputs - def generate_install_config(ctx: AnalysisContext) -> Artifact: data = get_install_config() return ctx.actions.write_json("install_android_options.json", data) From 637d949367dc117e3abe86f2d57fc556a0522368 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Fri, 22 Mar 2024 14:41:53 -0700 Subject: [PATCH 0558/1133] Use some pure code instead of cmd_args.add Reviewed By: igorsugak Differential Revision: D55230560 fbshipit-source-id: 6a3040b9fdcface4028b6076e9a2c2079a13d9df --- prelude/cxx/argsfiles.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prelude/cxx/argsfiles.bzl b/prelude/cxx/argsfiles.bzl index 398a24c51..327fccaca 100644 --- a/prelude/cxx/argsfiles.bzl +++ b/prelude/cxx/argsfiles.bzl @@ -31,13 +31,13 @@ CompileArgsfiles = record( def get_argsfiles_output(ctx: AnalysisContext, argsfile_by_ext: dict[str, CompileArgsfile], summary_name: str) -> DefaultInfo: argsfiles = [] - argsfile_names = cmd_args() + argsfile_names = [] dependent_outputs = [] for _, argsfile in argsfile_by_ext.items(): argsfiles.append(argsfile.file) - argsfile_names.add(cmd_args(argsfile.file, ignore_artifacts = True)) + argsfile_names.append(cmd_args(argsfile.file, ignore_artifacts = True)) dependent_outputs.extend(argsfile.input_args) - argsfiles_summary = ctx.actions.write(summary_name, argsfile_names) + argsfiles_summary = ctx.actions.write(summary_name, cmd_args(argsfile_names)) return DefaultInfo(default_outputs = [argsfiles_summary] + argsfiles, other_outputs = dependent_outputs) From aa9c1f9924d59afb5fd631a4143fe987fb625d17 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 22 Mar 2024 16:45:20 -0700 Subject: [PATCH 0559/1133] introduce `CodesignedPath` and use in `_codesign_paths`; take 2 Summary: Same as original diff: D55016955 Reviewed By: rmaz Differential Revision: D55241470 fbshipit-source-id: a1c04e3e44b7a89ee50fffc1611ce1af46bb6b7b --- .../tools/code_signing/codesign_bundle.py | 46 +++++++++++-------- 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index a4145761c..9454434a1 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -56,6 +56,18 @@ _LOGGER: logging.Logger = logging.getLogger(__name__) +@dataclass +class CodesignedPath: + path: Path + """ + Path relative to bundle root which needs to be codesigned + """ + entitlements: Optional[Path] + """ + Path to entitlements to be used when codesigning, relative to buck project + """ + + def _select_provisioning_profile( info_plist_metadata: InfoPlistMetadata, provisioning_profiles_dir: Path, @@ -398,11 +410,13 @@ def _dry_codesign_everything( # First sign codesign-on-copy directory paths _codesign_paths( - paths=codesign_on_copy_directory_paths, + paths=[ + CodesignedPath(path=p, entitlements=None) + for p in codesign_on_copy_directory_paths + ], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=None, platform=platform, codesign_args=codesign_args, ) @@ -418,11 +432,10 @@ def _dry_codesign_everything( # Lastly sign whole bundle _codesign_paths( - paths=[bundle_path], + paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=entitlements, platform=platform, codesign_args=codesign_args, ) @@ -441,9 +454,11 @@ def _codesign_everything( ) -> None: # First sign codesign-on-copy paths codesign_on_copy_filtered_paths = _filter_out_fast_adhoc_paths( - paths=[bundle_path / path for path in codesign_on_copy_paths], + paths=[ + CodesignedPath(path=bundle_path / path, entitlements=None) + for path in codesign_on_copy_paths + ], identity_fingerprint=identity_fingerprint, - entitlements=entitlements, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) @@ -452,15 +467,13 @@ def _codesign_everything( identity_fingerprint, tmp_dir, codesign_command_factory, - None, platform, codesign_args, ) # Lastly sign whole bundle root_bundle_paths = _filter_out_fast_adhoc_paths( - paths=[bundle_path], + paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], identity_fingerprint=identity_fingerprint, - entitlements=entitlements, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) @@ -469,7 +482,6 @@ def _codesign_everything( identity_fingerprint, tmp_dir, codesign_command_factory, - entitlements, platform, codesign_args, ) @@ -535,11 +547,10 @@ def _spawn_codesign_process( def _codesign_paths( - paths: List[Path], + paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], platform: ApplePlatform, codesign_args: List[str], ) -> None: @@ -548,11 +559,11 @@ def _codesign_paths( with ExitStack() as stack: for path in paths: process = _spawn_codesign_process( - path=path, + path=path.path, identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=entitlements, + entitlements=path.entitlements, stack=stack, codesign_args=codesign_args, ) @@ -564,12 +575,11 @@ def _codesign_paths( def _filter_out_fast_adhoc_paths( - paths: List[Path], + paths: List[CodesignedPath], identity_fingerprint: str, - entitlements: Optional[Path], platform: ApplePlatform, fast_adhoc_signing: bool, -) -> List[Path]: +) -> List[CodesignedPath]: if not fast_adhoc_signing: return paths # TODO(T149863217): Make skip checks run in parallel, they're usually fast (~15ms) @@ -578,6 +588,6 @@ def _filter_out_fast_adhoc_paths( p for p in paths if not should_skip_adhoc_signing_path( - p, identity_fingerprint, entitlements, platform + p.path, identity_fingerprint, p.entitlements, platform ) ] From 895991b17a354c8e70ea9d775168edc509b226b5 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 22 Mar 2024 16:45:20 -0700 Subject: [PATCH 0560/1133] use `CodesignedPath` in `_codesign_everything`; take 2 Summary: Same as original diff: D55016959 Reviewed By: rmaz Differential Revision: D55241469 fbshipit-source-id: dfc6e26fff1a1d901cea1f783e44a9681dfff5df --- .../tools/code_signing/codesign_bundle.py | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 9454434a1..e1f2e31e3 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -247,12 +247,16 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - bundle_path=bundle_path, - codesign_on_copy_paths=codesign_on_copy_paths, + root=CodesignedPath( + path=bundle_path, entitlements=prepared_entitlements_path + ), + codesign_on_copy_paths=[ + CodesignedPath(path=bundle_path / path, entitlements=None) + for path in codesign_on_copy_paths + ], identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), - entitlements=prepared_entitlements_path, platform=platform, fast_adhoc_signing=fast_adhoc_signing_enabled, codesign_args=codesign_args, @@ -442,22 +446,18 @@ def _dry_codesign_everything( def _codesign_everything( - bundle_path: Path, - codesign_on_copy_paths: List[str], + root: CodesignedPath, + codesign_on_copy_paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], platform: ApplePlatform, fast_adhoc_signing: bool, codesign_args: List[str], ) -> None: # First sign codesign-on-copy paths codesign_on_copy_filtered_paths = _filter_out_fast_adhoc_paths( - paths=[ - CodesignedPath(path=bundle_path / path, entitlements=None) - for path in codesign_on_copy_paths - ], + paths=codesign_on_copy_paths, identity_fingerprint=identity_fingerprint, platform=platform, fast_adhoc_signing=fast_adhoc_signing, @@ -471,14 +471,14 @@ def _codesign_everything( codesign_args, ) # Lastly sign whole bundle - root_bundle_paths = _filter_out_fast_adhoc_paths( - paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], + root_filtered_paths = _filter_out_fast_adhoc_paths( + paths=[root], identity_fingerprint=identity_fingerprint, platform=platform, fast_adhoc_signing=fast_adhoc_signing, ) _codesign_paths( - root_bundle_paths, + root_filtered_paths, identity_fingerprint, tmp_dir, codesign_command_factory, From 74c0596b06420be217735882d6c83cb8c3c8ca1f Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 22 Mar 2024 16:45:20 -0700 Subject: [PATCH 0561/1133] use `CodesignedPath` in `_dry_codesign_everything`; take 2 Summary: Same as original diff: D55016958 Reviewed By: rmaz Differential Revision: D55241473 fbshipit-source-id: 06050cddba1df11ccb8fe5eff26e80a26254e6f1 --- .../tools/code_signing/codesign_bundle.py | 39 +++++++++---------- 1 file changed, 18 insertions(+), 21 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index e1f2e31e3..83b5b9e7a 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -225,18 +225,23 @@ def codesign_bundle( prepared_entitlements_path = entitlements_path selected_identity_fingerprint = signing_context.codesign_identity + root = CodesignedPath(path=bundle_path, entitlements=prepared_entitlements_path) + codesigned_on_copy = [ + CodesignedPath(path=bundle_path / path, entitlements=None) + for path in codesign_on_copy_paths + ] + if codesign_configuration is CodesignConfiguration.dryRun: if codesign_tool is None: raise RuntimeError( "Expected codesign tool not to be the default one when dry run codesigning is requested." ) _dry_codesign_everything( - bundle_path=bundle_path, - codesign_on_copy_paths=codesign_on_copy_paths, + root=root, + codesign_on_copy_paths=codesigned_on_copy, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_tool=codesign_tool, - entitlements=prepared_entitlements_path, platform=platform, codesign_args=codesign_args, ) @@ -247,13 +252,8 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - root=CodesignedPath( - path=bundle_path, entitlements=prepared_entitlements_path - ), - codesign_on_copy_paths=[ - CodesignedPath(path=bundle_path / path, entitlements=None) - for path in codesign_on_copy_paths - ], + root=root, + codesign_on_copy_paths=codesigned_on_copy, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), @@ -396,28 +396,23 @@ def _read_entitlements_file(path: Optional[Path]) -> Optional[Dict[str, Any]]: def _dry_codesign_everything( - bundle_path: Path, - codesign_on_copy_paths: List[str], + root: CodesignedPath, + codesign_on_copy_paths: List[CodesignedPath], identity_fingerprint: str, tmp_dir: str, codesign_tool: Path, - entitlements: Optional[Path], platform: ApplePlatform, codesign_args: List[str], ) -> None: codesign_command_factory = DryRunCodesignCommandFactory(codesign_tool) - codesign_on_copy_abs_paths = [bundle_path / path for path in codesign_on_copy_paths] codesign_on_copy_directory_paths = [ - p for p in codesign_on_copy_abs_paths if p.is_dir() + p for p in codesign_on_copy_paths if p.path.is_dir() ] # First sign codesign-on-copy directory paths _codesign_paths( - paths=[ - CodesignedPath(path=p, entitlements=None) - for p in codesign_on_copy_directory_paths - ], + paths=codesign_on_copy_directory_paths, identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, @@ -428,7 +423,9 @@ def _dry_codesign_everything( # Dry codesigning creates a .plist inside every directory it signs. # That approach doesn't work for files so those files are written into .plist for root bundle. codesign_on_copy_file_paths = [ - p.relative_to(bundle_path) for p in codesign_on_copy_abs_paths if p.is_file() + p.path.relative_to(root.path) + for p in codesign_on_copy_paths + if p.path.is_file() ] codesign_command_factory.set_codesign_on_copy_file_paths( codesign_on_copy_file_paths @@ -436,7 +433,7 @@ def _dry_codesign_everything( # Lastly sign whole bundle _codesign_paths( - paths=[CodesignedPath(path=bundle_path, entitlements=entitlements)], + paths=[root], identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, From 96800c44f2d103105ef3126b324bfc79295acfb6 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 22 Mar 2024 16:45:20 -0700 Subject: [PATCH 0562/1133] simplify _spawn_codesign_process signature; take 2 Summary: Same as original diff D55016957 Reviewed By: rmaz Differential Revision: D55241472 fbshipit-source-id: 14ae519778ddffb2fc3a53d8b76ed2c5460972d8 --- prelude/apple/tools/code_signing/codesign_bundle.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 83b5b9e7a..20203606c 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -529,16 +529,15 @@ def _spawn_process( def _spawn_codesign_process( - path: Path, + path: CodesignedPath, identity_fingerprint: str, tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, - entitlements: Optional[Path], stack: ExitStack, codesign_args: List[str], ) -> ParallelProcess: command = codesign_command_factory.codesign_command( - path, identity_fingerprint, entitlements, codesign_args + path.path, identity_fingerprint, path.entitlements, codesign_args ) return _spawn_process(command=command, tmp_dir=tmp_dir, stack=stack) @@ -556,11 +555,10 @@ def _codesign_paths( with ExitStack() as stack: for path in paths: process = _spawn_codesign_process( - path=path.path, + path=path, identity_fingerprint=identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, - entitlements=path.entitlements, stack=stack, codesign_args=codesign_args, ) From ee6024883196a42647d75a69dea671718f12ba99 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Fri, 22 Mar 2024 16:45:20 -0700 Subject: [PATCH 0563/1133] use `CodesignPath` in `codesign_bundle`; take 2 Summary: Original diff D55016956 with applied fix for codesign binary Reviewed By: rmaz Differential Revision: D55241471 fbshipit-source-id: 56e0c78936f3ee6fb353c6a355a69c2d54c4b202 --- prelude/apple/tools/bundling/main.py | 13 +++-- .../tools/code_signing/codesign_bundle.py | 49 +++++++++---------- prelude/apple/tools/code_signing/main.py | 15 ++++-- 3 files changed, 44 insertions(+), 33 deletions(-) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 851ad0a6d..c4652e97a 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -22,6 +22,7 @@ AdhocSigningContext, codesign_bundle, CodesignConfiguration, + CodesignedPath, signing_context_with_profile_selection, ) from apple.tools.code_signing.list_codesign_identities import ( @@ -384,12 +385,18 @@ def _main() -> None: codesign_on_copy_paths = [ i.dst for i in spec if i.codesign_on_copy ] + swift_stdlib_paths + + bundle_path = CodesignedPath(path=args.output, entitlements=args.entitlements) + codesigned_on_copy = [ + CodesignedPath(path=bundle_path.path / path, entitlements=None) + for path in codesign_on_copy_paths + ] + codesign_bundle( - bundle_path=args.output, + bundle_path=bundle_path, signing_context=signing_context, - entitlements_path=args.entitlements, platform=args.platform, - codesign_on_copy_paths=codesign_on_copy_paths, + codesign_on_copy_paths=codesigned_on_copy, codesign_args=args.codesign_args, codesign_tool=args.codesign_tool, codesign_configuration=args.codesign_configuration, diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 20203606c..27e4b526a 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -183,11 +183,10 @@ class CodesignConfiguration(str, Enum): def codesign_bundle( - bundle_path: Path, + bundle_path: CodesignedPath, signing_context: Union[AdhocSigningContext, SigningContextWithProfileSelection], - entitlements_path: Optional[Path], platform: ApplePlatform, - codesign_on_copy_paths: List[str], + codesign_on_copy_paths: List[CodesignedPath], codesign_args: List[str], codesign_tool: Optional[Path] = None, codesign_configuration: Optional[CodesignConfiguration] = None, @@ -203,12 +202,13 @@ def codesign_bundle( ) if selection_profile_context: - prepared_entitlements_path = _prepare_entitlements_and_info_plist( - bundle_path=bundle_path, - entitlements_path=entitlements_path, - platform=platform, - signing_context=selection_profile_context, - tmp_dir=tmp_dir, + bundle_path_with_prepared_entitlements = ( + _prepare_entitlements_and_info_plist( + bundle_path=bundle_path, + platform=platform, + signing_context=selection_profile_context, + tmp_dir=tmp_dir, + ) ) selected_identity_fingerprint = ( selection_profile_context.selected_profile_info.identity.fingerprint @@ -222,23 +222,17 @@ def codesign_bundle( raise AssertionError( "Expected no profile selection context in `AdhocSigningContext` when `selection_profile_context` is `None`." ) - prepared_entitlements_path = entitlements_path + bundle_path_with_prepared_entitlements = bundle_path selected_identity_fingerprint = signing_context.codesign_identity - root = CodesignedPath(path=bundle_path, entitlements=prepared_entitlements_path) - codesigned_on_copy = [ - CodesignedPath(path=bundle_path / path, entitlements=None) - for path in codesign_on_copy_paths - ] - if codesign_configuration is CodesignConfiguration.dryRun: if codesign_tool is None: raise RuntimeError( "Expected codesign tool not to be the default one when dry run codesigning is requested." ) _dry_codesign_everything( - root=root, - codesign_on_copy_paths=codesigned_on_copy, + root=bundle_path_with_prepared_entitlements, + codesign_on_copy_paths=codesign_on_copy_paths, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_tool=codesign_tool, @@ -252,8 +246,8 @@ def codesign_bundle( ) _LOGGER.info(f"Fast adhoc signing enabled: {fast_adhoc_signing_enabled}") _codesign_everything( - root=root, - codesign_on_copy_paths=codesigned_on_copy, + root=bundle_path_with_prepared_entitlements, + codesign_on_copy_paths=codesign_on_copy_paths, identity_fingerprint=selected_identity_fingerprint, tmp_dir=tmp_dir, codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), @@ -264,16 +258,15 @@ def codesign_bundle( def _prepare_entitlements_and_info_plist( - bundle_path: Path, - entitlements_path: Optional[Path], + bundle_path: CodesignedPath, platform: ApplePlatform, signing_context: SigningContextWithProfileSelection, tmp_dir: str, -) -> Path: +) -> CodesignedPath: info_plist_metadata = signing_context.info_plist_metadata selected_profile = signing_context.selected_profile_info.profile prepared_entitlements_path = prepare_code_signing_entitlements( - entitlements_path, + bundle_path.entitlements, info_plist_metadata.bundle_id, selected_profile, tmp_dir, @@ -286,13 +279,15 @@ def _prepare_entitlements_and_info_plist( ) os.replace( prepared_info_plist_path, - bundle_path / signing_context.info_plist_destination, + bundle_path.path / signing_context.info_plist_destination, ) shutil.copy2( selected_profile.file_path, - bundle_path / platform.embedded_provisioning_profile_path(), + bundle_path.path / platform.embedded_provisioning_profile_path(), + ) + return CodesignedPath( + path=bundle_path.path, entitlements=prepared_entitlements_path ) - return prepared_entitlements_path async def _fast_read_provisioning_profiles_async( diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index eba6fbf2a..72c63ea1c 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -15,6 +15,7 @@ from .codesign_bundle import ( AdhocSigningContext, codesign_bundle, + CodesignedPath, signing_context_with_profile_selection, ) from .list_codesign_identities import ListCodesignIdentities @@ -114,12 +115,20 @@ def _main() -> None: platform=args.platform, should_use_fast_provisioning_profile_parsing=args.fast_provisioning_profile_parsing, ) + + bundle_path = CodesignedPath( + path=args.bundle_path, entitlements=args.entitlements + ) + codesign_on_copy_paths = [ + CodesignedPath(path=bundle_path.path / path, entitlements=None) + for path in args.codesign_on_copy + ] + codesign_bundle( - bundle_path=args.bundle_path, + bundle_path=bundle_path, signing_context=signing_context, - entitlements_path=args.entitlements, platform=args.platform, - codesign_on_copy_paths=args.codesign_on_copy or [], + codesign_on_copy_paths=codesign_on_copy_paths, codesign_args=[], ) except CodeSignProvisioningError as e: From f8a78478965f5a74016200eb4b21d596648f3155 Mon Sep 17 00:00:00 2001 From: Luisa Vasquez Gomez Date: Fri, 22 Mar 2024 17:16:26 -0700 Subject: [PATCH 0564/1133] rust: Make crate root error more actionable Summary: ^ Reviewed By: iguridi Differential Revision: D55255853 fbshipit-source-id: 5e1a3ecc61da16d2df33b2763eaec18c11b4a4e9 --- prelude/rust/build.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 745f3331d..c94f41bc3 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -1116,7 +1116,9 @@ def _crate_root( if candidates.size() == 1: return candidates.list()[0] - fail("Could not infer crate_root. candidates=%s\nAdd 'crate_root = \"src/example.rs\"' to your attributes to disambiguate." % candidates.list()) + fail("Could not infer crate_root." + + "\nMake sure you have one of {} in your `srcs` attribute.".format(default_roots) + + "\nOr add 'crate_root = \"src/example.rs\"' to your attributes to disambiguate. candidates={}".format(candidates.list())) def _explain(crate_type: CrateType, link_strategy: LinkStrategy, emit: Emit) -> str: link_strategy_suffix = { From 2419137d88e4ca113bf4f6d17cc424d9f7aeb77a Mon Sep 17 00:00:00 2001 From: Wyman Zhao Date: Fri, 22 Mar 2024 18:18:03 -0700 Subject: [PATCH 0565/1133] Revert D55001128: Use `SharedLibrary` to present shlibs Differential Revision: D55001128 Original commit changeset: bff3285a5669 Original Phabricator Diff: D55001128 fbshipit-source-id: eed39aa770133da07360be12f62ad6dbd4457cfa --- .../android_binary_native_library_rules.bzl | 8 ++--- prelude/cxx/cxx.bzl | 5 ++- prelude/cxx/cxx_executable.bzl | 5 ++- prelude/cxx/cxx_library.bzl | 7 ++--- prelude/cxx/link_groups.bzl | 9 +----- prelude/cxx/omnibus.bzl | 31 +++++-------------- prelude/cxx/prebuilt_cxx_library_group.bzl | 5 ++- prelude/haskell/haskell.bzl | 14 ++++----- prelude/haskell/haskell_ghci.bzl | 4 +-- prelude/linking/link_groups.bzl | 9 ++---- prelude/linking/linkable_graph.bzl | 9 ++---- prelude/python/python_binary.bzl | 2 +- prelude/rust/rust_binary.bzl | 6 +++- prelude/rust/rust_library.bzl | 5 ++- 14 files changed, 46 insertions(+), 73 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index f6b71a7e6..33dc0e55a 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -1045,7 +1045,7 @@ def _shared_lib_for_prebuilt_shared( transitive_linkable_cache: dict[Label, bool], platform: [str, None] = None) -> SharedLibrary: expect( - len(node_data.shared_libs.libraries) == 1, + len(node_data.shared_libs) == 1, "unexpected shared_libs length for somerge of {} ({})".format(target, node_data.shared_libs), ) @@ -1063,9 +1063,7 @@ def _shared_lib_for_prebuilt_shared( "prebuilt shared library `{}` with exported_deps not supported by somerge".format(target), ) - shlib = node_data.shared_libs.libraries[0] - soname = shlib.soname - shlib = shlib.lib + soname, shlib = node_data.shared_libs.items()[0] output_path = _platform_output_path(soname, platform) return SharedLibrary( lib = shlib, @@ -1272,7 +1270,7 @@ def _get_merged_linkables_for_platform( expect(target_to_link_group[key] == group) node = linkable_nodes[key] - default_solibs = list([shlib.soname for shlib in node.shared_libs.libraries]) + default_solibs = list(node.shared_libs.keys()) if not default_solibs and node.preferred_linkage == Linkage("static"): default_solibs = [node.default_soname] diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index dea2b33c4..4783ab695 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -556,10 +556,9 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: )) # Propagate shared libraries up the tree. - shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - shared_libs, + create_shared_libraries(ctx, solibs), filter(None, [x.get(SharedLibraryInfo) for x in exported_first_order_deps]), )) @@ -604,7 +603,7 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: # If we don't have link input for this link style, we pass in `None` so # that omnibus knows to avoid it. link_infos = libraries, - shared_libs = shared_libs, + shared_libs = solibs, linker_flags = linker_flags, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, ), diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 066597c67..683f3ecfa 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -450,7 +450,10 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.extend(link_group_lib.shared_libs.libraries) + shared_libs.extend([ + SharedLibrary(soname = name, lib = lib, label = ctx.label) + for name, lib in link_group_lib.shared_libs.items() + ]) toolchain_info = get_cxx_toolchain_info(ctx) linker_info = toolchain_info.linker_info diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 56ad1b67c..1a8bbfaf2 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -501,7 +501,6 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc link_execution_preference = link_execution_preference, ) solib_as_dict = {library_outputs.solib[0]: library_outputs.solib[1]} if library_outputs.solib else {} - shared_libs = create_shared_libraries(ctx, solib_as_dict) for _, link_style_output in library_outputs.outputs.items(): for key in link_style_output.sub_targets.keys(): @@ -609,7 +608,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc if impl_params.generate_providers.shared_libraries: providers.append(merge_shared_libraries( ctx.actions, - shared_libs, + create_shared_libraries(ctx, solib_as_dict), filter(None, [x.get(SharedLibraryInfo) for x in non_exported_deps]) + filter(None, [x.get(SharedLibraryInfo) for x in exported_deps]), )) @@ -718,7 +717,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # that omnibus knows to avoid it. include_in_android_mergemap = getattr(ctx.attrs, "include_in_android_merge_map_output", True) and default_output != None, link_infos = library_outputs.link_infos, - shared_libs = shared_libs, + shared_libs = solib_as_dict, linker_flags = linker_flags, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, ), @@ -832,7 +831,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc merge_link_group_lib_info( label = ctx.label, name = link_group, - shared_libs = shared_libs, + shared_libs = solib_as_dict, shared_link_infos = library_outputs.link_infos.get(LibOutputStyle("shared_lib")), deps = exported_deps + non_exported_deps, ), diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 86661ab90..9081c44c3 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -45,10 +45,6 @@ load( "get_linkable_graph_node_map_func", "get_transitive_deps", ) -load( - "@prelude//linking:shared_libraries.bzl", - "create_shared_libraries", -) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") @@ -878,10 +874,7 @@ def create_link_groups( linked_link_groups[link_group_spec.group.name] = _LinkedLinkGroup( artifact = link_group_lib, library = None if not link_group_spec.is_shared_lib else LinkGroupLib( - shared_libs = create_shared_libraries( - ctx = ctx, - libraries = {link_group_spec.name: link_group_lib}, - ), + shared_libs = {link_group_spec.name: link_group_lib}, shared_link_infos = LinkInfos( default = wrap_link_info( link_info, diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index 283ad6239..15e2f52f2 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -37,10 +37,6 @@ load( "linkable_deps", "linkable_graph", ) -load( - "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", -) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") load( @@ -122,7 +118,7 @@ OmnibusRootProduct = record( # The result of the omnibus link. OmnibusSharedLibraries = record( omnibus = field([CxxLinkResult, None], None), - libraries = field(list[SharedLibrary], []), + libraries = field(dict[str, LinkedObject], {}), roots = field(dict[Label, OmnibusRootProduct], {}), exclusion_roots = field(list[Label]), excluded = field(list[Label]), @@ -516,9 +512,9 @@ def _create_omnibus( root_products.values(), # ... and the shared libs from excluded nodes. [ - shared_lib.lib.output + shared_lib.output for label in spec.excluded - for shared_lib in spec.link_infos[label].shared_libs.libraries + for shared_lib in spec.link_infos[label].shared_libs.values() ], # Extract explicit global symbol names from flags in all body link args. global_symbols_link_args, @@ -689,7 +685,7 @@ def create_omnibus_libraries( # Create dummy omnibus dummy_omnibus = create_dummy_omnibus(ctx, extra_ldflags) - libraries = [] + libraries = {} root_products = {} # Link all root nodes against the dummy libomnibus lib. @@ -708,13 +704,7 @@ def create_omnibus_libraries( allow_cache_upload = True, ) if root.name != None: - libraries.append( - SharedLibrary( - soname = root.name, - lib = product.shared_library, - label = label, - ), - ) + libraries[root.name] = product.shared_library root_products[label] = product # If we have body nodes, then link them into the monolithic libomnibus.so. @@ -729,17 +719,12 @@ def create_omnibus_libraries( prefer_stripped_objects, allow_cache_upload = True, ) - libraries.append( - SharedLibrary( - soname = _omnibus_soname(ctx), - lib = omnibus.linked_object, - label = ctx.label, - ), - ) + libraries[_omnibus_soname(ctx)] = omnibus.linked_object # For all excluded nodes, just add their regular shared libs. for label in spec.excluded: - libraries.extend(spec.link_infos[label].shared_libs.libraries) + for name, lib in spec.link_infos[label].shared_libs.items(): + libraries[name] = lib return OmnibusSharedLibraries( omnibus = omnibus, diff --git a/prelude/cxx/prebuilt_cxx_library_group.bzl b/prelude/cxx/prebuilt_cxx_library_group.bzl index 2effc957f..2cfbdae5f 100644 --- a/prelude/cxx/prebuilt_cxx_library_group.bzl +++ b/prelude/cxx/prebuilt_cxx_library_group.bzl @@ -335,10 +335,9 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: )) # Propagate shared libraries up the tree. - shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - shared_libs, + create_shared_libraries(ctx, solibs), filter(None, [x.get(SharedLibraryInfo) for x in deps + exported_deps]), )) @@ -353,7 +352,7 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: exported_deps = exported_deps, preferred_linkage = preferred_linkage, link_infos = libraries, - shared_libs = shared_libs, + shared_libs = solibs, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, # TODO(cjhopman): this should be set to non-None default_soname = None, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 8bcd786a9..b87e09f36 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -128,6 +128,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", "SharedLibraryInfo", "create_shared_libraries", "create_shlib_symlink_tree", @@ -341,7 +342,6 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: solibs = {} for soname, lib in ctx.attrs.shared_libs.items(): solibs[soname] = LinkedObject(output = lib, unstripped_output = lib) - shared_libs = create_shared_libraries(ctx, solibs) linkable_graph = create_linkable_graph( ctx, @@ -351,7 +351,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx = ctx, exported_deps = ctx.attrs.deps, link_infos = {_to_lib_output_style(s): v for s, v in link_infos.items()}, - shared_libs = shared_libs, + shared_libs = solibs, default_soname = None, ), ), @@ -369,7 +369,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: cxx_merge_cpreprocessors(ctx, [own_pp_info], inherited_pp_info), merge_shared_libraries( ctx.actions, - shared_libs, + create_shared_libraries(ctx, solibs), shared_library_infos, ), merge_link_group_lib_info(deps = ctx.attrs.deps), @@ -820,7 +820,6 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage, pic_behavior, ) - shared_libs = create_shared_libraries(ctx, solibs) # TODO(cjhopman): this haskell implementation does not consistently handle LibOutputStyle # and LinkStrategy as expected and it's hard to tell what the intent of the existing code is @@ -863,7 +862,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage = preferred_linkage, exported_deps = ctx.attrs.deps, link_infos = {_to_lib_output_style(s): v for s, v in link_infos.items()}, - shared_libs = shared_libs, + shared_libs = solibs, # TODO(cjhopman): this should be set to non-None default_soname = None, ), @@ -909,7 +908,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: cxx_merge_cpreprocessors(ctx, pp, inherited_pp_info), merge_shared_libraries( ctx.actions, - shared_libs, + create_shared_libraries(ctx, solibs), shared_library_infos, ), haskell_haddock_lib(ctx, pkgname), @@ -1126,7 +1125,8 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - sos.extend(link_group_lib.shared_libs.libraries) + for soname, lib in link_group_lib.shared_libs.items(): + sos.append(SharedLibrary(soname = soname, lib = lib, label = ctx.label)) else: nlis = [] diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index d65330f43..c4e1bb03b 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -286,8 +286,8 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: shared_li = node.link_infos.get(output_style, None) if shared_li != None: tp_deps_shared_link_infos[node_label] = shared_li.default - for shlib in node.shared_libs.libraries: - so_symlinks[shlib.soname] = shlib.lib.output + for libname, linkObject in node.shared_libs.items(): + so_symlinks[libname] = linkObject.output # Create symlinks to the TP dependencies' SOs so_symlinks_root_path = ctx.label.name + ".so-symlinks" diff --git a/prelude/linking/link_groups.bzl b/prelude/linking/link_groups.bzl index a3fb2e1c5..6d6bdbef3 100644 --- a/prelude/linking/link_groups.bzl +++ b/prelude/linking/link_groups.bzl @@ -12,10 +12,7 @@ load( load( ":link_info.bzl", "LinkInfos", -) -load( - ":shared_libraries.bzl", - "SharedLibraries", + "LinkedObject", ) # Information about a linkable node which explicitly sets `link_group`. @@ -23,7 +20,7 @@ LinkGroupLib = record( # The label of the owning target (if any). label = field([Label, None], None), # The shared libs to package for this link group. - shared_libs = field(SharedLibraries), + shared_libs = field(dict[str, LinkedObject]), # The link info to link against this link group. shared_link_infos = field(LinkInfos), ) @@ -51,7 +48,7 @@ def gather_link_group_libs( def merge_link_group_lib_info( label: [Label, None] = None, name: [str, None] = None, - shared_libs: [SharedLibraries, None] = None, + shared_libs: [dict[str, LinkedObject], None] = None, shared_link_infos: [LinkInfos, None] = None, deps: list[Dependency] = [], children: list[LinkGroupLibInfo] = []) -> LinkGroupLibInfo: diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index 0f70b1d98..c47b5d035 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -20,16 +20,13 @@ load( "LinkInfo", # @unused Used as a type "LinkInfos", "LinkStrategy", + "LinkedObject", "LinkerFlags", "MergedLinkInfo", "get_lib_output_style", "get_output_styles_for_linkage", _get_link_info = "get_link_info", ) -load( - ":shared_libraries.bzl", - "SharedLibraries", -) # A provider with information used to link a rule into a shared library. # Potential omnibus roots must provide this so that omnibus can link them @@ -77,7 +74,7 @@ LinkableNode = record( # Shared libraries provided by this target. Used if this target is # excluded. - shared_libs = field(SharedLibraries, SharedLibraries(libraries = [])), + shared_libs = field(dict[str, LinkedObject], {}), # The soname this node would use in default link strategies. May be used by non-default # link strategies as a lib's soname. @@ -146,7 +143,7 @@ def create_linkable_node( deps: list[Dependency | LinkableGraph] = [], exported_deps: list[Dependency | LinkableGraph] = [], link_infos: dict[LibOutputStyle, LinkInfos] = {}, - shared_libs: SharedLibraries = SharedLibraries(libraries = []), + shared_libs: dict[str, LinkedObject] = {}, can_be_asset: bool = True, include_in_android_mergemap: bool = True, linker_flags: [LinkerFlags, None] = None, diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index cf4f9cab3..93436837e 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -497,7 +497,7 @@ def _convert_python_library_to_executable( dest: (omnibus_libs.roots[label].shared_library, label) for dest, (_, label) in extensions.items() } - native_libs = {shlib.soname: shlib.lib for shlib in omnibus_libs.libraries} + native_libs = omnibus_libs.libraries omnibus_providers = [] diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 2364a5b6d..72b05f635 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -40,6 +40,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -178,7 +179,10 @@ def _rust_binary_common( # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.extend(link_group_lib.shared_libs.libraries) + shared_libs.extend([ + SharedLibrary(soname = name, lib = lib, label = ctx.label) + for name, lib in link_group_lib.shared_libs.items() + ]) # link groups shared libraries link args are directly added to the link command, # we don't have to add them here diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 89917b34c..885286b4b 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -851,10 +851,9 @@ def _native_providers( ) # Native shared library provider. - shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - shared_libs, + create_shared_libraries(ctx, solibs), inherited_shlibs, )) @@ -891,7 +890,7 @@ def _native_providers( deps = inherited_link_graphs, exported_deps = inherited_exported_deps, link_infos = link_infos, - shared_libs = shared_libs, + shared_libs = solibs, default_soname = shlib_name, include_in_android_mergemap = False, ), From dc821598b58d8d44205bc9e7c3698eccf239ba69 Mon Sep 17 00:00:00 2001 From: Wyman Zhao Date: Fri, 22 Mar 2024 18:18:03 -0700 Subject: [PATCH 0566/1133] Revert D54964964: Represent shared libs as a list instead of dict Differential Revision: D54964964 Original commit changeset: 03dbd1401c99 Original Phabricator Diff: D54964964 fbshipit-source-id: 6d8053c0fded1d7d1f5a5abda0ac41be0a9d6430 --- .../android_binary_native_library_rules.bzl | 5 +- prelude/android/voltron.bzl | 6 +- prelude/cxx/cxx_executable.bzl | 49 ++++---- prelude/cxx/cxx_link_utility.bzl | 23 ++-- prelude/cxx/cxx_types.bzl | 2 +- prelude/go/link.bzl | 4 +- prelude/haskell/haskell.bzl | 20 ++-- prelude/haskell/haskell_ghci.bzl | 7 +- prelude/java/java_binary.bzl | 8 +- prelude/java/java_test.bzl | 16 +-- prelude/julia/julia_binary.bzl | 16 +-- prelude/linking/shared_libraries.bzl | 105 ++++++++---------- prelude/python/python_binary.bzl | 14 +-- prelude/rust/build.bzl | 5 +- prelude/rust/rust_binary.bzl | 38 +++---- 15 files changed, 138 insertions(+), 180 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 33dc0e55a..e7a07bf5d 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -57,7 +57,6 @@ load( "get_strip_non_global_flags", "merge_shared_libraries", "traverse_shared_library_info", - "with_unique_sonames", ) load("@prelude//linking:strip.bzl", "strip_object") load("@prelude//linking:types.bzl", "Linkage") @@ -871,8 +870,8 @@ def get_default_shared_libs(ctx: AnalysisContext, deps: list[Dependency], shared deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), ) return { - soname: shared_lib - for soname, shared_lib in with_unique_sonames(traverse_shared_library_info(shared_library_info)).items() + so_name: shared_lib + for so_name, shared_lib in traverse_shared_library_info(shared_library_info).items() if not (shared_libraries_to_exclude and shared_libraries_to_exclude.contains(shared_lib.label.raw_target())) } diff --git a/prelude/android/voltron.bzl b/prelude/android/voltron.bzl index 288194878..d6622c261 100644 --- a/prelude/android/voltron.bzl +++ b/prelude/android/voltron.bzl @@ -68,7 +68,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions, ctx.label, [android_packageable_info], - traversed_shared_library_info, + traversed_shared_library_info.values(), ctx.attrs._android_toolchain[AndroidToolchainInfo], ctx.attrs.application_module_configs, ctx.attrs.application_module_dependencies, @@ -86,7 +86,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: ]).hidden(targets_to_jars_args) if ctx.attrs.should_include_libraries: - targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), shared_lib.soname], delimiter = " ") for shared_lib in traversed_shared_library_info] + targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), so_name], delimiter = " ") for so_name, shared_lib in traversed_shared_library_info.items()] targets_to_so_names = ctx.actions.write("targets_to_so_names.txt", targets_to_so_names_args) cmd.add([ "--targets-to-so-names", @@ -121,7 +121,7 @@ def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[st ctx.actions, deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), ) - shared_libraries.extend(traverse_shared_library_info(shared_library_info)) + shared_libraries.extend(traverse_shared_library_info(shared_library_info).values()) cmd, output = _get_base_cmd_and_output( ctx.actions, diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 683f3ecfa..ee9c4184b 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -69,7 +69,6 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", # @unused Used as a type "merge_shared_libraries", "traverse_shared_library_info", ) @@ -178,7 +177,7 @@ CxxExecutableOutput = record( # materialized when this executable is the output of a build, not when it is # used by other rules. They become other_outputs on DefaultInfo. external_debug_info_artifacts = list[TransitiveSetArgsProjection], - shared_libs = list[SharedLibrary], + shared_libs = dict[str, LinkedObject], # All link group links that were generated in the executable. auto_link_groups = field(dict[str, LinkedObject], {}), compilation_db = CxxCompilationDbInfo, @@ -419,10 +418,11 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [get_link_group_map_json(ctx, filtered_targets)] # Set up shared libraries symlink tree only when needed - shared_libs = [] + shared_libs = {} # Add in extra, rule-specific shared libs. - shared_libs.extend(impl_params.extra_shared_libs) + for name, shlib in impl_params.extra_shared_libs.items(): + shared_libs[name] = shlib.lib # Only setup a shared library symlink tree when shared linkage or link_groups is used gnu_use_link_groups = cxx_is_gnu(ctx) and link_group_mappings @@ -442,18 +442,17 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, labels_to_links_map = labels_to_links_map, ) - for shlib in traverse_shared_library_info(shlib_info): - if not gnu_use_link_groups or is_link_group_shlib(shlib.label, link_group_ctx): - shared_libs.append(shlib) + def shlib_filter(_name, shared_lib): + return not gnu_use_link_groups or is_link_group_shlib(shared_lib.label, link_group_ctx) + + for name, shared_lib in traverse_shared_library_info(shlib_info, filter_func = shlib_filter).items(): + shared_libs[name] = shared_lib.lib if gnu_use_link_groups: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.extend([ - SharedLibrary(soname = name, lib = lib, label = ctx.label) - for name, lib in link_group_lib.shared_libs.items() - ]) + shared_libs.update(link_group_lib.shared_libs) toolchain_info = get_cxx_toolchain_info(ctx) linker_info = toolchain_info.linker_info @@ -491,7 +490,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, ctx, # If shlib lib tree generation is enabled, pass in the shared libs (which # will trigger the necessary link tree and link args). - shared_libs if impl_params.exe_shared_libs_link_tree else [], + shared_libs if impl_params.exe_shared_libs_link_tree else {}, impl_params.executable_name, linker_info.binary_extension, link_options( @@ -531,29 +530,29 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets["rpath-tree"] = [DefaultInfo( default_output = shared_libs_symlink_tree, other_outputs = [ - shlib.lib.output - for shlib in shared_libs + lib.output + for lib in shared_libs.values() ] + [ - shlib.lib.dwp - for shlib in shared_libs - if shlib.lib.dwp + lib.dwp + for lib in shared_libs.values() + if lib.dwp ], )] sub_targets["shared-libraries"] = [DefaultInfo( default_output = ctx.actions.write_json( binary.output.basename + ".shared-libraries.json", { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs if shlib.lib.dwp], + "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, name) for name in shared_libs.keys()], + "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, name) for name, lib in shared_libs.items() if lib.dwp], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if shared_libs_symlink_tree else [], }, ), sub_targets = { - shlib.soname: [DefaultInfo( - default_output = shlib.lib.output, - sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, + name: [DefaultInfo( + default_output = lib.output, + sub_targets = {"dwp": [DefaultInfo(default_output = lib.dwp)]} if lib.dwp else {}, )] - for shlib in shared_libs + for name, lib in shared_libs.items() }, )] if link_group_mappings: @@ -641,7 +640,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, actions = ctx.actions, children = ( [binary.external_debug_info] + - [s.lib.external_debug_info for s in shared_libs] + + [s.external_debug_info for s in shared_libs.values()] + impl_params.additional.static_external_debug_info ), ) @@ -699,7 +698,7 @@ _CxxLinkExecutableResult = record( def _link_into_executable( ctx: AnalysisContext, - shared_libs: list[SharedLibrary], + shared_libs: dict[str, LinkedObject], executable_name: [str, None], binary_extension: str, opts: LinkOptions) -> _CxxLinkExecutableResult: diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 079eeab99..320339cd5 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -14,15 +14,11 @@ load( "@prelude//linking:link_info.bzl", "LinkArgs", "LinkOrdering", # @unused Used as a type + "LinkedObject", # @unused Used as a type "unpack_link_args", "unpack_link_args_filelist", ) load("@prelude//linking:lto.bzl", "LtoMode") -load( - "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", # @unused Used as a type - "create_shlib_symlink_tree", -) load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type def generates_split_debug(toolchain: CxxToolchainInfo): @@ -217,7 +213,7 @@ def executable_shared_lib_arguments( ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, output: Artifact, - shared_libs: list[SharedLibrary]) -> ExecutableSharedLibArguments: + shared_libs: dict[str, LinkedObject]) -> ExecutableSharedLibArguments: extra_link_args = [] runtime_files = [] shared_libs_symlink_tree = None @@ -226,7 +222,7 @@ def executable_shared_lib_arguments( # of a build. Do not add to runtime_files. external_debug_info = project_artifacts( actions = ctx.actions, - tsets = [shlib.lib.external_debug_info for shlib in shared_libs], + tsets = [shlib.external_debug_info for shlib in shared_libs.values()], ) linker_type = cxx_toolchain.linker_info.type @@ -234,17 +230,16 @@ def executable_shared_lib_arguments( if len(shared_libs) > 0: if linker_type == "windows": shared_libs_symlink_tree = [ctx.actions.symlink_file( - shlib.lib.output.basename, - shlib.lib.output, - ) for shlib in shared_libs] + shlib.output.basename, + shlib.output, + ) for _, shlib in shared_libs.items()] runtime_files.extend(shared_libs_symlink_tree) # Windows doesn't support rpath. else: - shared_libs_symlink_tree = create_shlib_symlink_tree( - actions = ctx.actions, - out = shared_libs_symlink_tree_name(output), - shared_libs = shared_libs, + shared_libs_symlink_tree = ctx.actions.symlinked_dir( + shared_libs_symlink_tree_name(output), + {name: shlib.output for name, shlib in shared_libs.items()}, ) runtime_files.append(shared_libs_symlink_tree) rpath_reference = get_rpath_origin(linker_type) diff --git a/prelude/cxx/cxx_types.bzl b/prelude/cxx/cxx_types.bzl index e9e814b32..e6ba9f3a5 100644 --- a/prelude/cxx/cxx_types.bzl +++ b/prelude/cxx/cxx_types.bzl @@ -178,7 +178,7 @@ CxxRuleConstructorParams = record( # shared libs to include in the symlink tree). extra_link_roots = field(list[LinkableProviders], []), # Additional shared libs to "package". - extra_shared_libs = field(list[SharedLibrary], []), + extra_shared_libs = field(dict[str, SharedLibrary], {}), auto_link_group_specs = field([list[LinkGroupLibSpec], None], None), link_group_info = field([LinkGroupInfo, None], None), # Whether to use pre-stripped objects when linking. diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index fb57250d5..600c5d51f 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -79,7 +79,9 @@ def _process_shared_dependencies( ctx.actions, deps = filter(None, map_idx(SharedLibraryInfo, deps)), ) - shared_libs = traverse_shared_library_info(shlib_info) + shared_libs = {} + for name, shared_lib in traverse_shared_library_info(shlib_info).items(): + shared_libs[name] = shared_lib.lib return executable_shared_lib_arguments( ctx, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index b87e09f36..2a105b645 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -128,10 +128,8 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", "SharedLibraryInfo", "create_shared_libraries", - "create_shlib_symlink_tree", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -1022,7 +1020,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: deps = slis, ) - sos = [] + sos = {} if link_group_info != None: own_binary_link_flags = [] @@ -1117,16 +1115,15 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: labels_to_links_map = labels_to_links_map, ) - for shared_lib in traverse_shared_library_info(shlib_info): + for name, shared_lib in traverse_shared_library_info(shlib_info).items(): label = shared_lib.label if is_link_group_shlib(label, link_group_ctx): - sos.append(shared_lib) + sos[name] = shared_lib.lib # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - for soname, lib in link_group_lib.shared_libs.items(): - sos.append(SharedLibrary(soname = soname, lib = lib, label = ctx.label)) + sos.update(link_group_lib.shared_libs) else: nlis = [] @@ -1139,7 +1136,8 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: li = lib.get(MergedLinkInfo) if li != None: nlis.append(li) - sos.extend(traverse_shared_library_info(shlib_info)) + for name, shared_lib in traverse_shared_library_info(shlib_info).items(): + sos[name] = shared_lib.lib infos = get_link_args_for_strategy(ctx, nlis, to_link_strategy(link_style)) link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) @@ -1153,11 +1151,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: rpath_ref = get_rpath_origin(get_cxx_toolchain_info(ctx).linker_info.type) rpath_ldflag = "-Wl,{}/{}".format(rpath_ref, sos_dir) link.add("-optl", "-Wl,-rpath", "-optl", rpath_ldflag) - symlink_dir = create_shlib_symlink_tree( - actions = ctx.actions, - out = sos_dir, - shared_libs = sos, - ) + symlink_dir = ctx.actions.symlinked_dir(sos_dir, {n: o.output for n, o in sos.items()}) run.hidden(symlink_dir) providers = [ diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index c4e1bb03b..faea3eb2f 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -49,7 +49,6 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "traverse_shared_library_info", - "with_unique_sonames", ) load("@prelude//linking:types.bzl", "Linkage") load( @@ -481,10 +480,10 @@ def _build_preload_deps_root( if SharedLibraryInfo in preload_dep: slib_info = preload_dep[SharedLibraryInfo] - shlib = traverse_shared_library_info(slib_info) + shlib = traverse_shared_library_info(slib_info).items() - for soname, shared_lib in with_unique_sonames(shlib).items(): - preload_symlinks[soname] = shared_lib.lib.output + for shlib_name, shared_lib in shlib: + preload_symlinks[shlib_name] = shared_lib.lib.output # TODO(T150785851): build or get SO for direct preload_deps # TODO(T150785851): find out why the only SOs missing are the ones from diff --git a/prelude/java/java_binary.bzl b/prelude/java/java_binary.bzl index d11069b35..d80498ef9 100644 --- a/prelude/java/java_binary.bzl +++ b/prelude/java/java_binary.bzl @@ -22,7 +22,7 @@ load( "get_java_packaging_info", ) -def _generate_script(generate_wrapper: bool, native_libs: list[SharedLibrary]) -> bool: +def _generate_script(generate_wrapper: bool, native_libs: dict[str, SharedLibrary]) -> bool: # if `generate_wrapper` is set and no native libs then it should be a wrapper script as result, # otherwise fat jar will be generated (inner jar or script will be included inside a final fat jar) return generate_wrapper and len(native_libs) == 0 @@ -31,7 +31,7 @@ def _create_fat_jar( ctx: AnalysisContext, java_toolchain: JavaToolchainInfo, jars: cmd_args, - native_libs: list[SharedLibrary], + native_libs: dict[str, SharedLibrary], do_not_create_inner_jar: bool, generate_wrapper: bool) -> list[Artifact]: extension = "sh" if _generate_script(generate_wrapper, native_libs) else "jar" @@ -55,7 +55,7 @@ def _create_fat_jar( ) args += [ "--native_libs_file", - ctx.actions.write("native_libs", [cmd_args([native_lib.soname, native_lib.lib.output], delimiter = " ") for native_lib in native_libs]), + ctx.actions.write("native_libs", [cmd_args([so_name, native_lib.lib.output], delimiter = " ") for so_name, native_lib in native_libs.items()]), ] if do_not_create_inner_jar: args += [ @@ -107,7 +107,7 @@ def _create_fat_jar( outputs.append(classpath_args_output) fat_jar_cmd = cmd_args(args) - fat_jar_cmd.hidden(jars, [native_lib.lib.output for native_lib in native_libs]) + fat_jar_cmd.hidden(jars, [native_lib.lib.output for native_lib in native_libs.values()]) ctx.actions.run( fat_jar_cmd, diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index a61fc0cb7..697a0dccc 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -14,13 +14,7 @@ load("@prelude//java:java_library.bzl", "build_java_library") load("@prelude//java:java_providers.bzl", "JavaLibraryInfo", "JavaPackagingInfo", "get_all_java_packaging_deps_tset") load("@prelude//java:java_toolchain.bzl", "JavaTestToolchainInfo", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") -load( - "@prelude//linking:shared_libraries.bzl", - "SharedLibraryInfo", - "create_shlib_symlink_tree", - "merge_shared_libraries", - "traverse_shared_library_info", -) +load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "merge_shared_libraries", "traverse_shared_library_info") load( "@prelude//tests:re_utils.bzl", "get_re_executors_from_props", @@ -188,10 +182,8 @@ def _get_native_libs_env(ctx: AnalysisContext) -> dict: deps = shared_library_infos, ) - cxx_library_symlink_tree = create_shlib_symlink_tree( - actions = ctx.actions, - out = "cxx_library_symlink_tree", - shared_libs = traverse_shared_library_info(shared_library_info), - ) + native_linkables = traverse_shared_library_info(shared_library_info) + cxx_library_symlink_tree_dict = {so_name: shared_lib.lib.output for so_name, shared_lib in native_linkables.items()} + cxx_library_symlink_tree = ctx.actions.symlinked_dir("cxx_library_symlink_tree", cxx_library_symlink_tree_dict) return {"BUCK_LD_SYMLINK_TREE": cxx_library_symlink_tree} diff --git a/prelude/julia/julia_binary.bzl b/prelude/julia/julia_binary.bzl index 0aeb2a016..c1cedffee 100644 --- a/prelude/julia/julia_binary.bzl +++ b/prelude/julia/julia_binary.bzl @@ -5,12 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load( - "@prelude//linking:shared_libraries.bzl", - "create_shlib_symlink_tree", - "merge_shared_libraries", - "traverse_shared_library_info", -) +load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") load("@prelude//utils:utils.bzl", "flatten") load(":julia_info.bzl", "JuliaLibraryInfo", "JuliaLibraryTSet", "JuliaToolchainInfo") @@ -52,13 +47,12 @@ def build_jll_shlibs_mapping(ctx: AnalysisContext, json_info_file: Artifact): filter(None, [d.shared_library_info for d in deps]), )) - shared_libs_symlink_tree = create_shlib_symlink_tree( - actions = ctx.actions, - out = "__shared_libs_symlink_tree__", - shared_libs = shlibs, + shared_libs_symlink_tree = ctx.actions.symlinked_dir( + "__shared_libs_symlink_tree__", + {name: shlib.lib.output for name, shlib in shlibs.items()}, ) - shlib_label_to_soname = {shlib.label: shlib.soname for shlib in shlibs} + shlib_label_to_soname = {shlib.label: name for name, shlib in shlibs.items()} # iterate through all the jll libraries json_info = [] diff --git a/prelude/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl index a93310fa8..7d95e5345 100644 --- a/prelude/linking/shared_libraries.bzl +++ b/prelude/linking/shared_libraries.bzl @@ -19,13 +19,13 @@ SharedLibrary = record( # for downstream rules to reproduce the shared library with some modifications (for example # android relinker will link again with an added version script argument). # TODO(cjhopman): This is currently always available. - link_args = field(list[LinkArgs] | None, None), + link_args = field(list[LinkArgs] | None), # The sonames of the shared libraries that this links against. # TODO(cjhopman): This is currently always available. - shlib_deps = field(list[str] | None, None), - stripped_lib = field(Artifact | None, None), - can_be_asset = field(bool, False), - for_primary_apk = field(bool, False), + shlib_deps = field(list[str] | None), + stripped_lib = field([Artifact, None]), + can_be_asset = field(bool), + for_primary_apk = field(bool), soname = field(str), label = field(Label), ) @@ -35,7 +35,7 @@ SharedLibraries = record( # Since the SONAME is what the dynamic loader uses to uniquely identify # libraries, using this as the key allows easily detecting conflicts from # dependencies. - libraries = field(list[SharedLibrary]), + libraries = field(dict[str, SharedLibrary]), ) # T-set of SharedLibraries @@ -62,7 +62,7 @@ def create_shared_libraries( """ cxx_toolchain = getattr(ctx.attrs, "_cxx_toolchain", None) return SharedLibraries( - libraries = [SharedLibrary( + libraries = {name: SharedLibrary( lib = shlib, stripped_lib = strip_object( ctx, @@ -76,9 +76,41 @@ def create_shared_libraries( for_primary_apk = getattr(ctx.attrs, "used_by_wrap_script", False), label = ctx.label, soname = name, - ) for (name, shlib) in libraries.items()], + ) for (name, shlib) in libraries.items()}, ) +# We do a lot of merging library maps, so don't use O(n) type annotations +def _merge_lib_map( + # dict[str, SharedLibrary] + dest_mapping, + # [dict[str, SharedLibrary] + mapping_to_merge, + filter_func) -> None: + """ + Merges a mapping_to_merge into `dest_mapping`. Fails if different libraries + map to the same name. + """ + for (name, src) in mapping_to_merge.items(): + if filter_func != None and not filter_func(name, src): + continue + existing = dest_mapping.get(name) + if existing != None and existing.lib != src.lib: + error = ( + "Duplicate library {}! Conflicting mappings:\n" + + "{} from {}\n" + + "{} from {}" + ) + fail( + error.format( + name, + existing.lib, + existing.label, + src.lib, + src.label, + ), + ) + dest_mapping[name] = src + # Merge multiple SharedLibraryInfo. The value in `node` represents a set of # SharedLibraries that is provided by the target being analyzed. It's optional # because that might not always exist, e.g. a Python library can pass through @@ -99,58 +131,11 @@ def merge_shared_libraries( set = actions.tset(SharedLibrariesTSet, **kwargs) if kwargs else None return SharedLibraryInfo(set = set) -def traverse_shared_library_info(info: SharedLibraryInfo): # -> list[SharedLibrary]: - libraries = [] +def traverse_shared_library_info( + info: SharedLibraryInfo, + filter_func = None): # -> dict[str, SharedLibrary]: + libraries = {} if info.set: for libs in info.set.traverse(): - libraries.extend(libs.libraries) + _merge_lib_map(libraries, libs.libraries, filter_func) return libraries - -# Helper to merge shlibs, throwing an error if more than one have the same SONAME. -def _merge_shlibs( - shared_libs: list[SharedLibrary], - resolve_soname: typing.Callable) -> dict[str, SharedLibrary]: - merged = {} - for shlib in shared_libs: - soname = resolve_soname(shlib.soname) - existing = merged.get(soname) - if existing != None and existing.lib != shlib.lib: - error = ( - "Duplicate library {}! Conflicting mappings:\n" + - "{} from {}\n" + - "{} from {}" - ) - fail( - error.format( - shlib.soname, - existing.lib, - existing.label, - shlib.lib, - shlib.label, - ), - ) - merged[soname] = shlib - return merged - -def with_unique_sonames(shared_libs: list[SharedLibrary]) -> dict[str, SharedLibrary]: - """ - Convert a list of `SharedLibrary`s to a map of unique SONAMEs to the - corresponding `SharedLibrary`. - - Will fail if the same SONAME maps to multiple `SharedLibrary`s. - """ - return _merge_shlibs( - shared_libs = shared_libs, - resolve_soname = lambda s: s, - ) - -def create_shlib_symlink_tree(actions: AnalysisActions, out: str, shared_libs: list[SharedLibrary]): - """ - Merged shared libs into a symlink tree mapping the library's SONAME to - it's artifact. - """ - merged = with_unique_sonames(shared_libs = shared_libs) - return actions.symlinked_dir( - out, - {name: shlib.lib.output for name, shlib in merged.items()}, - ) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 93436837e..9d3ec9562 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -455,8 +455,8 @@ def _convert_python_library_to_executable( # Convert preloaded deps to a set of their names to be loaded by. preload_labels = {d.label: None for d in ctx.attrs.preload_deps} preload_names = { - shared_lib.soname: None - for shared_lib in library.shared_libraries() + name: None + for name, shared_lib in library.shared_libraries().items() if shared_lib.label in preload_labels } @@ -627,8 +627,8 @@ def _convert_python_library_to_executable( extra["native-executable"] = [DefaultInfo(default_output = executable_info.binary, sub_targets = executable_info.sub_targets)] # Add sub-targets for libs. - for shlib in executable_info.shared_libs: - extra[shlib.soname] = [DefaultInfo(default_output = shlib.lib.output)] + for name, lib in executable_info.shared_libs.items(): + extra[name] = [DefaultInfo(default_output = lib.output)] for name, group in executable_info.auto_link_groups.items(): extra[name] = [DefaultInfo(default_output = group.output)] @@ -645,8 +645,8 @@ def _convert_python_library_to_executable( # Put native libraries into the runtime location, as we need to unpack # potentially all of them before startup. native_libs = { - paths.join("runtime", "lib", shlib.soname): shlib.lib - for shlib in executable_info.shared_libs + paths.join("runtime", "lib", name): lib + for name, lib in executable_info.shared_libs.items() } preload_names = [paths.join("runtime", "lib", n) for n in preload_names] @@ -660,7 +660,7 @@ def _convert_python_library_to_executable( extra_artifacts["static_extension_finder.py"] = ctx.attrs.static_extension_finder else: - native_libs = {shared_lib.soname: shared_lib.lib for shared_lib in library.shared_libraries()} + native_libs = {name: shared_lib.lib for name, shared_lib in library.shared_libraries().items()} if dbg_source_db: extra_artifacts["dbg-db.json"] = dbg_source_db.default_outputs[0] diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index c94f41bc3..8c54768eb 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -315,13 +315,14 @@ def generate_rustdoc_test( ) # Gather and setup symlink tree of transitive shared library deps. - shared_libs = [] + shared_libs = {} if link_strategy == LinkStrategy("shared"): shlib_info = merge_shared_libraries( ctx.actions, deps = inherited_shared_libs(ctx, doc_dep_ctx), ) - shared_libs.extend(traverse_shared_library_info(shlib_info)) + for soname, shared_lib in traverse_shared_library_info(shlib_info).items(): + shared_libs[soname] = shared_lib.lib executable_args = executable_shared_lib_arguments( ctx, compile_ctx.cxx_toolchain_info, diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 72b05f635..62912c8e8 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -40,7 +40,6 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -134,7 +133,7 @@ def _rust_binary_common( output = ctx.actions.declare_output(name) # Gather and setup symlink tree of transitive shared library deps. - shared_libs = [] + shared_libs = {} rust_cxx_link_group_info = None link_group_mappings = {} @@ -171,18 +170,17 @@ def _rust_binary_common( labels_to_links_map = labels_to_links_map, ) - for shlib in traverse_shared_library_info(shlib_info): - if not rust_cxx_link_group_info or is_link_group_shlib(shlib.label, link_group_ctx): - shared_libs.append(shlib) + def shlib_filter(_name, shared_lib): + return not rust_cxx_link_group_info or is_link_group_shlib(shared_lib.label, link_group_ctx) + + for soname, shared_lib in traverse_shared_library_info(shlib_info, filter_func = shlib_filter).items(): + shared_libs[soname] = shared_lib.lib if rust_cxx_link_group_info: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.extend([ - SharedLibrary(soname = name, lib = lib, label = ctx.label) - for name, lib in link_group_lib.shared_libs.items() - ]) + shared_libs.update(link_group_lib.shared_libs) # link groups shared libraries link args are directly added to the link command, # we don't have to add them here @@ -241,17 +239,17 @@ def _rust_binary_common( default_output = ctx.actions.write_json( name + ".shared-libraries.json", { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs if shlib.lib.dwp], + "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, name) for name in shared_libs.keys()], + "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, name) for name, lib in shared_libs.items() if lib.dwp], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if executable_args.shared_libs_symlink_tree else [], }, ), sub_targets = { - shlib.soname: [DefaultInfo( - default_output = shlib.lib.output, - sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, + name: [DefaultInfo( + default_output = lib.output, + sub_targets = {"dwp": [DefaultInfo(default_output = lib.dwp)]} if lib.dwp else {}, )] - for shlib in shared_libs + for name, lib in shared_libs.items() }, )] @@ -259,12 +257,12 @@ def _rust_binary_common( sub_targets_for_link_strategy["rpath-tree"] = [DefaultInfo( default_output = executable_args.shared_libs_symlink_tree, other_outputs = [ - shlib.lib.output - for shlib in shared_libs + lib.output + for lib in shared_libs.values() ] + [ - shlib.lib.dwp - for shlib in shared_libs - if shlib.lib.dwp + lib.dwp + for lib in shared_libs.values() + if lib.dwp ], )] From fc7b955215a0a878df663301502bcc2f606d71bf Mon Sep 17 00:00:00 2001 From: Wyman Zhao Date: Fri, 22 Mar 2024 18:18:03 -0700 Subject: [PATCH 0567/1133] Revert D54987076: Simplify extra dep handling Differential Revision: D54987076 Original commit changeset: adde45ed7d2d Original Phabricator Diff: D54987076 fbshipit-source-id: 81da838f3a2c17375c809222b01348245ff47236 --- prelude/python/make_py_package.bzl | 38 +++++++++++++++--------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 141b70ce3..abda071b9 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -41,7 +41,7 @@ PexModules = record( # providers. PexProviders = record( default_output = field(Artifact), - other_outputs = list[ArgLike], + other_outputs = list[(ArgLike, str)], other_outputs_prefix = str | None, hidden_resources = list[ArgLike], sub_targets = dict[str, list[Provider]], @@ -59,7 +59,7 @@ def make_py_package_providers( def make_default_info(pex: PexProviders) -> Provider: return DefaultInfo( default_output = pex.default_output, - other_outputs = pex.other_outputs + pex.hidden_resources, + other_outputs = [a for a, _ in pex.other_outputs] + pex.hidden_resources, sub_targets = pex.sub_targets, ) @@ -208,8 +208,8 @@ def _make_py_package_impl( shared_libraries: bool, preload_libraries: cmd_args, common_modules_args: cmd_args, - dep_artifacts: list[ArgLike], - debug_artifacts: list[ArgLike], + dep_artifacts: list[(ArgLike, str)], + debug_artifacts: list[(ArgLike, str)], main: EntryPoint, hidden_resources: list[ArgLike] | None, manifest_module: ArgLike | None, @@ -275,7 +275,7 @@ def _make_py_package_impl( # For inplace builds add local artifacts to outputs so they get properly materialized runtime_files.extend(dep_artifacts) - runtime_files.append(symlink_tree_path) + runtime_files.append((symlink_tree_path, symlink_tree_path.short_path)) # For standalone builds, or builds setting make_py_package we generate args for calling make_par.py if standalone or make_py_package_cmd != None: @@ -330,12 +330,12 @@ def _make_py_package_impl( other_outputs_prefix = symlink_tree_path.short_path if symlink_tree_path != None else None, hidden_resources = hidden_resources, sub_targets = {}, - run_cmd = cmd_args(run_args).hidden(runtime_files + hidden_resources), + run_cmd = cmd_args(run_args).hidden([a for a, _ in runtime_files] + hidden_resources), ) -def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[ArgLike]) -> list[Provider]: +def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[(ArgLike, str)]) -> list[Provider]: out = ctx.actions.write_json("debuginfo.manifest.json", debug_artifacts) - return [DefaultInfo(default_output = out, other_outputs = debug_artifacts)] + return [DefaultInfo(default_output = out, other_outputs = [a for a, _ in debug_artifacts])] def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: dict[str, (LinkedObject, bool)]) -> cmd_args: preload_libraries_path = ctx.actions.write( @@ -390,7 +390,7 @@ def _pex_modules_common_args( pex_modules: PexModules, extra_manifests: list[ArgLike], shared_libraries: dict[str, LinkedObject], - debuginfo_files: list[Artifact]) -> (cmd_args, list[ArgLike], list[ArgLike]): + debuginfo_files: list[Artifact]) -> (cmd_args, list[(ArgLike, str)], list[(ArgLike, str)]): srcs = [] src_artifacts = [] deps = [] @@ -410,9 +410,9 @@ def _pex_modules_common_args( if extra_manifests: srcs.extend(extra_manifests) - deps.extend([a[0] for a in src_artifacts]) + deps.extend(src_artifacts) resources = pex_modules.manifests.resource_manifests() - deps.extend([a[0] for a in pex_modules.manifests.resource_artifacts_with_paths()]) + deps.extend(pex_modules.manifests.resource_artifacts_with_paths()) src_manifests_path = ctx.actions.write( "__src_manifests.txt", @@ -470,9 +470,9 @@ def _pex_modules_common_args( cmd.add(cmd_args(dwp_srcs_args, format = "@{}")) cmd.add(cmd_args(dwp_dests_path, format = "@{}")) - debug_artifacts.extend([d for d, _ in dwp]) + debug_artifacts.extend(dwp) - deps.extend([lib.output for lib in shared_libraries.values()]) + deps.extend([(lib.output, name) for name, lib in shared_libraries.items()]) external_debug_info = project_artifacts( ctx.actions, @@ -480,15 +480,15 @@ def _pex_modules_common_args( ) # HACK: external_debug_info has an empty path - debug_artifacts.extend(external_debug_info) + debug_artifacts.extend([(d, "") for d in external_debug_info]) return (cmd, deps, debug_artifacts) def _pex_modules_args( ctx: AnalysisContext, common_args: cmd_args, - dep_artifacts: list[ArgLike], - debug_artifacts: list[ArgLike], + dep_artifacts: list[(ArgLike, str)], + debug_artifacts: list[(ArgLike, str)], symlink_tree_path: Artifact | None, manifest_module: ArgLike | None, pex_modules: PexModules, @@ -508,7 +508,7 @@ def _pex_modules_args( if pex_modules.compile: pyc_mode = PycInvalidationMode("UNCHECKED_HASH") if symlink_tree_path == None else PycInvalidationMode("CHECKED_HASH") bytecode_manifests = pex_modules.manifests.bytecode_manifests(pyc_mode) - dep_artifacts.extend([a[0] for a in pex_modules.manifests.bytecode_artifacts_with_paths(pyc_mode)]) + dep_artifacts.extend(pex_modules.manifests.bytecode_artifacts_with_paths(pyc_mode)) bytecode_manifests_path = ctx.actions.write( "__bytecode_manifests{}.txt".format(output_suffix), @@ -525,9 +525,9 @@ def _pex_modules_args( else: # Accumulate all the artifacts we depend on. Only add them to the command # if we are not going to create symlinks. - cmd.hidden(dep_artifacts) + cmd.hidden([a for a, _ in dep_artifacts]) - cmd.hidden(debug_artifacts) + cmd.hidden([a for a, _ in debug_artifacts]) return cmd From 40feb7916d9601d207a75eceea276298054d2ab1 Mon Sep 17 00:00:00 2001 From: Wyman Zhao Date: Fri, 22 Mar 2024 18:18:03 -0700 Subject: [PATCH 0568/1133] Revert D54980209: Simplify debuginfo file handling Differential Revision: D54980209 Original commit changeset: b34989dbeaf5 Original Phabricator Diff: D54980209 fbshipit-source-id: 3dc1b444b312a959cac7d6cece1c87bf5a5e8113 --- prelude/python/make_py_package.bzl | 13 ++++++------- prelude/python/python_binary.bzl | 13 ++++++------- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index abda071b9..61db2f5cf 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -34,6 +34,7 @@ PexModules = record( manifests = field(PythonLibraryManifestsInterface), extensions = field(ManifestInfo | None, None), extra_manifests = field(ManifestInfo | None, None), + debuginfo_manifest = field(ManifestInfo | None, None), compile = field(bool, False), ) @@ -120,8 +121,7 @@ def make_py_package( shared_libraries: dict[str, (LinkedObject, bool)], main: EntryPoint, hidden_resources: list[ArgLike] | None, - allow_cache_upload: bool, - debuginfo_files: list[Artifact] = []) -> PexProviders: + allow_cache_upload: bool) -> PexProviders: """ Passes a standardized set of flags to a `make_py_package` binary to create a python "executable". @@ -152,7 +152,6 @@ def make_py_package( pex_modules, [startup_function] if startup_function else [], {name: lib for name, (lib, _) in shared_libraries.items()}, - debuginfo_files = debuginfo_files, ) default = _make_py_package_impl( @@ -389,8 +388,7 @@ def _pex_modules_common_args( ctx: AnalysisContext, pex_modules: PexModules, extra_manifests: list[ArgLike], - shared_libraries: dict[str, LinkedObject], - debuginfo_files: list[Artifact]) -> (cmd_args, list[(ArgLike, str)], list[(ArgLike, str)]): + shared_libraries: dict[str, LinkedObject]) -> (cmd_args, list[(ArgLike, str)], list[(ArgLike, str)]): srcs = [] src_artifacts = [] deps = [] @@ -443,10 +441,11 @@ def _pex_modules_common_args( cmd.add(cmd_args(native_library_srcs_args, format = "@{}")) cmd.add(cmd_args(native_library_dests_path, format = "@{}")) - if debuginfo_files: + if pex_modules.debuginfo_manifest: + debuginfo_files = pex_modules.debuginfo_manifest.artifacts debuginfo_srcs_path = ctx.actions.write( "__debuginfo___srcs.txt", - _srcs(debuginfo_files, format = "--debuginfo-src={}"), + _srcs([src for src, _ in debuginfo_files], format = "--debuginfo-src={}"), ) debuginfo_srcs_args = cmd_args(debuginfo_srcs_path) cmd.add(cmd_args(debuginfo_srcs_args, format = "@{}")) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 9d3ec9562..f085fc455 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -205,13 +205,13 @@ def _get_root_link_group_specs( return specs -def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[str, (LinkedObject, Label | bool)], list[Artifact]): - debuginfo_artifacts = [] +def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[str, (LinkedObject, Label | bool)], dict[str, Artifact]): + debuginfo_artifacts = {} transformed = {} for name, (artifact, extra) in data.items(): stripped_binary, debuginfo = strip_debug_with_gnu_debuglink(ctx, name, artifact.unstripped_output) transformed[name] = LinkedObject(output = stripped_binary, unstripped_output = artifact.unstripped_output, dwp = artifact.dwp), extra - debuginfo_artifacts.append(debuginfo) + debuginfo_artifacts[name + ".debuginfo"] = debuginfo return transformed, debuginfo_artifacts def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[Group]: @@ -671,6 +671,7 @@ def _convert_python_library_to_executable( extra_manifests = create_manifest_for_source_map(ctx, "extra_manifests", extra_artifacts) shared_libraries = {} + debuginfo_artifacts = {} # Create the map of native libraries to their artifacts and whether they # need to be preloaded. Note that we merge preload deps into regular deps @@ -681,17 +682,16 @@ def _convert_python_library_to_executable( # Strip native libraries and extensions and update the .gnu_debuglink references if we are extracting # debug symbols from the par - debuginfo_files = [] if ctx.attrs.strip_libpar == "extract" and package_style == PackageStyle("standalone") and cxx_is_gnu(ctx): shared_libraries, library_debuginfo = _split_debuginfo(ctx, shared_libraries) extensions, extension_debuginfo = _split_debuginfo(ctx, extensions) - debuginfo_files += library_debuginfo - debuginfo_files += extension_debuginfo + debuginfo_artifacts = library_debuginfo | extension_debuginfo # Combine sources and extensions into a map of all modules. pex_modules = PexModules( manifests = library.manifests(), extra_manifests = extra_manifests, + debuginfo_manifest = create_manifest_for_source_map(ctx, "debuginfo", debuginfo_artifacts) if debuginfo_artifacts else None, compile = compile, extensions = create_manifest_for_extensions( ctx, @@ -714,7 +714,6 @@ def _convert_python_library_to_executable( main, hidden_resources, allow_cache_upload, - debuginfo_files = debuginfo_files, ) pex.sub_targets.update(extra) From 533c6b9f219a3d94b3ce037177b2a912ebbf30f3 Mon Sep 17 00:00:00 2001 From: Zsolt Dollenstein Date: Fri, 22 Mar 2024 18:47:29 -0700 Subject: [PATCH 0569/1133] Back out "rpath shouldn't depend on package_style" Reviewed By: aleivag Differential Revision: D55246289 fbshipit-source-id: 3d070eeca71aa7a743e83704ba8e1659d2274891 --- prelude/python/python_binary.bzl | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index f085fc455..3e3250ee3 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -590,8 +590,13 @@ def _convert_python_library_to_executable( # Set rpaths to find 1) the shared libs dir and the 2) runtime libs dir. rpath_ref = get_rpath_origin(get_cxx_toolchain_info(ctx).linker_info.type) rpath_ldflag = "-Wl,-rpath,{}/".format(rpath_ref) - extra_binary_link_flags.append(rpath_ldflag + "../..") - extra_binary_link_flags.append(rpath_ldflag + "../lib") + if package_style == PackageStyle("standalone"): + extra_binary_link_flags.append(rpath_ldflag + "../..") + extra_binary_link_flags.append(rpath_ldflag + "../lib") + else: + rpath_ldflag_prefix = rpath_ldflag + "{}#link-tree".format(ctx.attrs.name) + extra_binary_link_flags.append(rpath_ldflag_prefix + "/runtime/lib") + extra_binary_link_flags.append(rpath_ldflag_prefix) impl_params = CxxRuleConstructorParams( rule_type = "python_binary", From e855ee497f09a4bf7ec4c0f79736820e71909893 Mon Sep 17 00:00:00 2001 From: Mark Bridges Date: Mon, 25 Mar 2024 06:31:26 -0700 Subject: [PATCH 0570/1133] Buck2 Mockingbird Integration Summary: Integrates mockingbird support into buck2 mockingbird_mock rule explicitly defines that a mock should be built for a target. ``` mockingbird_mock( name = "MockingBirdPlaygroundMock", module = ":MockingBirdPlayground", ) ``` Adding that as a test dep makes those mocks available to those tests. ``` fb_apple_library( name = "MockingBirdPlayground", frameworks = [ "Foundation", ], modular = True, oncall = "wa_frameworks_and_tools_ios", test_deps = [ ":MockingBirdPlaygroundMock", "//fbobjc/VendorLib/Mockingbird:Mockingbird", ], exported_deps = [ ":MockingBirdPlaygroundDep", ], ) ``` Reviewed By: blackm00n Differential Revision: D54582830 fbshipit-source-id: 1e207c42c000d27b58c4d12569ba31657a1f919d --- prelude/apple/apple_library.bzl | 51 +++++++- .../apple/mockingbird/mockingbird_mock.bzl | 120 ++++++++++++++++++ .../apple/mockingbird/mockingbird_types.bzl | 40 ++++++ prelude/cxx/cxx_sources.bzl | 4 +- prelude/user/all.bzl | 2 + 5 files changed, 211 insertions(+), 6 deletions(-) create mode 100644 prelude/apple/mockingbird/mockingbird_mock.bzl create mode 100644 prelude/apple/mockingbird/mockingbird_types.bzl diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index 94fed9f2e..4d54585d2 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -69,6 +69,7 @@ load( ) load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") +load("@prelude//apple/mockingbird/mockingbird_types.bzl", "MockingbirdLibraryInfo", "MockingbirdLibraryInfoTSet", "MockingbirdLibraryRecord", "MockingbirdSourcesInfo", "MockingbirdTargetType") load(":apple_bundle_types.bzl", "AppleBundleLinkerMapInfo", "AppleMinDeploymentVersionInfo") load(":apple_frameworks.bzl", "get_framework_search_path_flags") load(":apple_modular_utility.bzl", "MODULE_CACHE_PATH") @@ -142,15 +143,57 @@ def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: deps_providers, ) output = cxx_library_parameterized(ctx, constructor_params) - return output.providers + + return output.providers + [_make_mockingbird_library_info_provider(ctx)] if uses_explicit_modules(ctx): return get_swift_anonymous_targets(ctx, get_apple_library_providers) else: return get_apple_library_providers([]) +def _make_mockingbird_library_info_provider(ctx: AnalysisContext) -> MockingbirdLibraryInfo: + _, swift_sources = _filter_swift_srcs(ctx) + + all_deps = cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx) + deps_mockingbird_infos = filter(None, [dep.get(MockingbirdLibraryInfo) for dep in all_deps]) + + children = [] + dep_names = [] + for info in deps_mockingbird_infos: + dep_names.append(info.name) + children.append(info.tset) + + mockingbird_srcs_folder = ctx.actions.declare_output("mockingbird_srcs" + "_" + ctx.attrs.name, dir = True) + + ctx.actions.symlinked_dir( + mockingbird_srcs_folder, + {source.file.basename: source.file for source in swift_sources}, + ) + + mockingbird_record = MockingbirdLibraryRecord( + name = ctx.attrs.name, + srcs = [src.file for src in swift_sources], + dep_names = dep_names, + type = MockingbirdTargetType("library"), + src_dir = mockingbird_srcs_folder, + ) + + mockingbird_tset = ctx.actions.tset(MockingbirdLibraryInfoTSet, value = mockingbird_record, children = children) + + return MockingbirdLibraryInfo( + name = ctx.attrs.name, + tset = mockingbird_tset, + ) + def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisContext, params: AppleLibraryAdditionalParams, deps_providers: list = [], is_test_target: bool = False) -> CxxRuleConstructorParams: - cxx_srcs, swift_srcs = _filter_swift_srcs(ctx) + mockingbird_gen_sources = [] + if is_test_target: + for dep in cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx): + if MockingbirdSourcesInfo in dep: + for src in dep[MockingbirdSourcesInfo].srcs: + mockingbird_gen_sources.append(src) + + cxx_srcs, swift_srcs = _filter_swift_srcs(ctx, mockingbird_gen_sources) # First create a modulemap if necessary. This is required for importing # ObjC code in Swift so must be done before Swift compilation. @@ -311,10 +354,10 @@ def _get_extra_linker_flags_and_outputs( # @oss-disable: return add_extra_linker_outputs(ctx) return [], {} # @oss-enable -def _filter_swift_srcs(ctx: AnalysisContext) -> (list[CxxSrcWithFlags], list[CxxSrcWithFlags]): +def _filter_swift_srcs(ctx: AnalysisContext, additional_srcs: list = []) -> (list[CxxSrcWithFlags], list[CxxSrcWithFlags]): cxx_srcs = [] swift_srcs = [] - for s in get_srcs_with_flags(ctx): + for s in get_srcs_with_flags(ctx, additional_srcs): if s.file.extension == SWIFT_EXTENSION: swift_srcs.append(s) else: diff --git a/prelude/apple/mockingbird/mockingbird_mock.bzl b/prelude/apple/mockingbird/mockingbird_mock.bzl new file mode 100644 index 000000000..52a660ff9 --- /dev/null +++ b/prelude/apple/mockingbird/mockingbird_mock.bzl @@ -0,0 +1,120 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") +load(":mockingbird_types.bzl", "MockingbirdLibraryInfo", "MockingbirdLibraryRecord", "MockingbirdSourcesInfo") + +def _impl(ctx: AnalysisContext) -> list[Provider]: + mockingbird_info = ctx.attrs.module[MockingbirdLibraryInfo] + + json_project_description = _get_mockingbird_json_project_description(mockingbird_info) + json_project_description_output = ctx.actions.declare_output("mockingbird_project.json") + ctx.actions.write_json(json_project_description_output.as_output(), json_project_description) + + mockingbird_source = ctx.actions.declare_output(mockingbird_info.name + "Mocks.generated.swift", dir = False) + cmd = cmd_args() + + for record in mockingbird_info.tset.traverse(): + cmd.hidden(record.src_dir) + + cmd.add( + ctx.attrs._mockingbird_bin[RunInfo], + "generate", + "--target", + mockingbird_info.name, + "--project", + json_project_description_output, + "--output", + mockingbird_source.as_output(), + "--header", + "// (c) Meta Platforms, Inc. and affiliates. Confidential and proprietary.", + "--support", + ctx.attrs._mockingbird_support[DefaultInfo].default_outputs, + "--verbose", + "--disable-cache", + ) + ctx.actions.run( + cmd, + category = "mockingbird", + local_only = True, + ) + # TODO: T182716646 Remove local_only + + return [ + DefaultInfo(mockingbird_source), + MockingbirdSourcesInfo(srcs = [mockingbird_source]), + ] + +def _attrs(): + attribs = { + "module": attrs.dep(), + "_mockingbird_bin": attrs.exec_dep(providers = [RunInfo], default = "fbsource//fbobjc/VendorLib/Mockingbird:mockingbird-binary"), + "_mockingbird_support": attrs.dep(providers = [DefaultInfo], default = "fbsource//fbobjc/VendorLib/Mockingbird:MockingbirdSupport"), + } + return attribs + +registration_spec = RuleRegistrationSpec( + name = "mockingbird_mock", + impl = _impl, + attrs = _attrs(), +) + +# Produce JSON project description for Mockingbird codegen +# https://mockingbirdswift.com/json-project-description +# { +# "targets": [ +# { +# "name": "MyLibrary", +# "type": "library", +# "path": "/path/to/MyLibrary", +# "dependencies": [], +# "sources": [ +# "SourceFileA.swift", +# "SourceFileB.swift" +# ] +# }, +# { +# "name": "MyOtherLibrary", +# "type": "library", +# "path": "/path/to/MyOtherLibrary", +# "dependencies": [ +# "MyLibrary" +# ], +# "sources": [ +# "SourceFileA.swift", +# "SourceFileB.swift" +# ] +# }, +# { +# "name": "MyLibraryTests", +# "type": "test", +# "path": "/path/to/MyLibraryTests", +# "dependencies": [ +# "MyLibrary" +# ], +# "sources": [ +# "SourceFileA.swift", +# "SourceFileB.swift" +# ] +# } +# ] +# } +def _get_mockingbird_json_project_description(info: MockingbirdLibraryInfo) -> dict: + json = { + "targets": [_target_dict_for_mockingbird_record(record) for record in info.tset.traverse()], + } + + return json + +def _target_dict_for_mockingbird_record(record: MockingbirdLibraryRecord) -> dict: + return { + "dependencies": record.dep_names, + "name": record.name, + "path": record.src_dir, + "sources": [src.basename for src in record.srcs], + "type": record.type, + } diff --git a/prelude/apple/mockingbird/mockingbird_types.bzl b/prelude/apple/mockingbird/mockingbird_types.bzl new file mode 100644 index 000000000..639c763dd --- /dev/null +++ b/prelude/apple/mockingbird/mockingbird_types.bzl @@ -0,0 +1,40 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +MockingbirdLibraryInfoTSet = transitive_set() + +MockingbirdTargetType = enum("library", "test") + +MockingbirdLibraryInfo = provider( + fields = { + # The name of the target. + "name": provider_field(str), + # Contains a tset with this target's MockingbirdLibraryRecord as the value + # and all of its dependency's MockingbirdLibraryRecord in the children. + "tset": provider_field(MockingbirdLibraryInfoTSet), + }, +) + +MockingbirdLibraryRecord = record( + # The names of this target's dependencies. + dep_names = field(list[str]), + # The name of the target. + name = str, + # Swift sources in this target. + srcs = field(list[Artifact]), + # Whether this is a library or a test. + type = field(MockingbirdTargetType), + # Symlinked directory containing the source files. + src_dir = field(Artifact), +) + +MockingbirdSourcesInfo = provider( + fields = { + # Source files containing the auto generated mocks produced by mockingbird-cli. + "srcs": provider_field(list[Artifact]), + }, +) diff --git a/prelude/cxx/cxx_sources.bzl b/prelude/cxx/cxx_sources.bzl index ece339680..4ff10927c 100644 --- a/prelude/cxx/cxx_sources.bzl +++ b/prelude/cxx/cxx_sources.bzl @@ -16,8 +16,8 @@ load( load(":platform.bzl", "cxx_by_platform") # The source files -def get_srcs_with_flags(ctx: AnalysisContext) -> list[CxxSrcWithFlags]: - all_srcs = ctx.attrs.srcs + flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs)) +def get_srcs_with_flags(ctx: AnalysisContext, additional_srcs: list = []) -> list[CxxSrcWithFlags]: + all_srcs = ctx.attrs.srcs + flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs)) + additional_srcs # src -> flags_hash -> flags flags_sets_by_src = {} diff --git a/prelude/user/all.bzl b/prelude/user/all.bzl index 2fd2dbb09..f9bccdf2c 100644 --- a/prelude/user/all.bzl +++ b/prelude/user/all.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//apple/mockingbird:mockingbird_mock.bzl", _mockingbird_mock_spec = "registration_spec") load("@prelude//apple/user:apple_resource_bundle.bzl", _apple_resource_bundle_spec = "registration_spec") load("@prelude//apple/user:apple_selective_debugging.bzl", _apple_selective_debugging_spec = "registration_spec") load("@prelude//apple/user:apple_simulators.bzl", _apple_simulators_spec = "registration_spec") @@ -32,6 +33,7 @@ _all_specs = [ _cxx_toolchain_override_inheriting_target_platform_spec, _apple_simulators_spec, _write_file_spec, + _mockingbird_mock_spec, ] rules = { From 74d68d822c8a46f06ecb24334f19b6467b5a718d Mon Sep 17 00:00:00 2001 From: Andres Suarez Date: Mon, 25 Mar 2024 06:38:58 -0700 Subject: [PATCH 0571/1133] Fix native-build issues in BUCK.v2/TARGETS.v2 files Summary: These are cases where after D55299581, buildifier's `native-build` rule will remove `native.`. This diff applies the same `prelude = native` trick used in D55294423 to all buck2 owned files. And those that are not, get suppressions (`# buildifier: disable=native-build`). Reviewed By: ndmitchell, dtolnay Differential Revision: D55300243 fbshipit-source-id: 9e8e0d0670132002de4129a26dbcea74957e698a --- prelude/android/constraints/BUCK.v2 | 16 +++++++++------- prelude/android/tools/BUCK.v2 | 28 +++++++++++++++------------- prelude/os/BUCK.v2 | 6 ++++-- prelude/third-party/hmaptool/BUCK.v2 | 6 ++++-- 4 files changed, 32 insertions(+), 24 deletions(-) diff --git a/prelude/android/constraints/BUCK.v2 b/prelude/android/constraints/BUCK.v2 index 4efea2b3e..0106b9e44 100644 --- a/prelude/android/constraints/BUCK.v2 +++ b/prelude/android/constraints/BUCK.v2 @@ -1,28 +1,30 @@ load("@prelude//android:min_sdk_version.bzl", "get_min_sdk_version_constraint_value_name", "get_min_sdk_version_range") -native.constraint_setting( +prelude = native # Avoid warnings and auto-formatters + +prelude.constraint_setting( name = "maybe_build_only_native_code", visibility = ["PUBLIC"], ) -native.constraint_value( +prelude.constraint_value( name = "build_only_native_code", constraint_setting = ":maybe_build_only_native_code", visibility = ["PUBLIC"], ) -native.constraint_setting( +prelude.constraint_setting( name = "maybe_merge_native_libraries", visibility = ["PUBLIC"], ) -native.constraint_value( +prelude.constraint_value( name = "merge_native_libraries", constraint_setting = ":maybe_merge_native_libraries", visibility = ["PUBLIC"], ) -native.filegroup( +prelude.filegroup( name = "files", srcs = glob( ["**"], @@ -30,13 +32,13 @@ native.filegroup( visibility = ["PUBLIC"], ) -native.constraint_setting( +prelude.constraint_setting( name = "min_sdk_version", visibility = ["PUBLIC"], ) [ - native.constraint_value( + prelude.constraint_value( name = get_min_sdk_version_constraint_value_name(min_sdk), constraint_setting = ":min_sdk_version", ) diff --git a/prelude/android/tools/BUCK.v2 b/prelude/android/tools/BUCK.v2 index 35e57623c..ee6ca0351 100644 --- a/prelude/android/tools/BUCK.v2 +++ b/prelude/android/tools/BUCK.v2 @@ -1,4 +1,6 @@ -native.python_bootstrap_binary( +prelude = native # Avoid warnings and auto-formatters + +prelude.python_bootstrap_binary( name = "unpack_aar", main = "unpack_aar.py", visibility = ["PUBLIC"], @@ -8,14 +10,14 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "unpack_aar_lib", srcs = [ "unpack_aar.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "filter_dex", main = "filter_dex.py", visibility = ["PUBLIC"], @@ -24,14 +26,14 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "filter_dex_lib", srcs = [ "filter_dex.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "combine_native_library_dirs", main = "combine_native_library_dirs.py", visibility = ["PUBLIC"], @@ -40,14 +42,14 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "combine_native_library_dirs_lib", srcs = [ "combine_native_library_dirs.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "filter_prebuilt_native_library_dir", main = "filter_prebuilt_native_library_dir.py", visibility = ["PUBLIC"], @@ -56,14 +58,14 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "filter_prebuilt_native_library_dir_lib", srcs = [ "filter_prebuilt_native_library_dir.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "native_libs_as_assets_metadata", main = "native_libs_as_assets_metadata.py", visibility = ["PUBLIC"], @@ -72,20 +74,20 @@ native.python_bootstrap_binary( ], ) -native.python_bootstrap_library( +prelude.python_bootstrap_library( name = "native_libs_as_assets_metadata_lib", srcs = [ "native_libs_as_assets_metadata.py", ], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "compute_merge_sequence", main = "merge_sequence.py", visibility = ["PUBLIC"], ) -native.python_bootstrap_binary( +prelude.python_bootstrap_binary( name = "filter_extra_resources", main = "filter_extra_resources.py", visibility = ["PUBLIC"], @@ -94,7 +96,7 @@ native.python_bootstrap_binary( ], ) -native.zip_file( +prelude.zip_file( name = "app_without_resources_stub", srcs = ["com/facebook/buck_generated/AppWithoutResourcesStub.java"], out = "app_without_resources_stub.src.zip", diff --git a/prelude/os/BUCK.v2 b/prelude/os/BUCK.v2 index 816fd1764..2ee85e5c9 100644 --- a/prelude/os/BUCK.v2 +++ b/prelude/os/BUCK.v2 @@ -1,3 +1,5 @@ +prelude = native # Avoid warnings and auto-formatters + # The short list of ubiquitous, mainstream operating systems: config_setting( @@ -34,12 +36,12 @@ config_setting( visibility = ["PUBLIC"], ) -native.constraint_setting( +prelude.constraint_setting( name = "maybe_building_android_binary", visibility = ["prelude//..."], ) -native.constraint_value( +prelude.constraint_value( name = "building_android_binary", constraint_setting = ":maybe_building_android_binary", visibility = ["prelude//..."], diff --git a/prelude/third-party/hmaptool/BUCK.v2 b/prelude/third-party/hmaptool/BUCK.v2 index cced36f1d..b6aa7c9cd 100644 --- a/prelude/third-party/hmaptool/BUCK.v2 +++ b/prelude/third-party/hmaptool/BUCK.v2 @@ -1,10 +1,12 @@ -native.export_file( +prelude = native # Avoid warnings and auto-formatters + +prelude.export_file( name = "_hmaptool", src = "hmaptool", mode = "reference", ) -native.command_alias( +prelude.command_alias( name = "hmaptool", exe = ":_hmaptool", visibility = ["PUBLIC"], From 078015ce1cbba10ae315a56d66534bb660592d07 Mon Sep 17 00:00:00 2001 From: Andres Suarez Date: Mon, 25 Mar 2024 06:38:58 -0700 Subject: [PATCH 0572/1133] Apply formatting to BUCK.v2 and TARGETS.v2 files Summary: This is all the formatting that turns up after D55299581. Reviewed By: ndmitchell, dtolnay Differential Revision: D55300244 fbshipit-source-id: dde9d14ff08185cce41dd6c2bf1bf82b1f4720fe --- prelude/android/tools/BUCK.v2 | 2 +- prelude/apple/tools/BUCK.v2 | 6 +++--- prelude/apple/tools/code_signing/BUCK.v2 | 2 +- prelude/apple/tools/info_plist_processor/BUCK.v2 | 4 ++-- prelude/cxx/dist_lto/tools/BUCK.v2 | 6 +++--- prelude/cxx/tools/BUCK.v2 | 4 ++-- prelude/java/tools/BUCK.v2 | 2 +- prelude/platforms/BUCK.v2 | 2 +- prelude/python/tools/sourcedb_merger/BUCK.v2 | 4 ++-- prelude/python_bootstrap/tools/BUCK.v2 | 2 +- 10 files changed, 17 insertions(+), 17 deletions(-) diff --git a/prelude/android/tools/BUCK.v2 b/prelude/android/tools/BUCK.v2 index ee6ca0351..24da66472 100644 --- a/prelude/android/tools/BUCK.v2 +++ b/prelude/android/tools/BUCK.v2 @@ -5,8 +5,8 @@ prelude.python_bootstrap_binary( main = "unpack_aar.py", visibility = ["PUBLIC"], deps = [ - ":unpack_aar_lib", "prelude//java/tools:utils_lib", + ":unpack_aar_lib", ], ) diff --git a/prelude/apple/tools/BUCK.v2 b/prelude/apple/tools/BUCK.v2 index b31f120b5..7c7893fa0 100644 --- a/prelude/apple/tools/BUCK.v2 +++ b/prelude/apple/tools/BUCK.v2 @@ -1,15 +1,15 @@ apple_tools( name = "apple-tools", - assemble_bundle = "prelude//apple/tools/bundling:assemble_bundle", - # @oss-disable: adhoc_codesign_tool = "prelude//apple/tools/meta_only/codesign_rust:adhoc-signer", adhoc_codesign_tool = None # @oss-enable - split_arch_combine_dsym_bundles_tool = ":split_arch_combine_dsym_bundles_tool", + # @oss-disable: adhoc_codesign_tool = "prelude//apple/tools/meta_only/codesign_rust:adhoc-signer", + assemble_bundle = "prelude//apple/tools/bundling:assemble_bundle", dry_codesign_tool = ":dry_codesign_tool", info_plist_processor = "prelude//apple/tools/info_plist_processor:tool", ipa_package_maker = ":ipa_package_maker", make_modulemap = ":make_modulemap", make_vfsoverlay = ":make_vfsoverlay", selective_debugging_scrubber = "prelude//apple/tools/selective_debugging:tool", + split_arch_combine_dsym_bundles_tool = ":split_arch_combine_dsym_bundles_tool", swift_objc_header_postprocess = ":swift_objc_header_postprocess", visibility = ["PUBLIC"], ) diff --git a/prelude/apple/tools/code_signing/BUCK.v2 b/prelude/apple/tools/code_signing/BUCK.v2 index ecdcd43d9..dac8cad31 100644 --- a/prelude/apple/tools/code_signing/BUCK.v2 +++ b/prelude/apple/tools/code_signing/BUCK.v2 @@ -11,11 +11,11 @@ python_library( "main.py", ], ), + visibility = ["PUBLIC"], deps = [ "prelude//apple/tools:plistlib_utils", "prelude//apple/tools/info_plist_processor:process", ], - visibility = ["PUBLIC"], ) meta_python_test( diff --git a/prelude/apple/tools/info_plist_processor/BUCK.v2 b/prelude/apple/tools/info_plist_processor/BUCK.v2 index c40563c83..1e265eec6 100644 --- a/prelude/apple/tools/info_plist_processor/BUCK.v2 +++ b/prelude/apple/tools/info_plist_processor/BUCK.v2 @@ -14,8 +14,8 @@ meta_python_test( python_library( name = "process", srcs = ["process.py"], - deps = ["prelude//apple/tools:plistlib_utils"], visibility = ["PUBLIC"], + deps = ["prelude//apple/tools:plistlib_utils"], ) meta_python_test( @@ -27,9 +27,9 @@ meta_python_test( python_binary( name = "tool", main = "main.py", + visibility = ["PUBLIC"], deps = [ ":preprocess", ":process", ], - visibility = ["PUBLIC"], ) diff --git a/prelude/cxx/dist_lto/tools/BUCK.v2 b/prelude/cxx/dist_lto/tools/BUCK.v2 index 3abffa282..bc5f1502d 100644 --- a/prelude/cxx/dist_lto/tools/BUCK.v2 +++ b/prelude/cxx/dist_lto/tools/BUCK.v2 @@ -28,17 +28,17 @@ prelude.python_bootstrap_binary( dist_lto_tools( name = "dist_lto_tools", - planner = ":dist_lto_planner", + copy = ":dist_lto_copy", opt = ":dist_lto_opt", + planner = ":dist_lto_planner", prepare = ":dist_lto_prepare", - copy = ":dist_lto_copy", visibility = ["PUBLIC"], ) prelude.python_test( name = "test_dist_lto_opt", srcs = [ - "tests/test_dist_lto_opt.py", "dist_lto_opt.py", + "tests/test_dist_lto_opt.py", ], ) diff --git a/prelude/cxx/tools/BUCK.v2 b/prelude/cxx/tools/BUCK.v2 index 774d71717..ddbfe74d6 100644 --- a/prelude/cxx/tools/BUCK.v2 +++ b/prelude/cxx/tools/BUCK.v2 @@ -26,19 +26,19 @@ prelude.python_bootstrap_binary( prelude.python_bootstrap_binary( name = "dep_file_processor", main = "dep_file_processor.py", + visibility = ["PUBLIC"], deps = [ ":dep_file_processors", ], - visibility = ["PUBLIC"], ) prelude.python_bootstrap_library( name = "dep_file_processors", srcs = [ + "dep_file_utils.py", "makefile_to_dep_file.py", "show_headers_to_dep_file.py", "show_includes_to_dep_file.py", - "dep_file_utils.py", ], visibility = ["PUBLIC"], ) diff --git a/prelude/java/tools/BUCK.v2 b/prelude/java/tools/BUCK.v2 index ac3c111fc..dfd5b7f23 100644 --- a/prelude/java/tools/BUCK.v2 +++ b/prelude/java/tools/BUCK.v2 @@ -77,7 +77,7 @@ prelude.python_bootstrap_library( ], visibility = [ "prelude//android/tools/...", - "prelude//kotlin/tools/...", "prelude//java/tools/...", + "prelude//kotlin/tools/...", ], ) diff --git a/prelude/platforms/BUCK.v2 b/prelude/platforms/BUCK.v2 index d10b161f8..ae439808a 100644 --- a/prelude/platforms/BUCK.v2 +++ b/prelude/platforms/BUCK.v2 @@ -68,8 +68,8 @@ prelude.constraint_value( # execution configuration, but that's not implemented yet. export_file( name = "fat_platform_incompatible", - # @oss-disable: src = "TARGETS.v2", src = "BUCK", # @oss-enable + # @oss-disable: src = "TARGETS.v2", target_compatible_with = select({ ":fat_platform_enabled": ["config//:none"], "DEFAULT": [], diff --git a/prelude/python/tools/sourcedb_merger/BUCK.v2 b/prelude/python/tools/sourcedb_merger/BUCK.v2 index 7f090f90e..552d4adef 100644 --- a/prelude/python/tools/sourcedb_merger/BUCK.v2 +++ b/prelude/python/tools/sourcedb_merger/BUCK.v2 @@ -13,19 +13,19 @@ prelude.python_bootstrap_library( prelude.python_bootstrap_binary( name = "merge", main = "merge.py", + visibility = ["PUBLIC"], deps = [ ":library", ], - visibility = ["PUBLIC"], ) prelude.python_bootstrap_binary( name = "legacy_merge", main = "legacy_merge.py", + visibility = ["PUBLIC"], deps = [ ":library", ], - visibility = ["PUBLIC"], ) # Run the test suite with this command: diff --git a/prelude/python_bootstrap/tools/BUCK.v2 b/prelude/python_bootstrap/tools/BUCK.v2 index b895ef1a5..59345391e 100644 --- a/prelude/python_bootstrap/tools/BUCK.v2 +++ b/prelude/python_bootstrap/tools/BUCK.v2 @@ -3,6 +3,6 @@ prelude = native prelude.sh_binary( name = "win_python_wrapper", main = "win_python_wrapper.bat", - visibility = ["PUBLIC"], target_compatible_with = ["config//os:windows"], + visibility = ["PUBLIC"], ) From 270f3eb3469caae8faf804adc8764c2d1d19bfff Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Mon, 25 Mar 2024 07:13:18 -0700 Subject: [PATCH 0573/1133] Define build modes for the apple ecosystem Summary: To begin to consolidate our use of apple platforms that combine sdks and build modes, we need to establish a source of truth. In this diff, we define build modes for internal and external use. In the next diffs, I'll be replacing existing uses in the code base with these. Reviewed By: rmaz Differential Revision: D55152125 fbshipit-source-id: e351aeaa225ed528eb682744a64c992b9362c1f9 --- prelude/platforms/apple/build_mode.bzl | 29 ++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 prelude/platforms/apple/build_mode.bzl diff --git a/prelude/platforms/apple/build_mode.bzl b/prelude/platforms/apple/build_mode.bzl new file mode 100644 index 000000000..fcebd2154 --- /dev/null +++ b/prelude/platforms/apple/build_mode.bzl @@ -0,0 +1,29 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# @oss-enable BUILD_MODE_DEBUG = "debug" +# @oss-enable BUILD_MODE_PROFILE = "profile" +# @oss-enable BUILD_MODE_RELEASE = "release" +# @oss-disable: BUILD_MODE_LOCAL = "local" +# @oss-disable: BUILD_MODE_MASTER = "master" +# @oss-disable: BUILD_MODE_RELEASE_CANDIDATE = "rc" +# @oss-disable: BUILD_MODE_PRODUCTION = "production" +# @oss-disable: BUILD_MODE_PROFILE = "profile" + +# @oss-enable APPLE_BUILD_MODES = [BUILD_MODE_DEBUG, BUILD_MODE_PROFILE, BUILD_MODE_RELEASE] +# @oss-disable: APPLE_BUILD_MODES = [BUILD_MODE_LOCAL, BUILD_MODE_MASTER, BUILD_MODE_RELEASE_CANDIDATE, BUILD_MODE_PRODUCTION, BUILD_MODE_PROFILE] + +BUILD_MODE = struct( + # @oss-enable DEBUG: BUILD_MODE_DEBUG + # @oss-enable PROFILE: BUILD_MODE_PROFILE, + # @oss-enable RELEASE: BUILD_MODE_RELEASE, + # @oss-disable: LOCAL = BUILD_MODE_LOCAL, + # @oss-disable: MASTER = BUILD_MODE_MASTER, + # @oss-disable: RELEASE_CANDIDATE = BUILD_MODE_RELEASE_CANDIDATE, + # @oss-disable: PRODUCTION = BUILD_MODE_PRODUCTION, + # @oss-disable: PROFILE = BUILD_MODE_PROFILE, +) From 3e75b654e8f415c33003b7cf8970204c0f2ab6fc Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Mon, 25 Mar 2024 07:13:18 -0700 Subject: [PATCH 0574/1133] Make the build mode references a bit more dynamic Summary: We don't open source `ovr_config`. Instead, we have some basic config available in prelude for use. In order to start setting up some open source friendly apple_platforms, make where we define the build mode config constraints dynamic by using a derived path and constants. Reviewed By: rmaz Differential Revision: D55152122 fbshipit-source-id: 1430ac366759067c98026226c022268d9b9d5183 --- prelude/platforms/apple/build_mode.bzl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/prelude/platforms/apple/build_mode.bzl b/prelude/platforms/apple/build_mode.bzl index fcebd2154..a88dd4cce 100644 --- a/prelude/platforms/apple/build_mode.bzl +++ b/prelude/platforms/apple/build_mode.bzl @@ -27,3 +27,6 @@ BUILD_MODE = struct( # @oss-disable: PRODUCTION = BUILD_MODE_PRODUCTION, # @oss-disable: PROFILE = BUILD_MODE_PROFILE, ) + +# @oss-enable CONSTRAINT_PACKAGE = "prelude//platforms/apple/constraints" +# @oss-disable: CONSTRAINT_PACKAGE = "ovr_config//build_mode/apple/constraints" From 5f7d96959c9069029d4adefa722acc6ad266468c Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Mon, 25 Mar 2024 07:13:18 -0700 Subject: [PATCH 0575/1133] Extract build mode config_setting and constraint definition into functions Summary: By using functions and then powering them via the established build modes, we can have more of a source of truth. Until now, we were relying on there being a matching constraint/config_setting for the build modes. Reviewed By: rmaz Differential Revision: D55152123 fbshipit-source-id: c8307c849a07768b02976782a50317107a119865 --- prelude/platforms/apple/BUCK | 3 +++ prelude/platforms/apple/build_mode.bzl | 23 +++++++++++++++++++++++ prelude/platforms/apple/constraints/BUCK | 6 ++++++ 3 files changed, 32 insertions(+) create mode 100644 prelude/platforms/apple/BUCK create mode 100644 prelude/platforms/apple/constraints/BUCK diff --git a/prelude/platforms/apple/BUCK b/prelude/platforms/apple/BUCK new file mode 100644 index 000000000..83f7fa856 --- /dev/null +++ b/prelude/platforms/apple/BUCK @@ -0,0 +1,3 @@ +load("@prelude//platforms/apple:build_mode.bzl", "config_settings") + +config_settings(config_setting_rule = config_setting) diff --git a/prelude/platforms/apple/build_mode.bzl b/prelude/platforms/apple/build_mode.bzl index a88dd4cce..b086b8484 100644 --- a/prelude/platforms/apple/build_mode.bzl +++ b/prelude/platforms/apple/build_mode.bzl @@ -30,3 +30,26 @@ BUILD_MODE = struct( # @oss-enable CONSTRAINT_PACKAGE = "prelude//platforms/apple/constraints" # @oss-disable: CONSTRAINT_PACKAGE = "ovr_config//build_mode/apple/constraints" + +def config_settings(config_setting_rule): + for mode in APPLE_BUILD_MODES: + config_setting_rule( + name = mode, + constraint_values = [ + "{}:{}".format(CONSTRAINT_PACKAGE, mode), + ], + visibility = ["PUBLIC"], + ) + +def constraints(constraint_setting_rule, constraint_value_rule): + constraint_setting_rule( + name = "build_mode", + visibility = ["PUBLIC"], + ) + + for mode in APPLE_BUILD_MODES: + constraint_value_rule( + name = mode, + constraint_setting = ":build_mode", + visibility = ["PUBLIC"], + ) diff --git a/prelude/platforms/apple/constraints/BUCK b/prelude/platforms/apple/constraints/BUCK new file mode 100644 index 000000000..9c2001679 --- /dev/null +++ b/prelude/platforms/apple/constraints/BUCK @@ -0,0 +1,6 @@ +load("@prelude//platforms/apple:build_mode.bzl", "constraints") + +constraints( + constraint_setting_rule = constraint_setting, + constraint_value_rule = constraint_value, +) From 799cc34e2eac4949fbe08f2319ce0da321b597a1 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Mon, 25 Mar 2024 08:15:06 -0700 Subject: [PATCH 0576/1133] Run flowtype_ota_safety_target rules from the root dir Summary: These rules produce a `sources` entry that contains `buck-out` paths that need (I guess) to be relative to the project root. Therefore, run the `genrule` from the project root so that we get the correct output. Reviewed By: GijsWeterings Differential Revision: D55312880 fbshipit-source-id: e4fba2ad478c9e18b48710e0fbd839962162c9af --- prelude/genrule.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/genrule.bzl b/prelude/genrule.bzl index 1ccc48d52..5018f50a7 100644 --- a/prelude/genrule.bzl +++ b/prelude/genrule.bzl @@ -39,6 +39,7 @@ _BUILD_ROOT_LABELS = {label: True for label in [ "app_modules_genrule", # produces JSON containing file paths that are read from the root dir. "android_langpack_strings", # produces JSON containing file paths that are read from the root dir. "windows_long_path_issue", # Windows: relative path length exceeds PATH_MAX, program cannot access file + "flowtype_ota_safety_target", # produces JSON containing file paths that are project-relative ]} # In Buck1 the SRCS environment variable is only set if the substring SRCS is on the command line. From 52b834a45fdf6dbb234d3b2b542c6c9d68fbf74a Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Mon, 25 Mar 2024 08:27:24 -0700 Subject: [PATCH 0577/1133] add cxx_use_shlib_intfs_mode Summary: Add a helper method to verify the shared library interface mode without having to unpack the LinkerInfo. Reviewed By: milend Differential Revision: D55223158 fbshipit-source-id: 36b392d5a8c28963c6dd506cebc283e61d76b3a3 --- prelude/apple/swift/swift_compilation.bzl | 6 ++---- prelude/cxx/cxx_library_utility.bzl | 6 ++++++ 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index ae845beef..735b720ac 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -21,8 +21,7 @@ load( "@prelude//cxx:compile.bzl", "CxxSrcWithFlags", # @unused Used as a type ) -load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") -load("@prelude//cxx:cxx_library_utility.bzl", "cxx_use_shlib_intfs") +load("@prelude//cxx:cxx_library_utility.bzl", "cxx_use_shlib_intfs_mode") load("@prelude//cxx:cxx_toolchain_types.bzl", "ShlibInterfacesMode") load("@prelude//cxx:headers.bzl", "CHeader") load( @@ -262,8 +261,7 @@ def compile_swift( output_swiftmodule = ctx.actions.declare_output(module_name + SWIFTMODULE_EXTENSION) output_tbd = None - if cxx_use_shlib_intfs(ctx) and \ - get_cxx_toolchain_info(ctx).linker_info.shlib_interfaces == ShlibInterfacesMode("stub_from_headers"): + if cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): output_tbd = ctx.actions.declare_output(module_name + ".tbd") if toolchain.can_toolchain_emit_obj_c_header_textually: diff --git a/prelude/cxx/cxx_library_utility.bzl b/prelude/cxx/cxx_library_utility.bzl index 4d81d5c70..201798d6b 100644 --- a/prelude/cxx/cxx_library_utility.bzl +++ b/prelude/cxx/cxx_library_utility.bzl @@ -161,6 +161,12 @@ def cxx_use_shlib_intfs(ctx: AnalysisContext) -> bool: linker_info = get_cxx_toolchain_info(ctx).linker_info return linker_info.shlib_interfaces != ShlibInterfacesMode("disabled") +def cxx_use_shlib_intfs_mode(ctx: AnalysisContext, mode: ShlibInterfacesMode) -> bool: + """ + Verify we are using a specific shared library interface mode. + """ + return cxx_use_shlib_intfs(ctx) and get_cxx_toolchain_info(ctx).linker_info.shlib_interfaces == mode + def cxx_platform_supported(ctx: AnalysisContext) -> bool: """ Return whether this rule's `supported_platforms_regex` matches the current From 8a40f6b99a99368403cc6e41239418f76c1b14d3 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Mon, 25 Mar 2024 08:27:24 -0700 Subject: [PATCH 0578/1133] add shared_library_interface_target Summary: Add the optional `shared_library_interface_target` cxx library constructor parameter and set it for `apple_library`. This will be used as part of tbd generation. Reviewed By: milend Differential Revision: D55223159 fbshipit-source-id: bb98ea6f26941cb7527798489929d3d3e29f4e51 --- prelude/apple/apple_library.bzl | 3 ++- prelude/cxx/cxx_types.bzl | 2 ++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index 4d54585d2..6cd725cfa 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -74,7 +74,7 @@ load(":apple_bundle_types.bzl", "AppleBundleLinkerMapInfo", "AppleMinDeploymentV load(":apple_frameworks.bzl", "get_framework_search_path_flags") load(":apple_modular_utility.bzl", "MODULE_CACHE_PATH") load(":apple_target_sdk_version.bzl", "get_min_deployment_version_for_node", "get_min_deployment_version_target_linker_flags", "get_min_deployment_version_target_preprocessor_flags") -load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback", "get_module_name") +load(":apple_utility.bzl", "get_apple_cxx_headers_layout", "get_apple_stripped_attr_value_with_default_fallback", "get_module_name", "get_versioned_target_triple") load( ":debug.bzl", "AppleDebuggableInfo", @@ -334,6 +334,7 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte ), output_style_sub_targets_and_providers_factory = _get_link_style_sub_targets_and_providers, shared_library_flags = params.shared_library_flags, + shared_library_interface_target = get_versioned_target_triple(ctx), # apple_library's 'stripped' arg only applies to shared subtargets, or, # targets with 'preferred_linkage = "shared"' strip_executable = get_apple_stripped_attr_value_with_default_fallback(ctx), diff --git a/prelude/cxx/cxx_types.bzl b/prelude/cxx/cxx_types.bzl index e6ba9f3a5..a87eab24a 100644 --- a/prelude/cxx/cxx_types.bzl +++ b/prelude/cxx/cxx_types.bzl @@ -192,4 +192,6 @@ CxxRuleConstructorParams = record( extra_linker_outputs_factory = field(typing.Callable, lambda _context: ([], {})), # Whether to allow cache uploads for locally-linked executables. exe_allow_cache_upload = field(bool, False), + # The target triple to use when generating shared library interfaces + shared_library_interface_target = field([str, None], None), ) From 3dd76637e176c50ddded5477694b866ef052a5b1 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Mon, 25 Mar 2024 08:27:24 -0700 Subject: [PATCH 0579/1133] add tbd subtarget to cxx_library Summary: When setting the shared library interface mode to `stub_from_headers` invoke the shared library interface producer to emit tbd files based on the targets headers. Reviewed By: blackm00n Differential Revision: D55217383 fbshipit-source-id: de9250aeab32d337aad75ef79a82d63517575f82 --- prelude/apple/swift/swift_compilation.bzl | 2 +- prelude/cxx/cxx_library.bzl | 17 +++++++ prelude/cxx/shared_library_interface.bzl | 54 +++++++++++++++++++++++ 3 files changed, 72 insertions(+), 1 deletion(-) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 735b720ac..4066e9cee 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -262,7 +262,7 @@ def compile_swift( output_tbd = None if cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): - output_tbd = ctx.actions.declare_output(module_name + ".tbd") + output_tbd = ctx.actions.declare_output("__tbd__/" + module_name + "-swift.tbd") if toolchain.can_toolchain_emit_obj_c_header_textually: _compile_swiftmodule(ctx, toolchain, shared_flags, srcs, output_swiftmodule, output_header, output_tbd) diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 1a8bbfaf2..e2e8e46f7 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -35,6 +35,7 @@ load( "@prelude//apple/swift:swift_runtime.bzl", "create_swift_runtime_linkable", ) +load("@prelude//cxx:headers.bzl", "cxx_attr_exported_headers") load( "@prelude//ide_integrations:xcode.bzl", "XCODE_DATA_SUB_TARGET", @@ -145,6 +146,7 @@ load( "cxx_objects_sub_targets", "cxx_platform_supported", "cxx_use_shlib_intfs", + "cxx_use_shlib_intfs_mode", ) load(":cxx_toolchain_types.bzl", "ShlibInterfacesMode", "is_bitcode_format") load( @@ -195,6 +197,7 @@ load( ) load( ":shared_library_interface.bzl", + "create_tbd", "shared_library_interface", ) @@ -435,6 +438,20 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc comp_db_info = make_compilation_db_info(compiled_srcs.compile_cmds.comp_db_compile_cmds, get_cxx_toolchain_info(ctx), get_cxx_platform_info(ctx)) providers.append(comp_db_info) + # TBD generation is done per-target for stub_from_headers mode and collected at link time. + if cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): + if impl_params.shared_library_interface_target == None: + fail("tbd generation requires setting the cxx constructor param 'shared_library_interface_target'") + + tbd = create_tbd( + ctx, + cxx_attr_exported_headers(ctx, impl_params.headers_layout), + own_exported_preprocessor_info, + inherited_exported_preprocessor_infos, + impl_params.shared_library_interface_target, + ) + sub_targets["tbd"] = [tbd] + # Link Groups link_group = get_link_group(ctx) link_group_info = get_link_group_info(ctx) diff --git a/prelude/cxx/shared_library_interface.bzl b/prelude/cxx/shared_library_interface.bzl index 3ac819b04..782d37038 100644 --- a/prelude/cxx/shared_library_interface.bzl +++ b/prelude/cxx/shared_library_interface.bzl @@ -6,8 +6,10 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:preprocessor.bzl", "CPreprocessor", "CPreprocessorInfo") load(":cxx_context.bzl", "get_cxx_toolchain_info") load(":cxx_toolchain_types.bzl", "CxxToolchainInfo") +load(":headers.bzl", "CHeader") def _shared_library_interface( ctx: AnalysisContext, @@ -80,3 +82,55 @@ def shared_library_interface( shared_lib = shared_lib, identifier = shared_lib.short_path, ) + +def create_tbd(ctx: AnalysisContext, exported_headers: list[CHeader], exported_preprocessor: CPreprocessor, transitive_preprocessor: list[CPreprocessorInfo], target: str) -> DefaultInfo: + # Use the c++ compiler to correctly generate c++ symbols. + compiler_info = get_cxx_toolchain_info(ctx).cxx_compiler_info + + # Collect the exported headers for this library and create a filelist for them. + # The exported headers are possibly hidden behind a modulemap, + # so cannot be fetched directly from exported_preprocessor. + filelist_headers = [] + for h in exported_headers: + filelist_headers.append({ + "path": h.artifact, + "type": "public", + }) + filelist_contents = { + "headers": filelist_headers, + "version": "2", + } + filelist = ctx.actions.write_json( + paths.join("__tbd__", ctx.attrs.name + "_exported_headers.json"), + filelist_contents, + with_inputs = True, + ) + + # Run the shlib interface tool with the filelist and required args + tbd_file = ctx.actions.declare_output( + paths.join("__tbd__", ctx.attrs.name + ".tbd"), + ) + args = cmd_args(get_cxx_toolchain_info(ctx).linker_info.mk_shlib_intf[RunInfo]) + args.add([ + "installapi", + cmd_args(filelist, format = "--filelist={}"), + "-o", + tbd_file.as_output(), + "-ObjC++", + "--target=" + target, + "-install_name", + ctx.attrs.name, + ]) + args.add(cmd_args(compiler_info.preprocessor_flags, prepend = "-Xparser")) + args.add(cmd_args(compiler_info.compiler_flags, prepend = "-Xparser")) + args.add(cmd_args(exported_preprocessor.relative_args.args, prepend = "-Xparser")) + for ppinfo in transitive_preprocessor: + args.add(cmd_args(ppinfo.set.project_as_args("args"), prepend = "-Xparser")) + + ctx.actions.run( + args, + category = "generate_tbd", + identifier = ctx.attrs.name, + ) + + return DefaultInfo(default_output = tbd_file) From 639fcc7d11568d63e3193551d3e85935ed1902e4 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Mon, 25 Mar 2024 09:49:38 -0700 Subject: [PATCH 0580/1133] use separate entitlements for codesign on copy paths Summary: Use separate entitlements for codesign on copy items if provided. Also amend incremental state accordingly and logic to default to non-incremental bundling (if entitlements changed for codesign on copy item). Reviewed By: milend Differential Revision: D55123413 fbshipit-source-id: 4b5d41ecc47675baab3ab1815c5d13fd6cbf1292 --- .../apple/tools/bundling/incremental_state.py | 43 ++++++++-- .../tools/bundling/incremental_state_test.py | 12 ++- .../apple/tools/bundling/incremental_utils.py | 50 +++++++++--- .../tools/bundling/incremental_utils_test.py | 80 ++++++++++++++++--- prelude/apple/tools/bundling/main.py | 38 +++++++-- .../newer_version_incremental_state.json | 2 +- .../valid_incremental_state.json | 12 ++- 7 files changed, 199 insertions(+), 38 deletions(-) diff --git a/prelude/apple/tools/bundling/incremental_state.py b/prelude/apple/tools/bundling/incremental_state.py index 4588e8adc..ab0303591 100644 --- a/prelude/apple/tools/bundling/incremental_state.py +++ b/prelude/apple/tools/bundling/incremental_state.py @@ -7,6 +7,8 @@ # pyre-strict +from __future__ import annotations + import json from dataclasses import dataclass from io import TextIOBase @@ -15,7 +17,7 @@ from apple.tools.code_signing.codesign_bundle import CodesignConfiguration -_VERSION = 3 +_VERSION = 4 @dataclass @@ -35,6 +37,21 @@ class IncrementalStateItem: """ +@dataclass +class CodesignedOnCopy: + path: Path + """ + Path relative to bundle root which needs to be codesigned + """ + entitlements_digest: Optional[str] + """ + Digest of entitlements used when the given path is codesigned on copy + """ + + def __hash__(self: CodesignedOnCopy) -> int: + return hash((self.path, self.entitlements_digest)) + + @dataclass class IncrementalState: """ @@ -44,7 +61,7 @@ class IncrementalState: items: List[IncrementalStateItem] codesigned: bool codesign_configuration: CodesignConfiguration - codesign_on_copy_paths: List[Path] + codesigned_on_copy: List[CodesignedOnCopy] codesign_identity: Optional[str] swift_stdlib_paths: List[Path] version: int = _VERSION @@ -59,7 +76,7 @@ def default(self, o: object) -> object: "codesign_configuration": ( o.codesign_configuration.value if o.codesign_configuration else None ), - "codesign_on_copy_paths": [str(p) for p in o.codesign_on_copy_paths], + "codesigned_on_copy": [self.default(i) for i in o.codesigned_on_copy], "codesign_identity": o.codesign_identity, "swift_stdlib_paths": [str(p) for p in o.swift_stdlib_paths], "version": o.version, @@ -74,15 +91,21 @@ def default(self, o: object) -> object: if o.resolved_symlink is not None: result["resolved_symlink"] = str(o.resolved_symlink) return result + elif isinstance(o, CodesignedOnCopy): + result = { + "path": str(o.path), + } + if o.entitlements_digest is not None: + result["entitlements_digest"] = str(o.entitlements_digest) + return result else: return super().default(o) -def _object_hook(dict: Dict[str, Any]) -> Union[IncrementalState, IncrementalStateItem]: +def _object_hook( + dict: Dict[str, Any] +) -> Union[IncrementalState, IncrementalStateItem, CodesignedOnCopy]: if "version" in dict: - dict["codesign_on_copy_paths"] = [ - Path(p) for p in dict.pop("codesign_on_copy_paths") - ] codesign_configuration = dict.pop("codesign_configuration") dict["codesign_configuration"] = ( CodesignConfiguration(codesign_configuration) @@ -91,7 +114,7 @@ def _object_hook(dict: Dict[str, Any]) -> Union[IncrementalState, IncrementalSta ) dict["swift_stdlib_paths"] = [Path(p) for p in dict.pop("swift_stdlib_paths")] return IncrementalState(**dict) - else: + elif "destination_relative_to_bundle" in dict: dict["source"] = Path(dict.pop("source")) dict["destination_relative_to_bundle"] = Path( dict.pop("destination_relative_to_bundle") @@ -100,6 +123,10 @@ def _object_hook(dict: Dict[str, Any]) -> Union[IncrementalState, IncrementalSta resolved_symlink = dict.pop("resolved_symlink", None) dict["resolved_symlink"] = Path(resolved_symlink) if resolved_symlink else None return IncrementalStateItem(**dict) + else: + dict["path"] = Path(dict.pop("path")) + dict["entitlements_digest"] = dict.pop("entitlements_digest", None) + return CodesignedOnCopy(**dict) def parse_incremental_state(data: TextIOBase) -> IncrementalState: diff --git a/prelude/apple/tools/bundling/incremental_state_test.py b/prelude/apple/tools/bundling/incremental_state_test.py index 3f5597750..5e1e3bb70 100644 --- a/prelude/apple/tools/bundling/incremental_state_test.py +++ b/prelude/apple/tools/bundling/incremental_state_test.py @@ -12,6 +12,7 @@ import pkg_resources from .incremental_state import ( + CodesignedOnCopy, IncrementalState, IncrementalStateItem, parse_incremental_state, @@ -47,7 +48,16 @@ def test_valid_state_is_parsed_successfully(self): ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[Path("Resources/bar.txt")], + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("Resources/bar.txt"), + entitlements_digest=None, + ), + CodesignedOnCopy( + path=Path("Resources/baz.txt"), + entitlements_digest="abc", + ), + ], codesign_identity="Johny Appleseed", swift_stdlib_paths=[Path("Frameworks/libswiftCore.dylib")], ) diff --git a/prelude/apple/tools/bundling/incremental_utils.py b/prelude/apple/tools/bundling/incremental_utils.py index 38f7251e1..0017dc123 100644 --- a/prelude/apple/tools/bundling/incremental_utils.py +++ b/prelude/apple/tools/bundling/incremental_utils.py @@ -10,10 +10,10 @@ import logging import os from pathlib import Path -from typing import Dict, List, Set, Tuple +from typing import Dict, List, Optional, Set, Tuple from .assemble_bundle_types import BundleSpecItem, IncrementalContext -from .incremental_state import IncrementalStateItem +from .incremental_state import CodesignedOnCopy, IncrementalStateItem FILES_TO_BE_IGNORED: Set[str] = { # Storage of Finder settings, which shouldn't be added when enumerating files from sources @@ -64,29 +64,44 @@ def should_assemble_incrementally( # If there is an artifact that was code signed on copy in previous run which is # present in current run and not code signed on copy, we should perform # non-incremental run for simplicity and correctness reasons. - current_codesigned_on_copy_paths = {Path(i.dst) for i in spec if i.codesign_on_copy} + current_codesigned_on_copy_items = { + codesigned_on_copy_item( + path=Path(i.dst), + entitlements=( + Path(i.codesign_entitlements) if i.codesign_entitlements else None + ), + incremental_context=incremental_context, + ) + for i in spec + if i.codesign_on_copy + } + codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build = _codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build( - set(previous_run_state.codesign_on_copy_paths), + previous_run_state.codesigned_on_copy, {Path(i.dst) for i in spec}, ) codesign_on_copy_paths_are_compatible = codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build.issubset( - current_codesigned_on_copy_paths + current_codesigned_on_copy_items ) if not codesign_on_copy_paths_are_compatible: logging.getLogger(__name__).info( - f"Decided not to assemble incrementally — there is at least one artifact `{list(codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build - current_codesigned_on_copy_paths)[0]}` that was code signed on copy in previous build which is present in current run and not code signed on copy." + f"Decided not to assemble incrementally — there is at least one artifact `{list(codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build - current_codesigned_on_copy_items)[0]}` that was code signed on copy in previous build which is present in current run and not code signed on copy (or codesigned but with a different set of entitlements)." ) return codesign_on_copy_paths_are_compatible def _codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build( - previously_codesigned_on_copy_paths: Set[Path], + previously_codesigned_on_copy: List[CodesignedOnCopy], all_input_files: Set[Path], -) -> Set[Path]: +) -> Set[CodesignedOnCopy]: all_input_files_and_directories = all_input_files | { i for file in all_input_files for i in file.parents } - return previously_codesigned_on_copy_paths & all_input_files_and_directories + return { + i + for i in previously_codesigned_on_copy + if i.path in all_input_files_and_directories + } def _get_new_digest(action_metadata: Dict[Path, str], path: Path) -> str: @@ -167,3 +182,20 @@ def _list_directory_deterministically(directory: Path) -> List[Path]: # Sort in order for walk to be deterministic. dir_names.sort() return result + + +def codesigned_on_copy_item( + path: Path, entitlements: Optional[Path], incremental_context: IncrementalContext +) -> CodesignedOnCopy: + if entitlements is not None: + digest = incremental_context.metadata.get(entitlements) + if digest is None: + raise RuntimeError( + f"Expected digest for entitlements file path `{entitlements}` to be present in action metadata." + ) + else: + digest = None + return CodesignedOnCopy( + path=path, + entitlements_digest=digest, + ) diff --git a/prelude/apple/tools/bundling/incremental_utils_test.py b/prelude/apple/tools/bundling/incremental_utils_test.py index de2f48f28..2edb0483a 100644 --- a/prelude/apple/tools/bundling/incremental_utils_test.py +++ b/prelude/apple/tools/bundling/incremental_utils_test.py @@ -14,7 +14,7 @@ from apple.tools.code_signing.codesign_bundle import CodesignConfiguration from .assemble_bundle_types import BundleSpecItem -from .incremental_state import IncrementalState, IncrementalStateItem +from .incremental_state import CodesignedOnCopy, IncrementalState, IncrementalStateItem from .incremental_utils import ( calculate_incremental_state, IncrementalContext, @@ -77,7 +77,7 @@ def test_run_incrementally_when_previous_build_not_codesigned(self): ], codesigned=False, codesign_configuration=None, - codesign_on_copy_paths=[], + codesigned_on_copy=[], codesign_identity=None, swift_stdlib_paths=[], ), @@ -110,7 +110,7 @@ def test_not_run_incrementally_when_previous_build_codesigned_and_current_is_not ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[], + codesigned_on_copy=[], codesign_identity=None, swift_stdlib_paths=[], ), @@ -146,7 +146,7 @@ def test_not_run_incrementally_when_previous_build_codesigned_with_different_ide ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[], + codesigned_on_copy=[], codesign_identity="old_identity", swift_stdlib_paths=[], ), @@ -172,9 +172,19 @@ def test_run_incrementally_when_codesign_on_copy_paths_match(self): dst="bar", codesign_on_copy=True, ), + BundleSpecItem( + src="src/baz", + dst="baz", + codesign_on_copy=True, + codesign_entitlements="entitlements.plist", + ), ] incremental_context = IncrementalContext( - metadata={Path("src/foo"): "digest"}, + metadata={ + Path("src/foo"): "digest", + Path("src/baz"): "digest2", + Path("entitlements.plist"): "entitlements_digest", + }, state=IncrementalState( items=[ IncrementalStateItem( @@ -182,11 +192,22 @@ def test_run_incrementally_when_codesign_on_copy_paths_match(self): destination_relative_to_bundle=Path("foo"), digest="digest", resolved_symlink=None, - ) + ), + IncrementalStateItem( + source=Path("src/baz"), + destination_relative_to_bundle=Path("baz"), + digest="digest2", + resolved_symlink=None, + ), ], codesigned=True, codesign_configuration=None, - codesign_on_copy_paths=[Path("foo")], + codesigned_on_copy=[ + CodesignedOnCopy(path=Path("foo"), entitlements_digest=None), + CodesignedOnCopy( + path=Path("baz"), entitlements_digest="entitlements_digest" + ), + ], codesign_identity="same_identity", swift_stdlib_paths=[], ), @@ -219,7 +240,46 @@ def test_not_run_incrementally_when_codesign_on_copy_paths_mismatch(self): codesigned=True, codesign_configuration=None, # but it was codesigned in old build - codesign_on_copy_paths=[Path("foo")], + codesigned_on_copy=[ + CodesignedOnCopy(path=Path("foo"), entitlements_digest=None) + ], + codesign_identity="same_identity", + swift_stdlib_paths=[], + ), + codesigned=True, + codesign_configuration=None, + codesign_identity="same_identity", + ) + self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + + def test_not_run_incrementally_when_codesign_on_copy_entitlements_mismatch(self): + spec = [ + BundleSpecItem( + src="src/foo", + dst="foo", + codesign_on_copy=True, + codesign_entitlements="baz/entitlements.plist", + ) + ] + incremental_context = IncrementalContext( + metadata={ + Path("src/foo"): "digest", + Path("baz/entitlements.plist"): "new_digest", + }, + state=IncrementalState( + items=[ + IncrementalStateItem( + source=Path("src/foo"), + destination_relative_to_bundle=Path("foo"), + digest="digest", + resolved_symlink=None, + ) + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[ + CodesignedOnCopy(path=Path("foo"), entitlements_digest="old_digest") + ], codesign_identity="same_identity", swift_stdlib_paths=[], ), @@ -251,7 +311,9 @@ def test_not_run_incrementally_when_codesign_configurations_mismatch(self): codesigned=True, # Dry codesigned in old build codesign_configuration=CodesignConfiguration.dryRun, - codesign_on_copy_paths=[Path("foo")], + codesigned_on_copy=[ + CodesignedOnCopy(path=Path("foo"), entitlements_digest=None) + ], codesign_identity="same_identity", swift_stdlib_paths=[], ), diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index c4652e97a..da57f7037 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -37,11 +37,13 @@ from .assemble_bundle import assemble_bundle from .assemble_bundle_types import BundleSpecItem, IncrementalContext from .incremental_state import ( + CodesignedOnCopy, IncrementalState, IncrementalStateItem, IncrementalStateJSONEncoder, parse_incremental_state, ) +from .incremental_utils import codesigned_on_copy_item from .swift_support import run_swift_stdlib_tool, SwiftSupportArguments @@ -382,27 +384,37 @@ def _main() -> None: raise RuntimeError( "Expected signing context to be created before bundling is done if codesign is requested." ) - codesign_on_copy_paths = [ - i.dst for i in spec if i.codesign_on_copy - ] + swift_stdlib_paths bundle_path = CodesignedPath(path=args.output, entitlements=args.entitlements) - codesigned_on_copy = [ + codesign_on_copy_paths = [ + CodesignedPath( + path=bundle_path.path / i.dst, + entitlements=( + Path(i.codesign_entitlements) if i.codesign_entitlements else None + ), + ) + for i in spec + if i.codesign_on_copy + ] + [ CodesignedPath(path=bundle_path.path / path, entitlements=None) - for path in codesign_on_copy_paths + for path in swift_stdlib_paths ] codesign_bundle( bundle_path=bundle_path, signing_context=signing_context, platform=args.platform, - codesign_on_copy_paths=codesigned_on_copy, + codesign_on_copy_paths=codesign_on_copy_paths, codesign_args=args.codesign_args, codesign_tool=args.codesign_tool, codesign_configuration=args.codesign_configuration, ) if incremental_state: + if incremental_context is None: + raise RuntimeError( + "Expected incremental context to be present when incremental state is non-null." + ) _write_incremental_state( spec=spec, items=incremental_state, @@ -411,6 +423,7 @@ def _main() -> None: codesign_configuration=args.codesign_configuration, selected_codesign_identity=selected_identity_argument, swift_stdlib_paths=swift_stdlib_paths, + incremental_context=incremental_context, ) if profiling_enabled: @@ -516,12 +529,23 @@ def _write_incremental_state( codesign_configuration: CodesignConfiguration, selected_codesign_identity: Optional[str], swift_stdlib_paths: List[Path], + incremental_context: IncrementalContext, ) -> None: state = IncrementalState( items, codesigned=codesigned, codesign_configuration=codesign_configuration, - codesign_on_copy_paths=[Path(i.dst) for i in spec if i.codesign_on_copy], + codesigned_on_copy=[ + codesigned_on_copy_item( + path=Path(i.dst), + entitlements=( + Path(i.codesign_entitlements) if i.codesign_entitlements else None + ), + incremental_context=incremental_context, + ) + for i in spec + if i.codesign_on_copy + ], codesign_identity=selected_codesign_identity, swift_stdlib_paths=swift_stdlib_paths, ) diff --git a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json index 7e4b6d236..a78e806cb 100644 --- a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json +++ b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json @@ -1,5 +1,5 @@ { - "version": 4, + "version": 5, "data": { "something": [] } diff --git a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json index dd43ce55d..a5c287421 100644 --- a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json +++ b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json @@ -18,12 +18,18 @@ ], "codesign_configuration": null, "codesigned": true, - "codesign_on_copy_paths": [ - "Resources/bar.txt" + "codesigned_on_copy": [ + { + "path": "Resources/bar.txt" + }, + { + "path": "Resources/baz.txt", + "entitlements_digest": "abc" + } ], "codesign_identity": "Johny Appleseed", "swift_stdlib_paths": [ "Frameworks/libswiftCore.dylib" ], - "version": 3 + "version": 4 } From 9468f09866fc6ee2e6b18dc82ac2f31d669802ca Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Mon, 25 Mar 2024 13:39:09 -0700 Subject: [PATCH 0581/1133] Make sure android_apk materializes materialized_artifacts Summary: This was accidentally removed in D55224732 Reviewed By: jiawei-lyu Differential Revision: D55318529 fbshipit-source-id: 1ce436ae627dcb171f961f919187b1410c23dbae --- prelude/android/android_apk.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index 5a1f996f1..afc461e5b 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -71,7 +71,7 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: r_dot_java_packages = set([info.specified_r_dot_java_package for info in resources_info.unfiltered_resource_infos if info.specified_r_dot_java_package]), shared_libraries = set(native_library_info.shared_libraries), ), - DefaultInfo(default_output = default_output, other_outputs = install_info.files.values(), sub_targets = sub_targets | class_to_srcs_subtargets), + DefaultInfo(default_output = default_output, other_outputs = install_info.files.values() + android_binary_info.materialized_artifacts, sub_targets = sub_targets | class_to_srcs_subtargets), install_info, TemplatePlaceholderInfo( keyed_variables = { From fbd24b872718101d14f36ac904557ce7af0b1e07 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Mon, 25 Mar 2024 13:39:09 -0700 Subject: [PATCH 0582/1133] Always materialize proguard_artifacts Summary: Proguard artifacts are interesting information about the build. In particular, Sandcastle wants the `mapping.txt` file and `usage.txt` file: https://www.internalfb.com/code/www/[32d32732b7e8]/flib/mobile/builds/MobileBuildsUtils.php?lines=26-27 Reviewed By: jiawei-lyu Differential Revision: D55317885 fbshipit-source-id: 4114dd7cca4147ece8db2eeb4639b6479d20d92f --- prelude/android/android_binary.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/android/android_binary.bzl b/prelude/android/android_binary.bzl index f83d25a55..7bc22cc38 100644 --- a/prelude/android/android_binary.bzl +++ b/prelude/android/android_binary.bzl @@ -137,6 +137,7 @@ def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBina resources_info.proguard_config_file, [no_dx[DefaultInfo].default_outputs[0] for no_dx in ctx.attrs.no_dx if len(no_dx[DefaultInfo].default_outputs) == 1], ) + materialized_artifacts.extend(proguard_output.proguard_artifacts) jars_to_owners = proguard_output.jars_to_owners dir_srcs = {artifact.basename: artifact for artifact in proguard_output.proguard_artifacts} for i, hidden_artifact in enumerate(proguard_output.proguard_hidden_artifacts): From 488b6ff9d51fbbf1b17a8aaa79122e02eb10bf47 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Mon, 25 Mar 2024 13:48:11 -0700 Subject: [PATCH 0583/1133] Fix the oss-enable for apple platforms Summary: Taking a look at other uses of `oss-enable`, it seems a colon is required. Reviewed By: christolliday Differential Revision: D55330849 fbshipit-source-id: 7d31ca3eb8e8b27d1fdf4072d1d4efebdd8d9948 --- prelude/platforms/apple/build_mode.bzl | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/prelude/platforms/apple/build_mode.bzl b/prelude/platforms/apple/build_mode.bzl index b086b8484..9d8b20dba 100644 --- a/prelude/platforms/apple/build_mode.bzl +++ b/prelude/platforms/apple/build_mode.bzl @@ -5,22 +5,22 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# @oss-enable BUILD_MODE_DEBUG = "debug" -# @oss-enable BUILD_MODE_PROFILE = "profile" -# @oss-enable BUILD_MODE_RELEASE = "release" +BUILD_MODE_DEBUG = "debug" # @oss-enable +BUILD_MODE_PROFILE = "profile" # @oss-enable +BUILD_MODE_RELEASE = "release" # @oss-enable # @oss-disable: BUILD_MODE_LOCAL = "local" # @oss-disable: BUILD_MODE_MASTER = "master" # @oss-disable: BUILD_MODE_RELEASE_CANDIDATE = "rc" # @oss-disable: BUILD_MODE_PRODUCTION = "production" # @oss-disable: BUILD_MODE_PROFILE = "profile" -# @oss-enable APPLE_BUILD_MODES = [BUILD_MODE_DEBUG, BUILD_MODE_PROFILE, BUILD_MODE_RELEASE] +APPLE_BUILD_MODES = [BUILD_MODE_DEBUG, BUILD_MODE_PROFILE, BUILD_MODE_RELEASE] # @oss-enable # @oss-disable: APPLE_BUILD_MODES = [BUILD_MODE_LOCAL, BUILD_MODE_MASTER, BUILD_MODE_RELEASE_CANDIDATE, BUILD_MODE_PRODUCTION, BUILD_MODE_PROFILE] BUILD_MODE = struct( - # @oss-enable DEBUG: BUILD_MODE_DEBUG - # @oss-enable PROFILE: BUILD_MODE_PROFILE, - # @oss-enable RELEASE: BUILD_MODE_RELEASE, + DEBUG = BUILD_MODE_DEBUG, # @oss-enable + PROFILE = BUILD_MODE_PROFILE, # @oss-enable + RELEASE = BUILD_MODE_RELEASE, # @oss-enable # @oss-disable: LOCAL = BUILD_MODE_LOCAL, # @oss-disable: MASTER = BUILD_MODE_MASTER, # @oss-disable: RELEASE_CANDIDATE = BUILD_MODE_RELEASE_CANDIDATE, @@ -28,7 +28,7 @@ BUILD_MODE = struct( # @oss-disable: PROFILE = BUILD_MODE_PROFILE, ) -# @oss-enable CONSTRAINT_PACKAGE = "prelude//platforms/apple/constraints" +CONSTRAINT_PACKAGE = "prelude//platforms/apple/constraints" # @oss-enable # @oss-disable: CONSTRAINT_PACKAGE = "ovr_config//build_mode/apple/constraints" def config_settings(config_setting_rule): From b2e76d94adbefc4c703375bf4689400090d7f958 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Mon, 25 Mar 2024 13:59:18 -0700 Subject: [PATCH 0584/1133] Include minimal fbcode build info in wheel METADATA Summary: When building via `fbpkg.builder()`, this will bake in the fbpkg name/version into the `.whl`s METADATA, which can be useful for auditing. Reviewed By: manav-a Differential Revision: D55263717 fbshipit-source-id: 43e1f37c205f3724118d40d36d35f178d22215c3 --- prelude/python/python_wheel.bzl | 8 ++++++++ prelude/python/tools/wheel.py | 31 ++++++++++++++++++++++++------- 2 files changed, 32 insertions(+), 7 deletions(-) diff --git a/prelude/python/python_wheel.bzl b/prelude/python/python_wheel.bzl index b3cdea59b..aa4b396ee 100644 --- a/prelude/python/python_wheel.bzl +++ b/prelude/python/python_wheel.bzl @@ -76,6 +76,9 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: cmd.add("--name={}".format(ctx.attrs.dist or ctx.attrs.name)) cmd.add("--version={}".format(ctx.attrs.version)) + for key, val in ctx.attrs.extra_metadata.items(): + cmd.add("--metadata={}:{}".format(key, val)) + srcs = [] extensions = {} for dep in ctx.attrs.libraries: @@ -166,6 +169,11 @@ python_wheel = rule( "ovr_config//third-party/python/constraints:3.9": "py3.9", }), ), + extra_metadata = attrs.dict( + key = attrs.string(), + value = attrs.string(), + default = {}, + ), abi = attrs.string(default = "none"), platform = attrs.string( default = select({ diff --git a/prelude/python/tools/wheel.py b/prelude/python/tools/wheel.py index 6e825b6ae..a4aae31fe 100644 --- a/prelude/python/tools/wheel.py +++ b/prelude/python/tools/wheel.py @@ -12,17 +12,29 @@ import sys import zipfile from types import TracebackType -from typing import List, Optional, Set, Type +from typing import Dict, List, Optional, Set, Type # pyre-fixme[24]: Generic type `AbstractContextManager` expects 1 type parameter. class WheelBuilder(contextlib.AbstractContextManager): - def __init__(self, *, name: str, version: str, output: str) -> None: + def __init__( + self, + *, + name: str, + version: str, + output: str, + metadata: Optional[Dict[str, str]] = None, + ) -> None: self._name = name self._version = version self._record: list[str] = [] self._outf = zipfile.ZipFile(output, mode="w") + self._metadata: Dict[str, str] = {} + self._metadata["Name"] = name + self._metadata["Version"] = version + if metadata is not None: + self._metadata.update(metadata) def write(self, dst: str, src: str) -> None: self._record.append(dst) @@ -41,10 +53,9 @@ def _write_record(self) -> None: def close(self) -> None: self.writestr( f"{self._name}-{self._version}.dist-info/METADATA", - f"""\ -Name: {self._name} -Version: {self._version} -""", + "".join( + ["{}: {}\n".format(key, val) for key, val in self._metadata.items()] + ), ) self.writestr( f"{self._name}-{self._version}.dist-info/WHEEL", @@ -71,6 +82,7 @@ def main(argv: List[str]) -> None: parser.add_argument("--name", required=True) parser.add_argument("--version", required=True) parser.add_argument("--srcs", action="append", default=[]) + parser.add_argument("--metadata", action="append", default=[]) args = parser.parse_args(argv[1:]) pkgs: Set[str] = set() @@ -82,7 +94,12 @@ def _add_pkg(pkg: str) -> None: if parent: _add_pkg(parent) - with WheelBuilder(name=args.name, version=args.version, output=args.output) as whl: + with WheelBuilder( + name=args.name, + version=args.version, + output=args.output, + metadata=dict([m.split(":", 1) for m in args.metadata]), + ) as whl: for src in args.srcs: with open(src) as f: manifest = json.load(f) From 79dada96487494b90008873fbc8b076fe2594986 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Mon, 25 Mar 2024 15:30:39 -0700 Subject: [PATCH 0585/1133] Support `entry_points` Summary: As per https://setuptools.pypa.io/en/latest/userguide/entry_point.html, support generating `entry_points.txt` in the `.whl`. As per https://packaging.python.org/en/latest/specifications/entry-points/#file-format, this file is just a INI file, so add a new `entry_points` param to accept it as a dict of dicts. Reviewed By: kunalb, manav-a Differential Revision: D55318448 fbshipit-source-id: 014b88e7f2141e3255838180578c05dfdeda4d0f --- prelude/python/python_wheel.bzl | 11 +++++++++++ prelude/python/tools/wheel.py | 29 +++++++++++++++++++++++++---- 2 files changed, 36 insertions(+), 4 deletions(-) diff --git a/prelude/python/python_wheel.bzl b/prelude/python/python_wheel.bzl index aa4b396ee..635e990bd 100644 --- a/prelude/python/python_wheel.bzl +++ b/prelude/python/python_wheel.bzl @@ -76,6 +76,9 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: cmd.add("--name={}".format(ctx.attrs.dist or ctx.attrs.name)) cmd.add("--version={}".format(ctx.attrs.version)) + if ctx.attrs.entry_points: + cmd.add("--entry-points={}".format(json.encode(ctx.attrs.entry_points))) + for key, val in ctx.attrs.extra_metadata.items(): cmd.add("--metadata={}:{}".format(key, val)) @@ -169,6 +172,14 @@ python_wheel = rule( "ovr_config//third-party/python/constraints:3.9": "py3.9", }), ), + entry_points = attrs.dict( + key = attrs.string(), + value = attrs.dict( + key = attrs.string(), + value = attrs.string(), + ), + default = {}, + ), extra_metadata = attrs.dict( key = attrs.string(), value = attrs.string(), diff --git a/prelude/python/tools/wheel.py b/prelude/python/tools/wheel.py index a4aae31fe..10fef3c7b 100644 --- a/prelude/python/tools/wheel.py +++ b/prelude/python/tools/wheel.py @@ -6,13 +6,15 @@ # of this source tree. import argparse +import configparser import contextlib +import io import json import os import sys import zipfile from types import TracebackType -from typing import Dict, List, Optional, Set, Type +from typing import cast, Dict, List, Optional, Set, Type # pyre-fixme[24]: Generic type `AbstractContextManager` expects 1 type parameter. @@ -24,18 +26,23 @@ def __init__( name: str, version: str, output: str, + entry_points: Optional[Dict[str, str]] = None, metadata: Optional[Dict[str, str]] = None, ) -> None: self._name = name self._version = version self._record: list[str] = [] self._outf = zipfile.ZipFile(output, mode="w") + self._entry_points: Optional[Dict[str, str]] = entry_points self._metadata: Dict[str, str] = {} self._metadata["Name"] = name self._metadata["Version"] = version if metadata is not None: self._metadata.update(metadata) + def _dist_info(self, *path: str) -> str: + return os.path.join(f"{self._name}-{self._version}.dist-info", *path) + def write(self, dst: str, src: str) -> None: self._record.append(dst) self._outf.write(filename=src, arcname=dst) @@ -45,25 +52,35 @@ def writestr(self, dst: str, contents: str) -> None: self._outf.writestr(zinfo_or_arcname=dst, data=contents) def _write_record(self) -> None: - record = f"{self._name}-{self._version}.dist-info/RECORD" + record = self._dist_info("RECORD") self._outf.writestr( record, "".join(["{},,\n".format(f) for f in (self._record + [record])]) ) def close(self) -> None: self.writestr( - f"{self._name}-{self._version}.dist-info/METADATA", + self._dist_info("METADATA"), "".join( ["{}: {}\n".format(key, val) for key, val in self._metadata.items()] ), ) self.writestr( - f"{self._name}-{self._version}.dist-info/WHEEL", + self._dist_info("WHEEL"), """\ Wheel-Version: 1.0 """, ) + # Write entry points. + if self._entry_points is not None: + config = configparser.ConfigParser() + config.read_dict(cast(Dict[str, Dict[str, str]], self._entry_points)) + with io.TextIOWrapper( + self._outf.open(self._dist_info("entry_points.txt"), mode="w"), + encoding="utf-8", + ) as f: + config.write(f) + self._write_record() self._outf.close() @@ -81,6 +98,7 @@ def main(argv: List[str]) -> None: parser.add_argument("--output", required=True) parser.add_argument("--name", required=True) parser.add_argument("--version", required=True) + parser.add_argument("--entry-points", default=None) parser.add_argument("--srcs", action="append", default=[]) parser.add_argument("--metadata", action="append", default=[]) args = parser.parse_args(argv[1:]) @@ -98,6 +116,9 @@ def _add_pkg(pkg: str) -> None: name=args.name, version=args.version, output=args.output, + entry_points=( + json.loads(args.entry_points) if args.entry_points is not None else None + ), metadata=dict([m.split(":", 1) for m in args.metadata]), ) as whl: for src in args.srcs: From 1c63e991d69653f5550de56735902ed9aa3f7229 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 03:58:11 -0700 Subject: [PATCH 0586/1133] Collect source artifacts into provider Summary: First step is to collect source artifacts into provider so we could analyse them later during link group analysis. Reviewed By: VladimirMakaev Differential Revision: D54896901 fbshipit-source-id: e2898084221544b880ab4c114e2b22f4bb3550d2 --- prelude/linking/linkable_graph.bzl | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index c47b5d035..a4c01d7fe 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -7,6 +7,7 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") +load("@prelude//cxx:platform.bzl", "cxx_by_platform") load("@prelude//linking:types.bzl", "Linkage") load("@prelude//python:python.bzl", "PythonLibraryInfo") load("@prelude//utils:expect.bzl", "expect") @@ -14,6 +15,10 @@ load( "@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", ) +load( + "@prelude//utils:utils.bzl", + "flatten", +) load( ":link_info.bzl", "LibOutputStyle", @@ -47,6 +52,8 @@ LinkableRootInfo = provider( ############################################################################### _DisallowConstruction = record() +_TUPLE_TYPE = type(()) +_TargetSourceType = [Artifact, str, _TUPLE_TYPE] LinkableNode = record( # Attribute labels on the target. @@ -84,6 +91,9 @@ LinkableNode = record( # as an asset in android apks. can_be_asset = field(bool), + # Collected target sources from the target. + srcs = field(list[_TargetSourceType]), + # Whether the node should appear in the android mergemap (which provides information about the original # soname->final merged lib mapping) include_in_android_mergemap = field(bool), @@ -135,6 +145,14 @@ def _get_required_outputs_for_linkage(linkage: Linkage) -> list[LibOutputStyle]: return get_output_styles_for_linkage(linkage) +def _get_target_sources(ctx: AnalysisContext) -> list[_TargetSourceType]: + srcs = [] + if hasattr(ctx.attrs, "srcs"): + srcs.extend(ctx.attrs.srcs) + if hasattr(ctx.attrs, "platform_srcs"): + srcs.extend(flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs))) + return srcs + def create_linkable_node( ctx: AnalysisContext, default_soname: str | None, @@ -164,6 +182,7 @@ def create_linkable_node( link_infos = link_infos, shared_libs = shared_libs, can_be_asset = can_be_asset, + srcs = _get_target_sources(ctx), include_in_android_mergemap = include_in_android_mergemap, default_soname = default_soname, linker_flags = linker_flags, From d591f9f261db24d4a76e5899b5339a47bbe6657c Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 03:58:11 -0700 Subject: [PATCH 0587/1133] Refactor out public link group nodes to reuse later Summary: Just moving code around for reusability Reviewed By: VladimirMakaev Differential Revision: D55074002 fbshipit-source-id: e1dfbd6e3aa6f54c157ab217de4512a38566dc5c --- prelude/cxx/cxx_executable.bzl | 11 +++++++++++ prelude/cxx/cxx_library.bzl | 1 + prelude/cxx/link_groups.bzl | 5 ++++- prelude/haskell/haskell.bzl | 1 + prelude/rust/link_info.bzl | 2 ++ 5 files changed, 19 insertions(+), 1 deletion(-) diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index ee9c4184b..96cd016fb 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -135,6 +135,7 @@ load( "get_link_group", "get_link_group_map_json", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", "get_transitive_deps_matching_labels", "is_link_group_shlib", ) @@ -313,6 +314,13 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # If we're using auto-link-groups, where we generate the link group links # in the prelude, the link group map will give us the link group libs. # Otherwise, pull them from the `LinkGroupLibInfo` provider from out deps. + + public_link_group_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_mappings, + exec_dep_roots + link_group_extra_link_roots, + link_group, + ) if impl_params.auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, @@ -328,6 +336,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, prefer_stripped_objects = impl_params.prefer_stripped_objects, anonymous = ctx.attrs.anonymous_link_groups, allow_cache_upload = impl_params.exe_allow_cache_upload, + public_nodes = public_link_group_nodes, ) for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact @@ -349,6 +358,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # scenarios for which we need to propagate up link info and simplify this logic. For now # base which links to use based on whether link groups are defined. labels_to_links_map = get_filtered_labels_to_links_map( + public_link_group_nodes, linkable_graph_node_map, link_group, link_groups, @@ -378,6 +388,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # if a cpp_unittest is part of the link group, we need to traverse through all deps # from the root again to ensure we link in gtest deps labels_to_links_map = labels_to_links_map | get_filtered_labels_to_links_map( + public_link_group_nodes, linkable_graph_node_map, None, link_groups, diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index e2e8e46f7..5c9933ed7 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -1193,6 +1193,7 @@ def _get_shared_library_links( link_strategy = LinkStrategy("static_pic") link_strategy = process_link_strategy_for_pic_behavior(link_strategy, pic_behavior) filtered_labels_to_links_map = get_filtered_labels_to_links_map( + None, linkable_graph_node_map_func(), link_group, {}, diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 9081c44c3..917bc14fc 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -296,6 +296,7 @@ def _transitively_update_shared_linkage( ) def get_filtered_labels_to_links_map( + public_nodes: [set_record, None], # buildifier: disable=unused-variable linkable_graph_node_map: dict[Label, LinkableNode], link_group: [str, None], link_groups: dict[str, Group], @@ -641,6 +642,7 @@ def _create_link_group( # Add roots... filtered_labels_to_links_map = get_filtered_labels_to_links_map( + public_nodes, linkable_graph_node_map, spec.group.name, link_groups, @@ -793,6 +795,7 @@ def create_link_groups( link_group_preferred_linkage: dict[Label, Linkage] = {}, link_group_mappings: [dict[Label, str], None] = None, anonymous: bool = False, + public_nodes: [set_record, None] = None, allow_cache_upload = False) -> _LinkedLinkGroups: # Generate stubs first, so that subsequent links can link against them. link_group_shared_links = {} @@ -816,7 +819,7 @@ def create_link_groups( undefined_symfiles = [] global_symfiles = [] - public_nodes = get_public_link_group_nodes( + public_nodes = public_nodes or get_public_link_group_nodes( linkable_graph_node_map, link_group_mappings, executable_deps + other_roots, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 2a105b645..07361e213 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -1064,6 +1064,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ) labels_to_links_map = get_filtered_labels_to_links_map( + public_nodes = None, linkable_graph_node_map = linkable_graph_node_map, link_group = None, link_groups = link_group_info.groups, diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 737d3d14b..779d1f546 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -448,6 +448,8 @@ def inherited_rust_cxx_link_group_info( link_group_libs[name] = linked_link_group.library labels_to_links_map = get_filtered_labels_to_links_map( + # TODO(patskovn): catch duplicated files in link groups for rust. + None, linkable_graph_node_map, link_group, link_groups, From 0eab6bdee4ae220ddd0b624e3741cd13c4ac3134 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 03:58:11 -0700 Subject: [PATCH 0588/1133] Prohibit file duplications within link group Summary: Duplicated symbols are huge pain of link groups. People copy-paste code and, to repeat themselves less, people reuse files between targets. That is somewhat just more fancy way to copy paste. Reusing files works for `dev` and `opt` modes, but unfortunately, there is no way to make it work for link groups. With these changes we giving user actionable message to fix file duplication within link groups. > Q: Doesn't linker catch this? Yes, linker catches this. But a lot of people struggle with reading and fixing linker errors. This change gives them understandable and actionable error. > Q: Will that catch all symbols duplication? No. If people literally copy-pasted code between targets and included it into different files, we still gonna see linker errors. We need to address this somehow else. Reviewed By: VladimirMakaev Differential Revision: D54896900 fbshipit-source-id: bef85cad8e9333391a70e627b80c3039bca8f405 --- prelude/cxx/link_groups.bzl | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 917bc14fc..7e8e00a97 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -296,7 +296,7 @@ def _transitively_update_shared_linkage( ) def get_filtered_labels_to_links_map( - public_nodes: [set_record, None], # buildifier: disable=unused-variable + public_nodes: [set_record, None], linkable_graph_node_map: dict[Label, LinkableNode], link_group: [str, None], link_groups: dict[str, Group], @@ -370,6 +370,7 @@ def get_filtered_labels_to_links_map( # already. This avoids use adding the same link group lib multiple times, # for each of the possible multiple nodes that maps to it. link_group_added = {} + group_srcs = {} def add_link(target: Label, output_style: LibOutputStyle): linkable_map[target] = LinkGroupLinkInfo( @@ -379,8 +380,26 @@ def get_filtered_labels_to_links_map( def add_link_group(target: Label, target_group: str): # If we've already added this link group to the link line, we're done. - if target_group in link_group_added: - return + + if public_nodes and public_nodes.contains(target): + if target_group not in group_srcs: + group_srcs[target_group] = {} + target_group_srcs = group_srcs[target_group] + for src in linkable_graph_node_map[target].srcs: + if not isinstance(src, Artifact): + # "src" is either source file or source file with list of compilation flags. + # We do not handle the case where we have compilation flags attached to source files + # because it we don't know is link gonna fail or not. So we let user deal with linker errors if there are any. + continue + + previous_target = target_group_srcs.get(src, None) + if previous_target: + fail("'{}' artifact included multiple times into '{}' link group. From '{}:{}' and '{}:{}'".format(src, target_group, target.package, target.name, previous_target.package, previous_target.name)) + else: + target_group_srcs[src] = target + + if target_group in link_group_added: + return # In some flows, we may not have access to the actual link group lib # in our dep tree (e.g. https://fburl.com/code/pddmkptb), so just bail From 3e9a7d581b56be95b1d6f396ba400dd9536ba39c Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 03:58:11 -0700 Subject: [PATCH 0589/1133] Handle duplicated artifacts in link groups for rust Reviewed By: dtolnay Differential Revision: D55069759 fbshipit-source-id: d0db00dda8dd296f6a59f9823f3c402bbb55505e --- prelude/rust/link_info.bzl | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 779d1f546..f9740a710 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -31,6 +31,7 @@ load( "get_link_group", "get_link_group_info", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", ) load( "@prelude//cxx:link_groups_types.bzl", @@ -424,6 +425,13 @@ def inherited_rust_cxx_link_group_info( # handle labels that are mutated by version alias executable_deps.append(g.nodes.value.label) + public_link_group_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_mappings, + executable_deps, + link_group, + ) + linked_link_groups = create_link_groups( ctx = ctx, link_groups = link_groups, @@ -437,6 +445,7 @@ def inherited_rust_cxx_link_group_info( other_roots = [], prefer_stripped_objects = False, # Does Rust ever use stripped objects? anonymous = ctx.attrs.anonymous_link_groups, + public_nodes = public_link_group_nodes, ) auto_link_groups = {} @@ -448,8 +457,7 @@ def inherited_rust_cxx_link_group_info( link_group_libs[name] = linked_link_group.library labels_to_links_map = get_filtered_labels_to_links_map( - # TODO(patskovn): catch duplicated files in link groups for rust. - None, + public_link_group_nodes, linkable_graph_node_map, link_group, link_groups, From f2811a0d80fc5377bba7f979d8a4679b82dcd90a Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 03:58:11 -0700 Subject: [PATCH 0590/1133] Removed duplication Summary: Now we using public nodes for early duplicated symbols failure cleaning up link groups internals Reviewed By: VladimirMakaev Differential Revision: D55074000 fbshipit-source-id: 7687e0d58763a326341758011a45190c0c612833 --- prelude/cxx/cxx_executable.bzl | 1 - prelude/cxx/link_groups.bzl | 10 +--------- prelude/haskell/haskell.bzl | 13 +++++++++++-- prelude/rust/link_info.bzl | 1 - 4 files changed, 12 insertions(+), 13 deletions(-) diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 96cd016fb..2c829ef2e 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -330,7 +330,6 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, executable_deps = exec_dep_roots, linker_flags = own_link_flags, link_group_specs = impl_params.auto_link_group_specs, - root_link_group = link_group, linkable_graph_node_map = linkable_graph_node_map, other_roots = link_group_extra_link_roots, prefer_stripped_objects = impl_params.prefer_stripped_objects, diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 7e8e00a97..b85d3d12d 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -803,18 +803,17 @@ def _symbol_flags_for_link_groups( def create_link_groups( ctx: AnalysisContext, + public_nodes: set_record, link_groups: dict[str, Group] = {}, link_group_specs: list[LinkGroupLibSpec] = [], executable_deps: list[Label] = [], other_roots: list[Label] = [], - root_link_group: [str, None] = None, linker_flags: list[typing.Any] = [], prefer_stripped_objects: bool = False, linkable_graph_node_map: dict[Label, LinkableNode] = {}, link_group_preferred_linkage: dict[Label, Linkage] = {}, link_group_mappings: [dict[Label, str], None] = None, anonymous: bool = False, - public_nodes: [set_record, None] = None, allow_cache_upload = False) -> _LinkedLinkGroups: # Generate stubs first, so that subsequent links can link against them. link_group_shared_links = {} @@ -838,13 +837,6 @@ def create_link_groups( undefined_symfiles = [] global_symfiles = [] - public_nodes = public_nodes or get_public_link_group_nodes( - linkable_graph_node_map, - link_group_mappings, - executable_deps + other_roots, - root_link_group, - ) - for link_group_spec in specs: # NOTE(agallagher): It might make sense to move this down to be # done when we generated the links for the executable, so we can diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 07361e213..c85e4c563 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -39,6 +39,7 @@ load( "get_filtered_links", "get_link_group_info", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", "get_transitive_deps_matching_labels", "is_link_group_shlib", ) @@ -1034,14 +1035,22 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: # in the prelude, the link group map will give us the link group libs. # Otherwise, pull them from the `LinkGroupLibInfo` provider from out deps. auto_link_group_specs = get_auto_link_group_specs(ctx, link_group_info) + executable_deps = [d.linkable_graph.nodes.value.label for d in link_deps if d.linkable_graph != None] + public_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_info.mappings, + executable_deps, + None, + ) if auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, link_group_mappings = link_group_info.mappings, link_group_preferred_linkage = link_group_preferred_linkage, - executable_deps = [d.linkable_graph.nodes.value.label for d in link_deps if d.linkable_graph != None], + executable_deps = executable_deps, link_group_specs = auto_link_group_specs, linkable_graph_node_map = linkable_graph_node_map, + public_nodes = public_nodes, ) for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact @@ -1064,7 +1073,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ) labels_to_links_map = get_filtered_labels_to_links_map( - public_nodes = None, + public_nodes = public_nodes, linkable_graph_node_map = linkable_graph_node_map, link_group = None, link_groups = link_group_info.groups, diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index f9740a710..85ae1cc5c 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -440,7 +440,6 @@ def inherited_rust_cxx_link_group_info( executable_deps = executable_deps, linker_flags = [], link_group_specs = auto_link_group_specs, - root_link_group = link_group, linkable_graph_node_map = linkable_graph_node_map, other_roots = [], prefer_stripped_objects = False, # Does Rust ever use stripped objects? From da327621d8d92fa7319e04ab868c981c69a6498b Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Tue, 26 Mar 2024 04:27:22 -0700 Subject: [PATCH 0591/1133] Speed up filter_srcs action Summary: Benchmark `time buck2 build --local-only --no-remote-cache fbcode//third-party-go/vendor/...` Before: 22m:29s After: 3m:17s Changes - use `BUCK_SCRATCH_PATH` as `GOCACHE` directory - return from `go list` only json fields those we actually use Reviewed By: leoleovich Differential Revision: D55339442 fbshipit-source-id: 0a309ace5d078d8b1f8f3a7ee3be04c6dc0d3ec9 --- prelude/go/tools/filter_srcs.py | 42 ++++++++++++++++----------------- 1 file changed, 20 insertions(+), 22 deletions(-) diff --git a/prelude/go/tools/filter_srcs.py b/prelude/go/tools/filter_srcs.py index 94e87fbde..c551b3344 100755 --- a/prelude/go/tools/filter_srcs.py +++ b/prelude/go/tools/filter_srcs.py @@ -23,7 +23,6 @@ import os import subprocess import sys -import tempfile from pathlib import Path @@ -42,27 +41,26 @@ def main(argv): goroot = os.path.realpath(goroot) # Run `go list` to filter input sources by build pragmas. - with tempfile.TemporaryDirectory() as go_cache_dir: - out = subprocess.check_output( - [ - "env", - "-i", - "GOROOT={}".format(goroot), - "GOARCH={}".format(os.environ.get("GOARCH", "")), - "GOOS={}".format(os.environ.get("GOOS", "")), - "CGO_ENABLED={}".format(os.environ.get("CGO_ENABLED", "0")), - "GO111MODULE=off", - "GOCACHE=" + go_cache_dir, - args.go.resolve(), - "list", - "-e", - "-json", - "-tags", - args.tags, - ".", - ], - cwd=args.srcdir, - ).decode("utf-8") + out = subprocess.check_output( + [ + "env", + "-i", + "GOROOT={}".format(goroot), + "GOARCH={}".format(os.environ.get("GOARCH", "")), + "GOOS={}".format(os.environ.get("GOOS", "")), + "CGO_ENABLED={}".format(os.environ.get("CGO_ENABLED", "0")), + "GO111MODULE=off", + "GOCACHE=" + os.path.realpath(os.environ.get("BUCK_SCRATCH_PATH")), + args.go.resolve(), + "list", + "-e", + "-json=GoFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", + "-tags", + args.tags, + ".", + ], + cwd=args.srcdir, + ).decode("utf-8") # Parse JSON output and print out sources. idx = 0 From 22ea792338754db17d55e4bbea939298839554a0 Mon Sep 17 00:00:00 2001 From: Wyman Zhao Date: Tue, 26 Mar 2024 04:53:36 -0700 Subject: [PATCH 0592/1133] Revert D55074000: Removed duplication Differential Revision: D55074000 Original commit changeset: 7687e0d58763 Original Phabricator Diff: D55074000 fbshipit-source-id: 2863c37813f0806c67ecd7a96eb3ea45c88dfee4 --- prelude/cxx/cxx_executable.bzl | 1 + prelude/cxx/link_groups.bzl | 10 +++++++++- prelude/haskell/haskell.bzl | 13 ++----------- prelude/rust/link_info.bzl | 1 + 4 files changed, 13 insertions(+), 12 deletions(-) diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 2c829ef2e..96cd016fb 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -330,6 +330,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, executable_deps = exec_dep_roots, linker_flags = own_link_flags, link_group_specs = impl_params.auto_link_group_specs, + root_link_group = link_group, linkable_graph_node_map = linkable_graph_node_map, other_roots = link_group_extra_link_roots, prefer_stripped_objects = impl_params.prefer_stripped_objects, diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index b85d3d12d..7e8e00a97 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -803,17 +803,18 @@ def _symbol_flags_for_link_groups( def create_link_groups( ctx: AnalysisContext, - public_nodes: set_record, link_groups: dict[str, Group] = {}, link_group_specs: list[LinkGroupLibSpec] = [], executable_deps: list[Label] = [], other_roots: list[Label] = [], + root_link_group: [str, None] = None, linker_flags: list[typing.Any] = [], prefer_stripped_objects: bool = False, linkable_graph_node_map: dict[Label, LinkableNode] = {}, link_group_preferred_linkage: dict[Label, Linkage] = {}, link_group_mappings: [dict[Label, str], None] = None, anonymous: bool = False, + public_nodes: [set_record, None] = None, allow_cache_upload = False) -> _LinkedLinkGroups: # Generate stubs first, so that subsequent links can link against them. link_group_shared_links = {} @@ -837,6 +838,13 @@ def create_link_groups( undefined_symfiles = [] global_symfiles = [] + public_nodes = public_nodes or get_public_link_group_nodes( + linkable_graph_node_map, + link_group_mappings, + executable_deps + other_roots, + root_link_group, + ) + for link_group_spec in specs: # NOTE(agallagher): It might make sense to move this down to be # done when we generated the links for the executable, so we can diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index c85e4c563..07361e213 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -39,7 +39,6 @@ load( "get_filtered_links", "get_link_group_info", "get_link_group_preferred_linkage", - "get_public_link_group_nodes", "get_transitive_deps_matching_labels", "is_link_group_shlib", ) @@ -1035,22 +1034,14 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: # in the prelude, the link group map will give us the link group libs. # Otherwise, pull them from the `LinkGroupLibInfo` provider from out deps. auto_link_group_specs = get_auto_link_group_specs(ctx, link_group_info) - executable_deps = [d.linkable_graph.nodes.value.label for d in link_deps if d.linkable_graph != None] - public_nodes = get_public_link_group_nodes( - linkable_graph_node_map, - link_group_info.mappings, - executable_deps, - None, - ) if auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, link_group_mappings = link_group_info.mappings, link_group_preferred_linkage = link_group_preferred_linkage, - executable_deps = executable_deps, + executable_deps = [d.linkable_graph.nodes.value.label for d in link_deps if d.linkable_graph != None], link_group_specs = auto_link_group_specs, linkable_graph_node_map = linkable_graph_node_map, - public_nodes = public_nodes, ) for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact @@ -1073,7 +1064,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ) labels_to_links_map = get_filtered_labels_to_links_map( - public_nodes = public_nodes, + public_nodes = None, linkable_graph_node_map = linkable_graph_node_map, link_group = None, link_groups = link_group_info.groups, diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 85ae1cc5c..f9740a710 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -440,6 +440,7 @@ def inherited_rust_cxx_link_group_info( executable_deps = executable_deps, linker_flags = [], link_group_specs = auto_link_group_specs, + root_link_group = link_group, linkable_graph_node_map = linkable_graph_node_map, other_roots = [], prefer_stripped_objects = False, # Does Rust ever use stripped objects? From 75759073b5a3bb42a03aba5c8a6c6d5d936fa197 Mon Sep 17 00:00:00 2001 From: Wyman Zhao Date: Tue, 26 Mar 2024 04:53:36 -0700 Subject: [PATCH 0593/1133] Revert D55069759: Handle duplicated artifacts in link groups for rust Differential Revision: D55069759 Original commit changeset: d0db00dda8dd Original Phabricator Diff: D55069759 fbshipit-source-id: 92d438a9310ec53bb4c89cd557401e7d88fbacd5 --- prelude/rust/link_info.bzl | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index f9740a710..779d1f546 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -31,7 +31,6 @@ load( "get_link_group", "get_link_group_info", "get_link_group_preferred_linkage", - "get_public_link_group_nodes", ) load( "@prelude//cxx:link_groups_types.bzl", @@ -425,13 +424,6 @@ def inherited_rust_cxx_link_group_info( # handle labels that are mutated by version alias executable_deps.append(g.nodes.value.label) - public_link_group_nodes = get_public_link_group_nodes( - linkable_graph_node_map, - link_group_mappings, - executable_deps, - link_group, - ) - linked_link_groups = create_link_groups( ctx = ctx, link_groups = link_groups, @@ -445,7 +437,6 @@ def inherited_rust_cxx_link_group_info( other_roots = [], prefer_stripped_objects = False, # Does Rust ever use stripped objects? anonymous = ctx.attrs.anonymous_link_groups, - public_nodes = public_link_group_nodes, ) auto_link_groups = {} @@ -457,7 +448,8 @@ def inherited_rust_cxx_link_group_info( link_group_libs[name] = linked_link_group.library labels_to_links_map = get_filtered_labels_to_links_map( - public_link_group_nodes, + # TODO(patskovn): catch duplicated files in link groups for rust. + None, linkable_graph_node_map, link_group, link_groups, From 9eb36a537128524cf18ba710de64a288442cfdfe Mon Sep 17 00:00:00 2001 From: Wyman Zhao Date: Tue, 26 Mar 2024 04:53:36 -0700 Subject: [PATCH 0594/1133] Revert D54896900: Prohibit file duplications within link group Differential Revision: D54896900 Original commit changeset: bef85cad8e93 Original Phabricator Diff: D54896900 fbshipit-source-id: 4044be81a9cdc2b9f795099fefdd53d04700aea2 --- prelude/cxx/link_groups.bzl | 25 +++---------------------- 1 file changed, 3 insertions(+), 22 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 7e8e00a97..917bc14fc 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -296,7 +296,7 @@ def _transitively_update_shared_linkage( ) def get_filtered_labels_to_links_map( - public_nodes: [set_record, None], + public_nodes: [set_record, None], # buildifier: disable=unused-variable linkable_graph_node_map: dict[Label, LinkableNode], link_group: [str, None], link_groups: dict[str, Group], @@ -370,7 +370,6 @@ def get_filtered_labels_to_links_map( # already. This avoids use adding the same link group lib multiple times, # for each of the possible multiple nodes that maps to it. link_group_added = {} - group_srcs = {} def add_link(target: Label, output_style: LibOutputStyle): linkable_map[target] = LinkGroupLinkInfo( @@ -380,26 +379,8 @@ def get_filtered_labels_to_links_map( def add_link_group(target: Label, target_group: str): # If we've already added this link group to the link line, we're done. - - if public_nodes and public_nodes.contains(target): - if target_group not in group_srcs: - group_srcs[target_group] = {} - target_group_srcs = group_srcs[target_group] - for src in linkable_graph_node_map[target].srcs: - if not isinstance(src, Artifact): - # "src" is either source file or source file with list of compilation flags. - # We do not handle the case where we have compilation flags attached to source files - # because it we don't know is link gonna fail or not. So we let user deal with linker errors if there are any. - continue - - previous_target = target_group_srcs.get(src, None) - if previous_target: - fail("'{}' artifact included multiple times into '{}' link group. From '{}:{}' and '{}:{}'".format(src, target_group, target.package, target.name, previous_target.package, previous_target.name)) - else: - target_group_srcs[src] = target - - if target_group in link_group_added: - return + if target_group in link_group_added: + return # In some flows, we may not have access to the actual link group lib # in our dep tree (e.g. https://fburl.com/code/pddmkptb), so just bail From d9cf9a8af99868f2e856c19b5c1a5b38adc011c6 Mon Sep 17 00:00:00 2001 From: Wyman Zhao Date: Tue, 26 Mar 2024 04:53:36 -0700 Subject: [PATCH 0595/1133] Revert D55074002: Refactor out public link group nodes to reuse later Differential Revision: D55074002 Original commit changeset: e1dfbd6e3aa6 Original Phabricator Diff: D55074002 fbshipit-source-id: 593dc794fdb35b0f4cc73cbd03ef5dd7cf2f7300 --- prelude/cxx/cxx_executable.bzl | 11 ----------- prelude/cxx/cxx_library.bzl | 1 - prelude/cxx/link_groups.bzl | 5 +---- prelude/haskell/haskell.bzl | 1 - prelude/rust/link_info.bzl | 2 -- 5 files changed, 1 insertion(+), 19 deletions(-) diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 96cd016fb..ee9c4184b 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -135,7 +135,6 @@ load( "get_link_group", "get_link_group_map_json", "get_link_group_preferred_linkage", - "get_public_link_group_nodes", "get_transitive_deps_matching_labels", "is_link_group_shlib", ) @@ -314,13 +313,6 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # If we're using auto-link-groups, where we generate the link group links # in the prelude, the link group map will give us the link group libs. # Otherwise, pull them from the `LinkGroupLibInfo` provider from out deps. - - public_link_group_nodes = get_public_link_group_nodes( - linkable_graph_node_map, - link_group_mappings, - exec_dep_roots + link_group_extra_link_roots, - link_group, - ) if impl_params.auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, @@ -336,7 +328,6 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, prefer_stripped_objects = impl_params.prefer_stripped_objects, anonymous = ctx.attrs.anonymous_link_groups, allow_cache_upload = impl_params.exe_allow_cache_upload, - public_nodes = public_link_group_nodes, ) for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact @@ -358,7 +349,6 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # scenarios for which we need to propagate up link info and simplify this logic. For now # base which links to use based on whether link groups are defined. labels_to_links_map = get_filtered_labels_to_links_map( - public_link_group_nodes, linkable_graph_node_map, link_group, link_groups, @@ -388,7 +378,6 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # if a cpp_unittest is part of the link group, we need to traverse through all deps # from the root again to ensure we link in gtest deps labels_to_links_map = labels_to_links_map | get_filtered_labels_to_links_map( - public_link_group_nodes, linkable_graph_node_map, None, link_groups, diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 5c9933ed7..e2e8e46f7 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -1193,7 +1193,6 @@ def _get_shared_library_links( link_strategy = LinkStrategy("static_pic") link_strategy = process_link_strategy_for_pic_behavior(link_strategy, pic_behavior) filtered_labels_to_links_map = get_filtered_labels_to_links_map( - None, linkable_graph_node_map_func(), link_group, {}, diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 917bc14fc..9081c44c3 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -296,7 +296,6 @@ def _transitively_update_shared_linkage( ) def get_filtered_labels_to_links_map( - public_nodes: [set_record, None], # buildifier: disable=unused-variable linkable_graph_node_map: dict[Label, LinkableNode], link_group: [str, None], link_groups: dict[str, Group], @@ -642,7 +641,6 @@ def _create_link_group( # Add roots... filtered_labels_to_links_map = get_filtered_labels_to_links_map( - public_nodes, linkable_graph_node_map, spec.group.name, link_groups, @@ -795,7 +793,6 @@ def create_link_groups( link_group_preferred_linkage: dict[Label, Linkage] = {}, link_group_mappings: [dict[Label, str], None] = None, anonymous: bool = False, - public_nodes: [set_record, None] = None, allow_cache_upload = False) -> _LinkedLinkGroups: # Generate stubs first, so that subsequent links can link against them. link_group_shared_links = {} @@ -819,7 +816,7 @@ def create_link_groups( undefined_symfiles = [] global_symfiles = [] - public_nodes = public_nodes or get_public_link_group_nodes( + public_nodes = get_public_link_group_nodes( linkable_graph_node_map, link_group_mappings, executable_deps + other_roots, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 07361e213..2a105b645 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -1064,7 +1064,6 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ) labels_to_links_map = get_filtered_labels_to_links_map( - public_nodes = None, linkable_graph_node_map = linkable_graph_node_map, link_group = None, link_groups = link_group_info.groups, diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 779d1f546..737d3d14b 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -448,8 +448,6 @@ def inherited_rust_cxx_link_group_info( link_group_libs[name] = linked_link_group.library labels_to_links_map = get_filtered_labels_to_links_map( - # TODO(patskovn): catch duplicated files in link groups for rust. - None, linkable_graph_node_map, link_group, link_groups, From 08107bfcba9b840e9d72de9e4fb73ba59c94a858 Mon Sep 17 00:00:00 2001 From: Wyman Zhao Date: Tue, 26 Mar 2024 04:53:36 -0700 Subject: [PATCH 0596/1133] Revert D54896901: Collect source artifacts into provider Differential Revision: D54896901 Original commit changeset: e28980842215 Original Phabricator Diff: D54896901 fbshipit-source-id: fd5d448b41f180237571dbaa2093f9a3fbbdd864 --- prelude/linking/linkable_graph.bzl | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index a4c01d7fe..c47b5d035 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -7,7 +7,6 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") -load("@prelude//cxx:platform.bzl", "cxx_by_platform") load("@prelude//linking:types.bzl", "Linkage") load("@prelude//python:python.bzl", "PythonLibraryInfo") load("@prelude//utils:expect.bzl", "expect") @@ -15,10 +14,6 @@ load( "@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", ) -load( - "@prelude//utils:utils.bzl", - "flatten", -) load( ":link_info.bzl", "LibOutputStyle", @@ -52,8 +47,6 @@ LinkableRootInfo = provider( ############################################################################### _DisallowConstruction = record() -_TUPLE_TYPE = type(()) -_TargetSourceType = [Artifact, str, _TUPLE_TYPE] LinkableNode = record( # Attribute labels on the target. @@ -91,9 +84,6 @@ LinkableNode = record( # as an asset in android apks. can_be_asset = field(bool), - # Collected target sources from the target. - srcs = field(list[_TargetSourceType]), - # Whether the node should appear in the android mergemap (which provides information about the original # soname->final merged lib mapping) include_in_android_mergemap = field(bool), @@ -145,14 +135,6 @@ def _get_required_outputs_for_linkage(linkage: Linkage) -> list[LibOutputStyle]: return get_output_styles_for_linkage(linkage) -def _get_target_sources(ctx: AnalysisContext) -> list[_TargetSourceType]: - srcs = [] - if hasattr(ctx.attrs, "srcs"): - srcs.extend(ctx.attrs.srcs) - if hasattr(ctx.attrs, "platform_srcs"): - srcs.extend(flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs))) - return srcs - def create_linkable_node( ctx: AnalysisContext, default_soname: str | None, @@ -182,7 +164,6 @@ def create_linkable_node( link_infos = link_infos, shared_libs = shared_libs, can_be_asset = can_be_asset, - srcs = _get_target_sources(ctx), include_in_android_mergemap = include_in_android_mergemap, default_soname = default_soname, linker_flags = linker_flags, From f1511baf237a8734f58bdcd186956c6627d7d0b6 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 08:16:12 -0700 Subject: [PATCH 0597/1133] Collect source artifacts into provider Summary: First step is to collect source artifacts into provider so we could analyse them later during link group analysis. Reviewed By: VladimirMakaev Differential Revision: D55368289 fbshipit-source-id: e0fe44db96ab10956763224fee010db4a88fec10 --- prelude/linking/linkable_graph.bzl | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index c47b5d035..a4c01d7fe 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -7,6 +7,7 @@ load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") load("@prelude//cxx:headers.bzl", "CPrecompiledHeaderInfo") +load("@prelude//cxx:platform.bzl", "cxx_by_platform") load("@prelude//linking:types.bzl", "Linkage") load("@prelude//python:python.bzl", "PythonLibraryInfo") load("@prelude//utils:expect.bzl", "expect") @@ -14,6 +15,10 @@ load( "@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", ) +load( + "@prelude//utils:utils.bzl", + "flatten", +) load( ":link_info.bzl", "LibOutputStyle", @@ -47,6 +52,8 @@ LinkableRootInfo = provider( ############################################################################### _DisallowConstruction = record() +_TUPLE_TYPE = type(()) +_TargetSourceType = [Artifact, str, _TUPLE_TYPE] LinkableNode = record( # Attribute labels on the target. @@ -84,6 +91,9 @@ LinkableNode = record( # as an asset in android apks. can_be_asset = field(bool), + # Collected target sources from the target. + srcs = field(list[_TargetSourceType]), + # Whether the node should appear in the android mergemap (which provides information about the original # soname->final merged lib mapping) include_in_android_mergemap = field(bool), @@ -135,6 +145,14 @@ def _get_required_outputs_for_linkage(linkage: Linkage) -> list[LibOutputStyle]: return get_output_styles_for_linkage(linkage) +def _get_target_sources(ctx: AnalysisContext) -> list[_TargetSourceType]: + srcs = [] + if hasattr(ctx.attrs, "srcs"): + srcs.extend(ctx.attrs.srcs) + if hasattr(ctx.attrs, "platform_srcs"): + srcs.extend(flatten(cxx_by_platform(ctx, ctx.attrs.platform_srcs))) + return srcs + def create_linkable_node( ctx: AnalysisContext, default_soname: str | None, @@ -164,6 +182,7 @@ def create_linkable_node( link_infos = link_infos, shared_libs = shared_libs, can_be_asset = can_be_asset, + srcs = _get_target_sources(ctx), include_in_android_mergemap = include_in_android_mergemap, default_soname = default_soname, linker_flags = linker_flags, From ef5c48330b6ff8c245db82f5969698e1e9b10c78 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 08:16:12 -0700 Subject: [PATCH 0598/1133] Refactor out public link group nodes to reuse later Summary: Just moving code around for reusability Reviewed By: VladimirMakaev Differential Revision: D55368288 fbshipit-source-id: 6698dd0736d453e9d9983e7ff893f6ec1c923a91 --- prelude/cxx/cxx_executable.bzl | 11 +++++++++++ prelude/cxx/cxx_library.bzl | 1 + prelude/cxx/link_groups.bzl | 5 ++++- prelude/haskell/haskell.bzl | 1 + prelude/rust/link_info.bzl | 2 ++ 5 files changed, 19 insertions(+), 1 deletion(-) diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index ee9c4184b..96cd016fb 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -135,6 +135,7 @@ load( "get_link_group", "get_link_group_map_json", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", "get_transitive_deps_matching_labels", "is_link_group_shlib", ) @@ -313,6 +314,13 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # If we're using auto-link-groups, where we generate the link group links # in the prelude, the link group map will give us the link group libs. # Otherwise, pull them from the `LinkGroupLibInfo` provider from out deps. + + public_link_group_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_mappings, + exec_dep_roots + link_group_extra_link_roots, + link_group, + ) if impl_params.auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, @@ -328,6 +336,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, prefer_stripped_objects = impl_params.prefer_stripped_objects, anonymous = ctx.attrs.anonymous_link_groups, allow_cache_upload = impl_params.exe_allow_cache_upload, + public_nodes = public_link_group_nodes, ) for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact @@ -349,6 +358,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # scenarios for which we need to propagate up link info and simplify this logic. For now # base which links to use based on whether link groups are defined. labels_to_links_map = get_filtered_labels_to_links_map( + public_link_group_nodes, linkable_graph_node_map, link_group, link_groups, @@ -378,6 +388,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # if a cpp_unittest is part of the link group, we need to traverse through all deps # from the root again to ensure we link in gtest deps labels_to_links_map = labels_to_links_map | get_filtered_labels_to_links_map( + public_link_group_nodes, linkable_graph_node_map, None, link_groups, diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index e2e8e46f7..5c9933ed7 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -1193,6 +1193,7 @@ def _get_shared_library_links( link_strategy = LinkStrategy("static_pic") link_strategy = process_link_strategy_for_pic_behavior(link_strategy, pic_behavior) filtered_labels_to_links_map = get_filtered_labels_to_links_map( + None, linkable_graph_node_map_func(), link_group, {}, diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 9081c44c3..917bc14fc 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -296,6 +296,7 @@ def _transitively_update_shared_linkage( ) def get_filtered_labels_to_links_map( + public_nodes: [set_record, None], # buildifier: disable=unused-variable linkable_graph_node_map: dict[Label, LinkableNode], link_group: [str, None], link_groups: dict[str, Group], @@ -641,6 +642,7 @@ def _create_link_group( # Add roots... filtered_labels_to_links_map = get_filtered_labels_to_links_map( + public_nodes, linkable_graph_node_map, spec.group.name, link_groups, @@ -793,6 +795,7 @@ def create_link_groups( link_group_preferred_linkage: dict[Label, Linkage] = {}, link_group_mappings: [dict[Label, str], None] = None, anonymous: bool = False, + public_nodes: [set_record, None] = None, allow_cache_upload = False) -> _LinkedLinkGroups: # Generate stubs first, so that subsequent links can link against them. link_group_shared_links = {} @@ -816,7 +819,7 @@ def create_link_groups( undefined_symfiles = [] global_symfiles = [] - public_nodes = get_public_link_group_nodes( + public_nodes = public_nodes or get_public_link_group_nodes( linkable_graph_node_map, link_group_mappings, executable_deps + other_roots, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 2a105b645..07361e213 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -1064,6 +1064,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ) labels_to_links_map = get_filtered_labels_to_links_map( + public_nodes = None, linkable_graph_node_map = linkable_graph_node_map, link_group = None, link_groups = link_group_info.groups, diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 737d3d14b..779d1f546 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -448,6 +448,8 @@ def inherited_rust_cxx_link_group_info( link_group_libs[name] = linked_link_group.library labels_to_links_map = get_filtered_labels_to_links_map( + # TODO(patskovn): catch duplicated files in link groups for rust. + None, linkable_graph_node_map, link_group, link_groups, From cf4f6be9bf476c83b65d229429191c4524ae8682 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 08:16:12 -0700 Subject: [PATCH 0599/1133] Prohibit file duplications within link group Summary: Duplicated symbols are huge pain of link groups. People copy-paste code and, to repeat themselves less, people reuse files between targets. That is somewhat just more fancy way to copy paste. Reusing files works for `dev` and `opt` modes, but unfortunately, there is no way to make it work for link groups. With these changes we giving user actionable message to fix file duplication within link groups. > Q: Doesn't linker catch this? Yes, linker catches this. But a lot of people struggle with reading and fixing linker errors. This change gives them understandable and actionable error. > Q: Will that catch all symbols duplication? No. If people literally copy-pasted code between targets and included it into different files, we still gonna see linker errors. We need to address this somehow else. Reviewed By: VladimirMakaev Differential Revision: D55368291 fbshipit-source-id: 49061fab42e56f9a12660a9e75d7143cb5c7930c --- prelude/cxx/link_groups.bzl | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 917bc14fc..7e8e00a97 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -296,7 +296,7 @@ def _transitively_update_shared_linkage( ) def get_filtered_labels_to_links_map( - public_nodes: [set_record, None], # buildifier: disable=unused-variable + public_nodes: [set_record, None], linkable_graph_node_map: dict[Label, LinkableNode], link_group: [str, None], link_groups: dict[str, Group], @@ -370,6 +370,7 @@ def get_filtered_labels_to_links_map( # already. This avoids use adding the same link group lib multiple times, # for each of the possible multiple nodes that maps to it. link_group_added = {} + group_srcs = {} def add_link(target: Label, output_style: LibOutputStyle): linkable_map[target] = LinkGroupLinkInfo( @@ -379,8 +380,26 @@ def get_filtered_labels_to_links_map( def add_link_group(target: Label, target_group: str): # If we've already added this link group to the link line, we're done. - if target_group in link_group_added: - return + + if public_nodes and public_nodes.contains(target): + if target_group not in group_srcs: + group_srcs[target_group] = {} + target_group_srcs = group_srcs[target_group] + for src in linkable_graph_node_map[target].srcs: + if not isinstance(src, Artifact): + # "src" is either source file or source file with list of compilation flags. + # We do not handle the case where we have compilation flags attached to source files + # because it we don't know is link gonna fail or not. So we let user deal with linker errors if there are any. + continue + + previous_target = target_group_srcs.get(src, None) + if previous_target: + fail("'{}' artifact included multiple times into '{}' link group. From '{}:{}' and '{}:{}'".format(src, target_group, target.package, target.name, previous_target.package, previous_target.name)) + else: + target_group_srcs[src] = target + + if target_group in link_group_added: + return # In some flows, we may not have access to the actual link group lib # in our dep tree (e.g. https://fburl.com/code/pddmkptb), so just bail From 124250a4a89efd44b9cc06cf9905f19e3ecf1e87 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 08:16:12 -0700 Subject: [PATCH 0600/1133] Handle duplicated artifacts in link groups for rust Reviewed By: fgasperij Differential Revision: D55368292 fbshipit-source-id: bf907e3330c7023199106fcfe43d797cd88b4755 --- prelude/rust/link_info.bzl | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 779d1f546..f9740a710 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -31,6 +31,7 @@ load( "get_link_group", "get_link_group_info", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", ) load( "@prelude//cxx:link_groups_types.bzl", @@ -424,6 +425,13 @@ def inherited_rust_cxx_link_group_info( # handle labels that are mutated by version alias executable_deps.append(g.nodes.value.label) + public_link_group_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_mappings, + executable_deps, + link_group, + ) + linked_link_groups = create_link_groups( ctx = ctx, link_groups = link_groups, @@ -437,6 +445,7 @@ def inherited_rust_cxx_link_group_info( other_roots = [], prefer_stripped_objects = False, # Does Rust ever use stripped objects? anonymous = ctx.attrs.anonymous_link_groups, + public_nodes = public_link_group_nodes, ) auto_link_groups = {} @@ -448,8 +457,7 @@ def inherited_rust_cxx_link_group_info( link_group_libs[name] = linked_link_group.library labels_to_links_map = get_filtered_labels_to_links_map( - # TODO(patskovn): catch duplicated files in link groups for rust. - None, + public_link_group_nodes, linkable_graph_node_map, link_group, link_groups, From 9824e23efaddeeaf8855d583156a1340f7536456 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 08:16:12 -0700 Subject: [PATCH 0601/1133] Removed duplication Summary: Now we using public nodes for early duplicated symbols failure cleaning up link groups internals Reviewed By: fgasperij Differential Revision: D55368290 fbshipit-source-id: 3e66c7fdbda5d6091fdd463351ee99830440e76c --- prelude/cxx/cxx_executable.bzl | 1 - prelude/cxx/link_groups.bzl | 10 +--------- prelude/haskell/haskell.bzl | 13 +++++++++++-- prelude/rust/link_info.bzl | 1 - 4 files changed, 12 insertions(+), 13 deletions(-) diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 96cd016fb..2c829ef2e 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -330,7 +330,6 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, executable_deps = exec_dep_roots, linker_flags = own_link_flags, link_group_specs = impl_params.auto_link_group_specs, - root_link_group = link_group, linkable_graph_node_map = linkable_graph_node_map, other_roots = link_group_extra_link_roots, prefer_stripped_objects = impl_params.prefer_stripped_objects, diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 7e8e00a97..b85d3d12d 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -803,18 +803,17 @@ def _symbol_flags_for_link_groups( def create_link_groups( ctx: AnalysisContext, + public_nodes: set_record, link_groups: dict[str, Group] = {}, link_group_specs: list[LinkGroupLibSpec] = [], executable_deps: list[Label] = [], other_roots: list[Label] = [], - root_link_group: [str, None] = None, linker_flags: list[typing.Any] = [], prefer_stripped_objects: bool = False, linkable_graph_node_map: dict[Label, LinkableNode] = {}, link_group_preferred_linkage: dict[Label, Linkage] = {}, link_group_mappings: [dict[Label, str], None] = None, anonymous: bool = False, - public_nodes: [set_record, None] = None, allow_cache_upload = False) -> _LinkedLinkGroups: # Generate stubs first, so that subsequent links can link against them. link_group_shared_links = {} @@ -838,13 +837,6 @@ def create_link_groups( undefined_symfiles = [] global_symfiles = [] - public_nodes = public_nodes or get_public_link_group_nodes( - linkable_graph_node_map, - link_group_mappings, - executable_deps + other_roots, - root_link_group, - ) - for link_group_spec in specs: # NOTE(agallagher): It might make sense to move this down to be # done when we generated the links for the executable, so we can diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 07361e213..c85e4c563 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -39,6 +39,7 @@ load( "get_filtered_links", "get_link_group_info", "get_link_group_preferred_linkage", + "get_public_link_group_nodes", "get_transitive_deps_matching_labels", "is_link_group_shlib", ) @@ -1034,14 +1035,22 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: # in the prelude, the link group map will give us the link group libs. # Otherwise, pull them from the `LinkGroupLibInfo` provider from out deps. auto_link_group_specs = get_auto_link_group_specs(ctx, link_group_info) + executable_deps = [d.linkable_graph.nodes.value.label for d in link_deps if d.linkable_graph != None] + public_nodes = get_public_link_group_nodes( + linkable_graph_node_map, + link_group_info.mappings, + executable_deps, + None, + ) if auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, link_group_mappings = link_group_info.mappings, link_group_preferred_linkage = link_group_preferred_linkage, - executable_deps = [d.linkable_graph.nodes.value.label for d in link_deps if d.linkable_graph != None], + executable_deps = executable_deps, link_group_specs = auto_link_group_specs, linkable_graph_node_map = linkable_graph_node_map, + public_nodes = public_nodes, ) for name, linked_link_group in linked_link_groups.libs.items(): auto_link_groups[name] = linked_link_group.artifact @@ -1064,7 +1073,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ) labels_to_links_map = get_filtered_labels_to_links_map( - public_nodes = None, + public_nodes = public_nodes, linkable_graph_node_map = linkable_graph_node_map, link_group = None, link_groups = link_group_info.groups, diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index f9740a710..85ae1cc5c 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -440,7 +440,6 @@ def inherited_rust_cxx_link_group_info( executable_deps = executable_deps, linker_flags = [], link_group_specs = auto_link_group_specs, - root_link_group = link_group, linkable_graph_node_map = linkable_graph_node_map, other_roots = [], prefer_stripped_objects = False, # Does Rust ever use stripped objects? From 709dce4b200b38df91f2753a873b0bdb722a0f12 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 08:16:12 -0700 Subject: [PATCH 0602/1133] Made additional analysis opt-in Summary: Not all link groups require this check. We can only enable it for broadly scoped folders like "whole root package" that is very dynamic in nature. Reviewed By: VladimirMakaev Differential Revision: D55368257 fbshipit-source-id: e7bdf0cea52de067720edb6d45ca3a4d7283b468 --- prelude/cxx/groups.bzl | 2 ++ prelude/cxx/groups_types.bzl | 4 ++++ prelude/cxx/link_groups.bzl | 4 ++-- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index 616a1a449..17dea083a 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -44,6 +44,7 @@ _VALID_ATTRS = [ "discard_group", "linker_flags", "requires_root_node_exists", + "prohibit_file_duplicates", ] # Creates a group from an existing group, overwriting any properties provided @@ -81,6 +82,7 @@ def parse_groups_definitions( discard_group = attrs.get("discard_group", False), linker_flags = attrs.get("linker_flags", []), requires_root_node_exists = attrs.get("requires_root_node_exists", True), + prohibit_file_duplicates = attrs.get("prohibit_file_duplicates", False), ) parsed_mappings = [] diff --git a/prelude/cxx/groups_types.bzl b/prelude/cxx/groups_types.bzl index a9fafd8ce..6e0a86c74 100644 --- a/prelude/cxx/groups_types.bzl +++ b/prelude/cxx/groups_types.bzl @@ -81,6 +81,10 @@ GroupAttrs = record( # Requires root nodes in specs to always exist in dependency graph. # Otherwise fails. requires_root_node_exists = field(bool, True), + # For certain wide-scale generic link groups we want to enable + # initial duplicate analysis. This is useful for detecting dduplicated symbols problem early + # for automatoc link groups that we not aware about (e.g. evicting whole root package folder into link group) + prohibit_file_duplicates = field(bool, False), ) # Types of group traversal diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index b85d3d12d..be5e5e75c 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -381,7 +381,7 @@ def get_filtered_labels_to_links_map( def add_link_group(target: Label, target_group: str): # If we've already added this link group to the link line, we're done. - if public_nodes and public_nodes.contains(target): + if link_groups[target_group].attrs.prohibit_file_duplicates and public_nodes and public_nodes.contains(target): if target_group not in group_srcs: group_srcs[target_group] = {} target_group_srcs = group_srcs[target_group] @@ -393,7 +393,7 @@ def get_filtered_labels_to_links_map( continue previous_target = target_group_srcs.get(src, None) - if previous_target: + if previous_target and previous_target != target: fail("'{}' artifact included multiple times into '{}' link group. From '{}:{}' and '{}:{}'".format(src, target_group, target.package, target.name, previous_target.package, previous_target.name)) else: target_group_srcs[src] = target From 75989539942449bd51a78de1c2c9642659dc41d5 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Tue, 26 Mar 2024 08:56:55 -0700 Subject: [PATCH 0603/1133] Use dynamic_output for file filtering Summary: In this diff the package building procedure has been completely redesigned. - All the actions are now written in Starlark and Python used only as as a simple wrapper. - This gives us no features at the moment, but enables further improvements of CGo features, such as full build tags support. - Also it makes code more compact and easy for understanding. It works almost as fast as before, but more important I've found a way to speed the whole process 7x during this refactoring D55339442 Reviewed By: awalterschulze Differential Revision: D53947546 fbshipit-source-id: c732b884c201c81ba23262a7d4ee9e38bc363faa --- prelude/go/cgo_library.bzl | 17 +- prelude/go/compile.bzl | 139 ----------- prelude/go/go_binary.bzl | 10 +- prelude/go/go_exported_library.bzl | 10 +- prelude/go/go_library.bzl | 17 +- prelude/go/go_test.bzl | 18 +- prelude/go/package_builder.bzl | 345 ++++++++++++++++++++++++++++ prelude/go/packages.bzl | 2 +- prelude/go/toolchain.bzl | 4 +- prelude/go/tools/BUCK.v2 | 24 +- prelude/go/tools/compile_wrapper.py | 139 ----------- prelude/go/tools/cover_srcs.py | 82 ------- prelude/go/tools/filter_srcs.py | 92 -------- prelude/go/tools/go_list_wrapper.py | 40 ++++ prelude/toolchains/go.bzl | 8 +- 15 files changed, 435 insertions(+), 512 deletions(-) create mode 100644 prelude/go/package_builder.bzl delete mode 100755 prelude/go/tools/compile_wrapper.py delete mode 100644 prelude/go/tools/cover_srcs.py delete mode 100755 prelude/go/tools/filter_srcs.py create mode 100644 prelude/go/tools/go_list_wrapper.py diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 1499821c7..7902c8b51 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -48,9 +48,10 @@ load( "@prelude//utils:utils.bzl", "map_idx", ) -load(":compile.bzl", "GoPkgCompileInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") +load(":compile.bzl", "GoPkgCompileInfo", "get_inherited_compile_pkgs") load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") +load(":package_builder.bzl", "build_package") load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") @@ -218,29 +219,27 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: compiled_objects = c_compile_cmds.pic.objects - # Merge all sources together to pass to the Go compile step. - all_srcs = cmd_args(go_srcs + compiled_objects) - if ctx.attrs.go_srcs: - all_srcs.add(get_filtered_srcs(ctx, ctx.attrs.go_srcs, ctx.attrs.package_root)) - shared = ctx.attrs._compile_shared race = ctx.attrs._race coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None # Build Go library. - compiled_pkg = compile( + compiled_pkg = build_package( ctx, pkg_name, - all_srcs, + ctx.attrs.go_srcs, + package_root = ctx.attrs.package_root, deps = ctx.attrs.deps + ctx.attrs.exported_deps, + compiled_objects = compiled_objects, + extra_go_files = go_srcs, shared = shared, race = race, coverage_mode = coverage_mode, + embedcfg = ctx.attrs.embedcfg, ) # Temporarily hack, it seems like we can update record, so create new one compiled_pkg = GoPkg( - cgo = True, pkg = compiled_pkg.pkg, coverage_vars = compiled_pkg.coverage_vars, ) diff --git a/prelude/go/compile.bzl b/prelude/go/compile.bzl index cd56bec3d..e3038756c 100644 --- a/prelude/go/compile.bzl +++ b/prelude/go/compile.bzl @@ -6,19 +6,11 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") - -# @unused this comment is to make the linter happy. The linter thinks -# GoCoverageMode is unused despite it being used in the function signature of -# multiple functions. -load(":coverage.bzl", "GoCoverageMode", "cover_srcs") load( ":packages.bzl", "GoPkg", # @Unused used as type - "make_importcfg", "merge_pkgs", - "pkg_artifacts", ) -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") # Provider wrapping packages used for compiling. GoPkgCompileInfo = provider(fields = { @@ -39,35 +31,6 @@ GoTestInfo = provider( def get_inherited_compile_pkgs(deps: list[Dependency]) -> dict[str, GoPkg]: return merge_pkgs([d[GoPkgCompileInfo].pkgs for d in deps if GoPkgCompileInfo in d]) -def get_filtered_srcs(ctx: AnalysisContext, srcs: list[Artifact], package_root: str | None, tests: bool = False, force_disable_cgo: bool = False) -> cmd_args: - """ - Filter the input sources based on build pragma - """ - - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - - package_root = package_root if package_root != None else infer_package_root(srcs) - - # Delegate to `go list` to filter out srcs with incompatible `// +build` - # pragmas. - filtered_srcs = ctx.actions.declare_output("__filtered_srcs__.txt") - srcs_dir = ctx.actions.symlinked_dir( - "__srcs__", - {paths.relativize(src.short_path, package_root): src for src in srcs}, - ) - filter_cmd = get_toolchain_cmd_args(go_toolchain, go_root = True, force_disable_cgo = force_disable_cgo) - filter_cmd.add(go_toolchain.filter_srcs) - filter_cmd.add(cmd_args(go_toolchain.go, format = "--go={}")) - if tests: - filter_cmd.add("--tests") - filter_cmd.add(cmd_args(",".join(go_toolchain.tags + ctx.attrs._tags), format = "--tags={}")) - filter_cmd.add(cmd_args(filtered_srcs.as_output(), format = "--output={}")) - filter_cmd.add(srcs_dir) - ctx.actions.run(filter_cmd, category = "go_filter_srcs") - - # Add filtered srcs to compile command. - return cmd_args(filtered_srcs, format = "@{}").hidden(srcs).hidden(srcs_dir) - def infer_package_root(srcs: list[Artifact]) -> str: go_sources = [s for s in srcs if s.extension == ".go"] if len(go_sources) == 0: @@ -80,105 +43,3 @@ def infer_package_root(srcs: list[Artifact]) -> str: )) return dir_set.keys()[0] - -def _assemble_cmd( - ctx: AnalysisContext, - pkg_name: str, - flags: list[str] = [], - shared: bool = False) -> cmd_args: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - cmd = cmd_args() - cmd.add(go_toolchain.assembler) - cmd.add(go_toolchain.assembler_flags) - cmd.add(flags) - cmd.add("-p", pkg_name) - if shared: - cmd.add("-shared") - - return cmd - -def _compile_cmd( - ctx: AnalysisContext, - pkg_name: str, - pkgs: dict[str, Artifact] = {}, - deps: list[Dependency] = [], - flags: list[str] = [], - shared: bool = False, - race: bool = False) -> cmd_args: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - - cmd = cmd_args() - cmd.add(go_toolchain.compiler) - cmd.add(go_toolchain.compiler_flags) - cmd.add("-p", pkg_name) - cmd.add("-pack") - cmd.add("-nolocalimports") - cmd.add(flags) - cmd.add("-buildid=") - - # Add shared/static flags. - if shared: - cmd.add("-shared") - - if race: - cmd.add("-race") - - # Add Go pkgs inherited from deps to compiler search path. - all_pkgs = merge_pkgs([ - pkgs, - pkg_artifacts(get_inherited_compile_pkgs(deps)), - ]) - - importcfg = make_importcfg(ctx, pkg_name, all_pkgs, with_importmap = True) - - cmd.add("-importcfg", importcfg) - - return cmd - -def compile( - ctx: AnalysisContext, - pkg_name: str, - srcs: cmd_args, - pkgs: dict[str, Artifact] = {}, - deps: list[Dependency] = [], - compile_flags: list[str] = [], - assemble_flags: list[str] = [], - shared: bool = False, - race: bool = False, - coverage_mode: GoCoverageMode | None = None) -> GoPkg: - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - output = ctx.actions.declare_output(paths.basename(pkg_name) + ".a") - - cmd = get_toolchain_cmd_args(go_toolchain) - cmd.add(go_toolchain.compile_wrapper) - cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(_compile_cmd(ctx, pkg_name, pkgs, deps, compile_flags, shared = shared, race = race), format = "--compiler={}")) - cmd.add(cmd_args(_assemble_cmd(ctx, pkg_name, assemble_flags, shared = shared), format = "--assembler={}")) - cmd.add(cmd_args(go_toolchain.packer, format = "--packer={}")) - if ctx.attrs.embedcfg: - cmd.add(cmd_args(ctx.attrs.embedcfg, format = "--embedcfg={}")) - - argsfile = ctx.actions.declare_output(pkg_name + ".go.argsfile") - - coverage_vars = None - if coverage_mode != None: - if race and coverage_mode != GoCoverageMode("atomic"): - fail("`coverage_mode` must be `atomic` when `race=True`") - cov_res = cover_srcs(ctx, pkg_name, coverage_mode, srcs, shared) - srcs = cov_res.srcs - coverage_vars = cov_res.variables - - srcs_args = cmd_args(srcs) - ctx.actions.write(argsfile.as_output(), srcs_args, allow_args = True) - - cmd.add(cmd_args(argsfile, format = "@{}").hidden([srcs_args])) - - identifier = paths.basename(pkg_name) - if shared: - identifier += "[shared]" - if coverage_mode: - identifier += "[coverage_" + coverage_mode.value + "]" - - ctx.actions.run(cmd, category = "go_compile", identifier = identifier) - - return GoPkg(pkg = output, coverage_vars = coverage_vars) diff --git a/prelude/go/go_binary.bzl b/prelude/go/go_binary.bzl index ca87cc90c..72977df5c 100644 --- a/prelude/go/go_binary.bzl +++ b/prelude/go/go_binary.bzl @@ -16,17 +16,19 @@ load( "map_val", "value_or", ) -load(":compile.bzl", "compile", "get_filtered_srcs") load(":link.bzl", "link") +load(":package_builder.bzl", "build_package") def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: - lib = compile( + lib = build_package( ctx, "main", - get_filtered_srcs(ctx, ctx.attrs.srcs, ctx.attrs.package_root), + ctx.attrs.srcs, + package_root = ctx.attrs.package_root, deps = ctx.attrs.deps, - compile_flags = ctx.attrs.compiler_flags, + compiler_flags = ctx.attrs.compiler_flags, race = ctx.attrs._race, + embedcfg = ctx.attrs.embedcfg, ) (bin, runtime_files, external_debug_info) = link( ctx, diff --git a/prelude/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl index 0142b5777..71e4ff4ba 100644 --- a/prelude/go/go_exported_library.bzl +++ b/prelude/go/go_exported_library.bzl @@ -14,18 +14,20 @@ load( "map_val", "value_or", ) -load(":compile.bzl", "compile", "get_filtered_srcs") load(":link.bzl", "GoBuildMode", "link") +load(":package_builder.bzl", "build_package") def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: - lib = compile( + lib = build_package( ctx, "main", - get_filtered_srcs(ctx, ctx.attrs.srcs, ctx.attrs.package_root), + ctx.attrs.srcs, + package_root = ctx.attrs.package_root, deps = ctx.attrs.deps, - compile_flags = ctx.attrs.compiler_flags, + compiler_flags = ctx.attrs.compiler_flags, shared = True, race = ctx.attrs._race, + embedcfg = ctx.attrs.embedcfg, ) (bin, runtime_files, _external_debug_info) = link( ctx, diff --git a/prelude/go/go_library.bzl b/prelude/go/go_library.bzl index 62040c7da..6d0e40aa5 100644 --- a/prelude/go/go_library.bzl +++ b/prelude/go/go_library.bzl @@ -23,9 +23,10 @@ load( "@prelude//utils:utils.bzl", "map_idx", ) -load(":compile.bzl", "GoPkgCompileInfo", "GoTestInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") +load(":compile.bzl", "GoPkgCompileInfo", "GoTestInfo", "get_inherited_compile_pkgs") load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") +load(":package_builder.bzl", "build_package") load(":packages.bzl", "go_attr_pkg_name", "merge_pkgs") def go_library_impl(ctx: AnalysisContext) -> list[Provider]: @@ -35,22 +36,24 @@ def go_library_impl(ctx: AnalysisContext) -> list[Provider]: if ctx.attrs.srcs: pkg_name = go_attr_pkg_name(ctx) - # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. - srcs = get_filtered_srcs(ctx, ctx.attrs.srcs, ctx.attrs.package_root, force_disable_cgo = True) shared = ctx.attrs._compile_shared race = ctx.attrs._race coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None - pkg = compile( + pkg = build_package( ctx, pkg_name, - srcs = srcs, + srcs = ctx.attrs.srcs, + package_root = ctx.attrs.package_root, deps = ctx.attrs.deps + ctx.attrs.exported_deps, - compile_flags = ctx.attrs.compiler_flags, - assemble_flags = ctx.attrs.assembler_flags, + compiler_flags = ctx.attrs.compiler_flags, + assembler_flags = ctx.attrs.assembler_flags, shared = shared, race = race, coverage_mode = coverage_mode, + embedcfg = ctx.attrs.embedcfg, + # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. + force_disable_cgo = True, ) default_output = pkg.pkg diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index a01a985d0..de3da9394 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -19,9 +19,10 @@ load( "value_or", ) load("@prelude//test/inject_test_run_info.bzl", "inject_test_run_info") -load(":compile.bzl", "GoTestInfo", "compile", "get_filtered_srcs", "get_inherited_compile_pkgs") +load(":compile.bzl", "GoTestInfo", "get_inherited_compile_pkgs") load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "link") +load(":package_builder.bzl", "build_package") load(":packages.bzl", "go_attr_pkg_name") def _gen_test_main( @@ -67,8 +68,6 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: # TODO: should we assert that pkg_name != None here? pkg_name = lib.pkg_name - srcs = get_filtered_srcs(ctx, srcs, ctx.attrs.package_root, tests = True) - # If coverage is enabled for this test, we need to preprocess the sources # with the Go cover tool. coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None @@ -76,15 +75,18 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: pkgs = {} # Compile all tests into a package. - tests = compile( + tests = build_package( ctx, pkg_name, - srcs, + srcs = srcs, + package_root = ctx.attrs.package_root, deps = deps, pkgs = pkgs, - compile_flags = ctx.attrs.compiler_flags, + compiler_flags = ctx.attrs.compiler_flags, coverage_mode = coverage_mode, race = ctx.attrs._race, + embedcfg = ctx.attrs.embedcfg, + tests = True, ) if coverage_mode != None: @@ -100,8 +102,8 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: # Generate a main function which runs the tests and build that into another # package. - gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, srcs) - main = compile(ctx, "main", cmd_args(gen_main), pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs._race) + gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, tests.srcs_list) + main = build_package(ctx, "main", [gen_main], package_root = "", pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs._race) # Link the above into a Go binary. (bin, runtime_files, external_debug_info) = link( diff --git a/prelude/go/package_builder.bzl b/prelude/go/package_builder.bzl new file mode 100644 index 000000000..e0440d918 --- /dev/null +++ b/prelude/go/package_builder.bzl @@ -0,0 +1,345 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load("@prelude//utils:utils.bzl", "dedupe_by_value") +load(":compile.bzl", "get_inherited_compile_pkgs", "infer_package_root") +load( + ":coverage.bzl", + "GoCoverageMode", # @Unused used as type +) +load(":packages.bzl", "GoPkg", "make_importcfg", "merge_pkgs", "pkg_artifacts") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") + +def build_package( + ctx: AnalysisContext, + pkg_name: str, + srcs: list[Artifact], + package_root: str | None, + pkgs: dict[str, Artifact] = {}, + deps: list[Dependency] = [], + compiled_objects: list[Artifact] = [], + # hack: extra go files will bypass filtration to enable cgo_library migration + extra_go_files: list[Artifact] = [], + compiler_flags: list[str] = [], + assembler_flags: list[str] = [], + shared: bool = False, + race: bool = False, + coverage_mode: GoCoverageMode | None = None, + embedcfg: Artifact | None = None, + tests: bool = False, + force_disable_cgo: bool = False) -> GoPkg: + if race and coverage_mode not in [None, GoCoverageMode("atomic")]: + fail("`coverage_mode` must be `atomic` when `race=True`") + + out = ctx.actions.declare_output(paths.basename(pkg_name) + ".a") + + srcs = dedupe_by_value(srcs) + + has_go_files = False + for src in (srcs + extra_go_files): + if src.extension == ".go": + has_go_files = True + break + + if not has_go_files: + return GoPkg( + pkg = ctx.actions.write(out.as_output(), ""), + coverage_vars = cmd_args(), + srcs_list = cmd_args(), + ) + + package_root = package_root if package_root != None else infer_package_root(srcs) + + go_list_out = _go_list(ctx, pkg_name, srcs, package_root, force_disable_cgo) + + srcs_list_argsfile = ctx.actions.declare_output(paths.basename(pkg_name) + "_srcs_list.argsfile") + coverage_vars_argsfile = ctx.actions.declare_output(paths.basename(pkg_name) + "_coverage_vars.argsfile") + dynamic_outputs = [out, srcs_list_argsfile, coverage_vars_argsfile] + + all_pkgs = merge_pkgs([ + pkgs, + pkg_artifacts(get_inherited_compile_pkgs(deps)), + ]) + importcfg = make_importcfg(ctx, pkg_name, all_pkgs, with_importmap = True) + + def f(ctx: AnalysisContext, artifacts, outputs, go_list_out = go_list_out): + go_list = _parse_go_list_out(srcs, package_root, artifacts[go_list_out]) + + symabis = _symabis(ctx, pkg_name, go_list.s_files, assembler_flags, shared) + + go_files = go_list.go_files + extra_go_files + + src_list_for_argsfile = go_files + (go_list.test_go_files + go_list.x_test_go_files if tests else []) + ctx.actions.write(outputs[srcs_list_argsfile], cmd_args(src_list_for_argsfile, "")) + + covered_go_files, coverage_vars_out = _cover(ctx, pkg_name, go_files, coverage_mode) + ctx.actions.write(outputs[coverage_vars_argsfile], coverage_vars_out) + + go_files_to_compile = covered_go_files + ((go_list.test_go_files + go_list.x_test_go_files) if tests else []) + go_a_file, asmhdr = _compile(ctx, pkg_name, go_files_to_compile, importcfg, compiler_flags, shared, race, embedcfg, go_list.embed_files, symabis, len(go_list.s_files) > 0) + + asm_o_files = _asssembly(ctx, pkg_name, go_list.s_files, asmhdr, assembler_flags, shared) + + pkg_file = _pack(ctx, pkg_name, go_a_file, compiled_objects + asm_o_files) + + ctx.actions.copy_file(outputs[out], pkg_file) + + ctx.actions.dynamic_output(dynamic = [go_list_out], inputs = [], outputs = dynamic_outputs, f = f) + + return GoPkg( + pkg = out, + coverage_vars = cmd_args(coverage_vars_argsfile, format = "@{}"), + srcs_list = cmd_args(srcs_list_argsfile, format = "@{}").hidden(srcs), + ) + +def _go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_root: str, force_disable_cgo: bool): + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env_args = get_toolchain_cmd_args(go_toolchain, go_root = True, force_disable_cgo = force_disable_cgo) + go_list_out = ctx.actions.declare_output(paths.basename(pkg_name) + "_go_list.json") + + # Create file sructure that `go list` can recognize + # Use copied_dir, because embed doesn't work with symlinks + srcs_dir = ctx.actions.copied_dir( + "__{}_srcs_dir__".format(paths.basename(pkg_name)), + {src.short_path.removeprefix(package_root).lstrip("/"): src for src in srcs}, + ) + tags = go_toolchain.tags + ctx.attrs._tags + go_list_args = [ + env_args, + "GO111MODULE=off", + go_toolchain.go_list_wrapper, + "-e", + ["--go", go_toolchain.go], + ["--workdir", srcs_dir], + ["--output", go_list_out.as_output()], + "-json=GoFiles,CgoFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", + ["-tags", ",".join(tags) if tags else []], + ".", + ] + + identifier = paths.basename(pkg_name) + ctx.actions.run(go_list_args, category = "go_list", identifier = identifier) + + return go_list_out + +GoListOut = record( + go_files = field(list[Artifact], default = []), + cgo_files = field(list[Artifact], default = []), + s_files = field(list[Artifact], default = []), + test_go_files = field(list[Artifact], default = []), + x_test_go_files = field(list[Artifact], default = []), + embed_files = field(list[Artifact], default = []), +) + +def _parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out) -> GoListOut: + go_list = go_list_out.read_json() + go_files, cgo_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [] + + for src in srcs: + # remove package_root prefix from src artifact path to match `go list` outout format + src_path = src.short_path.removeprefix(package_root).lstrip("/") + if src_path in go_list.get("GoFiles", []): + go_files.append(src) + if src_path in go_list.get("CgoFiles", []): + cgo_files.append(src) + if src_path in go_list.get("SFiles", []): + s_files.append(src) + if src_path in go_list.get("TestGoFiles", []): + test_go_files.append(src) + if src_path in go_list.get("XTestGoFiles", []): + x_test_go_files.append(src) + if _any_starts_with(go_list.get("EmbedFiles", []), src_path): + embed_files.append(src) + + return GoListOut( + go_files = go_files, + cgo_files = cgo_files, + s_files = s_files, + test_go_files = test_go_files, + x_test_go_files = x_test_go_files, + embed_files = embed_files, + ) + +def _any_starts_with(files: list[str], path: str): + for file in files: + if paths.starts_with(file, path): + return True + + return False + +def _compile( + ctx: AnalysisContext, + pkg_name: str, + go_srcs: list[Artifact], + importcfg: cmd_args, + compiler_flags: list[str], + shared: bool, + race: bool, + embedcfg: Artifact | None = None, + embed_files: list[Artifact] = [], + symabis: Artifact | None = None, + gen_asmhdr: bool = False) -> (Artifact, Artifact | None): + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + + env_args = get_toolchain_cmd_args(go_toolchain, go_root = True) + out = ctx.actions.declare_output("go_compile_out.a") + + if len(go_srcs) == 0: + ctx.actions.write(out.as_output(), "") + return out, None + + asmhdr = ctx.actions.declare_output("__asmhdr__/go_asm.h") if gen_asmhdr else None + + compile_cmd = cmd_args([ + env_args, + go_toolchain.compiler, + go_toolchain.compiler_flags, + compiler_flags, + "-buildid=", + "-nolocalimports", + ["-p", pkg_name], + ["-importcfg", importcfg], + ["-o", out.as_output()], + ["-race"] if race else [], + ["-shared"] if shared else [], + ["-embedcfg", embedcfg] if embedcfg else [], + ["-symabis", symabis] if symabis else [], + ["-asmhdr", asmhdr.as_output()] if asmhdr else [], + go_srcs, + ]) + compile_cmd.hidden(embed_files) # files and directories should be available for embedding + + identifier = paths.basename(pkg_name) + ctx.actions.run(compile_cmd, category = "go_compile", identifier = identifier) + + return (out, asmhdr) + +def _symabis(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], assembler_flags: list[str], shared: bool) -> Artifact | None: + if len(s_files) == 0: + return None + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env_args = get_toolchain_cmd_args(go_toolchain, go_root = True) + + # we have to supply "go_asm.h" with any content to make asm tool happy + # its content doesn't matter if -gensymabis provided + # https://github.com/golang/go/blob/3f8f929d60a90c4e4e2b07c8d1972166c1a783b1/src/cmd/go/internal/work/gc.go#L441-L443 + fake_asmhdr = ctx.actions.write("__fake_asmhdr__/go_asm.h", "") + symabis = ctx.actions.declare_output("symabis") + asm_cmd = [ + env_args, + go_toolchain.assembler, + go_toolchain.assembler_flags, + assembler_flags, + _asm_args(ctx, pkg_name, shared), + "-gensymabis", + ["-o", symabis.as_output()], + ["-I", cmd_args(fake_asmhdr).parent(1)], + s_files, + ] + + identifier = paths.basename(pkg_name) + ctx.actions.run(asm_cmd, category = "go_symabis", identifier = identifier) + + return symabis + +def _asssembly(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], asmhdr: Artifact | None, assembler_flags: list[str], shared: bool) -> list[Artifact]: + if len(s_files) == 0: + return [] + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env_args = get_toolchain_cmd_args(go_toolchain, go_root = True) + + o_files = [] + identifier = paths.basename(pkg_name) + for s_file in s_files: + o_file = ctx.actions.declare_output(s_file.short_path + ".o") + o_files.append(o_file) + + asm_cmd = [ + env_args, + go_toolchain.assembler, + go_toolchain.assembler_flags, + assembler_flags, + _asm_args(ctx, pkg_name, shared), + ["-o", o_file.as_output()], + ["-I", cmd_args(asmhdr).parent(1)] if asmhdr else [], # can it actually be None? + s_file, + ] + + ctx.actions.run(asm_cmd, category = "go_assembly", identifier = identifier + "/" + s_file.short_path) + + return o_files + +def _pack(ctx: AnalysisContext, pkg_name: str, a_file: Artifact, o_files: list[Artifact]) -> Artifact: + if len(o_files) == 0: + # no need to repack .a file, if there are no .o files + return a_file + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env_args = get_toolchain_cmd_args(go_toolchain, go_root = True) + + pkg_file = ctx.actions.declare_output("pkg.a") + + pack_cmd = [ + env_args, + go_toolchain.packer, + "c", + pkg_file.as_output(), + a_file, + o_files, + ] + + identifier = paths.basename(pkg_name) + ctx.actions.run(pack_cmd, category = "go_pack", identifier = identifier) + + return pkg_file + +def _asm_args(ctx: AnalysisContext, pkg_name: str, shared: bool): + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + return [ + ["-p", pkg_name], + ["-I", cmd_args(go_toolchain.env_go_root).absolute_suffix("/pkg/include")], + ["-D", "GOOS_" + go_toolchain.env_go_os] if go_toolchain.env_go_os else [], + ["-D", "GOARCH_" + go_toolchain.env_go_arch] if go_toolchain.env_go_arch else [], + ["-shared"] if shared else [], + ] + +def _cover(ctx: AnalysisContext, pkg_name: str, go_files: list[Artifact], coverage_mode: GoCoverageMode | None) -> (list[Artifact], str | cmd_args): + if coverage_mode == None: + return go_files, "" + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env_args = get_toolchain_cmd_args(go_toolchain, go_root = True) + covered_files = [] + coverage_vars = {} + for go_file in go_files: + covered_file = ctx.actions.declare_output("with_coverage", go_file.short_path) + covered_files.append(covered_file) + + var = "Var_" + sha256(pkg_name + "::" + go_file.short_path) + coverage_vars[var] = go_file.short_path + + cover_cmd = [ + env_args, + go_toolchain.cover, + ["-mode", coverage_mode.value], + ["-var", var], + ["-o", covered_file.as_output()], + go_file, + ] + + ctx.actions.run(cover_cmd, category = "go_cover", identifier = paths.basename(pkg_name) + "/" + go_file.short_path) + + coverage_vars_out = "" + if len(coverage_vars) > 0: + # convert coverage_vars to argsfile for compatibility with python implementation + cover_pkg = "{}:{}".format(pkg_name, ",".join(["{}={}".format(var, name) for var, name in coverage_vars.items()])) + coverage_vars_out = cmd_args("--cover-pkgs", cover_pkg) + + return covered_files, coverage_vars_out diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index a07ba3f4f..5732dfcb4 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -9,9 +9,9 @@ load("@prelude//go:toolchain.bzl", "GoToolchainInfo") load("@prelude//utils:utils.bzl", "value_or") GoPkg = record( - cgo = field(bool, default = False), pkg = field(Artifact), coverage_vars = field(cmd_args | None, default = None), + srcs_list = field(cmd_args | None, default = None), ) GoStdlib = provider( diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 978b5e2ad..c454b7af1 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -16,13 +16,12 @@ GoToolchainInfo = provider( "cgo": provider_field(RunInfo | None, default = None), "cgo_wrapper": provider_field(RunInfo), "gen_stdlib_importcfg": provider_field(RunInfo), + "go_list_wrapper": provider_field(RunInfo), "go_wrapper": provider_field(RunInfo), - "compile_wrapper": provider_field(RunInfo), "compiler": provider_field(RunInfo), "compiler_flags": provider_field(typing.Any, default = None), "concat_files": provider_field(RunInfo), "cover": provider_field(RunInfo), - "cover_srcs": provider_field(RunInfo), "cxx_toolchain_for_linking": provider_field(CxxToolchainInfo | None, default = None), "env_go_arch": provider_field(typing.Any, default = None), "env_go_os": provider_field(typing.Any, default = None), @@ -30,7 +29,6 @@ GoToolchainInfo = provider( "env_go_root": provider_field(typing.Any, default = None), "env_go_debug": provider_field(dict[str, str], default = {}), "external_linker_flags": provider_field(typing.Any, default = None), - "filter_srcs": provider_field(RunInfo), "go": provider_field(RunInfo), "linker": provider_field(RunInfo), "linker_flags": provider_field(typing.Any, default = None), diff --git a/prelude/go/tools/BUCK.v2 b/prelude/go/tools/BUCK.v2 index 92b006f5b..6d6a5bfdc 100644 --- a/prelude/go/tools/BUCK.v2 +++ b/prelude/go/tools/BUCK.v2 @@ -1,29 +1,11 @@ prelude = native -prelude.python_bootstrap_binary( - name = "compile_wrapper", - main = "compile_wrapper.py", - visibility = ["PUBLIC"], -) - prelude.python_bootstrap_binary( name = "concat_files", main = "concat_files.py", visibility = ["PUBLIC"], ) -prelude.python_bootstrap_binary( - name = "cover_srcs", - main = "cover_srcs.py", - visibility = ["PUBLIC"], -) - -prelude.python_bootstrap_binary( - name = "filter_srcs", - main = "filter_srcs.py", - visibility = ["PUBLIC"], -) - prelude.python_bootstrap_binary( name = "cgo_wrapper", main = "cgo_wrapper.py", @@ -42,6 +24,12 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) +prelude.python_bootstrap_binary( + name = "go_list_wrapper", + main = "go_list_wrapper.py", + visibility = ["PUBLIC"], +) + prelude.go_binary( name = "testmaingen", srcs = [ diff --git a/prelude/go/tools/compile_wrapper.py b/prelude/go/tools/compile_wrapper.py deleted file mode 100755 index b6b743e04..000000000 --- a/prelude/go/tools/compile_wrapper.py +++ /dev/null @@ -1,139 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -""" -Compile the given Go sources into a Go package. - -Example: - - $ ./compile_wrapper.py \ - --compiler compile \ - --assembler assemble \ - --output srcs.txt src/dir/ - -""" - -# pyre-unsafe - -import argparse -import contextlib -import os -import subprocess -import sys -import tempfile -from pathlib import Path -from typing import List - - -def _call_or_exit(cmd: List[str]): - ret = subprocess.call(cmd) - if ret != 0: - sys.exit(ret) - - -def _compile(compile_prefix: List[str], output: Path, srcs: List[Path]): - cmd = [] - cmd.extend(compile_prefix) - cmd.append("-trimpath={}".format(os.getcwd())) - cmd.append("-o") - cmd.append(output) - cmd.extend(srcs) - _call_or_exit(cmd) - - -def _pack(pack_prefix: List[str], output: Path, items: List[Path]): - cmd = [] - cmd.extend(pack_prefix) - cmd.append("r") - cmd.append(output) - cmd.extend(items) - _call_or_exit(cmd) - - -def main(argv): - parser = argparse.ArgumentParser(fromfile_prefix_chars="@") - parser.add_argument("--compiler", action="append", default=[]) - parser.add_argument("--assembler", action="append", default=[]) - parser.add_argument("--packer", action="append", default=[]) - parser.add_argument("--embedcfg", type=Path, default=None) - parser.add_argument("--output", required=True, type=Path) - parser.add_argument("srcs", type=Path, nargs="*") - args = parser.parse_args(argv[1:]) - - # If there's no srcs, just leave an empty file. - if not args.srcs: - args.output.touch() - return - - # go:embed does not parse symlinks, so following the links to the real paths - real_srcs = [s.resolve() for s in args.srcs] - - go_files = [s for s in real_srcs if s.suffix == ".go"] - s_files = [s for s in real_srcs if s.suffix == ".s"] - o_files = [s for s in real_srcs if s.suffix in (".o", ".obj")] - - with contextlib.ExitStack() as stack: - - asmhdr_dir = None - - assemble_prefix = [] - assemble_prefix.extend(args.assembler) - - if go_files: - compile_prefix = [] - compile_prefix.extend(args.compiler) - - # If we have assembly files, generate the symabi file to compile - # against, and the asm header. - if s_files: - asmhdr_dir = stack.push(tempfile.TemporaryDirectory()) - - asmhdr = Path(asmhdr_dir.name) / "go_asm.h" - asmhdr.touch() - compile_prefix.extend(["-asmhdr", asmhdr]) - assemble_prefix.extend(["-I", asmhdr_dir.name]) - assemble_prefix.extend( - ["-I", os.path.join(os.environ["GOROOT"], "pkg", "include")] - ) - assemble_prefix.extend(["-D", f"GOOS_{os.environ['GOOS']}"]) - assemble_prefix.extend(["-D", f"GOARCH_{os.environ['GOARCH']}"]) - if "GOAMD64" in os.environ and os.environ["GOARCH"] == "amd64": - assemble_prefix.extend(["-D", f"GOAMD64_{os.environ['GOAMD64']}"]) - - # Note: at this point go_asm.h is empty, but that's OK. As per the Go compiler: - # https://github.com/golang/go/blob/3f8f929d60a90c4e4e2b07c8d1972166c1a783b1/src/cmd/go/internal/work/gc.go#L441-L443 - symabis = args.output.with_suffix(".symabis") - _compile(assemble_prefix + ["-gensymabis"], symabis, s_files) - compile_prefix.extend(["-symabis", symabis]) - - if args.embedcfg is not None: - compile_prefix.extend( - [ - "-embedcfg", - args.embedcfg, - ] - ) - - # This will create go_asm.h - _compile(compile_prefix, args.output, go_files) - - else: - args.output.touch() - - # If there are assembly files, assemble them to an object and add into the - # output archive. - for s_file in s_files: - s_object = args.output.with_name(s_file.name).with_suffix(".o") - _compile(assemble_prefix, s_object, [s_file]) - o_files.append(s_object) - - if o_files: - _pack(args.packer, args.output, o_files) - - -sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/cover_srcs.py b/prelude/go/tools/cover_srcs.py deleted file mode 100644 index 1dabf647a..000000000 --- a/prelude/go/tools/cover_srcs.py +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -""" -Run `go cover` on non-`_test.go` input sources. -""" - -# pyre-unsafe - -import argparse -import hashlib -import subprocess -import sys -from pathlib import Path - - -def _var(pkg_name, src): - return "Var_" + hashlib.md5(f"{pkg_name}::{src}".encode("utf-8")).hexdigest() - - -def main(argv): - parser = argparse.ArgumentParser(fromfile_prefix_chars="@") - parser.add_argument("--cover", type=Path, required=True) - parser.add_argument("--pkg-name", type=str, required=True) - parser.add_argument("--coverage-mode", type=str, required=True) - parser.add_argument("--covered-srcs-dir", type=Path, required=True) - parser.add_argument("--out-srcs-argsfile", type=Path, required=True) - parser.add_argument("--coverage-var-argsfile", type=Path, required=True) - parser.add_argument("srcs", nargs="*", type=Path) - args = parser.parse_args(argv[1:]) - - out_srcs = [] - coverage_vars = {} - - args.covered_srcs_dir.mkdir(parents=True) - - for src in args.srcs: - # don't cover test files or non-go files (e.g. assembly files) - if src.name.endswith("_test.go") or not src.name.endswith(".go"): - out_srcs.append(src) - else: - var = _var(args.pkg_name, src) - covered_src = args.covered_srcs_dir / src - covered_src.parent.mkdir(parents=True, exist_ok=True) - subprocess.check_call( - [ - args.cover, - "-mode", - args.coverage_mode, - "-var", - var, - "-o", - covered_src, - src, - ] - ) - # we need just the source name for the --cover-pkgs argument - coverage_vars[var] = src.name - out_srcs.append(covered_src) - - with open(args.out_srcs_argsfile, mode="w") as f: - for src in out_srcs: - print(src, file=f) - - with open(args.coverage_var_argsfile, mode="w") as f: - if coverage_vars: - print("--cover-pkgs", file=f) - print( - "{}:{}".format( - args.pkg_name, - ",".join([f"{var}={name}" for var, name in coverage_vars.items()]), - ), - file=f, - ) - - -sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/filter_srcs.py b/prelude/go/tools/filter_srcs.py deleted file mode 100755 index c551b3344..000000000 --- a/prelude/go/tools/filter_srcs.py +++ /dev/null @@ -1,92 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -""" -Run on a directory of Go source files and print out a list of srcs that should -be compiled. - -Example: - - $ ./filter_srcs.py --output srcs.txt src/dir/ - -""" - -# pyre-unsafe - -import argparse -import json -import os -import subprocess -import sys -from pathlib import Path - - -def main(argv): - parser = argparse.ArgumentParser() - parser.add_argument("--go", default="go", type=Path) - parser.add_argument("--tests", action="store_true") - parser.add_argument("--tags", default="") - parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) - parser.add_argument("srcdir", type=Path) - args = parser.parse_args(argv[1:]) - - # Compute absolute paths for GOROOT, to enable `go list` to use `compile/asm/etc` - goroot = os.environ.get("GOROOT", "") - if goroot: - goroot = os.path.realpath(goroot) - - # Run `go list` to filter input sources by build pragmas. - out = subprocess.check_output( - [ - "env", - "-i", - "GOROOT={}".format(goroot), - "GOARCH={}".format(os.environ.get("GOARCH", "")), - "GOOS={}".format(os.environ.get("GOOS", "")), - "CGO_ENABLED={}".format(os.environ.get("CGO_ENABLED", "0")), - "GO111MODULE=off", - "GOCACHE=" + os.path.realpath(os.environ.get("BUCK_SCRATCH_PATH")), - args.go.resolve(), - "list", - "-e", - "-json=GoFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", - "-tags", - args.tags, - ".", - ], - cwd=args.srcdir, - ).decode("utf-8") - - # Parse JSON output and print out sources. - idx = 0 - decoder = json.JSONDecoder() - while idx < len(out) - 1: - # The raw_decode method fails if there are any leading spaces, e.g. " {}" fails - # so manually trim the prefix of the string - if out[idx].isspace(): - idx += 1 - continue - - obj, idx = decoder.raw_decode(out, idx) - types = ["GoFiles", "EmbedFiles"] - if args.tests: - types.extend(["TestGoFiles", "XTestGoFiles"]) - else: - types.extend(["SFiles"]) - for typ in types: - for src in obj.get(typ, []): - # Absolute path to the source file. - src = (args.srcdir / src).resolve() - # Relativize to the CWD. - src = src.relative_to(os.getcwd()) - print(src, file=args.output) - - args.output.close() - - -sys.exit(main(sys.argv)) diff --git a/prelude/go/tools/go_list_wrapper.py b/prelude/go/tools/go_list_wrapper.py new file mode 100644 index 000000000..895f3185b --- /dev/null +++ b/prelude/go/tools/go_list_wrapper.py @@ -0,0 +1,40 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + + +import argparse +import os +import subprocess +import sys +from pathlib import Path + + +def main(argv): + parser = argparse.ArgumentParser() + parser.add_argument("--go", default="go", type=Path) + parser.add_argument("--workdir", type=Path) + parser.add_argument("--output", type=argparse.FileType("w"), default=sys.stdout) + parsed, unknown = parser.parse_known_args(argv[1:]) + + env = os.environ.copy() + # Make paths absolute, otherwise go build will fail. + if "GOROOT" in env: + env["GOROOT"] = os.path.realpath(env["GOROOT"]) + + env["GOCACHE"] = os.path.realpath(env["BUCK_SCRATCH_PATH"]) + + retcode = subprocess.call( + [parsed.go.resolve(), "list"] + unknown, + env=env, + stdout=parsed.output, + cwd=parsed.workdir, + ) + parsed.output.close() + return retcode + + +sys.exit(main(sys.argv)) diff --git a/prelude/toolchains/go.bzl b/prelude/toolchains/go.bzl index 176203a63..cf741b17e 100644 --- a/prelude/toolchains/go.bzl +++ b/prelude/toolchains/go.bzl @@ -36,18 +36,16 @@ def _system_go_toolchain_impl(ctx): assembler = RunInfo(cmd_script(ctx, "asm", cmd_args(go, "tool", "asm"), script_os)), cgo = RunInfo(cmd_script(ctx, "cgo", cmd_args(go, "tool", "cgo"), script_os)), cgo_wrapper = ctx.attrs.cgo_wrapper[RunInfo], - compile_wrapper = ctx.attrs.compile_wrapper[RunInfo], concat_files = ctx.attrs.concat_files[RunInfo], compiler = RunInfo(cmd_script(ctx, "compile", cmd_args(go, "tool", "compile"), script_os)), cover = RunInfo(cmd_script(ctx, "cover", cmd_args(go, "tool", "cover"), script_os)), - cover_srcs = ctx.attrs.cover_srcs[RunInfo], cxx_toolchain_for_linking = None, env_go_arch = go_arch, env_go_os = go_os, external_linker_flags = [], - filter_srcs = ctx.attrs.filter_srcs[RunInfo], gen_stdlib_importcfg = ctx.attrs.gen_stdlib_importcfg[RunInfo], go = RunInfo(cmd_script(ctx, "go", cmd_args(go), script_os)), + go_list_wrapper = ctx.attrs.go_list_wrapper[RunInfo], go_wrapper = ctx.attrs.go_wrapper[RunInfo], linker = RunInfo(cmd_script(ctx, "link", cmd_args(go, "tool", "link"), script_os)), packer = RunInfo(cmd_script(ctx, "pack", cmd_args(go, "tool", "pack"), script_os)), @@ -67,11 +65,9 @@ system_go_toolchain = rule( )""", attrs = { "cgo_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cgo_wrapper")), - "compile_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:compile_wrapper")), "concat_files": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:concat_files")), - "cover_srcs": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cover_srcs")), - "filter_srcs": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:filter_srcs")), "gen_stdlib_importcfg": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:gen_stdlib_importcfg")), + "go_list_wrapper": attrs.exec_dep(providers = [RunInfo], default = "prelude//go/tools:go_list_wrapper"), "go_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:go_wrapper")), }, is_toolchain_rule = True, From 6ef4b84168e30c96bcb9aa33c43103035beb03bb Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Tue, 26 Mar 2024 10:59:58 -0700 Subject: [PATCH 0604/1133] Fixed for fbobjc link groups Summary: Link groups in fbobjc are structured differently. Need to handle empty case Reviewed By: VladimirMakaev Differential Revision: D55376037 fbshipit-source-id: 2d3a1c3f4cf60c198b0db78713274ccf5b97093a --- prelude/cxx/link_groups.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index be5e5e75c..e7720b326 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -381,7 +381,8 @@ def get_filtered_labels_to_links_map( def add_link_group(target: Label, target_group: str): # If we've already added this link group to the link line, we're done. - if link_groups[target_group].attrs.prohibit_file_duplicates and public_nodes and public_nodes.contains(target): + link_group_spec = link_groups.get(target_group, None) + if link_group_spec and link_group_spec.attrs.prohibit_file_duplicates and public_nodes and public_nodes.contains(target): if target_group not in group_srcs: group_srcs[target_group] = {} target_group_srcs = group_srcs[target_group] From f31b91a659c06863a0647d317aaeca336b546c72 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 26 Mar 2024 11:21:16 -0700 Subject: [PATCH 0605/1133] fix codesign binary Summary: when no codesign on copy argument provided Differential Revision: D55373624 fbshipit-source-id: 63d6109b51bf41b4b67dbf28e12197f313460cb1 --- prelude/apple/tools/code_signing/main.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index 72c63ea1c..a6c62da82 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -119,10 +119,14 @@ def _main() -> None: bundle_path = CodesignedPath( path=args.bundle_path, entitlements=args.entitlements ) - codesign_on_copy_paths = [ - CodesignedPath(path=bundle_path.path / path, entitlements=None) - for path in args.codesign_on_copy - ] + codesign_on_copy_paths = ( + [ + CodesignedPath(path=bundle_path.path / path, entitlements=None) + for path in args.codesign_on_copy + ] + if args.codesign_on_copy + else [] + ) codesign_bundle( bundle_path=bundle_path, From 08efe655433b6504234f6ea2f39883713cfa5b2a Mon Sep 17 00:00:00 2001 From: Richard Barnes Date: Wed, 27 Mar 2024 02:23:23 -0700 Subject: [PATCH 0606/1133] remove{suffix,prefix} replaces {l,r}strip in buck2/prelude/matlab/matlab_program.bzl +1 Summary: `x.lstrip("string")` is equivalent to `re.sub(r"[string]", "", x)`. This means that `"fbcode/fbcode_file".lstrip("fbcode/")` returns `_file`. This can easily be an unintended behaviour! Instead, as of Python 3.10, we should use `string.removeprefix` or `string.removesuffix`. This diff makes one or more such fixes that we believe are likely to be bugs. Please double-check before approving. Reviewed By: krallin Differential Revision: D55386316 fbshipit-source-id: 1c38b3aeeedd4af3d7bc78112089592e2cf02d4d --- prelude/matlab/matlab_program.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/matlab/matlab_program.bzl b/prelude/matlab/matlab_program.bzl index 42b150e61..5f4c1a03f 100644 --- a/prelude/matlab/matlab_program.bzl +++ b/prelude/matlab/matlab_program.bzl @@ -14,7 +14,7 @@ def matlab_program_impl(ctx: AnalysisContext) -> list[Provider]: cmd.add( "-batch", cmd_args( - ctx.attrs.main.basename.rstrip(".m"), + ctx.attrs.main.basename.removesuffix(".m"), quote = "shell", ), ) From 2fc5046135dd7b177a1bc3841a385d1923d3cbf2 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Wed, 27 Mar 2024 02:36:46 -0700 Subject: [PATCH 0607/1133] Add preprocess_java_classes_input_dir sub_target Reviewed By: jiawei-lyu Differential Revision: D55366980 fbshipit-source-id: 3690d16eac6a070b3afa697c0931501fcb815ee1 --- prelude/android/android_binary.bzl | 2 +- prelude/android/preprocess_java_classes.bzl | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/prelude/android/android_binary.bzl b/prelude/android/android_binary.bzl index 7bc22cc38..d22c5a5b0 100644 --- a/prelude/android/android_binary.bzl +++ b/prelude/android/android_binary.bzl @@ -126,7 +126,7 @@ def get_binary_info(ctx: AnalysisContext, use_proto_format: bool) -> AndroidBina else: jars_to_owners = {packaging_dep.jar: packaging_dep.jar.owner.raw_target() for packaging_dep in dex_java_packaging_deps} if ctx.attrs.preprocess_java_classes_bash: - jars_to_owners, materialized_artifacts_dir = get_preprocessed_java_classes(ctx, jars_to_owners) + jars_to_owners, materialized_artifacts_dir = get_preprocessed_java_classes(enhancement_ctx, jars_to_owners) if materialized_artifacts_dir: materialized_artifacts.append(materialized_artifacts_dir) if has_proguard_config: diff --git a/prelude/android/preprocess_java_classes.bzl b/prelude/android/preprocess_java_classes.bzl index 5a94c0a80..e35ed44a0 100644 --- a/prelude/android/preprocess_java_classes.bzl +++ b/prelude/android/preprocess_java_classes.bzl @@ -6,14 +6,17 @@ # of this source tree. load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//android:util.bzl", "EnhancementContext") load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") load("@prelude//utils:expect.bzl", "expect") -def get_preprocessed_java_classes(ctx: AnalysisContext, input_jars = {"artifact": "target_label"}) -> (dict[Artifact, TargetLabel], [Artifact, None]): +def get_preprocessed_java_classes(enhance_ctx: EnhancementContext, input_jars: dict[Artifact, TargetLabel]) -> (dict[Artifact, TargetLabel], [Artifact, None]): if not input_jars: return {}, None + ctx = enhance_ctx.ctx + input_srcs = {} output_jars_to_owners = {} output_dir = ctx.actions.declare_output("preprocessed_java_classes/output_dir") @@ -60,4 +63,6 @@ def get_preprocessed_java_classes(ctx: AnalysisContext, input_jars = {"artifact" ctx.actions.run(preprocess_cmd, env = env, category = "preprocess_java_classes") + enhance_ctx.debug_output("preprocess_java_classes_input_dir", input_dir) + return output_jars_to_owners, materialized_artifacts_dir From 61d4ca668697496d499973175ff0c238e1053c00 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 27 Mar 2024 05:39:45 -0700 Subject: [PATCH 0608/1133] dynamic_output(outputs=[.as_output()]) Summary: Following diff D55389251 prohibits artifact (which is not marked as output artifact) in `dynamic_output` call. Reviewed By: podtserkovskiy Differential Revision: D55390967 fbshipit-source-id: f3890c8b7685db8955680b0cfef84652b1418c67 --- prelude/go/package_builder.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/go/package_builder.bzl b/prelude/go/package_builder.bzl index e0440d918..c633046de 100644 --- a/prelude/go/package_builder.bzl +++ b/prelude/go/package_builder.bzl @@ -89,7 +89,7 @@ def build_package( ctx.actions.copy_file(outputs[out], pkg_file) - ctx.actions.dynamic_output(dynamic = [go_list_out], inputs = [], outputs = dynamic_outputs, f = f) + ctx.actions.dynamic_output(dynamic = [go_list_out], inputs = [], outputs = [o.as_output() for o in dynamic_outputs], f = f) return GoPkg( pkg = out, From 8d301e7ab227beb254d52bc7aaae61c7629150cc Mon Sep 17 00:00:00 2001 From: Leo Chashnikov Date: Wed, 27 Mar 2024 06:34:47 -0700 Subject: [PATCH 0609/1133] merge _dry_run_code_signing and _fast_adhoc_signing_enabled in apple_bundle_config Summary: There are 2 mutually-exclusive parameters in apple_bundle_config: `_dry_run_code_signing` and `_fast_adhoc_signing_enabled`. As only one of them will be effective during build time, we don't want users to be able to pass both and represent invalid state (thus we'll fail if both are enabled). Bundle building will fail in case new values of enum are introduced but handling not implemented explicitly. Reviewed By: blackm00n Differential Revision: D55372023 fbshipit-source-id: d34486b91e32152277c5a9930a02a8ffbf9acfc1 --- prelude/apple/apple_bundle_config.bzl | 22 ++++++++++++++++++---- prelude/apple/apple_bundle_part.bzl | 11 +++++++---- prelude/apple/apple_code_signing_types.bzl | 6 ++++++ prelude/apple/apple_rules_impl_utility.bzl | 5 ++--- 4 files changed, 33 insertions(+), 11 deletions(-) diff --git a/prelude/apple/apple_bundle_config.bzl b/prelude/apple/apple_bundle_config.bzl index 47376252f..7a91e9ff2 100644 --- a/prelude/apple/apple_bundle_config.bzl +++ b/prelude/apple/apple_bundle_config.bzl @@ -5,24 +5,38 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load(":apple_code_signing_types.bzl", "CodeSignConfiguration") + def _maybe_get_bool(config: str, default: [None, bool]) -> [None, bool]: result = read_root_config("apple", config, None) if result == None: return default return result.lower() == "true" +def _get_code_signing_configuration() -> str: + is_dry_run = _maybe_get_bool("dry_run_code_signing", False) + + # This is a kill switch for the feature, it can also be disabled by setting + # `apple.fast_adhoc_signing_enabled=false` in a global buckconfig file. + is_fast_adhoc_signing_enabled = _maybe_get_bool("fast_adhoc_signing_enabled", True) + if is_dry_run and is_fast_adhoc_signing_enabled: + fail("Cannot enable both dry-run and fast-adhoc code signing") + if is_dry_run: + return CodeSignConfiguration("dry-run").value + elif is_fast_adhoc_signing_enabled: + return CodeSignConfiguration("fast-adhoc").value + else: + return CodeSignConfiguration("none").value + def apple_bundle_config() -> dict[str, typing.Any]: return { "_bundling_cache_buster": read_root_config("apple", "bundling_cache_buster", None), "_bundling_log_file_enabled": _maybe_get_bool("bundling_log_file_enabled", True), "_bundling_log_file_level": read_root_config("apple", "bundling_log_file_level", None), + "_code_signing_configuration": _get_code_signing_configuration(), "_codesign_type": read_root_config("apple", "codesign_type_override", None), "_compile_resources_locally_override": _maybe_get_bool("compile_resources_locally_override", None), - "_dry_run_code_signing": _maybe_get_bool("dry_run_code_signing", False), "_embed_provisioning_profile_when_adhoc_code_signing": _maybe_get_bool("embed_provisioning_profile_when_adhoc_code_signing", None), - # This is a kill switch for the feature, it can also be disabled by setting - # `apple.fast_adhoc_signing_enabled=false` in a global buckconfig file. - "_fast_adhoc_signing_enabled": _maybe_get_bool("fast_adhoc_signing_enabled", True), "_fast_provisioning_profile_parsing_enabled": _maybe_get_bool("fast_provisioning_profile_parsing_enabled", False), "_incremental_bundling_enabled": _maybe_get_bool("incremental_bundling_enabled", True), "_info_plist_identify_build_system_default": _maybe_get_bool("info_plist_identify_build_system", True), diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index 9f2a26628..a6ee6cbfa 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -9,7 +9,7 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//utils:expect.bzl", "expect") load(":apple_bundle_destination.bzl", "AppleBundleDestination", "bundle_relative_path_for_destination") load(":apple_bundle_utility.bzl", "get_extension_attr", "get_product_name") -load(":apple_code_signing_types.bzl", "CodeSignType") +load(":apple_code_signing_types.bzl", "CodeSignConfiguration", "CodeSignType") load(":apple_entitlements.bzl", "get_entitlements_codesign_args", "should_include_entitlements") load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_sdk_metadata.bzl", "get_apple_sdk_metadata_for_sdk_name") @@ -64,13 +64,16 @@ def assemble_bundle( codesign_args = [] codesign_tool = ctx.attrs._apple_toolchain[AppleToolchainInfo].codesign - if ctx.attrs._dry_run_code_signing: + code_signing_configuration = CodeSignConfiguration(ctx.attrs._code_signing_configuration) + if code_signing_configuration == CodeSignConfiguration("dry-run"): codesign_configuration_args = ["--codesign-configuration", "dry-run"] codesign_tool = tools.dry_codesign_tool - elif ctx.attrs._fast_adhoc_signing_enabled: + elif code_signing_configuration == CodeSignConfiguration("fast-adhoc"): codesign_configuration_args = ["--codesign-configuration", "fast-adhoc"] - else: + elif code_signing_configuration == CodeSignConfiguration("none"): codesign_configuration_args = [] + else: + fail("Code signing configuration `{}` not supported".format(code_signing_configuration)) codesign_required = codesign_type.value in ["distribution", "adhoc"] swift_support_required = swift_stdlib_args and (not ctx.attrs.skip_copying_swift_stdlib) and should_copy_swift_stdlib(bundle.extension) diff --git a/prelude/apple/apple_code_signing_types.bzl b/prelude/apple/apple_code_signing_types.bzl index 555a04f8a..b63f98ffb 100644 --- a/prelude/apple/apple_code_signing_types.bzl +++ b/prelude/apple/apple_code_signing_types.bzl @@ -15,3 +15,9 @@ CodeSignType = enum( "adhoc", "distribution", ) + +CodeSignConfiguration = enum( + "dry-run", + "fast-adhoc", + "none", +) diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/prelude/apple/apple_rules_impl_utility.bzl index 6a03ae599..49a04bfe4 100644 --- a/prelude/apple/apple_rules_impl_utility.bzl +++ b/prelude/apple/apple_rules_impl_utility.bzl @@ -7,7 +7,7 @@ load("@prelude//apple:apple_bundle_attrs.bzl", "get_apple_info_plist_build_system_identification_attrs") load("@prelude//apple:apple_bundle_types.bzl", "AppleBundleResourceInfo", "AppleBundleTypeAttributeType") -load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignType") +load("@prelude//apple:apple_code_signing_types.bzl", "CodeSignConfiguration", "CodeSignType") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") load("@prelude//apple:resource_groups.bzl", "RESOURCE_GROUP_MAP_ATTR") load("@prelude//apple/swift:swift_incremental_support.bzl", "SwiftCompilationMode") @@ -60,10 +60,9 @@ def _apple_bundle_like_common_attrs(): "_bundling_cache_buster": attrs.option(attrs.string(), default = None), "_bundling_log_file_enabled": attrs.bool(default = False), "_bundling_log_file_level": attrs.option(attrs.string(), default = None), + "_code_signing_configuration": attrs.option(attrs.enum(CodeSignConfiguration.values()), default = None), "_codesign_type": attrs.option(attrs.enum(CodeSignType.values()), default = None), "_compile_resources_locally_override": attrs.option(attrs.bool(), default = None), - "_dry_run_code_signing": attrs.bool(default = False), - "_fast_adhoc_signing_enabled": attrs.bool(default = False), "_fast_provisioning_profile_parsing_enabled": attrs.bool(default = False), "_incremental_bundling_enabled": attrs.bool(default = False), "_profile_bundling_enabled": attrs.bool(default = False), From 24161f175c9b2bd4257ffb89965544f09096cf8e Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 27 Mar 2024 07:49:33 -0700 Subject: [PATCH 0610/1133] remove failure when both dry-run and fast-adhoc are enabled Summary: just fallback to dry-run Reviewed By: RayanRal, milend Differential Revision: D55423635 fbshipit-source-id: fe97a043bf06828ac96fd6c3622ed375bb4f303a --- prelude/apple/apple_bundle_config.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/prelude/apple/apple_bundle_config.bzl b/prelude/apple/apple_bundle_config.bzl index 7a91e9ff2..3e5909d2c 100644 --- a/prelude/apple/apple_bundle_config.bzl +++ b/prelude/apple/apple_bundle_config.bzl @@ -19,8 +19,7 @@ def _get_code_signing_configuration() -> str: # This is a kill switch for the feature, it can also be disabled by setting # `apple.fast_adhoc_signing_enabled=false` in a global buckconfig file. is_fast_adhoc_signing_enabled = _maybe_get_bool("fast_adhoc_signing_enabled", True) - if is_dry_run and is_fast_adhoc_signing_enabled: - fail("Cannot enable both dry-run and fast-adhoc code signing") + if is_dry_run: return CodeSignConfiguration("dry-run").value elif is_fast_adhoc_signing_enabled: From d733f06675bd0d2a4019542805a0f4588a174132 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Wed, 27 Mar 2024 09:41:30 -0700 Subject: [PATCH 0611/1133] Fixed incorrect indentation in python Reviewed By: VladimirMakaev Differential Revision: D55420457 fbshipit-source-id: a20267c90ecb9286ccb47fba66889081d5d0375f --- prelude/cxx/link_groups.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index e7720b326..baee7ae53 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -399,8 +399,8 @@ def get_filtered_labels_to_links_map( else: target_group_srcs[src] = target - if target_group in link_group_added: - return + if target_group in link_group_added: + return # In some flows, we may not have access to the actual link group lib # in our dep tree (e.g. https://fburl.com/code/pddmkptb), so just bail From 5bedbbdbf01cdf61422b97e57912991d97f513a8 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 27 Mar 2024 10:23:35 -0700 Subject: [PATCH 0612/1133] Put transitive deps of rustdoc tests under their original crate name Summary: In rustdoc tests, the same dynamically named crate can be both a transitive dependency and a **renamed** direct dependency, as is the case with `//common/rust/multiplatform/smc:smc`'s dependency on `//common/smc/if:if-rust` here: https://www.internalfb.com/code/fbsource/[c2fff4ca7ebb971cacac4ea8c47469e2275d0737]/fbcode/common/rust/multiplatform/smc/TARGETS?lines=6%2C9-11%2C28%2C34%2C37-38 https://www.internalfb.com/code/fbsource/[c2fff4ca7ebb971cacac4ea8c47469e2275d0737]/fbcode/common/smc/if/TARGETS?lines=8-9%2C51%2C53-54%2C94-95%2C97-98 For the purpose of laying out transitive dependencies, these local renames (`named_deps`) are *never* relevant, but this was being handled incorrectly. This diff fixes transitive dependencies of rustdoc tests to be placed according to the correct real crate name, which may not be known at analysis time. Reviewed By: JakobDegen Differential Revision: D55397344 fbshipit-source-id: 92d726bd43ad349d033d09e16ce7f36265a165f4 --- prelude/rust/build.bzl | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 8c54768eb..721e9c0f8 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -705,10 +705,11 @@ def dependency_args( args.add(extern_arg(dep.flags, crate, artifact)) crate_targets.append((crate, dep.label)) - # Because deps of this *target* can also be transitive deps of this compiler - # invocation, pass the artifact through `-L` unconditionally for doc tests. + # Because deps of this *target* can also be transitive deps of this + # compiler invocation, pass the artifact (under its original crate name) + # through `-L` unconditionally for doc tests. if is_rustdoc_test: - transitive_deps[artifact] = crate + transitive_deps[artifact] = dep.info.crate # Unwanted transitive_deps have already been excluded transitive_deps.update(transitive_artifacts) From 85f9a06516e909a8421ef6d3672df25e1c004181 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 27 Mar 2024 10:23:59 -0700 Subject: [PATCH 0613/1133] Bring dependency_args function documentation up to date Summary: The original documentation, referring to a "third" return value and an "is_check" argument, does not pertain to the current signature. The third return value went away years ago in {D33930730}. The is_check argument went away in {D54976703}. Reviewed By: JakobDegen Differential Revision: D55397343 fbshipit-source-id: 6ebae413f35216599febe47e35e93b79693064d8 --- prelude/rust/build.bzl | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 721e9c0f8..c73935da8 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -667,10 +667,14 @@ def rust_compile( # --extern = for direct dependencies # -Ldependency= for transitive dependencies # For native dependencies, we use -Clink-arg=@argsfile -# Second element of result tuple is a list of files/directories that should be present for executable to be run successfully -# Third return is the mapping from crate names back to targets (needed so that a deps linter knows what deps need fixing) # -# The `compile_ctx` may be omitted if `is_check` is `True` and there are no dependencies with dynamic crate names +# Second element of returned tuple is a mapping from crate names back to target +# label, needed for applying autofixes for rustc's unused_crate_dependencies +# lint by tracing Rust crate names in the compiler diagnostic back to which +# dependency entry in the BUCK file needs to be removed. +# +# The `compile_ctx` may be omitted if there are no dependencies with dynamic +# crate names. def dependency_args( ctx: AnalysisContext, compile_ctx: CompileContext | None, From c72097f2ab490e215abcd639ee45a23a5b1f7223 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 27 Mar 2024 10:24:14 -0700 Subject: [PATCH 0614/1133] fix `_fast_adhoc_signing_enabled` usage Summary: missed in D55372023 Reviewed By: Nero5023 Differential Revision: D55429600 fbshipit-source-id: df085a4a62ff3394586af76a11e5aa5c0e582cea --- prelude/apple/apple_bundle.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index b461efff0..db379855c 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -60,6 +60,7 @@ load( "AppleBundleTypeWatchApp", ) load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_default_binary_dep", "get_flattened_binary_deps", "get_product_name") +load(":apple_code_signing_types.bzl", "CodeSignConfiguration") load(":apple_dsym.bzl", "DSYM_INFO_SUBTARGET", "DSYM_SUBTARGET", "get_apple_dsym", "get_apple_dsym_ext", "get_apple_dsym_info") load(":apple_sdk.bzl", "get_apple_sdk_name") load(":apple_universal_binaries.bzl", "create_universal_binary") @@ -129,7 +130,8 @@ def _get_bundle_dsym_name(ctx: AnalysisContext) -> str: def _scrub_binary(ctx, binary: Artifact, binary_execution_preference_info: None | LinkExecutionPreferenceInfo) -> Artifact: # If fast adhoc code signing is enabled, we need to resign the binary as it won't be signed later. - if ctx.attrs._fast_adhoc_signing_enabled: + code_signing_configuration = CodeSignConfiguration(ctx.attrs._code_signing_configuration) + if code_signing_configuration == CodeSignConfiguration("fast-adhoc"): apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] adhoc_codesign_tool = apple_tools.adhoc_codesign_tool else: From ad817516ed188a8b4254ceda0a7e4e0d827a1083 Mon Sep 17 00:00:00 2001 From: Dustin Shahidehpour Date: Wed, 27 Mar 2024 10:38:14 -0700 Subject: [PATCH 0615/1133] add intersect. Summary: RFC: https://fb.workplace.com/groups/3645748045672638/posts/3827886047458836/ This adds a new traversal type for `link_groups` known as `intersect`. Given N roots, this will match on targets that present in the graph of *all* the roots (and pass the filters). This will be used by our production iOS apps so that they can declare how many of our dylibs are structures: shared containers of code for various binaries that are sent to apple (main app, extensions, app clips, etc.) Currently, this is done by using a two-pass system to our builds that is bane of our existence. You have to run `arc dylibs enable ` which runs a dep-query, often taking minutes and writing the output to a BZL file which declared in a new cell `fbobjc/dylibs`. If that cell is present, buried in our macros is logic which **rewrites the deps field of our targets (YUCK)** based on the BZL file. With this addition, we are one step closer to getting rid of all of that. Reviewed By: rmaz Differential Revision: D54809113 fbshipit-source-id: 715075327f1f8c19acd499fe75fb99ad8d5e6cac --- prelude/apple/user/resource_group_map.bzl | 13 +++--- prelude/cxx/groups.bzl | 56 +++++++++++++++++++++-- prelude/cxx/groups_types.bzl | 3 ++ prelude/cxx/link_groups_types.bzl | 2 +- prelude/cxx/user/link_group_map.bzl | 47 +++++-------------- 5 files changed, 74 insertions(+), 47 deletions(-) diff --git a/prelude/apple/user/resource_group_map.bzl b/prelude/apple/user/resource_group_map.bzl index d9bee033d..5c5c315ec 100644 --- a/prelude/apple/user/resource_group_map.bzl +++ b/prelude/apple/user/resource_group_map.bzl @@ -16,6 +16,7 @@ load( "@prelude//cxx:groups.bzl", "compute_mappings", "create_group", + "get_roots_from_mapping", "make_info_subtarget_providers", "parse_groups_definitions", ) @@ -25,17 +26,17 @@ load( "Traversal", ) load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") +load("@prelude//utils:utils.bzl", "flatten") def _impl(ctx: AnalysisContext) -> list[Provider]: resource_groups = parse_groups_definitions(ctx.attrs.map, lambda root: root.label) - # Extract deps from the roots via the raw attrs, as `parse_groups_definitions` - # parses them as labels. - resource_groups_deps = [ - mapping[0] + resource_groups_deps = flatten([ + get_roots_from_mapping(mapping) for entry in ctx.attrs.map for mapping in entry[1] - ] + ]) + resource_graph = create_resource_graph( ctx = ctx, labels = [], @@ -104,7 +105,7 @@ registration_spec = RuleRegistrationSpec( attrs.string(), attrs.list( attrs.tuple( - attrs.dep(), + attrs.one_of(attrs.dep(), attrs.list(attrs.dep())), attrs.enum(Traversal.values()), attrs.option(attrs.string()), ), diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index 17dea083a..f81b60ee4 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -22,7 +22,6 @@ load( ) load( "@prelude//utils:utils.bzl", - "map_val", "value_or", ) load( @@ -47,6 +46,26 @@ _VALID_ATTRS = [ "prohibit_file_duplicates", ] +# Traversal types in this list will only assign the node +# to a target (as opposed to the transitive deps of the node's tree). +_TRAVERSALS_TO_ASSIGN_NODE = [ + Traversal("node"), + Traversal("subfolders"), + # TODO (dust): Possible perf optimization: + # When intersecting configured targets, it's not possible to intersect + # a parent without also intersecting it's children. + # + # As a result, there's a possible perf optimization to assign 'tree' + # to intersected targets instead, and leverage that to avoid traversing + # the entire tree of every root. + # + # For example: + # If iterating the tree of 'root2' we find a node which + # was also present in 'root1', we can skip traversing the subtree + # because it's evitable that everything is going to match there too. + Traversal("intersect"), +] + # Creates a group from an existing group, overwriting any properties provided def create_group( group: Group, @@ -61,6 +80,10 @@ def create_group( definition_type = value_or(definition_type, group.definition_type), ) +def get_roots_from_mapping(mapping): + deps = mapping[0] if type(mapping[0]) == "list" else [mapping[0]] + return filter(None, deps) + def parse_groups_definitions( map: list, # Function to parse a root label from the input type, allowing different @@ -89,11 +112,17 @@ def parse_groups_definitions( for entry in mappings: traversal = _parse_traversal_from_mapping(entry[1]) mapping = GroupMapping( - roots = filter(None, [map_val(parse_root, entry[0])]), + roots = filter(None, [parse_root(root) for root in get_roots_from_mapping(entry)]), traversal = traversal, filters = _parse_filter_from_mapping(entry[2]), preferred_linkage = Linkage(entry[3]) if len(entry) > 3 and entry[3] else None, ) + num_roots = len(mapping.roots) if mapping.roots else 0 + if num_roots > 1 and mapping.traversal != Traversal("intersect"): + fail("Invariant. A link_group mapping with traversal type: {} can only have 1 root node. {} found.".format(mapping.traversal, mapping.roots)) + elif mapping.traversal == Traversal("intersect") and num_roots < 2: + fail("Invariant. A link_group mapping with traversal type 'intersect' must have at least 2 root nodes. {} found.".format(mapping.roots)) + parsed_mappings.append(mapping) group = Group( @@ -113,6 +142,8 @@ def _parse_traversal_from_mapping(entry: str) -> Traversal: return Traversal("node") elif entry == "subfolders": return Traversal("subfolders") + elif entry == "intersect": + return Traversal("intersect") else: fail("Unrecognized group traversal type: " + entry) @@ -187,7 +218,8 @@ def _find_targets_in_mapping( if not mapping.filters: if not mapping.roots: fail("no filter or explicit root given: {}", mapping) - return mapping.roots + elif mapping.traversal != Traversal("intersect"): + return mapping.roots # Else find all dependencies that match the filter. matching_targets = {} @@ -228,6 +260,22 @@ def _find_targets_in_mapping( if not mapping.roots: for node in graph_map: find_matching_targets(node) + elif mapping.traversal == Traversal("intersect"): + intersected_targets = None + for root in mapping.roots: + # This is a captured variable inside `find_matching_targets`. + # We reset it for each root we visit so that we don't have results + # from other roots. + matching_targets = {} + breadth_first_traversal_by(graph_map, [root], find_matching_targets) + if intersected_targets == None: + intersected_targets = {target: True for target in matching_targets} + else: + # filter the list of intersected targets to only include targets also seen + # in the last passthrough. + intersected_targets = {target: True for target in matching_targets if target in intersected_targets} + + return intersected_targets.keys() else: breadth_first_traversal_by(graph_map, mapping.roots, find_matching_targets) @@ -269,7 +317,7 @@ def _update_target_to_group_mapping( graph_node = graph_map[node] return graph_node.deps + graph_node.exported_deps - if mapping.traversal == Traversal("node") or mapping.traversal == Traversal("subfolders"): + if mapping.traversal in _TRAVERSALS_TO_ASSIGN_NODE: assign_target_to_group(target = target, node_traversal = True) else: # tree breadth_first_traversal_by(graph_map, [target], transitively_add_targets_to_group_mapping) diff --git a/prelude/cxx/groups_types.bzl b/prelude/cxx/groups_types.bzl index 6e0a86c74..24807564c 100644 --- a/prelude/cxx/groups_types.bzl +++ b/prelude/cxx/groups_types.bzl @@ -25,6 +25,9 @@ Traversal = enum( "node", # Uses pattern and separates all targets by full folder path. "subfolders", + # Includes targets found in the transitive deps of *all* roots. + # Filters for these mappings will be applied to the intersected deps. + "intersect", ) # Optional type of filtering diff --git a/prelude/cxx/link_groups_types.bzl b/prelude/cxx/link_groups_types.bzl index 2ecf90729..a02ced1d3 100644 --- a/prelude/cxx/link_groups_types.bzl +++ b/prelude/cxx/link_groups_types.bzl @@ -33,7 +33,7 @@ def link_group_inlined_map_attr(root_attr): # a single mapping attrs.tuple( # root node - root_attr, + attrs.one_of(root_attr, attrs.list(root_attr)), # traversal attrs.enum(Traversal.values()), # filters, either `None`, a single filter, or a list of filters diff --git a/prelude/cxx/user/link_group_map.bzl b/prelude/cxx/user/link_group_map.bzl index 20597750c..d57d1e01e 100644 --- a/prelude/cxx/user/link_group_map.bzl +++ b/prelude/cxx/user/link_group_map.bzl @@ -7,10 +7,10 @@ load( "@prelude//cxx:groups.bzl", + "get_roots_from_mapping", "make_info_subtarget_providers", "parse_groups_definitions", ) -load("@prelude//cxx:groups_types.bzl", "Traversal") load( "@prelude//cxx:link_groups.bzl", "build_link_group_info", @@ -37,45 +37,18 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", ) -load("@prelude//linking:types.bzl", "Linkage") load("@prelude//user:rule_spec.bzl", "RuleRegistrationSpec") - -def _v1_attrs(attrs_root): - return attrs.list( - attrs.tuple( - # name - attrs.string(), - # list of mappings - attrs.list( - # a single mapping - attrs.tuple( - # root node - attrs_root, - # traversal - attrs.enum(Traversal.values()), - # filters, either `None`, a single filter, or a list of filters - # (which must all match). - attrs.option(attrs.one_of(attrs.list(attrs.string()), attrs.string())), - # linkage - attrs.option(attrs.enum(Linkage.values())), - ), - ), - # attributes - attrs.option( - attrs.dict(key = attrs.string(), value = attrs.any(), sorted = False), - ), - ), - ) +load("@prelude//utils:utils.bzl", "flatten") LINK_GROUP_MAP_ATTR = attrs.option( attrs.one_of( attrs.dep(providers = [LinkGroupInfo]), - _v1_attrs( + link_group_inlined_map_attr( # Inlined `link_group_map` will parse roots as `label`s, to avoid # bloating deps w/ unrelated mappings (e.g. it's common to use # a default mapping for all rules, which would otherwise add # unrelated deps to them). - attrs_root = attrs.option(attrs.label()), + root_attr = attrs.option(attrs.label()), ), ), default = None, @@ -84,13 +57,15 @@ LINK_GROUP_MAP_ATTR = attrs.option( def _impl(ctx: AnalysisContext) -> list[Provider]: # Extract graphs from the roots via the raw attrs, as `parse_groups_definitions` # parses them as labels. + + deps = flatten([ + get_roots_from_mapping(mapping) + for entry in ctx.attrs.map + for mapping in entry[1] + ]) linkable_graph = create_linkable_graph( ctx, - deps = [ - mapping[0][LinkableGraph] - for entry in ctx.attrs.map - for mapping in entry[1] - ], + deps = [dep[LinkableGraph] for dep in deps], ) link_groups = parse_groups_definitions(ctx.attrs.map, lambda root: root.label) link_group_info = build_link_group_info(linkable_graph, link_groups) From b742cbed97ec1eee0191d1229b61afc9182304aa Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Wed, 27 Mar 2024 11:26:41 -0700 Subject: [PATCH 0616/1133] Modular assets for APKs Summary: This puts assets into their Voltron module within an APK, i.e. into `/assets` instead of `assets`. Reviewed By: bchang7 Differential Revision: D55366638 fbshipit-source-id: da2ca63c6d3b95e307d47f31a20fa7258503e47d --- .../android_binary_resources_rules.bzl | 20 ++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index 297d7defa..2c9369349 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -553,6 +553,10 @@ def _merge_assets( not (is_exopackaged_enabled_for_resources and is_bundle_build), "Cannot use exopackage-for-resources with AAB builds.", ) + expect( + not (is_exopackaged_enabled_for_resources and apk_module_graph_file), + "Cannot use exopackage-for-resources with Voltron builds.", + ) asset_resource_infos = [resource_info for resource_info in resource_infos if resource_info.assets] if not asset_resource_infos and not cxx_resources: return base_apk, None, None, None @@ -578,9 +582,14 @@ def _merge_assets( return merge_assets_cmd, merged_assets_output_hash - # For Voltron AAB builds, we need to put assets into a separate "APK" for each module. - if is_bundle_build and apk_module_graph_file: - module_assets_apks_dir = ctx.actions.declare_output("module_assets_apks") + if apk_module_graph_file: + declared_outputs = [merged_assets_output] + if is_bundle_build: + # For Voltron AAB builds, we need to put assets into a separate "APK" for each module. + module_assets_apks_dir = ctx.actions.declare_output("module_assets_apks") + declared_outputs.append(module_assets_apks_dir) + else: + module_assets_apks_dir = None def merge_assets_modular(ctx: AnalysisContext, artifacts, outputs): apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) @@ -594,7 +603,8 @@ def _merge_assets( merge_assets_cmd, _ = get_common_merge_assets_cmd(ctx, outputs[merged_assets_output]) - merge_assets_cmd.add(["--module-assets-apks-dir", outputs[module_assets_apks_dir].as_output()]) + if is_bundle_build: + merge_assets_cmd.add(["--module-assets-apks-dir", outputs[module_assets_apks_dir].as_output()]) assets_dirs_file = ctx.actions.write_json("assets_dirs.json", module_to_assets_dirs) merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) @@ -605,7 +615,7 @@ def _merge_assets( ctx.actions.dynamic_output( dynamic = [apk_module_graph_file], inputs = [], - outputs = [module_assets_apks_dir, merged_assets_output], + outputs = declared_outputs, f = merge_assets_modular, ) From 4c78043c01d1d36f2a19fedd0643f170287b5b4b Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Wed, 27 Mar 2024 13:12:03 -0700 Subject: [PATCH 0617/1133] Revert D55366638: Modular assets for APKs Differential Revision: D55366638 Original commit changeset: da2ca63c6d3b Original Phabricator Diff: D55366638 fbshipit-source-id: ee93df8f173e286d6d9183da06d29b56118cc51d --- .../android_binary_resources_rules.bzl | 20 +++++-------------- 1 file changed, 5 insertions(+), 15 deletions(-) diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index 2c9369349..297d7defa 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -553,10 +553,6 @@ def _merge_assets( not (is_exopackaged_enabled_for_resources and is_bundle_build), "Cannot use exopackage-for-resources with AAB builds.", ) - expect( - not (is_exopackaged_enabled_for_resources and apk_module_graph_file), - "Cannot use exopackage-for-resources with Voltron builds.", - ) asset_resource_infos = [resource_info for resource_info in resource_infos if resource_info.assets] if not asset_resource_infos and not cxx_resources: return base_apk, None, None, None @@ -582,14 +578,9 @@ def _merge_assets( return merge_assets_cmd, merged_assets_output_hash - if apk_module_graph_file: - declared_outputs = [merged_assets_output] - if is_bundle_build: - # For Voltron AAB builds, we need to put assets into a separate "APK" for each module. - module_assets_apks_dir = ctx.actions.declare_output("module_assets_apks") - declared_outputs.append(module_assets_apks_dir) - else: - module_assets_apks_dir = None + # For Voltron AAB builds, we need to put assets into a separate "APK" for each module. + if is_bundle_build and apk_module_graph_file: + module_assets_apks_dir = ctx.actions.declare_output("module_assets_apks") def merge_assets_modular(ctx: AnalysisContext, artifacts, outputs): apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) @@ -603,8 +594,7 @@ def _merge_assets( merge_assets_cmd, _ = get_common_merge_assets_cmd(ctx, outputs[merged_assets_output]) - if is_bundle_build: - merge_assets_cmd.add(["--module-assets-apks-dir", outputs[module_assets_apks_dir].as_output()]) + merge_assets_cmd.add(["--module-assets-apks-dir", outputs[module_assets_apks_dir].as_output()]) assets_dirs_file = ctx.actions.write_json("assets_dirs.json", module_to_assets_dirs) merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) @@ -615,7 +605,7 @@ def _merge_assets( ctx.actions.dynamic_output( dynamic = [apk_module_graph_file], inputs = [], - outputs = declared_outputs, + outputs = [module_assets_apks_dir, merged_assets_output], f = merge_assets_modular, ) From b8bcd3bf75e50bf3f476c6f3dc201b4259b09d0d Mon Sep 17 00:00:00 2001 From: Cesar Valdez Date: Wed, 27 Mar 2024 13:26:50 -0700 Subject: [PATCH 0618/1133] Add mapc to Apple toolchain Summary: # Context It looks like the `xcmappingmodel` type is not supported by the current apple resource rules, Xcode processes these files with a tool called `mapc` P1200549260. # This Diff Add mapc to Apple toolchain as preparation to add support for core data mapping models (xcmappingmodel). Reviewed By: narissiam Differential Revision: D55286884 fbshipit-source-id: b51e71513cafba5bf40853efd17548b4f56298b7 --- prelude/apple/apple_resource_types.bzl | 2 +- prelude/apple/apple_rules_impl.bzl | 3 ++- prelude/apple/apple_toolchain.bzl | 1 + prelude/apple/apple_toolchain_types.bzl | 1 + prelude/apple/user/apple_toolchain_override.bzl | 1 + prelude/decls/ios_rules.bzl | 1 + 6 files changed, 7 insertions(+), 2 deletions(-) diff --git a/prelude/apple/apple_resource_types.bzl b/prelude/apple/apple_resource_types.bzl index cbe3d2e3c..45fa6b8f0 100644 --- a/prelude/apple/apple_resource_types.bzl +++ b/prelude/apple/apple_resource_types.bzl @@ -31,7 +31,7 @@ AppleResourceSpec = record( codesign_entitlements = field([Artifact, None], None), ) -# Used when invoking `ibtool`, `actool` and `momc` +# Used when invoking `ibtool`, `actool`, `mapc` and `momc` AppleResourceProcessingOptions = record( prefer_local = field(bool, False), allow_cache_upload = field(bool, False), diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index 8298fbba2..d51cb972c 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -186,7 +186,7 @@ extra_attributes = { "codesign": attrs.exec_dep(providers = [RunInfo]), "codesign_allocate": attrs.exec_dep(providers = [RunInfo]), "codesign_identities_command": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), - # Controls invocations of `ibtool`, `actool` and `momc` + # Controls invocations of `ibtool`, `actool` `mapc`and `momc` "compile_resources_locally": attrs.bool(default = False), "copy_scene_kit_assets": attrs.exec_dep(providers = [RunInfo]), "cxx_toolchain": attrs.toolchain_dep(), @@ -197,6 +197,7 @@ extra_attributes = { "installer": attrs.default_only(attrs.label(default = "buck//src/com/facebook/buck/installer/apple:apple_installer")), "libtool": attrs.exec_dep(providers = [RunInfo]), "lipo": attrs.exec_dep(providers = [RunInfo]), + "mapc": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "min_version": attrs.option(attrs.string(), default = None), "momc": attrs.exec_dep(providers = [RunInfo]), "objdump": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), diff --git a/prelude/apple/apple_toolchain.bzl b/prelude/apple/apple_toolchain.bzl index 3e0802db4..196146bef 100644 --- a/prelude/apple/apple_toolchain.bzl +++ b/prelude/apple/apple_toolchain.bzl @@ -31,6 +31,7 @@ def apple_toolchain_impl(ctx: AnalysisContext) -> list[Provider]: installer = ctx.attrs.installer, libtool = ctx.attrs.libtool[RunInfo], lipo = ctx.attrs.lipo[RunInfo], + mapc = ctx.attrs.mapc[RunInfo] if ctx.attrs.mapc else None, min_version = ctx.attrs.min_version, momc = ctx.attrs.momc[RunInfo], objdump = ctx.attrs.objdump[RunInfo] if ctx.attrs.objdump else None, diff --git a/prelude/apple/apple_toolchain_types.bzl b/prelude/apple/apple_toolchain_types.bzl index d94c9676b..fb9c6544e 100644 --- a/prelude/apple/apple_toolchain_types.bzl +++ b/prelude/apple/apple_toolchain_types.bzl @@ -24,6 +24,7 @@ AppleToolchainInfo = provider( "installer": provider_field(typing.Any, default = None), # label "libtool": provider_field(typing.Any, default = None), # "RunInfo" "lipo": provider_field(typing.Any, default = None), # "RunInfo" + "mapc": provider_field(typing.Any, default = None), # "RunInfo" "min_version": provider_field(typing.Any, default = None), # [None, str] "momc": provider_field(typing.Any, default = None), # "RunInfo" "objdump": provider_field(RunInfo | None, default = None), diff --git a/prelude/apple/user/apple_toolchain_override.bzl b/prelude/apple/user/apple_toolchain_override.bzl index fce7dbfa7..a7a226909 100644 --- a/prelude/apple/user/apple_toolchain_override.bzl +++ b/prelude/apple/user/apple_toolchain_override.bzl @@ -31,6 +31,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: libtool = base.libtool, lipo = base.lipo, min_version = base.min_version, + mapc = base.mapc, momc = base.momc, objdump = base.objdump, odrcov = base.odrcov, diff --git a/prelude/decls/ios_rules.bzl b/prelude/decls/ios_rules.bzl index 16f37e13a..5c8df4f76 100644 --- a/prelude/decls/ios_rules.bzl +++ b/prelude/decls/ios_rules.bzl @@ -818,6 +818,7 @@ apple_toolchain = prelude_rule( "libtool": attrs.source(), "licenses": attrs.list(attrs.source(), default = []), "lipo": attrs.source(), + "mapc": attrs.option(attrs.source(), default = None), "min_version": attrs.string(default = ""), "momc": attrs.source(), "platform_path": attrs.source(), From 64a05b3d14839ce3de6ee6ce267d45df37ef9310 Mon Sep 17 00:00:00 2001 From: Cesar Valdez Date: Wed, 27 Mar 2024 13:26:50 -0700 Subject: [PATCH 0619/1133] Add support for xcmappingmodel Summary: # Context It looks like the `xcmappingmodel` type is not supported by the current apple resource rules, Xcode processes these files with a tool called `mapc` P1200549260. # This Diff Extend apple_core_data rule functionality to support core data mapping models (xcmappingmodel). Reviewed By: blackm00n Differential Revision: D55350927 fbshipit-source-id: 345c631cbf96ab6dbd116efaa3efc6e4ebfa2b00 --- prelude/apple/apple_core_data.bzl | 31 +++++++++++++++++++++---------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/prelude/apple/apple_core_data.bzl b/prelude/apple/apple_core_data.bzl index 97a5916a4..c2112d0bb 100644 --- a/prelude/apple/apple_core_data.bzl +++ b/prelude/apple/apple_core_data.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:paths.bzl", "paths") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") load(":apple_bundle_utility.bzl", "get_bundle_min_target_version", "get_bundle_resource_processing_options") load(":apple_core_data_types.bzl", "AppleCoreDataSpec") @@ -31,11 +32,12 @@ def compile_apple_core_data(ctx: AnalysisContext, specs: list[AppleCoreDataSpec] output = ctx.actions.declare_output("AppleCoreDataCompiled") - # Aggregate all the coredata momc commands together - momc_commands = [] + # Aggregate all the coredata momc and mapc commands together + tool_commands = [] for spec in specs: - momc_command = _get_momc_command(ctx, spec, product_name, cmd_args("$TMPDIR")) - momc_commands.append(momc_command) + tool, output_path = _get_model_args(ctx, spec) + tool_command = _get_tool_command(ctx, spec, product_name, tool, output_path) + tool_commands.append(tool_command) # Sandboxing and fs isolation on RE machines results in Xcode tools failing # when those are working in freshly created directories in buck-out. @@ -43,23 +45,32 @@ def compile_apple_core_data(ctx: AnalysisContext, specs: list[AppleCoreDataSpec] # As a workaround create a directory in tmp, use it for Xcode tools, then # copy the result to buck-out. wrapper_script, _ = ctx.actions.write( - "momc_wrapper.sh", + "tool_wrapper.sh", [ cmd_args("set -euo pipefail"), cmd_args('export TMPDIR="$(mktemp -d)"'), - cmd_args(momc_commands), + cmd_args(tool_commands), cmd_args(output, format = 'mkdir -p {} && cp -r "$TMPDIR"/ {}'), ], allow_args = True, ) - combined_command = cmd_args(["/bin/sh", wrapper_script]).hidden(momc_commands + [output.as_output()]) + combined_command = cmd_args(["/bin/sh", wrapper_script]).hidden(tool_commands + [output.as_output()]) processing_options = get_bundle_resource_processing_options(ctx) ctx.actions.run(combined_command, prefer_local = processing_options.prefer_local, allow_cache_upload = processing_options.allow_cache_upload, category = "apple_core_data") return output -def _get_momc_command(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec, product_name: str, output_directory: cmd_args) -> cmd_args: +def _get_model_args(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec): + toolchain = ctx.attrs._apple_toolchain[AppleToolchainInfo] + + if core_data_spec.path.extension == ".xcmappingmodel": + filename = paths.replace_extension(core_data_spec.path.basename, ".cdm") + return toolchain.mapc, cmd_args("$TMPDIR/" + filename) + else: + return toolchain.momc, cmd_args("$TMPDIR") + +def _get_tool_command(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec, product_name: str, tool: RunInfo, output: cmd_args) -> cmd_args: return cmd_args([ - ctx.attrs._apple_toolchain[AppleToolchainInfo].momc, + tool, "--sdkroot", ctx.attrs._apple_toolchain[AppleToolchainInfo].sdk_path, "--" + get_apple_sdk_name(ctx) + "-deployment-target", @@ -67,5 +78,5 @@ def _get_momc_command(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec, p "--module", core_data_spec.module if core_data_spec.module else product_name, core_data_spec.path, - output_directory, + output, ], delimiter = " ") From 9a3fb6659fa8a04b7f186f5df23d19e26be438a2 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 27 Mar 2024 13:32:49 -0700 Subject: [PATCH 0620/1133] dynamic_output(outputs=[.as_output()]) Summary: Following diff D55389251 requires output artifact as parameter to `.dynamic_output` call. Reviewed By: TheGeorge Differential Revision: D55390974 fbshipit-source-id: d479df9a5f94fc0284ed7047f26a5c55509f5079 --- prelude/erlang/erlang_build.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/erlang/erlang_build.bzl b/prelude/erlang/erlang_build.bzl index f616bf163..7cef3a8a3 100644 --- a/prelude/erlang/erlang_build.bzl +++ b/prelude/erlang/erlang_build.bzl @@ -504,7 +504,7 @@ def _build_erl( always_print_stderr = True, ) - ctx.actions.dynamic_output(dynamic = [final_dep_file], inputs = [src], outputs = [output], f = dynamic_lambda) + ctx.actions.dynamic_output(dynamic = [final_dep_file], inputs = [src], outputs = [output.as_output()], f = dynamic_lambda) return None def _build_edoc( From 9fb287ae1640d248ab63cd72d985cf315b9ad85a Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Wed, 27 Mar 2024 15:17:31 -0700 Subject: [PATCH 0621/1133] Eliminate rustc_check_flags in favor of extra_rustc_flags MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: None of the current usages of `rustc_check_flags` was used in such a way that flags only being passed to `check` builds made sense. They were all just using it as a poor substitute for `extra_rustc_flags`, i.e. add flags without erasing all the existing flags configured by the toolchain's `rustc_flags`. The semantics of `rustc_check_flags` is confusing to begin with. Those flags are not passed just to `…[check]` builds. They are passed also for dependencies during pipelined non-check builds, because those also involve building metadata. This diff deletes support for `rustc_check_flags` in `RustToolchainInfo` and switches existing uses to `extra_rustc_flags`. Reviewed By: zertosh, diliop Differential Revision: D55438348 fbshipit-source-id: 221e1b6944774eebbeebac6f25882c9024b7b579 --- prelude/decls/rust_rules.bzl | 8 ++------ prelude/rust/build.bzl | 1 - prelude/rust/rust_toolchain.bzl | 2 -- prelude/toolchains/rust.bzl | 2 -- 4 files changed, 2 insertions(+), 11 deletions(-) diff --git a/prelude/decls/rust_rules.bzl b/prelude/decls/rust_rules.bzl index 32854fb27..51b988074 100644 --- a/prelude/decls/rust_rules.bzl +++ b/prelude/decls/rust_rules.bzl @@ -121,9 +121,7 @@ rust_binary = prelude_rule( If you invoke a build with the `check` flavor, then Buck will invoke rustc to check the code (typecheck, produce warnings, etc), but won't generate an executable code. When applied to binaries it produces no output; for libraries it produces metadata for - consumers of the library. When building with `check`, extra compiler flags from - the `rust.rustc_check_flags` are added to the compiler's command line options, - to allow for extra warnings, etc. + consumers of the library. Note: Buck is currently tested with (and therefore supports) version 1.32.0 of Rust. @@ -195,9 +193,7 @@ rust_library = prelude_rule( If you invoke a build with the `check` flavor, then Buck will invoke rustc to check the code (typecheck, produce warnings, etc), but won't generate an executable code. When applied to binaries it produces no output; for libraries it produces metadata for - consumers of the library. When building with `check`, extra compiler flags from - the `rust.rustc_check_flags` are added to the compiler's command line options, - to allow for extra warnings, etc. + consumers of the library. Note: Buck is currently tested with (and therefore supports) version 1.32.0 of Rust. diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index c73935da8..f62642885 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -1002,7 +1002,6 @@ def _compute_common_args( compile_ctx.sysroot_args, ["-Cpanic=abort", "-Zpanic-abort-tests=yes"] if toolchain_info.panic_runtime == PanicRuntime("abort") else [], _rustc_flags(toolchain_info.rustc_flags), - _rustc_flags(toolchain_info.rustc_check_flags) if is_check else [], _rustc_flags(toolchain_info.rustc_coverage_flags) if ctx.attrs.coverage else [], _rustc_flags(ctx.attrs.rustc_flags), _rustc_flags(toolchain_info.extra_rustc_flags), diff --git a/prelude/rust/rust_toolchain.bzl b/prelude/rust/rust_toolchain.bzl index e2d226e25..5f980f789 100644 --- a/prelude/rust/rust_toolchain.bzl +++ b/prelude/rust/rust_toolchain.bzl @@ -45,8 +45,6 @@ rust_toolchain_attrs = { "extra_rustc_flags": provider_field(list[typing.Any], default = []), # Extra flags when building binaries "rustc_binary_flags": provider_field(list[typing.Any], default = []), - # Extra flags for doing check builds - "rustc_check_flags": provider_field(list[typing.Any], default = []), # Extra flags for doing building tests "rustc_test_flags": provider_field(list[typing.Any], default = []), # Extra flags when coverage is enabled for a target diff --git a/prelude/toolchains/rust.bzl b/prelude/toolchains/rust.bzl index 8a5135963..7e3ea7b9e 100644 --- a/prelude/toolchains/rust.bzl +++ b/prelude/toolchains/rust.bzl @@ -52,7 +52,6 @@ def _system_rust_toolchain_impl(ctx): report_unused_deps = ctx.attrs.report_unused_deps, rustc_action = ctx.attrs.rustc_action[RunInfo], rustc_binary_flags = ctx.attrs.rustc_binary_flags, - rustc_check_flags = ctx.attrs.rustc_check_flags, rustc_flags = ctx.attrs.rustc_flags, rustc_target_triple = ctx.attrs.rustc_target_triple, rustc_test_flags = ctx.attrs.rustc_test_flags, @@ -77,7 +76,6 @@ system_rust_toolchain = rule( "pipelined": attrs.bool(default = True), "report_unused_deps": attrs.bool(default = False), "rustc_binary_flags": attrs.list(attrs.string(), default = []), - "rustc_check_flags": attrs.list(attrs.string(), default = []), "rustc_flags": attrs.list(attrs.string(), default = []), "rustc_target_triple": attrs.string(default = _DEFAULT_TRIPLE), "rustc_test_flags": attrs.list(attrs.string(), default = []), From cde15440167f4095ac62efc907aec562e9e94073 Mon Sep 17 00:00:00 2001 From: Ben Chang Date: Thu, 28 Mar 2024 14:22:10 -0700 Subject: [PATCH 0622/1133] Back out "Revert D55366638: [buck2] Modular assets for APKs" Summary: relanding diff - see original diff for more details. v1 of this diff is a clean backout with no changes. v2 makes minor tweaks to fix previous land issues. ============ Original commit changeset: ee93df8f173e Original Phabricator Diff: D55366638 Reviewed By: IanChilds Differential Revision: D55450456 fbshipit-source-id: 0bb87fb74d99c9568567181f888ec75976b69551 --- .../android_binary_resources_rules.bzl | 20 ++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index 297d7defa..2c9369349 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -553,6 +553,10 @@ def _merge_assets( not (is_exopackaged_enabled_for_resources and is_bundle_build), "Cannot use exopackage-for-resources with AAB builds.", ) + expect( + not (is_exopackaged_enabled_for_resources and apk_module_graph_file), + "Cannot use exopackage-for-resources with Voltron builds.", + ) asset_resource_infos = [resource_info for resource_info in resource_infos if resource_info.assets] if not asset_resource_infos and not cxx_resources: return base_apk, None, None, None @@ -578,9 +582,14 @@ def _merge_assets( return merge_assets_cmd, merged_assets_output_hash - # For Voltron AAB builds, we need to put assets into a separate "APK" for each module. - if is_bundle_build and apk_module_graph_file: - module_assets_apks_dir = ctx.actions.declare_output("module_assets_apks") + if apk_module_graph_file: + declared_outputs = [merged_assets_output] + if is_bundle_build: + # For Voltron AAB builds, we need to put assets into a separate "APK" for each module. + module_assets_apks_dir = ctx.actions.declare_output("module_assets_apks") + declared_outputs.append(module_assets_apks_dir) + else: + module_assets_apks_dir = None def merge_assets_modular(ctx: AnalysisContext, artifacts, outputs): apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) @@ -594,7 +603,8 @@ def _merge_assets( merge_assets_cmd, _ = get_common_merge_assets_cmd(ctx, outputs[merged_assets_output]) - merge_assets_cmd.add(["--module-assets-apks-dir", outputs[module_assets_apks_dir].as_output()]) + if is_bundle_build: + merge_assets_cmd.add(["--module-assets-apks-dir", outputs[module_assets_apks_dir].as_output()]) assets_dirs_file = ctx.actions.write_json("assets_dirs.json", module_to_assets_dirs) merge_assets_cmd.add(["--assets-dirs", assets_dirs_file]) @@ -605,7 +615,7 @@ def _merge_assets( ctx.actions.dynamic_output( dynamic = [apk_module_graph_file], inputs = [], - outputs = [module_assets_apks_dir, merged_assets_output], + outputs = declared_outputs, f = merge_assets_modular, ) From 1163cccb5880472d6e697eb3e85617f231d51c47 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Thu, 28 Mar 2024 15:37:53 -0700 Subject: [PATCH 0623/1133] Use some pure code instead of cmd_args.add Reviewed By: IanChilds Differential Revision: D55230561 fbshipit-source-id: 83e1fe2c0d708ea651c5002322fa68ce3fac3a38 --- prelude/android/android_resource.bzl | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/prelude/android/android_resource.bzl b/prelude/android/android_resource.bzl index e51a21cfe..ac29f75df 100644 --- a/prelude/android/android_resource.bzl +++ b/prelude/android/android_resource.bzl @@ -82,16 +82,16 @@ def aapt2_compile( android_toolchain: AndroidToolchainInfo, skip_crunch_pngs: bool = False, identifier: [str, None] = None) -> Artifact: - aapt2_command = cmd_args(android_toolchain.aapt2) - aapt2_command.add("compile") - aapt2_command.add("--legacy") + aapt2_command = [cmd_args(android_toolchain.aapt2)] + aapt2_command.append("compile") + aapt2_command.append("--legacy") if skip_crunch_pngs: - aapt2_command.add("--no-crunch") - aapt2_command.add(["--dir", resources_dir]) + aapt2_command.append("--no-crunch") + aapt2_command.extend(["--dir", resources_dir]) aapt2_output = ctx.actions.declare_output("{}_resources.flata".format(identifier) if identifier else "resources.flata") - aapt2_command.add("-o", aapt2_output.as_output()) + aapt2_command.extend(["-o", aapt2_output.as_output()]) - ctx.actions.run(aapt2_command, category = "aapt2_compile", identifier = identifier) + ctx.actions.run(cmd_args(aapt2_command), category = "aapt2_compile", identifier = identifier) return aapt2_output @@ -104,9 +104,13 @@ def _get_package(ctx: AnalysisContext, package: [str, None], manifest: [Artifact def extract_package_from_manifest(ctx: AnalysisContext, manifest: Artifact) -> Artifact: r_dot_java_package = ctx.actions.declare_output(JAVA_PACKAGE_FILENAME) - extract_package_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].manifest_utils[RunInfo]) - extract_package_cmd.add(["--manifest-path", manifest]) - extract_package_cmd.add(["--package-output", r_dot_java_package.as_output()]) + extract_package_cmd = cmd_args( + ctx.attrs._android_toolchain[AndroidToolchainInfo].manifest_utils[RunInfo], + "--manifest-path", + manifest, + "--package-output", + r_dot_java_package.as_output(), + ) ctx.actions.run(extract_package_cmd, category = "android_extract_package") From dcb82199a8eddd473023a2f458b9a0cbf708f7ee Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Thu, 28 Mar 2024 15:37:53 -0700 Subject: [PATCH 0624/1133] Use some pure code instead of cmd_args.add Reviewed By: IanChilds Differential Revision: D55230558 fbshipit-source-id: 475f76a3055ae8572d92ef035ffddbcfd84657af --- prelude/debugging/inspect_dbg_exec.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/prelude/debugging/inspect_dbg_exec.bzl b/prelude/debugging/inspect_dbg_exec.bzl index 416f5b486..379f27c38 100644 --- a/prelude/debugging/inspect_dbg_exec.bzl +++ b/prelude/debugging/inspect_dbg_exec.bzl @@ -18,8 +18,7 @@ def inspect_dbg_exec(ctx: bxl.Context, actions: AnalysisActions, target: bxl.Con providers = ctx.analysis(fbsource_alias_target).providers() fdb_helper = providers[RunInfo] fdb_helper_out = actions.declare_output("fdb_helper.json") - cmd = cmd_args(fdb_helper) - cmd.add(settings.args) + cmd = cmd_args(fdb_helper, settings.args) actions.run(cmd, category = "fdb_helper", env = {"FDB_OUTPUT_FILE": fdb_helper_out.as_output()}, local_only = True) result = actions.declare_output("final_out.json") From 9fbd8097e1a6a60d3606a92f93c82e3a24adbcc9 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Thu, 28 Mar 2024 15:37:53 -0700 Subject: [PATCH 0625/1133] Use some pure code instead of cmd_args.add Reviewed By: IanChilds Differential Revision: D55230595 fbshipit-source-id: 74d1ee812b38a4ae00b25bf93011e9e5e1632de6 --- prelude/python_bootstrap/python_bootstrap.bzl | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/prelude/python_bootstrap/python_bootstrap.bzl b/prelude/python_bootstrap/python_bootstrap.bzl index b4c0d021b..ef628945d 100644 --- a/prelude/python_bootstrap/python_bootstrap.bzl +++ b/prelude/python_bootstrap/python_bootstrap.bzl @@ -43,15 +43,18 @@ def python_bootstrap_binary_impl(ctx: AnalysisContext) -> list[Provider]: interpreter = ctx.attrs._python_bootstrap_toolchain[PythonBootstrapToolchainInfo].interpreter - run_args = cmd_args() if ctx.attrs._win_python_wrapper != None: - run_args.add(ctx.attrs._win_python_wrapper[RunInfo]) - run_args.add(run_tree) - run_args.add(interpreter) - run_args.add(output) + run_args = cmd_args( + ctx.attrs._win_python_wrapper[RunInfo], + run_tree, + interpreter, + output, + ) else: - run_args.add("/usr/bin/env") - run_args.add(cmd_args(run_tree, format = "PYTHONPATH={}")) - run_args.add(interpreter) - run_args.add(output) + run_args = cmd_args( + "/usr/bin/env", + cmd_args(run_tree, format = "PYTHONPATH={}"), + interpreter, + output, + ) return [DefaultInfo(default_output = output), RunInfo(args = run_args)] From 558a1978733536648b86586a96f4689bd808a26c Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Thu, 28 Mar 2024 15:37:53 -0700 Subject: [PATCH 0626/1133] Use some pure code instead of cmd_args.add Reviewed By: IanChilds Differential Revision: D55233350 fbshipit-source-id: 63cfb2c0edd62475a5d30262cc5616c15a7265e2 --- prelude/js/js_bundle.bzl | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/prelude/js/js_bundle.bzl b/prelude/js/js_bundle.bzl index 2b25374ee..e90621731 100644 --- a/prelude/js/js_bundle.bzl +++ b/prelude/js/js_bundle.bzl @@ -49,10 +49,11 @@ def _build_dependencies_file( command_args_files = [command_args_file], identifier = transform_profile, category = "dependencies", - hidden_artifacts = [cmd_args([ + hidden_artifacts = [cmd_args( dependencies_file.as_output(), extra_data_args, - ]).add(transitive_js_library_outputs)], + transitive_js_library_outputs, + )], ) return dependencies_file @@ -110,13 +111,14 @@ def _build_js_bundle( command_args_files = [command_args_file], identifier = base_dir, category = job_args["command"], - hidden_artifacts = [cmd_args([ + hidden_artifacts = [cmd_args( bundle_dir_output.as_output(), assets_dir.as_output(), misc_dir_path.as_output(), source_map.as_output(), extra_data_args, - ]).add(transitive_js_library_outputs)], + transitive_js_library_outputs, + )], ) return JsBundleInfo( From f6a8702417ff29b7663b781dcc3ae3483152ddee Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Thu, 28 Mar 2024 15:37:53 -0700 Subject: [PATCH 0627/1133] Use some pure code instead of cmd_args.add Reviewed By: iguridi Differential Revision: D55233503 fbshipit-source-id: 5dd3506807a75f28bfc9ed886dca9fd2ce337a9f --- prelude/apple/apple_universal_binaries.bzl | 16 ++++++++-------- prelude/apple/apple_utility.bzl | 6 +++--- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/prelude/apple/apple_universal_binaries.bzl b/prelude/apple/apple_universal_binaries.bzl index f5c3fbe6e..fdcb6c78e 100644 --- a/prelude/apple/apple_universal_binaries.bzl +++ b/prelude/apple/apple_universal_binaries.bzl @@ -18,13 +18,13 @@ def create_universal_binary( dsym_bundle_name: [str, None], split_arch_dsym: bool) -> AppleBundleBinaryOutput: binary_output = ctx.actions.declare_output("UniversalBinary" if binary_name == None else binary_name, dir = False) - lipo_cmd = cmd_args([ctx.attrs._apple_toolchain[AppleToolchainInfo].lipo]) + lipo_cmd = [ctx.attrs._apple_toolchain[AppleToolchainInfo].lipo] for (_, binary) in binary_deps.items(): - lipo_cmd.add(cmd_args(binary[DefaultInfo].default_outputs[0])) + lipo_cmd.append(cmd_args(binary[DefaultInfo].default_outputs[0])) - lipo_cmd.add(["-create", "-output", binary_output.as_output()]) - ctx.actions.run(lipo_cmd, category = "lipo") + lipo_cmd.extend(["-create", "-output", binary_output.as_output()]) + ctx.actions.run(cmd_args(lipo_cmd), category = "lipo") # Universal binaries can be created out of plain `cxx_binary()` / `cxx_library()` # which lack the `AppleDebuggableInfo` provider. @@ -34,12 +34,12 @@ def create_universal_binary( dsym_output = None if split_arch_dsym and contains_full_debuggable_info: dsym_output = ctx.actions.declare_output("UniversalBinary.dSYM" if dsym_bundle_name == None else dsym_bundle_name, dir = True) - dsym_combine_cmd = cmd_args([ctx.attrs._apple_tools[AppleToolsInfo].split_arch_combine_dsym_bundles_tool]) + dsym_combine_cmd = [ctx.attrs._apple_tools[AppleToolsInfo].split_arch_combine_dsym_bundles_tool] for (arch, binary) in binary_deps.items(): - dsym_combine_cmd.add(["--dsym-bundle", cmd_args(binary.get(AppleDebuggableInfo).dsyms[0]), "--arch", arch]) - dsym_combine_cmd.add(["--output", dsym_output.as_output()]) - ctx.actions.run(dsym_combine_cmd, category = "universal_binaries_dsym") + dsym_combine_cmd.extend(["--dsym-bundle", cmd_args(binary.get(AppleDebuggableInfo).dsyms[0]), "--arch", arch]) + dsym_combine_cmd.extend(["--output", dsym_output.as_output()]) + ctx.actions.run(cmd_args(dsym_combine_cmd), category = "universal_binaries_dsym") all_debug_info_tsets = [] if contains_full_debuggable_info: diff --git a/prelude/apple/apple_utility.bzl b/prelude/apple/apple_utility.bzl index 529793638..72fb011ec 100644 --- a/prelude/apple/apple_utility.bzl +++ b/prelude/apple/apple_utility.bzl @@ -73,15 +73,15 @@ def expand_relative_prefixed_sdk_path( "$RESOURCEDIR": swift_resource_dir, "$SDKROOT": sdk_path, } - expanded_cmd = cmd_args() + expanded_cmd = [] for (path_variable, path_value) in path_expansion_map.items(): if path_to_expand.startswith(path_variable): path = path_to_expand[len(path_variable):] if path.find("$") == 0: fail("Failed to expand framework path: {}".format(path)) - expanded_cmd.add(cmd_args([path_value, path], delimiter = "")) + expanded_cmd.append(cmd_args([path_value, path], delimiter = "")) - return expanded_cmd + return cmd_args(expanded_cmd) def get_disable_pch_validation_flags() -> list[str]: """ From 2eb745fbbf964263faa4fe3c7987a3f7bc500636 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Thu, 28 Mar 2024 15:37:53 -0700 Subject: [PATCH 0628/1133] Use some pure code instead of cmd_args.add Reviewed By: iguridi Differential Revision: D55233540 fbshipit-source-id: 0853ce95a7f81d064d120d971c3eaa5aefbc8914 --- prelude/csharp/csharp.bzl | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/prelude/csharp/csharp.bzl b/prelude/csharp/csharp.bzl index 7651b5763..53ddfb7c3 100644 --- a/prelude/csharp/csharp.bzl +++ b/prelude/csharp/csharp.bzl @@ -19,14 +19,14 @@ def csharp_library_impl(ctx: AnalysisContext) -> list[Provider]: library = ctx.actions.declare_output(dll_name) # Create a command invoking a wrapper script that calls csc.exe to compile the .dll. - cmd = cmd_args(toolchain.csc) + cmd = [toolchain.csc] # Add caller specified compiler flags. - cmd.add(ctx.attrs.compiler_flags) + cmd.append(ctx.attrs.compiler_flags) # Set the output target as a .NET library. - cmd.add("/target:library") - cmd.add(cmd_args( + cmd.append("/target:library") + cmd.append(cmd_args( library.as_output(), format = "/out:{}", )) @@ -34,29 +34,29 @@ def csharp_library_impl(ctx: AnalysisContext) -> list[Provider]: # Don't include any default .NET framework assemblies like "mscorlib" or "System" unless # explicitly requested with `/reference:{}`. This flag also stops injection of other # default compiler flags. - cmd.add("/noconfig") + cmd.append("/noconfig") # Don't reference mscorlib.dll unless asked for. This is required for targets that target # embedded platforms such as Silverlight or WASM. (Originally for Buck1 compatibility.) - cmd.add("/nostdlib") + cmd.append("/nostdlib") # Don't search any paths for .NET libraries unless explicitly referenced with `/lib:{}`. - cmd.add("/nosdkpath") + cmd.append("/nosdkpath") # Let csc know the directory path where it can find system assemblies. This is the path # that is searched by `/reference:{libname}` if `libname` is just a DLL name. - cmd.add(cmd_args(toolchain.framework_dirs[ctx.attrs.framework_ver], format = "/lib:{}")) + cmd.append(cmd_args(toolchain.framework_dirs[ctx.attrs.framework_ver], format = "/lib:{}")) # Add a `/reference:{name}` argument for each dependency. # Buck target refs should be absolute paths and system assemblies just the DLL name. child_deps = generate_target_tset_children(ctx.attrs.deps, ctx) deps_tset = ctx.actions.tset(DllDepTSet, children = child_deps) - cmd.add(deps_tset.project_as_args("reference")) + cmd.append(deps_tset.project_as_args("reference")) # Specify the C# source code files that should be compiled into this target. # NOTE: This must happen after /out and /target! - cmd.add(ctx.attrs.srcs) + cmd.append(ctx.attrs.srcs) # Run the C# compiler to produce the output artifact. ctx.actions.run(cmd, category = "csharp_compile") From e89db9de28df8973273fc24efacd9d1cfcdd9bcc Mon Sep 17 00:00:00 2001 From: Emerson Ford Date: Thu, 28 Mar 2024 22:47:35 -0700 Subject: [PATCH 0629/1133] expose Clippy configuration rule and provider Summary: expose a ClippyConfiguration provider to be used for the `clippy.json` subtarget of Rust library/binary/tests. This is made a unique provider (instead of using `DefaultInfo`) to protect against the use of `clippy.toml` files that may, for example, have not been merged with the toolchain defined `clippy.toml` file. Reviewed By: JakobDegen Differential Revision: D55386480 fbshipit-source-id: 4698d5b3b5fbeeeaca058ec69484a6bd98853191 --- prelude/decls/rust_rules.bzl | 2 + prelude/rust/build.bzl | 12 +++++- prelude/rust/clippy_configuration.bzl | 58 +++++++++++++++++++++++++++ 3 files changed, 70 insertions(+), 2 deletions(-) create mode 100644 prelude/rust/clippy_configuration.bzl diff --git a/prelude/decls/rust_rules.bzl b/prelude/decls/rust_rules.bzl index 51b988074..306d09816 100644 --- a/prelude/decls/rust_rules.bzl +++ b/prelude/decls/rust_rules.bzl @@ -7,6 +7,7 @@ load("@prelude//cxx:link_groups_types.bzl", "LINK_GROUP_MAP_ATTR") load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//rust:clippy_configuration.bzl", "ClippyConfiguration") load("@prelude//rust:link_info.bzl", "RustProcMacroPlugin") load("@prelude//rust:rust_binary.bzl", "rust_binary_impl", "rust_test_impl") load("@prelude//rust:rust_library.bzl", "prebuilt_rust_library_impl", "rust_library_impl") @@ -70,6 +71,7 @@ prebuilt_rust_library = prelude_rule( def _rust_common_attributes(is_binary: bool): return { + "clippy_configuration": attrs.option(attrs.dep(providers = [ClippyConfiguration]), default = None), "contacts": attrs.list(attrs.string(), default = []), "coverage": attrs.bool(default = False), "default_host_platform": attrs.option(attrs.configuration_label(), default = None), diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index f62642885..dcd173c22 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -54,6 +54,7 @@ load( "dep_metadata_of_emit", "output_filename", ) +load(":clippy_configuration.bzl", "ClippyConfiguration") load( ":context.bzl", "CommonArgsInfo", @@ -550,7 +551,14 @@ def rust_compile( params = params, ) clippy_env = clippy_emit_op.env - if toolchain_info.clippy_toml: + + clippy_toml = None + if ctx.attrs.clippy_configuration: + clippy_toml = ctx.attrs.clippy_configuration[ClippyConfiguration].clippy_toml + elif toolchain_info.clippy_toml: + clippy_toml = toolchain_info.clippy_toml + + if clippy_toml: # Clippy wants to be given a path to a directory containing a # clippy.toml (or .clippy.toml). Our buckconfig accepts an arbitrary # label like //path/to:my-clippy.toml which may not have the @@ -558,7 +566,7 @@ def rust_compile( # symlinks the requested configuration file under the required name. clippy_conf_dir = ctx.actions.symlinked_dir( common_args.subdir + "-clippy-configuration", - {"clippy.toml": toolchain_info.clippy_toml}, + {"clippy.toml": clippy_toml}, ) clippy_env["CLIPPY_CONF_DIR"] = clippy_conf_dir clippy_invoke = _rustc_invoke( diff --git a/prelude/rust/clippy_configuration.bzl b/prelude/rust/clippy_configuration.bzl new file mode 100644 index 000000000..74f0c3f6d --- /dev/null +++ b/prelude/rust/clippy_configuration.bzl @@ -0,0 +1,58 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//rust:rust_toolchain.bzl", "RustToolchainInfo") +load("@prelude//decls/toolchains_common.bzl", "toolchains_common") + +# Configurations for Clippy runs. +ClippyConfiguration = provider( + fields = { + "clippy_toml": provider_field(Artifact), + }, +) + +def _clippy_configuration_impl(ctx: AnalysisContext) -> list[Provider]: + toolchain_ctx = ctx.attrs._rust_toolchain[RustToolchainInfo] + toolchain_clippy_toml = toolchain_ctx.clippy_toml + + if not toolchain_clippy_toml: + clippy_toml = ctx.attrs.clippy_toml_src + else: + toml_merge_tool = ctx.attrs.toml_merge_tool + + clippy_toml = ctx.actions.declare_output("clippy.toml") + ctx.actions.run([ + toml_merge_tool[RunInfo], + cmd_args(clippy_toml.as_output(), format = "--output={}"), + cmd_args(toolchain_clippy_toml, format = "--file={}"), + cmd_args(ctx.attrs.clippy_toml_src, format = "--file={}"), + ], category = "clippy_toml_merge") + + return [ + DefaultInfo( + default_output = clippy_toml, + ), + ClippyConfiguration( + clippy_toml = clippy_toml, + ), + ] + +# Generate a Clippy configuration that is merged with the toolchain specified +# Clippy configuration (if defined). +clippy_configuration = rule(impl = _clippy_configuration_impl, attrs = { + "clippy_toml_src": attrs.source(), + # TODO(emersonford): figure out how to store this in `_rust_toolchain` + # without causing a circular dependency on the toolchain target when + # `toml_merge_tool` is a `rust_binary`. + # + # Tool used to recursively merge multiple TOML files, e.g. for merging + # clippy.toml files. Must support taking multiple `--file ` flags + # as source files to merge and `--output ` flag to write the + # merged TOML table to. + "toml_merge_tool": attrs.exec_dep(providers = [RunInfo]), + "_rust_toolchain": toolchains_common.rust(), +}) From 17cacb1342c3764b0a16ad30d9b8b4672771ec76 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0630/1133] Extract platform constants into prelude Reviewed By: rmaz Differential Revision: D55339758 fbshipit-source-id: ddd00317217ceaea89c7cceb0a2a2d05604d1095 --- prelude/platforms/apple/constants.bzl | 102 ++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 prelude/platforms/apple/constants.bzl diff --git a/prelude/platforms/apple/constants.bzl b/prelude/platforms/apple/constants.bzl new file mode 100644 index 000000000..327ff3a2e --- /dev/null +++ b/prelude/platforms/apple/constants.bzl @@ -0,0 +1,102 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# These are identifiers used in defining Apple platforms for configuring apple_* rules. + +# Apple SDK Definitions +IOS = "ios" + +WATCHOS = "watchos" + +MACOSX = "macosx" + +APPLETVOS = "appletvos" + +VISIONOS = "visionos" + +# iOS Platforms/Flavors + +IPHONEOS_ARM64 = "iphoneos-arm64" + +IPHONEOS_ARMV7 = "iphoneos-armv7" + +IPHONESIMULATOR_ARM64 = "iphonesimulator-arm64" + +IPHONESIMULATOR_I386 = "iphonesimulator-i386" + +IPHONESIMULATOR_X86_64 = "iphonesimulator-x86_64" + +# Mac Catalyst Platforms/Flavors + +MACCATALYST_ARM64 = "maccatalyst-arm64" + +MACCATALYST_X86_64 = "maccatalyst-x86_64" + +# Mac OS X Platforms/Flavors + +MACOS_ARM64 = "macosx-arm64" + +MACOS_X86_64 = "macosx-x86_64" + +MACOS_UNIVERSAL = "macosx-universal" + +# Watch OS Platforms/Flavors + +WATCHOS_ARM64_32 = "watchos-arm64_32" + +WATCHOS_ARMV7K = "watchos-armv7k" + +WATCHSIMULATOR_ARM64 = "watchsimulator-arm64" + +WATCHSIMULATOR_I386 = "watchsimulator-i386" + +WATCHSIMULATOR_X86_64 = "watchsimulator-x86_64" + +# Vision OS Platforms/Flavors +VISIONOS_ARM64 = "visionos-arm64" + +VISIONSIMULATOR_ARM64 = "visionsimulator-arm64" + +apple_sdks = struct( + IOS = IOS, + WATCHOS = WATCHOS, + MACOSX = MACOSX, + APPLETVOS = APPLETVOS, + VISIONOS = VISIONOS, +) + +ios_platforms = struct( + IPHONEOS_ARM64 = IPHONEOS_ARM64, + IPHONEOS_ARMV7 = IPHONEOS_ARMV7, + IPHONESIMULATOR_ARM64 = IPHONESIMULATOR_ARM64, + IPHONESIMULATOR_I386 = IPHONESIMULATOR_I386, + IPHONESIMULATOR_X86_64 = IPHONESIMULATOR_X86_64, +) + +mac_catalyst_platforms = struct( + MACCATALYST_ARM64 = MACCATALYST_ARM64, + MACCATALYST_X86_64 = MACCATALYST_X86_64, +) + +mac_platforms = struct( + MACOS_ARM64 = MACOS_ARM64, + MACOS_X86_64 = MACOS_X86_64, + MACOS_UNIVERSAL = MACOS_UNIVERSAL, +) + +watch_platforms = struct( + WATCHOS_ARM64_32 = WATCHOS_ARM64_32, + WATCHOS_ARMV7K = WATCHOS_ARMV7K, + WATCHSIMULATOR_ARM64 = WATCHSIMULATOR_ARM64, + WATCHSIMULATOR_I386 = WATCHSIMULATOR_I386, + WATCHSIMULATOR_X86_64 = WATCHSIMULATOR_X86_64, +) + +vision_platforms = struct( + VISIONOS_ARM64 = VISIONOS_ARM64, + VISIONSIMULATOR_ARM64 = VISIONSIMULATOR_ARM64, +) From e2a6f2c1a40fd7addfdb5a2f39a54e73bfd511f0 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0631/1133] Define AppleTV constants Reviewed By: blackm00n, rmaz Differential Revision: D55391924 fbshipit-source-id: be3df37c5a30b96c7004397c1597b4d993280590 --- prelude/platforms/apple/constants.bzl | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/prelude/platforms/apple/constants.bzl b/prelude/platforms/apple/constants.bzl index 327ff3a2e..455686b86 100644 --- a/prelude/platforms/apple/constants.bzl +++ b/prelude/platforms/apple/constants.bzl @@ -8,16 +8,24 @@ # These are identifiers used in defining Apple platforms for configuring apple_* rules. # Apple SDK Definitions -IOS = "ios" +APPLETVOS = "appletvos" -WATCHOS = "watchos" +IOS = "ios" MACOSX = "macosx" -APPLETVOS = "appletvos" +WATCHOS = "watchos" VISIONOS = "visionos" +# Apple TV Platforms/Flavors + +APPLETVOS_ARM64 = "appletvos-arm64" + +APPLETVSIMULATOR_ARM64 = "appletvsimulator-arm64" + +APPLETVSIMULATOR_X86_64 = "appletvsimulator-x86_64" + # iOS Platforms/Flavors IPHONEOS_ARM64 = "iphoneos-arm64" @@ -69,6 +77,12 @@ apple_sdks = struct( VISIONOS = VISIONOS, ) +appletv_platforms = struct( + APPLETVOS_ARM64 = APPLETVOS_ARM64, + APPLETVSIMULATOR_ARM64 = APPLETVSIMULATOR_ARM64, + APPLETVSIMULATOR_X86_64 = APPLETVSIMULATOR_X86_64, +) + ios_platforms = struct( IPHONEOS_ARM64 = IPHONEOS_ARM64, IPHONEOS_ARMV7 = IPHONEOS_ARMV7, From 33bd2db6c55980174a5eb69462c6c1de9b9c5494 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0632/1133] Extract apple platforms map Reviewed By: blackm00n, rmaz Differential Revision: D55391923 fbshipit-source-id: abbd6028400a5099b46b64d4a6de50c8214f9d7d --- prelude/platforms/apple/platforms_map.bzl | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 prelude/platforms/apple/platforms_map.bzl diff --git a/prelude/platforms/apple/platforms_map.bzl b/prelude/platforms/apple/platforms_map.bzl new file mode 100644 index 000000000..784302175 --- /dev/null +++ b/prelude/platforms/apple/platforms_map.bzl @@ -0,0 +1,11 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# @oss-disable: load("@prelude//platforms/apple/meta_only:platforms_map.bzl", _APPLE_PLATFORMS_MAP = "APPLE_PLATFORMS_MAP") + +APPLE_PLATFORMS_MAP = [] # TODO: Define OSS platforms map # @oss-enable +# @oss-disable: APPLE_PLATFORMS_MAP = _APPLE_PLATFORMS_MAP From 03cf4c09ab7326c65ffaa66f093f7a0ace3e06be Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0633/1133] Extract out apple platform lookup functions Reviewed By: blackm00n, rmaz Differential Revision: D55391925 fbshipit-source-id: c10a4ec42ae925b6471b4438ba36b9af1bfddcf1 --- prelude/platforms/apple/platforms.bzl | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 prelude/platforms/apple/platforms.bzl diff --git a/prelude/platforms/apple/platforms.bzl b/prelude/platforms/apple/platforms.bzl new file mode 100644 index 000000000..6ebc9a439 --- /dev/null +++ b/prelude/platforms/apple/platforms.bzl @@ -0,0 +1,22 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//platforms/apple:platforms_map.bzl", "APPLE_PLATFORMS_MAP") + +def get_base_target_platform_for_platform(sdk_arch) -> [str, None]: + data = APPLE_PLATFORMS_MAP.get(sdk_arch) + if data != None: + return data.base_target_platform + + return None + +def get_default_target_platform_for_platform(sdk_arch) -> [str, None]: + data = APPLE_PLATFORMS_MAP.get(sdk_arch) + if data != None: + return data.target_platform + + return None From b4555202b68d4f48237af47d2ceb123d8377cc22 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0634/1133] Pass the `platform` rule to helper functions Reviewed By: blackm00n, rmaz Differential Revision: D55429393 fbshipit-source-id: caa5287a6954e52a0d75f64ac622e20dd11af7d9 --- prelude/platforms/apple/build_mode.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prelude/platforms/apple/build_mode.bzl b/prelude/platforms/apple/build_mode.bzl index 9d8b20dba..f2e2223a0 100644 --- a/prelude/platforms/apple/build_mode.bzl +++ b/prelude/platforms/apple/build_mode.bzl @@ -31,6 +31,7 @@ BUILD_MODE = struct( CONSTRAINT_PACKAGE = "prelude//platforms/apple/constraints" # @oss-enable # @oss-disable: CONSTRAINT_PACKAGE = "ovr_config//build_mode/apple/constraints" +# TODO: Drop providing the rule when we're not longer attempting to support buck1. def config_settings(config_setting_rule): for mode in APPLE_BUILD_MODES: config_setting_rule( @@ -41,6 +42,7 @@ def config_settings(config_setting_rule): visibility = ["PUBLIC"], ) +# TODO: Drop providing the rule when we're not longer attempting to support buck1. def constraints(constraint_setting_rule, constraint_value_rule): constraint_setting_rule( name = "build_mode", From 91e433671330420d9912b6af0f09752bf36a9a27 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0635/1133] Move apple_platforms.bzl logic to prelude Reviewed By: rmaz Differential Revision: D55391922 fbshipit-source-id: 5112a81ad606dbc158e203479c17f30cc5560e79 --- prelude/platforms/apple/base.bzl | 103 +++++++++++++++++++++++++++++++ 1 file changed, 103 insertions(+) create mode 100644 prelude/platforms/apple/base.bzl diff --git a/prelude/platforms/apple/base.bzl b/prelude/platforms/apple/base.bzl new file mode 100644 index 000000000..6454c184d --- /dev/null +++ b/prelude/platforms/apple/base.bzl @@ -0,0 +1,103 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# @oss-disable: load("@fbsource//tools/build_defs/apple:build_mode_defs.bzl", _get_build_mode = "build_mode") +# @oss-disable: load("@fbsource//tools/build_defs/buck2:is_buck2.bzl", "is_buck2") + +load("@prelude//:is_buck2.bzl", "is_buck2") # @oss-enable +load("@prelude//platforms/apple:build_mode.bzl", "BUILD_MODE", "CONSTRAINT_PACKAGE") +load( + "@prelude//platforms/apple:constants.bzl", + "ios_platforms", + "mac_catalyst_platforms", + "mac_platforms", + "watch_platforms", +) + +# Local/debug constraints to add for build modes used by other rule platforms (ex: rust). +_LOCAL_CONSTRAINTS = [ + # @oss-disable: "ovr_config//build_mode/constraints:debug", +] + +# Non-local/release constraints to add for build modes used by other rule platforms (ex: rust). +_NON_LOCAL_CONSTRAINTS = [ + # @oss-disable: "ovr_config//build_mode/constraints:release", +] + +BUILD_MODE_TO_CONSTRAINTS_MAP = { + BUILD_MODE.LOCAL: ["{}:local".format(CONSTRAINT_PACKAGE)] + _LOCAL_CONSTRAINTS, + BUILD_MODE.MASTER: ["{}:master".format(CONSTRAINT_PACKAGE)] + _NON_LOCAL_CONSTRAINTS, + BUILD_MODE.PRODUCTION: ["{}:production".format(CONSTRAINT_PACKAGE)] + _NON_LOCAL_CONSTRAINTS, + BUILD_MODE.PROFILE: ["{}:profile".format(CONSTRAINT_PACKAGE)] + _NON_LOCAL_CONSTRAINTS, + BUILD_MODE.RELEASE_CANDIDATE: ["{}:rc".format(CONSTRAINT_PACKAGE)] + _NON_LOCAL_CONSTRAINTS, +} + +_MOBILE_PLATFORMS = [ + ios_platforms.IPHONEOS_ARM64, + ios_platforms.IPHONEOS_ARMV7, + ios_platforms.IPHONESIMULATOR_ARM64, + ios_platforms.IPHONESIMULATOR_I386, + ios_platforms.IPHONESIMULATOR_X86_64, + watch_platforms.WATCHOS_ARM64_32, + watch_platforms.WATCHOS_ARMV7K, + watch_platforms.WATCHSIMULATOR_ARM64, + watch_platforms.WATCHSIMULATOR_I386, + watch_platforms.WATCHSIMULATOR_X86_64, +] + +_MAC_PLATFORMS = [ + mac_platforms.MACOS_ARM64, + mac_platforms.MACOS_X86_64, + mac_platforms.MACOS_UNIVERSAL, + mac_catalyst_platforms.MACCATALYST_ARM64, + mac_catalyst_platforms.MACCATALYST_X86_64, +] + +# TODO: Drop the platform_rule when we're not longer attempting to support buck1. +def apple_generated_platforms(name, constraint_values, deps, platform_rule, platform = None): + # By convention, the cxx.default_platform is typically the same as the platform being defined. + # This is not the case for all watch platforms, so provide an override. + platform = platform if platform else name + if is_mobile_platform(platform) or is_buck2_mac_platform(platform): + for build_mode, build_mode_deps in BUILD_MODE_TO_CONSTRAINTS_MAP.items(): + platform_rule( + name = _get_generated_name(name, platform, build_mode), + constraint_values = constraint_values + build_mode_deps, + visibility = ["PUBLIC"], + deps = deps, + ) + + # Create a platform without the build mode to support backwards compatibility of hardcoded platforms + # and with buck1 cxx platform setup. + # TODO(chatatap): Look to remove all hardcoded references and get rid of these + platform_rule( + name = name, + constraint_values = constraint_values, + visibility = ["PUBLIC"], + deps = deps, + ) + +def apple_build_mode_backed_platform(name, platform, build_mode = None): + build_mode = _get_build_mode() if build_mode == None else build_mode + return _get_generated_name(name, platform, build_mode) + +def is_mobile_platform(platform): + # These builds modes are primarily used in mobile code. MacOS builds in fbcode/arvr use different + # modes to represent dev/opt variants. + return platform in _MOBILE_PLATFORMS + +def is_buck2_mac_platform(platform): + return is_buck2() and platform in _MAC_PLATFORMS + +def _get_generated_name(name, platform, build_mode): + if is_mobile_platform(platform) or is_buck2_mac_platform(platform): + return "{}-{}".format(name, build_mode) + else: + return name + +def _get_build_mode(): # @oss-enable + return None # TODO: Implement OSS version # @oss-enable From f83c370114f7cc7b6e1d464fc49fef1afdd120d9 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0636/1133] Move _apple_platforms attr to its own file Reviewed By: blackm00n, rmaz Differential Revision: D55429398 fbshipit-source-id: d1e366d7b697e374df0524caea032b32d07523ff --- prelude/platforms/apple/attr.bzl | 15 +++++++++++++++ prelude/platforms/apple/constants.bzl | 2 ++ prelude/rules.bzl | 6 ++---- 3 files changed, 19 insertions(+), 4 deletions(-) create mode 100644 prelude/platforms/apple/attr.bzl diff --git a/prelude/platforms/apple/attr.bzl b/prelude/platforms/apple/attr.bzl new file mode 100644 index 000000000..1ca4e368e --- /dev/null +++ b/prelude/platforms/apple/attr.bzl @@ -0,0 +1,15 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//platforms/apple:constants.bzl", "APPLE_PLATFORMS_KEY") + +def add_apple_platforms_attr(attributes): + # Add _apple_platforms to all rules so that we may query the target platform to use until we support configuration + # modifiers and can use them to set the configuration to use for operations. + # Map of string identifer to platform. + attributes[APPLE_PLATFORMS_KEY] = attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}) + return attributes diff --git a/prelude/platforms/apple/constants.bzl b/prelude/platforms/apple/constants.bzl index 455686b86..8b14b0406 100644 --- a/prelude/platforms/apple/constants.bzl +++ b/prelude/platforms/apple/constants.bzl @@ -7,6 +7,8 @@ # These are identifiers used in defining Apple platforms for configuring apple_* rules. +APPLE_PLATFORMS_KEY = "_apple_platforms" + # Apple SDK Definitions APPLETVOS = "appletvos" diff --git a/prelude/rules.bzl b/prelude/rules.bzl index 64ab5e5f3..36b5910aa 100644 --- a/prelude/rules.bzl +++ b/prelude/rules.bzl @@ -9,6 +9,7 @@ load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "B load("@prelude//configurations:rules.bzl", _config_implemented_rules = "implemented_rules") load("@prelude//decls/common.bzl", "prelude_rule") load("@prelude//is_full_meta_repo.bzl", "is_full_meta_repo") +load("@prelude//platforms/apple/attr.bzl", "add_apple_platforms_attr") # Combine the attributes we generate, we the custom implementations we have. load("@prelude//rules_impl.bzl", "extra_attributes", "extra_implemented_rules", "rule_decl_records", "toolchain_rule_names", "transitions") @@ -51,10 +52,7 @@ def _mk_rule(rule_spec: typing.Any, extra_attrs: dict[str, typing.Any] = dict(), # copy so we don't try change the passed in object attributes["_cxx_toolchain_target_configuration"] = attrs.dep(default = "prelude//platforms:fat_platform_incompatible") - # Add _apple_platforms to all rules so that we may query the target platform to use until we support configuration - # modifiers and can use them to set the configuration to use for operations. - # Map of string identifer to platform. - attributes["_apple_platforms"] = attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}) + attributes = add_apple_platforms_attr(attributes) extra_args = dict(kwargs) cfg = transitions.get(name) From 1110de5a587431f417ed3465e758349751190392 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0637/1133] Consolidate the get_build_mode function Reviewed By: blackm00n, rmaz Differential Revision: D55429395 fbshipit-source-id: 920a4a3c15af31d0ad38f7b0e99ed9f364f95fb2 --- prelude/platforms/apple/base.bzl | 14 ++++++++------ prelude/platforms/apple/build_mode.bzl | 3 +++ 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/prelude/platforms/apple/base.bzl b/prelude/platforms/apple/base.bzl index 6454c184d..e1960832f 100644 --- a/prelude/platforms/apple/base.bzl +++ b/prelude/platforms/apple/base.bzl @@ -5,11 +5,16 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# @oss-disable: load("@fbsource//tools/build_defs/apple:build_mode_defs.bzl", _get_build_mode = "build_mode") +# @oss-disable: load("@fbsource//tools/build_defs/apple:build_mode_defs.bzl", get_build_mode = "build_mode") # @oss-disable: load("@fbsource//tools/build_defs/buck2:is_buck2.bzl", "is_buck2") load("@prelude//:is_buck2.bzl", "is_buck2") # @oss-enable -load("@prelude//platforms/apple:build_mode.bzl", "BUILD_MODE", "CONSTRAINT_PACKAGE") +load( + "@prelude//platforms/apple:build_mode.bzl", + "BUILD_MODE", + "CONSTRAINT_PACKAGE", + "get_build_mode", # @oss-enable +) load( "@prelude//platforms/apple:constants.bzl", "ios_platforms", @@ -82,7 +87,7 @@ def apple_generated_platforms(name, constraint_values, deps, platform_rule, plat ) def apple_build_mode_backed_platform(name, platform, build_mode = None): - build_mode = _get_build_mode() if build_mode == None else build_mode + build_mode = get_build_mode() if build_mode == None else build_mode return _get_generated_name(name, platform, build_mode) def is_mobile_platform(platform): @@ -98,6 +103,3 @@ def _get_generated_name(name, platform, build_mode): return "{}-{}".format(name, build_mode) else: return name - -def _get_build_mode(): # @oss-enable - return None # TODO: Implement OSS version # @oss-enable diff --git a/prelude/platforms/apple/build_mode.bzl b/prelude/platforms/apple/build_mode.bzl index f2e2223a0..04b3512d6 100644 --- a/prelude/platforms/apple/build_mode.bzl +++ b/prelude/platforms/apple/build_mode.bzl @@ -55,3 +55,6 @@ def constraints(constraint_setting_rule, constraint_value_rule): constraint_setting = ":build_mode", visibility = ["PUBLIC"], ) + +def get_build_mode(): + return None # TODO: Implement OSS version From eaf37f44051fde9258f50206f28327b3795ea26e Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0638/1133] Move the apple_target_platforms logic to prelude Reviewed By: blackm00n Differential Revision: D55429394 fbshipit-source-id: 8fdbbbd5ac36969e45bb0fd8ab682bab336fc285 --- prelude/platforms/apple/platforms.bzl | 214 +++++++++++++++++++++++++- 1 file changed, 210 insertions(+), 4 deletions(-) diff --git a/prelude/platforms/apple/platforms.bzl b/prelude/platforms/apple/platforms.bzl index 6ebc9a439..22def2b1d 100644 --- a/prelude/platforms/apple/platforms.bzl +++ b/prelude/platforms/apple/platforms.bzl @@ -5,18 +5,224 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@fbsource//tools/build_defs:buckconfig.bzl", "read") +# @oss-disable: load("@fbsource//tools/build_defs/apple:build_mode_defs.bzl", get_build_mode = "build_mode") +load("@fbsource//tools/build_defs/buck2:is_buck2.bzl", "is_buck2") +load("@prelude//platforms/apple:base.bzl", "BUILD_MODE_TO_CONSTRAINTS_MAP", "apple_build_mode_backed_platform", "is_buck2_mac_platform", "is_mobile_platform") +load( + "@prelude//platforms/apple:build_mode.bzl", + "APPLE_BUILD_MODES", + "BUILD_MODE_LOCAL", + "get_build_mode" # @oss-enable +) +load( + "@prelude//platforms/apple:constants.bzl", + "APPLE_PLATFORMS_KEY", + "ios_platforms", + "mac_catalyst_platforms", + "mac_platforms", +) load("@prelude//platforms/apple:platforms_map.bzl", "APPLE_PLATFORMS_MAP") -def get_base_target_platform_for_platform(sdk_arch) -> [str, None]: +_SUPPORTED_IOS_PLATFORMS = [ + ios_platforms.IPHONEOS_ARM64, + ios_platforms.IPHONESIMULATOR_ARM64, + ios_platforms.IPHONESIMULATOR_X86_64, +] + +_SUPPORTED_MACOS_PLATFORMS = [ + mac_platforms.MACOS_ARM64, + mac_platforms.MACOS_X86_64, +] + +_SUPPORTED_MAC_CATALYST_PLATFORMS = [ + mac_catalyst_platforms.MACCATALYST_ARM64, + mac_catalyst_platforms.MACCATALYST_X86_64, +] + +_ANALYSIS_CONSTRAINTS = ["ovr_config//bitcode/constraints:bitcode"] +_DEFAULT_ANALYSIS_IOS_PLATFORM = ios_platforms.IPHONEOS_ARM64 +_DEFAULT_ANALYSIS_MACOS_PLATFORM = mac_platforms.MACOS_X86_64 + +DEFAULT_SUPPORTED_CXX_PLATFORMS = _SUPPORTED_IOS_PLATFORMS + +def apple_target_platforms( + base_name, + platform_rule, + constraint_values = None, # Constraint values added to all generated platforms + visibility = None, + deps = None, + cxx_platforms_constraint_values = None, # Must be a map of a supported cxx platform to a list of constraint values + build_mode_constraint_values = None, # Must be a map of a supported build mode to a list of constraint values + supported_cxx_platforms = DEFAULT_SUPPORTED_CXX_PLATFORMS, # Cxx platforms to generate platforms for + supported_build_modes = APPLE_BUILD_MODES): # Build modes to generate platforms for + """ Define architecture and sdk specific platforms alongside the base platform. """ + + # Form defaults + constraint_values = constraint_values or [] + cxx_platforms_constraint_values = cxx_platforms_constraint_values or {} + build_mode_constraint_values = build_mode_constraint_values or {} + visibility = visibility or ["PUBLIC"] + deps = deps or [] + + _validate_cxx_platforms_constraint_values(base_name, cxx_platforms_constraint_values, supported_cxx_platforms) + _validate_build_mode_constraint_values(base_name, build_mode_constraint_values, supported_build_modes) + + # Define the generated platforms + for platform in supported_cxx_platforms: + platform_dep = get_default_target_platform_for_platform(platform) + cxx_platform_constraints = cxx_platforms_constraint_values.get(platform, []) + if is_mobile_platform(platform) or is_buck2_mac_platform(platform): + for build_mode in supported_build_modes: + build_mode_constraints = build_mode_constraint_values.get(build_mode, []) + BUILD_MODE_TO_CONSTRAINTS_MAP.get(build_mode) + _define_platform( + base_name, + platform, + build_mode, + constraint_values + cxx_platform_constraints + build_mode_constraints, + visibility, + deps + [platform_dep], + platform_rule, + ) + else: + _define_platform( + base_name, + platform, + None, + constraint_values + cxx_platform_constraints, + visibility, + deps + [platform_dep], + platform_rule, + ) + + # Define the base platform in case it is needed (example: to be a dep of another platform) + platform_rule( + name = base_name, + constraint_values = constraint_values, + visibility = visibility, + deps = deps, + ) + + analysis_platform = _get_analysis_platform_for_supported_platforms(supported_cxx_platforms) + analysis_platform_dep = get_default_target_platform_for_platform(analysis_platform) + analysis_platform_build_mode_constraints = build_mode_constraint_values.get(BUILD_MODE_LOCAL, []) + + platform_rule( + name = base_name + "-analysis", + constraint_values = constraint_values + analysis_platform_build_mode_constraints + _ANALYSIS_CONSTRAINTS, + visibility = ["PUBLIC"], + deps = deps + [analysis_platform_dep], + ) + +def config_backed_apple_target_platform(target_platform = None, platform = None, build_mode = None): + platform = _get_default_platform() if platform == None else platform + build_mode = get_build_mode() if build_mode == None else build_mode + if target_platform == None: + return get_default_target_platform_for_platform(platform) + + return _get_generated_name(target_platform, platform, build_mode) + +def get_default_target_platform_for_platform(sdk_arch) -> [str, None]: data = APPLE_PLATFORMS_MAP.get(sdk_arch) if data != None: - return data.base_target_platform + return data.target_platform return None -def get_default_target_platform_for_platform(sdk_arch) -> [str, None]: +def set_apple_platforms(platform, base_config_backed_target_platform, kwargs): + def get_supported_platforms(): + if not is_buck2(): + return None + if platform in _SUPPORTED_IOS_PLATFORMS: + return _SUPPORTED_IOS_PLATFORMS + elif platform in _SUPPORTED_MACOS_PLATFORMS: + return _SUPPORTED_MACOS_PLATFORMS + elif platform in _SUPPORTED_MAC_CATALYST_PLATFORMS: + return _SUPPORTED_MAC_CATALYST_PLATFORMS + else: + return None + + supported_platforms = get_supported_platforms() + if not supported_platforms: + return kwargs + + # If we've already defined the apple platforms, we can avoid having to process them again. + if APPLE_PLATFORMS_KEY in kwargs: + return kwargs + + apple_platforms = {} + for platform in supported_platforms: + for build_mode in APPLE_BUILD_MODES: + identifier = "{}-{}".format(platform, build_mode) + if base_config_backed_target_platform: + apple_platforms[identifier] = config_backed_apple_target_platform(base_config_backed_target_platform, platform, build_mode) + else: + base_target_platform = _get_base_target_platform_for_platform(platform) + if not base_target_platform: + fail("A valid base target platform is required!") + apple_platforms[identifier] = apple_build_mode_backed_platform(base_target_platform, platform, build_mode) + + kwargs[APPLE_PLATFORMS_KEY] = apple_platforms + + return kwargs + +def _get_generated_name(base_name, platform, build_mode): + platform_and_build_mode_name = apple_build_mode_backed_platform(platform, platform, build_mode) + return "{}-{}".format(base_name, platform_and_build_mode_name) + +def _get_default_platform(): + platform = read("cxx", "default_platform") + return platform if platform != None else ios_platforms.IPHONESIMULATOR_X86_64 + +def _define_platform(base_name, platform, build_mode, constraint_values, visibility, deps, platform_rule): + # @lint-ignore BUCKLINT - We set the visibility to PUBLIC directly and can bypass fb_native + platform_rule( + name = _get_generated_name(base_name, platform, build_mode), + constraint_values = constraint_values, + visibility = visibility, + deps = deps, + ) + +def _get_base_target_platform_for_platform(sdk_arch) -> [str, None]: data = APPLE_PLATFORMS_MAP.get(sdk_arch) if data != None: - return data.target_platform + return data.base_target_platform return None + +def _get_analysis_platform_for_supported_platforms(supported_cxx_platforms): + # For determining the platform deps to use for the base platform, we inspect the supported + # cxx platforms, giving precedence to iOS platforms. + for platform in _SUPPORTED_IOS_PLATFORMS: + if platform in supported_cxx_platforms: + return _DEFAULT_ANALYSIS_IOS_PLATFORM + + for platform in _SUPPORTED_MACOS_PLATFORMS: + if platform in supported_cxx_platforms: + return _DEFAULT_ANALYSIS_MACOS_PLATFORM + + return _DEFAULT_ANALYSIS_IOS_PLATFORM + +def _validate_cxx_platforms_constraint_values(base_name, cxx_platforms_constraint_values, supported_cxx_platforms): + if type(cxx_platforms_constraint_values) != type({}): + fail("cxx_platforms_constraint_values must be a map of platform to constraint values!") + for platform, platform_values in cxx_platforms_constraint_values.items(): + if platform not in supported_cxx_platforms: + fail("\n\nProviding platform constraints for an unsupported platform!\nBase platform: {}\nCXX Platform: {} with values {}\nSupported platforms: {}\n".format( + base_name, + platform, + platform_values, + ", ".join(supported_cxx_platforms), + )) + +def _validate_build_mode_constraint_values(base_name, build_mode_constraint_values, supported_build_modes): + if type(build_mode_constraint_values) != type({}): + fail("build_mode_constraint_values must be a map of build mode to constraint values!") + for build_mode, build_mode_values in build_mode_constraint_values.items(): + if build_mode not in supported_build_modes: + fail("\n\nProviding build mode constraints for an unsupported build mode!\nBase platform: {}\nBuild mode: {} with values {}\nSupported build modes: {}\n".format( + base_name, + build_mode, + build_mode_values, + ", ".join(supported_build_modes), + )) From 3cfd02a497e2efbbaa292eca93a47eeb1541e11e Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0639/1133] Extract the default target mapping used in fbsource Summary: # This Stack Right now, our platform configuration layer is baked deeply into the FB macro layer. We're not ready to move to configuration modifiers yet, and for use in WhatsApp/OpenSource, or even our basic e2e tests, we need better ability to configure for different sdk/arch/build_mode combinations. We also want a central point that when we move to modifiers, its clearer what can be cleaned up, and integrations can be better designed. As such, we're going to introduce an `apple_native` layer, which ensures that `apple_*` targets have appropriate default target platforms and `_apple_platforms` set for build, and discovery in project generation. # This diff As this diff says, when dealing with setting defaults, we need the default mapping. To ensure one source of truth, extract this into prelude, and make it available in fbsource. Reviewed By: rmaz Differential Revision: D55445131 fbshipit-source-id: 4308116bb9ce8993c591ded6f3cd9e82417897aa --- prelude/platforms/apple/constants.bzl | 2 ++ prelude/platforms/apple/platforms_map.bzl | 7 +++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/prelude/platforms/apple/constants.bzl b/prelude/platforms/apple/constants.bzl index 8b14b0406..88bec5da6 100644 --- a/prelude/platforms/apple/constants.bzl +++ b/prelude/platforms/apple/constants.bzl @@ -9,6 +9,8 @@ APPLE_PLATFORMS_KEY = "_apple_platforms" +APPLE = "Apple" + # Apple SDK Definitions APPLETVOS = "appletvos" diff --git a/prelude/platforms/apple/platforms_map.bzl b/prelude/platforms/apple/platforms_map.bzl index 784302175..9e15a662c 100644 --- a/prelude/platforms/apple/platforms_map.bzl +++ b/prelude/platforms/apple/platforms_map.bzl @@ -5,7 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -# @oss-disable: load("@prelude//platforms/apple/meta_only:platforms_map.bzl", _APPLE_PLATFORMS_MAP = "APPLE_PLATFORMS_MAP") +# @oss-disable: load("@prelude//platforms/apple/meta_only:platforms_map.bzl", _APPLE_PLATFORMS_MAP = "APPLE_PLATFORMS_MAP", _APPLE_SDK_DEFAULT_PLATFORM_MAP = "APPLE_SDK_DEFAULT_PLATFORM_MAP") -APPLE_PLATFORMS_MAP = [] # TODO: Define OSS platforms map # @oss-enable +APPLE_PLATFORMS_MAP = {} # TODO: Define OSS platforms map # @oss-enable # @oss-disable: APPLE_PLATFORMS_MAP = _APPLE_PLATFORMS_MAP + +APPLE_SDK_DEFAULT_PLATFORM_MAP = {} # @oss-enable +# @oss-disable: APPLE_SDK_DEFAULT_PLATFORM_MAP = _APPLE_SDK_DEFAULT_PLATFORM_MAP From 0aaac377d646347785c8a86573f131217d07515e Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:12:53 -0700 Subject: [PATCH 0640/1133] Define the apple_native layer Summary: # This Stack Right now, our platform configuration layer is baked deeply into the FB macro layer. We're not ready to move to configuration modifiers yet, and for use in WhatsApp/OpenSource, or even our basic e2e tests, we need better ability to configure for different sdk/arch/build_mode combinations. We also want a central point that when we move to modifiers, its clearer what can be cleaned up, and integrations can be better designed. As such, we're going to introduce an `apple_native` layer, which ensures that `apple_*` targets have appropriate default target platforms and `_apple_platforms` set for build, and discovery in project generation. # This diff We now have everything we need to define the `apple_native` layer which sets the configuration we need on apple targets. Reviewed By: blackm00n, rmaz Differential Revision: D55445133 fbshipit-source-id: fb8321e9eb848ac24b69e32e929c2ae495f1b0a4 --- prelude/apple/apple_native.bzl | 91 ++++++++++++++++++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 prelude/apple/apple_native.bzl diff --git a/prelude/apple/apple_native.bzl b/prelude/apple/apple_native.bzl new file mode 100644 index 000000000..169371183 --- /dev/null +++ b/prelude/apple/apple_native.bzl @@ -0,0 +1,91 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:prelude.bzl", "native") +load( + "@prelude//platforms/apple:constants.bzl", + "APPLE", +) +load("@prelude//platforms/apple:platforms.bzl", "config_backed_apple_target_platform", "get_default_target_platform_for_platform", "set_apple_platforms") +load("@prelude//platforms/apple/platforms_map.bzl", "APPLE_SDK_DEFAULT_PLATFORM_MAP") +load("@prelude//utils/buckconfig.bzl", "read") + +def _apple_library(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_library(**kwargs) + +def _apple_asset_catalog(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_asset_catalog(**kwargs) + +def _apple_binary(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_binary(**kwargs) + +def _apple_bundle(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_bundle(**kwargs) + +def _apple_watchos_bundle(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_watchos_bundle(**kwargs) + +def _apple_package(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_package(**kwargs) + +def _apple_resource(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_resource(**kwargs) + +def _apple_test(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_test(**kwargs) + +def _apple_xcuitest(**kwargs): + kwargs = _update_platforms(**kwargs) + native.apple_xcuitest(**kwargs) + +def _update_platforms(**kwargs): + platform = _get_default_platform() + + default_target_platform = kwargs.pop("default_target_platform", None) + base_config_backed_target_platform = kwargs.pop("config_backed_target_platform", None) + + if default_target_platform != None and base_config_backed_target_platform != None: + name = kwargs.get("name", "UNKNOWN_TARGET") + fail("{} has both a default_target_platform and a config_backed_target_platform, which is not allowed".format(name)) + + if base_config_backed_target_platform != None: + default_target_platform = config_backed_apple_target_platform(base_config_backed_target_platform, platform) + elif default_target_platform == None: + default_target_platform = get_default_target_platform_for_platform(platform) + + if default_target_platform != None: + kwargs["default_target_platform"] = default_target_platform + + kwargs = set_apple_platforms(platform, base_config_backed_target_platform, kwargs) + + return kwargs + +def _get_default_platform(): + config_platform = read("cxx", "default_platform") + if config_platform != None: + return config_platform + return APPLE_SDK_DEFAULT_PLATFORM_MAP.get(APPLE) + +apple_native = struct( + apple_asset_catalog = _apple_asset_catalog, + apple_binary = _apple_binary, + apple_bundle = _apple_bundle, + apple_watchos_bundle = _apple_watchos_bundle, + apple_library = _apple_library, + apple_package = _apple_package, + apple_resource = _apple_resource, + apple_test = _apple_test, + apple_xcuitest = _apple_xcuitest, +) From bc72b4737ff1671a35288dfe15c8f9fa1e531c2d Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 03:19:55 -0700 Subject: [PATCH 0641/1133] Use relative paths for inputs when using core data tools Summary: When WhatsApp is trying to use `mapc` on `xcmappingmodel` files, its erroring unless a relative path to the input is used. Update the `cmd_arg` to make a relative call, adding the input as a hidden input (otherwise just the path is captured). Differential Revision: D55491512 fbshipit-source-id: ce7ef176c372a9a577a128f99d80bdf631007eec --- prelude/apple/apple_core_data.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/apple/apple_core_data.bzl b/prelude/apple/apple_core_data.bzl index c2112d0bb..9d3847442 100644 --- a/prelude/apple/apple_core_data.bzl +++ b/prelude/apple/apple_core_data.bzl @@ -77,6 +77,6 @@ def _get_tool_command(ctx: AnalysisContext, core_data_spec: AppleCoreDataSpec, p get_bundle_min_target_version(ctx, ctx.attrs.binary), "--module", core_data_spec.module if core_data_spec.module else product_name, - core_data_spec.path, + cmd_args(core_data_spec.path, format = "./{}"), output, - ], delimiter = " ") + ], delimiter = " ").hidden(core_data_spec.path) From 544fdf49f95469e0e6c26302264e9a3283524b28 Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Fri, 29 Mar 2024 06:46:46 -0700 Subject: [PATCH 0642/1133] Inline the _apple_platforms attr Reviewed By: milend Differential Revision: D55522928 fbshipit-source-id: 11f66bced44b64e7b818c2ff214977164f77a60e --- prelude/rules.bzl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/prelude/rules.bzl b/prelude/rules.bzl index 36b5910aa..64ab5e5f3 100644 --- a/prelude/rules.bzl +++ b/prelude/rules.bzl @@ -9,7 +9,6 @@ load("@prelude//:buck2_compatibility.bzl", "BUCK2_COMPATIBILITY_ATTRIB_NAME", "B load("@prelude//configurations:rules.bzl", _config_implemented_rules = "implemented_rules") load("@prelude//decls/common.bzl", "prelude_rule") load("@prelude//is_full_meta_repo.bzl", "is_full_meta_repo") -load("@prelude//platforms/apple/attr.bzl", "add_apple_platforms_attr") # Combine the attributes we generate, we the custom implementations we have. load("@prelude//rules_impl.bzl", "extra_attributes", "extra_implemented_rules", "rule_decl_records", "toolchain_rule_names", "transitions") @@ -52,7 +51,10 @@ def _mk_rule(rule_spec: typing.Any, extra_attrs: dict[str, typing.Any] = dict(), # copy so we don't try change the passed in object attributes["_cxx_toolchain_target_configuration"] = attrs.dep(default = "prelude//platforms:fat_platform_incompatible") - attributes = add_apple_platforms_attr(attributes) + # Add _apple_platforms to all rules so that we may query the target platform to use until we support configuration + # modifiers and can use them to set the configuration to use for operations. + # Map of string identifer to platform. + attributes["_apple_platforms"] = attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}) extra_args = dict(kwargs) cfg = transitions.get(name) From 7e38e21613dd557cdbdff36be245479789ccb82c Mon Sep 17 00:00:00 2001 From: George Giorgidze Date: Fri, 29 Mar 2024 15:44:00 -0700 Subject: [PATCH 0643/1133] Fix "OSError: [WinError 6] The handle is invalid" failures Summary: # Context Attempt to fix: https://fb.workplace.com/groups/buck2windows/permalink/932995478572365/ Similar to this change from scottcao D55267096 Reviewed By: krallin Differential Revision: D55515411 fbshipit-source-id: 51e22a511f8a3c922019ca1ecf140d1d3c1ca491 --- prelude/cxx/tools/makefile_to_dep_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/cxx/tools/makefile_to_dep_file.py b/prelude/cxx/tools/makefile_to_dep_file.py index ec173cac3..1049a7876 100755 --- a/prelude/cxx/tools/makefile_to_dep_file.py +++ b/prelude/cxx/tools/makefile_to_dep_file.py @@ -88,7 +88,7 @@ def process_dep_file(args): Expects the src dep file to be the first argument, dst dep file to be the second argument, and the command to follow. """ - ret = subprocess.call(args[2:]) + ret = subprocess.call(args[2:], stdin=subprocess.DEVNULL) if ret == 0: rewrite_dep_file(args[0], args[1]) sys.exit(ret) From 9e1c55b4baeba536fa92c7d6daf25ad0ff1f507d Mon Sep 17 00:00:00 2001 From: Taras Tsugrii Date: Sat, 30 Mar 2024 09:44:04 -0700 Subject: [PATCH 0644/1133] Simplify jvm_args check. Summary: it's more concise and idiomatic Reviewed By: hick209 Differential Revision: D55537495 fbshipit-source-id: c7817b730b89ae235b62e78660cd853ebdf037ec --- prelude/java/java_test.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index 697a0dccc..2a18e2294 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -50,7 +50,7 @@ def build_junit_test( java = ctx.attrs.java[RunInfo] if ctx.attrs.java else ctx.attrs._java_toolchain[JavaToolchainInfo].java_for_tests cmd = [java] + extra_cmds + ctx.attrs.vm_args + ["-XX:-MaxFDLimit"] - if java_test_toolchain.jvm_args != None and len(java_test_toolchain.jvm_args) > 0: + if java_test_toolchain.jvm_args: cmd.extend(java_test_toolchain.jvm_args) classpath = [] From c16357a49a3c713b5fc61cdeab7d100f46a2a7b6 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sat, 30 Mar 2024 18:02:46 -0700 Subject: [PATCH 0645/1133] Fix type of _TargetSourceType in linking Summary: `type(())` is string, and we are getting rid of strings as types. Reviewed By: JakobDegen Differential Revision: D55562934 fbshipit-source-id: 6b4dc3b8e77c684e633735c692de2e7c02523138 --- prelude/linking/linkable_graph.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index a4c01d7fe..74531b2d8 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -52,8 +52,7 @@ LinkableRootInfo = provider( ############################################################################### _DisallowConstruction = record() -_TUPLE_TYPE = type(()) -_TargetSourceType = [Artifact, str, _TUPLE_TYPE] +_TargetSourceType = Artifact | str | tuple LinkableNode = record( # Attribute labels on the target. From 7d32ea51717d5b032d67d77d2998c61a040708ac Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sat, 30 Mar 2024 20:45:40 -0700 Subject: [PATCH 0646/1133] -> Artifact | None Summary: `[A, B]` is deprecated syntax for `A | B`. But we cannot codemod it for every type because of autodeps who interpreters code with Python. For `Artifact` this is fine because it is declared as `MagicMock`: https://www.internalfb.com/code/fbsource/[8f43ad93855724d6d080b0620f05f026209abb5e]/fbcode/tools/build/buck/parser.py?lines=399 Reviewed By: JakobDegen Differential Revision: D55563840 fbshipit-source-id: 379bc6da558852ee40a71bfb9da5ad329e4c3703 --- prelude/android/aapt2_link.bzl | 2 +- .../android_binary_native_library_rules.bzl | 8 ++-- .../android_binary_resources_rules.bzl | 22 ++++----- prelude/android/android_build_config.bzl | 4 +- prelude/android/android_bundle.bzl | 2 +- prelude/android/android_library.bzl | 4 +- prelude/android/android_providers.bzl | 18 ++++---- prelude/android/android_resource.bzl | 4 +- prelude/android/dex_rules.bzl | 18 ++++---- prelude/android/preprocess_java_classes.bzl | 2 +- prelude/android/proguard.bzl | 10 ++-- prelude/android/r_dot_java.bzl | 12 ++--- prelude/android/voltron.bzl | 2 +- prelude/apple/apple_bundle_part.bzl | 2 +- prelude/apple/apple_bundle_resources.bzl | 2 +- prelude/apple/apple_code_signing_types.bzl | 2 +- prelude/apple/apple_core_data.bzl | 2 +- prelude/apple/apple_entitlements.bzl | 2 +- prelude/apple/apple_info_plist.bzl | 6 +-- prelude/apple/apple_resource_types.bzl | 2 +- prelude/apple/apple_test.bzl | 6 +-- prelude/apple/modulemap.bzl | 4 +- prelude/apple/scene_kit_assets.bzl | 2 +- prelude/apple/swift/swift_compilation.bzl | 10 ++-- prelude/cxx/compile.bzl | 6 +-- prelude/cxx/cxx_executable.bzl | 4 +- prelude/cxx/cxx_library.bzl | 8 ++-- prelude/cxx/cxx_link_utility.bzl | 4 +- prelude/cxx/dist_lto/dist_lto.bzl | 2 +- prelude/cxx/headers.bzl | 6 +-- prelude/cxx/link.bzl | 4 +- prelude/cxx/link_types.bzl | 4 +- prelude/cxx/linker.bzl | 2 +- prelude/cxx/preprocessor.bzl | 8 ++-- prelude/debugging/types.bzl | 2 +- prelude/haskell/haskell.bzl | 2 +- prelude/haskell/haskell_ghci.bzl | 8 ++-- prelude/ide_integrations/xcode.bzl | 2 +- prelude/java/class_to_srcs.bzl | 10 ++-- prelude/java/java_library.bzl | 14 +++--- prelude/java/java_providers.bzl | 46 +++++++++---------- prelude/java/javacd_jar_creator.bzl | 10 ++-- prelude/js/js_providers.bzl | 2 +- prelude/jvm/cd_jar_creator_util.bzl | 16 +++---- prelude/kotlin/kotlin_library.bzl | 2 +- prelude/kotlin/kotlincd_jar_creator.bzl | 6 +-- prelude/linking/link_info.bzl | 24 +++++----- prelude/linking/shared_libraries.bzl | 2 +- prelude/rust/build.bzl | 6 +-- prelude/rust/context.bzl | 2 +- prelude/rust/failure_filter.bzl | 2 +- prelude/rust/link_info.bzl | 4 +- prelude/rust/outputs.bzl | 6 +-- 53 files changed, 181 insertions(+), 181 deletions(-) diff --git a/prelude/android/aapt2_link.bzl b/prelude/android/aapt2_link.bzl index a21c592f0..91ac3af02 100644 --- a/prelude/android/aapt2_link.bzl +++ b/prelude/android/aapt2_link.bzl @@ -23,7 +23,7 @@ def get_aapt2_link( no_resource_removal: bool, should_keep_raw_values: bool, package_id_offset: int, - resource_stable_ids: [Artifact, None], + resource_stable_ids: Artifact | None, preferred_density: [str, None], filter_locales: bool, locales: list[str], diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index e7a07bf5d..751f9cca0 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -95,7 +95,7 @@ def get_android_binary_native_library_info( enhance_ctx: EnhancementContext, android_packageable_info: AndroidPackageableInfo, deps_by_platform: dict[str, list[Dependency]], - apk_module_graph_file: [Artifact, None] = None, + apk_module_graph_file: Artifact | None = None, prebuilt_native_library_dirs_to_exclude: [set_type, None] = None, shared_libraries_to_exclude: [set_type, None] = None) -> AndroidBinaryNativeLibsInfo: ctx = enhance_ctx.ctx @@ -526,8 +526,8 @@ _NativeLibsAndAssetsInfo = record( native_libs = Artifact, native_libs_metadata = Artifact, native_libs_always_in_primary_apk = Artifact, - native_lib_assets_for_primary_apk = [Artifact, None], - stripped_native_linkable_assets_for_primary_apk = [Artifact, None], + native_lib_assets_for_primary_apk = Artifact | None, + stripped_native_linkable_assets_for_primary_apk = Artifact | None, root_module_metadata_assets = Artifact, root_module_compressed_lib_assets = Artifact, non_root_module_metadata_assets = Artifact, @@ -736,7 +736,7 @@ def _filter_prebuilt_native_library_dir( _StrippedNativeLinkables = record( linkables = Artifact, linkables_always_in_primary_apk = Artifact, - linkable_assets_for_primary_apk = [Artifact, None], + linkable_assets_for_primary_apk = Artifact | None, linkable_module_assets_map = dict[str, Artifact], ) diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index 2c9369349..4bded64c8 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -24,8 +24,8 @@ load("@prelude//decls/android_rules.bzl", "RType") _FilteredResourcesOutput = record( resource_infos = list[AndroidResourceInfo], voltron_res = list[Artifact], - override_symbols = [Artifact, None], - string_files_list = [Artifact, None], + override_symbols = Artifact | None, + string_files_list = Artifact | None, string_files_res_dirs = list[Artifact], ) @@ -36,7 +36,7 @@ def get_android_binary_resources_info( java_packaging_deps: list[JavaPackagingDep], use_proto_format: bool, referenced_resources_lists: list[Artifact], - apk_module_graph_file: [Artifact, None] = None, + apk_module_graph_file: Artifact | None = None, manifest_entries: dict = {}, resource_infos_to_exclude: [set_type, None] = None, r_dot_java_packages_to_exclude: [list[str], None] = [], @@ -367,7 +367,7 @@ def _maybe_generate_string_source_map( should_build_source_string_map: bool, res_dirs: list[Artifact], android_toolchain: AndroidToolchainInfo, - is_voltron_string_source_map: bool = False) -> [Artifact, None]: + is_voltron_string_source_map: bool = False) -> Artifact | None: if not should_build_source_string_map or len(res_dirs) == 0: return None @@ -391,10 +391,10 @@ def _maybe_generate_string_source_map( def _maybe_package_strings_as_assets( ctx: AnalysisContext, - string_files_list: [Artifact, None], + string_files_list: Artifact | None, string_files_res_dirs: list[Artifact], r_dot_txt: Artifact, - android_toolchain: AndroidToolchainInfo) -> [Artifact, None]: + android_toolchain: AndroidToolchainInfo) -> Artifact | None: resource_compression_mode = getattr(ctx.attrs, "resource_compression", "disabled") is_store_strings_as_assets = _is_store_strings_as_assets(resource_compression_mode) expect(is_store_strings_as_assets == (string_files_list != None)) @@ -481,7 +481,7 @@ def get_manifest( def _get_module_manifests( ctx: AnalysisContext, manifest_entries: dict, - apk_module_graph_file: [Artifact, None], + apk_module_graph_file: Artifact | None, use_proto_format: bool, primary_resources_apk: Artifact) -> list[Artifact]: if not apk_module_graph_file: @@ -546,9 +546,9 @@ def _merge_assets( is_exopackaged_enabled_for_resources: bool, base_apk: Artifact, resource_infos: list[AndroidResourceInfo], - cxx_resources: [Artifact, None], + cxx_resources: Artifact | None, is_bundle_build: bool, - apk_module_graph_file: [Artifact, None]) -> (Artifact, [Artifact, None], [Artifact, None], [Artifact, None]): + apk_module_graph_file: Artifact | None) -> (Artifact, Artifact | None, Artifact | None, Artifact | None): expect( not (is_exopackaged_enabled_for_resources and is_bundle_build), "Cannot use exopackage-for-resources with AAB builds.", @@ -565,7 +565,7 @@ def _merge_assets( def get_common_merge_assets_cmd( ctx: AnalysisContext, - output_apk: Artifact) -> (cmd_args, [Artifact, None]): + output_apk: Artifact) -> (cmd_args, Artifact | None): merge_assets_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].merge_assets[RunInfo]) merge_assets_cmd.add(["--output-apk", output_apk.as_output()]) @@ -657,7 +657,7 @@ def get_effective_banned_duplicate_resource_types( else: fail("Unrecognized duplicate_resource_behavior: {}".format(duplicate_resource_behavior)) -def get_cxx_resources(ctx: AnalysisContext, deps: list[Dependency], dir_name: str = "cxx_resources_dir") -> [Artifact, None]: +def get_cxx_resources(ctx: AnalysisContext, deps: list[Dependency], dir_name: str = "cxx_resources_dir") -> Artifact | None: cxx_resources = gather_resources( label = ctx.label, resources = {}, diff --git a/prelude/android/android_build_config.bzl b/prelude/android/android_build_config.bzl index 452b3132c..95ab62c55 100644 --- a/prelude/android/android_build_config.bzl +++ b/prelude/android/android_build_config.bzl @@ -54,7 +54,7 @@ def generate_android_build_config( java_package: str, use_constant_expressions: bool, default_values: list[BuildConfigField], - values_file: [Artifact, None]) -> (JavaLibraryInfo, JavaPackagingInfo, Artifact): + values_file: Artifact | None) -> (JavaLibraryInfo, JavaPackagingInfo, Artifact): build_config_dot_java = _generate_build_config_dot_java(ctx, source, java_package, use_constant_expressions, default_values, values_file) compiled_build_config_dot_java = _compile_and_package_build_config_dot_java(ctx, java_package, build_config_dot_java) @@ -76,7 +76,7 @@ def _generate_build_config_dot_java( java_package: str, use_constant_expressions: bool, default_values: list[BuildConfigField], - values_file: [Artifact, None]) -> Artifact: + values_file: Artifact | None) -> Artifact: generate_build_config_cmd = cmd_args(ctx.attrs._android_toolchain[AndroidToolchainInfo].generate_build_config[RunInfo]) generate_build_config_cmd.add([ "--source", diff --git a/prelude/android/android_bundle.bzl b/prelude/android/android_bundle.bzl index 849f242c2..6484af2d5 100644 --- a/prelude/android/android_bundle.bzl +++ b/prelude/android/android_bundle.bzl @@ -44,7 +44,7 @@ def build_bundle( dex_files_info: DexFilesInfo, native_library_info: AndroidBinaryNativeLibsInfo, resources_info: AndroidBinaryResourcesInfo, - bundle_config: [Artifact, None], + bundle_config: Artifact | None, validation_deps_outputs: [list[Artifact], None] = None) -> Artifact: output_bundle = actions.declare_output("{}.aab".format(label.name)) diff --git a/prelude/android/android_library.bzl b/prelude/android/android_library.bzl index 18f2e501a..0196ad51d 100644 --- a/prelude/android/android_library.bzl +++ b/prelude/android/android_library.bzl @@ -58,7 +58,7 @@ def android_library_impl(ctx: AnalysisContext) -> list[Provider]: def build_android_library( ctx: AnalysisContext, - r_dot_java: [Artifact, None] = None, + r_dot_java: Artifact | None = None, extra_sub_targets = {}, validation_deps_outputs: [list[Artifact], None] = None) -> (JavaProviders, [AndroidLibraryIntellijInfo, None]): bootclasspath_entries = [] + ctx.attrs._android_toolchain[AndroidToolchainInfo].android_bootclasspath @@ -92,7 +92,7 @@ def build_android_library( ), android_library_intellij_info def _get_dummy_r_dot_java( - ctx: AnalysisContext) -> ([Artifact, None], [AndroidLibraryIntellijInfo, None]): + ctx: AnalysisContext) -> (Artifact | None, [AndroidLibraryIntellijInfo, None]): android_resources = dedupe([resource for resource in filter(None, [ x.get(AndroidResourceInfo) for x in ctx.attrs.deps + ctx.attrs.provided_deps + (getattr(ctx.attrs, "provided_deps_query", []) or []) diff --git a/prelude/android/android_providers.bzl b/prelude/android/android_providers.bzl index 774bb24df..7359eea5b 100644 --- a/prelude/android/android_providers.bzl +++ b/prelude/android/android_providers.bzl @@ -34,8 +34,8 @@ ExopackageNativeInfo = record( ) ExopackageResourcesInfo = record( - assets = [Artifact, None], - assets_hash = [Artifact, None], + assets = Artifact | None, + assets_hash = Artifact | None, res = Artifact, res_hash = Artifact, ) @@ -66,9 +66,9 @@ AndroidBinaryResourcesInfo = record( # per-module manifests (packaged as assets) module_manifests = list[Artifact], # per-module assets APKs (for .aabs only) - module_assets = [Artifact, None], + module_assets = Artifact | None, # zip containing any strings packaged as assets - packaged_string_assets = [Artifact, None], + packaged_string_assets = Artifact | None, # "APK" containing resources to be used by the Android binary primary_resources_apk = Artifact, # proguard config needed to retain used resources @@ -76,9 +76,9 @@ AndroidBinaryResourcesInfo = record( # R.java jars containing all the linked resources r_dot_java_infos = list[RDotJavaInfo], # directory containing filtered string resources files - string_source_map = [Artifact, None], + string_source_map = Artifact | None, # directory containing filtered string resources files for Voltron language packs - voltron_string_source_map = [Artifact, None], + voltron_string_source_map = Artifact | None, # list of jars that could contain resources that should be packaged into the APK jar_files_that_may_contain_resources = list[Artifact], # The resource infos that are used in this APK @@ -219,11 +219,11 @@ ExportedAndroidResourceInfo = provider( DexFilesInfo = record( primary_dex = Artifact, - primary_dex_class_names = [Artifact, None], + primary_dex_class_names = Artifact | None, root_module_secondary_dex_dirs = list[Artifact], non_root_module_secondary_dex_dirs = list[Artifact], secondary_dex_exopackage_info = [ExopackageDexInfo, None], - proguard_text_files_path = [Artifact, None], + proguard_text_files_path = Artifact | None, ) ExopackageInfo = record( @@ -246,7 +246,7 @@ def merge_android_packageable_info( actions: AnalysisActions, deps: list[Dependency], build_config_info: [AndroidBuildConfigInfo, None] = None, - manifest: [Artifact, None] = None, + manifest: Artifact | None = None, prebuilt_native_library_dir: [PrebuiltNativeLibraryDir, None] = None, resource_info: [AndroidResourceInfo, None] = None, for_primary_apk: bool = False) -> AndroidPackageableInfo: diff --git a/prelude/android/android_resource.bzl b/prelude/android/android_resource.bzl index ac29f75df..440c31951 100644 --- a/prelude/android/android_resource.bzl +++ b/prelude/android/android_resource.bzl @@ -12,7 +12,7 @@ load(":android_toolchain.bzl", "AndroidToolchainInfo") JAVA_PACKAGE_FILENAME = "java_package.txt" -def _convert_to_artifact_dir(ctx: AnalysisContext, attr: [Dependency, dict, Artifact, None], attr_name: str) -> [Artifact, None]: +def _convert_to_artifact_dir(ctx: AnalysisContext, attr: [Dependency, dict, Artifact, None], attr_name: str) -> Artifact | None: if isinstance(attr, Dependency): expect(len(attr[DefaultInfo].default_outputs) == 1, "Expect one default output from build dep of attr {}!".format(attr_name)) return attr[DefaultInfo].default_outputs[0] @@ -95,7 +95,7 @@ def aapt2_compile( return aapt2_output -def _get_package(ctx: AnalysisContext, package: [str, None], manifest: [Artifact, None]) -> Artifact: +def _get_package(ctx: AnalysisContext, package: [str, None], manifest: Artifact | None) -> Artifact: if package: return ctx.actions.write(JAVA_PACKAGE_FILENAME, package) else: diff --git a/prelude/android/dex_rules.bzl b/prelude/android/dex_rules.bzl index 02184f174..d66acc92b 100644 --- a/prelude/android/dex_rules.bzl +++ b/prelude/android/dex_rules.bzl @@ -126,10 +126,10 @@ def get_multi_dex( android_toolchain: AndroidToolchainInfo, java_library_jars_to_owners: dict[Artifact, TargetLabel], primary_dex_patterns: list[str], - proguard_configuration_output_file: [Artifact, None] = None, - proguard_mapping_output_file: [Artifact, None] = None, + proguard_configuration_output_file: Artifact | None = None, + proguard_mapping_output_file: Artifact | None = None, is_optimized: bool = False, - apk_module_graph_file: [Artifact, None] = None) -> DexFilesInfo: + apk_module_graph_file: Artifact | None = None) -> DexFilesInfo: expect( not _is_exopackage_enabled_for_secondary_dex(ctx), "secondary dex exopackage can only be enabled on pre-dexed builds!", @@ -238,8 +238,8 @@ def _get_primary_dex_and_secondary_dex_jars( jars: list[Artifact], java_library_jars_to_owners: dict[Artifact, TargetLabel], primary_dex_patterns_file: Artifact, - proguard_configuration_output_file: [Artifact, None], - proguard_mapping_output_file: [Artifact, None], + proguard_configuration_output_file: Artifact | None, + proguard_mapping_output_file: Artifact | None, android_toolchain: AndroidToolchainInfo) -> (list[Artifact], list[Artifact]): primary_dex_jars = [] secondary_dex_jars = [] @@ -321,7 +321,7 @@ DexInputsWithClassNamesAndWeightEstimatesFile = record( SecondaryDexMetadataConfig = record( secondary_dex_compression = str, secondary_dex_metadata_path = [str, None], - secondary_dex_metadata_file = [Artifact, None], + secondary_dex_metadata_file = Artifact | None, secondary_dex_metadata_line = Artifact, secondary_dex_canary_class_name = str, ) @@ -398,7 +398,7 @@ def merge_to_split_dex( android_toolchain: AndroidToolchainInfo, pre_dexed_libs: list[DexLibraryInfo], split_dex_merge_config: SplitDexMergeConfig, - apk_module_graph_file: [Artifact, None] = None) -> DexFilesInfo: + apk_module_graph_file: Artifact | None = None) -> DexFilesInfo: is_exopackage_enabled_for_secondary_dex = _is_exopackage_enabled_for_secondary_dex(ctx) if is_exopackage_enabled_for_secondary_dex: expect( @@ -592,8 +592,8 @@ def _merge_dexes( output_dex_file: Artifact, pre_dexed_artifacts: list[Artifact], pre_dexed_artifacts_file: Artifact, - class_names_to_include: [Artifact, None] = None, - secondary_output_dex_file: [Artifact, None] = None, + class_names_to_include: Artifact | None = None, + secondary_output_dex_file: Artifact | None = None, secondary_dex_metadata_config: [SecondaryDexMetadataConfig, None] = None): d8_cmd = cmd_args(android_toolchain.d8_command[RunInfo]) d8_cmd.add(["--output-dex-file", output_dex_file.as_output()]) diff --git a/prelude/android/preprocess_java_classes.bzl b/prelude/android/preprocess_java_classes.bzl index e35ed44a0..c23e5116c 100644 --- a/prelude/android/preprocess_java_classes.bzl +++ b/prelude/android/preprocess_java_classes.bzl @@ -11,7 +11,7 @@ load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") load("@prelude//utils:expect.bzl", "expect") -def get_preprocessed_java_classes(enhance_ctx: EnhancementContext, input_jars: dict[Artifact, TargetLabel]) -> (dict[Artifact, TargetLabel], [Artifact, None]): +def get_preprocessed_java_classes(enhance_ctx: EnhancementContext, input_jars: dict[Artifact, TargetLabel]) -> (dict[Artifact, TargetLabel], Artifact | None): if not input_jars: return {}, None diff --git a/prelude/android/proguard.bzl b/prelude/android/proguard.bzl index c796fb339..392638178 100644 --- a/prelude/android/proguard.bzl +++ b/prelude/android/proguard.bzl @@ -17,7 +17,7 @@ load("@prelude//utils:expect.bzl", "expect") ProguardOutput = record( jars_to_owners = dict[Artifact, TargetLabel], - proguard_configuration_output_file = [Artifact, None], + proguard_configuration_output_file = Artifact | None, proguard_mapping_output_file = Artifact, proguard_artifacts = list[Artifact], proguard_hidden_artifacts = list[Artifact], @@ -29,9 +29,9 @@ def _get_proguard_command_line_args( proguard_configs: list[Artifact], additional_library_jars: list[Artifact], mapping: Artifact, - configuration: [Artifact, None], - seeds: [Artifact, None], - usage: [Artifact, None], + configuration: Artifact | None, + seeds: Artifact | None, + usage: Artifact | None, android_toolchain: AndroidToolchainInfo) -> (cmd_args, list[Artifact]): cmd = cmd_args() hidden = [] @@ -131,7 +131,7 @@ def get_proguard_output( ctx: AnalysisContext, input_jars: dict[Artifact, TargetLabel], java_packaging_deps: list[JavaPackagingDep], - aapt_generated_proguard_config: [Artifact, None], + aapt_generated_proguard_config: Artifact | None, additional_library_jars: list[Artifact]) -> ProguardOutput: proguard_configs = [packaging_dep.proguard_config for packaging_dep in java_packaging_deps if packaging_dep.proguard_config] if ctx.attrs.proguard_config: diff --git a/prelude/android/r_dot_java.bzl b/prelude/android/r_dot_java.bzl index a21e69a4a..99e6cf448 100644 --- a/prelude/android/r_dot_java.bzl +++ b/prelude/android/r_dot_java.bzl @@ -14,10 +14,10 @@ load("@prelude//utils:set.bzl", "set") RDotJavaSourceCode = record( r_dot_java_source_code_dir = Artifact, r_dot_java_source_code_zipped = Artifact, - strings_source_code_dir = [Artifact, None], - strings_source_code_zipped = [Artifact, None], - ids_source_code_dir = [Artifact, None], - ids_source_code_zipped = [Artifact, None], + strings_source_code_dir = Artifact | None, + strings_source_code_zipped = Artifact | None, + ids_source_code_dir = Artifact | None, + ids_source_code_zipped = Artifact | None, ) def get_dummy_r_dot_java( @@ -39,7 +39,7 @@ def generate_r_dot_javas( banned_duplicate_resource_types: list[str], uber_r_dot_txt_files: list[Artifact], override_symbols_paths: list[Artifact], - duplicate_resources_allowlist: [Artifact, None], + duplicate_resources_allowlist: Artifact | None, union_package: [str, None], referenced_resources_lists: list[Artifact], generate_strings_and_ids_separately: [bool, None] = True, @@ -107,7 +107,7 @@ def _generate_r_dot_java_source_code( banned_duplicate_resource_types: list[str] = [], uber_r_dot_txt_files: list[Artifact] = [], override_symbols_paths: list[Artifact] = [], - duplicate_resources_allowlist: [Artifact, None] = None, + duplicate_resources_allowlist: Artifact | None = None, union_package: [str, None] = None, referenced_resources_lists: list[Artifact] = []) -> RDotJavaSourceCode: merge_resources_cmd = cmd_args(merge_android_resources_tool) diff --git a/prelude/android/voltron.bzl b/prelude/android/voltron.bzl index d6622c261..5c116a9cd 100644 --- a/prelude/android/voltron.bzl +++ b/prelude/android/voltron.bzl @@ -109,7 +109,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: return [DefaultInfo(default_output = output)] -def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[str, list[Dependency]]) -> [Artifact, None]: +def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[str, list[Dependency]]) -> Artifact | None: if not ctx.attrs.application_module_configs: return None diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index a6ee6cbfa..d6caa9041 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -31,7 +31,7 @@ AppleBundlePart = record( # Marks parts which should be code signed separately from the whole bundle. codesign_on_copy = field(bool, False), # Entitlements to use when this part is code signed separately. - codesign_entitlements = field([Artifact, None], None), + codesign_entitlements = field(Artifact | None, None), ) SwiftStdlibArguments = record( diff --git a/prelude/apple/apple_bundle_resources.bzl b/prelude/apple/apple_bundle_resources.bzl index 2a692d867..dbd3b8b23 100644 --- a/prelude/apple/apple_bundle_resources.bzl +++ b/prelude/apple/apple_bundle_resources.bzl @@ -354,7 +354,7 @@ def _process_apple_resource_file_if_needed( destination: AppleBundleDestination, destination_relative_path: [str, None], codesign_on_copy: bool = False, - codesign_entitlements: [Artifact, None] = None) -> AppleBundlePart: + codesign_entitlements: Artifact | None = None) -> AppleBundlePart: output_dir = "_ProcessedResources" basename = paths.basename(file.short_path) output_is_contents_dir = False diff --git a/prelude/apple/apple_code_signing_types.bzl b/prelude/apple/apple_code_signing_types.bzl index b63f98ffb..66ac6cad9 100644 --- a/prelude/apple/apple_code_signing_types.bzl +++ b/prelude/apple/apple_code_signing_types.bzl @@ -7,7 +7,7 @@ # Provider which exposes a field from `apple_binary` to `apple_bundle` as it might be used during code signing. AppleEntitlementsInfo = provider(fields = { - "entitlements_file": provider_field([Artifact, None], default = None), + "entitlements_file": provider_field(Artifact | None, default = None), }) CodeSignType = enum( diff --git a/prelude/apple/apple_core_data.bzl b/prelude/apple/apple_core_data.bzl index 9d3847442..bd5560ba6 100644 --- a/prelude/apple/apple_core_data.bzl +++ b/prelude/apple/apple_core_data.bzl @@ -26,7 +26,7 @@ def apple_core_data_impl(ctx: AnalysisContext) -> list[Provider]: ) return [DefaultInfo(), graph] -def compile_apple_core_data(ctx: AnalysisContext, specs: list[AppleCoreDataSpec], product_name: str) -> [Artifact, None]: +def compile_apple_core_data(ctx: AnalysisContext, specs: list[AppleCoreDataSpec], product_name: str) -> Artifact | None: if len(specs) == 0: return None diff --git a/prelude/apple/apple_entitlements.bzl b/prelude/apple/apple_entitlements.bzl index dd51e4a19..5877aea9a 100644 --- a/prelude/apple/apple_entitlements.bzl +++ b/prelude/apple/apple_entitlements.bzl @@ -29,7 +29,7 @@ def should_include_entitlements(ctx: AnalysisContext, codesign_type: CodeSignTyp return False -def _entitlements_file(ctx: AnalysisContext) -> [Artifact, None]: +def _entitlements_file(ctx: AnalysisContext) -> Artifact | None: if hasattr(ctx.attrs, "entitlements_file"): # Bundling `apple_test` which doesn't have a binary to provide the entitlements, so they are provided via `entitlements_file` attribute directly. return ctx.attrs.entitlements_file diff --git a/prelude/apple/apple_info_plist.bzl b/prelude/apple/apple_info_plist.bzl index 3b461f0b6..590bf7248 100644 --- a/prelude/apple/apple_info_plist.bzl +++ b/prelude/apple/apple_info_plist.bzl @@ -20,7 +20,7 @@ load( ) load(":apple_toolchain_types.bzl", "AppleToolchainInfo", "AppleToolsInfo") -def process_info_plist(ctx: AnalysisContext, override_input: [Artifact, None]) -> AppleBundlePart: +def process_info_plist(ctx: AnalysisContext, override_input: Artifact | None) -> AppleBundlePart: input = _preprocess_info_plist(ctx) output = ctx.actions.declare_output("Info.plist") additional_keys = _additional_keys_as_json_file(ctx) @@ -64,7 +64,7 @@ def _preprocess_info_plist(ctx: AnalysisContext) -> Artifact: ctx.actions.run(command, category = "apple_preprocess_info_plist", **_get_plist_run_options()) return output -def _plist_substitutions_as_json_file(ctx: AnalysisContext) -> [Artifact, None]: +def _plist_substitutions_as_json_file(ctx: AnalysisContext) -> Artifact | None: info_plist_substitutions = ctx.attrs.info_plist_substitutions if not info_plist_substitutions: return None @@ -72,7 +72,7 @@ def _plist_substitutions_as_json_file(ctx: AnalysisContext) -> [Artifact, None]: substitutions_json = ctx.actions.write_json("plist_substitutions.json", info_plist_substitutions) return substitutions_json -def process_plist(ctx: AnalysisContext, input: Artifact, output: OutputArtifact, override_input: [Artifact, None] = None, additional_keys: [Artifact, None] = None, override_keys: [Artifact, None] = None, action_id: [str, None] = None): +def process_plist(ctx: AnalysisContext, input: Artifact, output: OutputArtifact, override_input: Artifact | None = None, additional_keys: Artifact | None = None, override_keys: Artifact | None = None, action_id: [str, None] = None): apple_tools = ctx.attrs._apple_tools[AppleToolsInfo] processor = apple_tools.info_plist_processor override_input_arguments = ["--override-input", override_input] if override_input != None else [] diff --git a/prelude/apple/apple_resource_types.bzl b/prelude/apple/apple_resource_types.bzl index 45fa6b8f0..4d800f7ae 100644 --- a/prelude/apple/apple_resource_types.bzl +++ b/prelude/apple/apple_resource_types.bzl @@ -28,7 +28,7 @@ AppleResourceSpec = record( # `{ "ru.lproj" : ["Localizable.strings"] }` named_variant_files = field(dict[str, list[Artifact]], {}), codesign_files_on_copy = field(bool, False), - codesign_entitlements = field([Artifact, None], None), + codesign_entitlements = field(Artifact | None, None), ) # Used when invoking `ibtool`, `actool`, `mapc` and `momc` diff --git a/prelude/apple/apple_test.bzl b/prelude/apple/apple_test.bzl index 74119f747..5da34d8a1 100644 --- a/prelude/apple/apple_test.bzl +++ b/prelude/apple/apple_test.bzl @@ -183,7 +183,7 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: else: return get_apple_test_providers([]) -def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_bundle: [Artifact, None], dsym_artifact: [Artifact, None] = None) -> Provider: +def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_bundle: Artifact | None, dsym_artifact: Artifact | None = None) -> Provider: # When interacting with Tpx, we just pass our various inputs via env vars, # since Tpx basiclaly wants structured output for this. @@ -239,7 +239,7 @@ def _get_test_info(ctx: AnalysisContext, xctest_bundle: Artifact, test_host_app_ }, ) -def _get_test_host_app_bundle(ctx: AnalysisContext) -> [Artifact, None]: +def _get_test_host_app_bundle(ctx: AnalysisContext) -> Artifact | None: """ Get the bundle for the test host app, if one exists for this test. """ if ctx.attrs.test_host_app: # Copy the test host app bundle into test's output directory @@ -250,7 +250,7 @@ def _get_test_host_app_bundle(ctx: AnalysisContext) -> [Artifact, None]: return None -def _get_test_host_app_binary(ctx: AnalysisContext, test_host_app_bundle: [Artifact, None]) -> [cmd_args, None]: +def _get_test_host_app_binary(ctx: AnalysisContext, test_host_app_bundle: Artifact | None) -> [cmd_args, None]: """ Reference to the binary with the test host app bundle, if one exists for this test. Captures the bundle as an artifact in the cmd_args. """ if ctx.attrs.test_host_app == None: return None diff --git a/prelude/apple/modulemap.bzl b/prelude/apple/modulemap.bzl index 7cba3e9eb..c4aacca19 100644 --- a/prelude/apple/modulemap.bzl +++ b/prelude/apple/modulemap.bzl @@ -17,7 +17,7 @@ load( ) load(":apple_utility.bzl", "get_module_name") -def preprocessor_info_for_modulemap(ctx: AnalysisContext, name: str, headers: list[CHeader], swift_header: [Artifact, None]) -> CPreprocessor: +def preprocessor_info_for_modulemap(ctx: AnalysisContext, name: str, headers: list[CHeader], swift_header: Artifact | None) -> CPreprocessor: # We don't want to name this module.modulemap to avoid implicit importing if name == "module": fail("Don't use the name `module` for modulemaps, this will allow for implicit importing.") @@ -78,7 +78,7 @@ def preprocessor_info_for_modulemap(ctx: AnalysisContext, name: str, headers: li def _args_for_modulemap( modulemap: Artifact, symlink_tree: Artifact, - swift_header: [Artifact, None]) -> list[cmd_args]: + swift_header: Artifact | None) -> list[cmd_args]: cmd = cmd_args(modulemap, format = "-fmodule-map-file={}") cmd.hidden(symlink_tree) if swift_header: diff --git a/prelude/apple/scene_kit_assets.bzl b/prelude/apple/scene_kit_assets.bzl index 650919b33..01b2805a9 100644 --- a/prelude/apple/scene_kit_assets.bzl +++ b/prelude/apple/scene_kit_assets.bzl @@ -24,7 +24,7 @@ def scene_kit_assets_impl(ctx: AnalysisContext) -> list[Provider]: ) return [DefaultInfo(), graph] -def compile_scene_kit_assets(ctx: AnalysisContext, specs: list[SceneKitAssetsSpec]) -> [Artifact, None]: +def compile_scene_kit_assets(ctx: AnalysisContext, specs: list[SceneKitAssetsSpec]) -> Artifact | None: if len(specs) == 0: return None diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index 4066e9cee..d350bab84 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -88,7 +88,7 @@ SwiftCompilationDatabase = record( SwiftObjectOutput = record( object_files = field(list[Artifact]), argsfiles = field(CompileArgsfiles), - output_map_artifact = field([Artifact, None]), + output_map_artifact = field(Artifact | None), ) SwiftCompilationOutput = record( @@ -114,9 +114,9 @@ SwiftCompilationOutput = record( # Info required for `[swift-compilation-database]` subtarget. compilation_database = field(SwiftCompilationDatabase), # An artifact that represent the Swift module map for this target. - output_map_artifact = field([Artifact, None]), + output_map_artifact = field(Artifact | None), # An optional artifact of the partial tbd file emitted for this module. - tbd = field([Artifact, None]), + tbd = field(Artifact | None), ) SwiftDebugInfo = record( @@ -349,7 +349,7 @@ def _compile_swiftmodule( srcs: list[CxxSrcWithFlags], output_swiftmodule: Artifact, output_header: Artifact, - output_tbd: [Artifact, None]) -> CompileArgsfiles: + output_tbd: Artifact | None) -> CompileArgsfiles: argfile_cmd = cmd_args(shared_flags) argfile_cmd.add([ "-emit-module", @@ -735,7 +735,7 @@ def get_swift_pcm_uncompile_info( def get_swift_dependency_info( ctx: AnalysisContext, exported_pre: [CPreprocessor, None], - output_module: [Artifact, None], + output_module: Artifact | None, deps_providers: list) -> SwiftDependencyInfo: exported_deps = _exported_deps(ctx) diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index 785c93b4f..628df680e 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -153,9 +153,9 @@ CxxCompileOutput = record( # Externally referenced debug info, which doesn't get linked with the # object (e.g. the above `.o` when using `-gsplit-dwarf=single` or the # the `.dwo` when using `-gsplit-dwarf=split`). - external_debug_info = field([Artifact, None], None), - clang_remarks = field([Artifact, None], None), - clang_trace = field([Artifact, None], None), + external_debug_info = field(Artifact | None, None), + clang_remarks = field(Artifact | None, None), + clang_trace = field(Artifact | None, None), ) _ABSOLUTE_ARGSFILE_SUBSTITUTIONS = [ diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 2c829ef2e..5a19716e1 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -162,8 +162,8 @@ load( CxxExecutableOutput = record( binary = Artifact, unstripped_binary = Artifact, - bitcode_bundle = field([Artifact, None], None), - dwp = field([Artifact, None]), + bitcode_bundle = field(Artifact | None, None), + dwp = field(Artifact | None), # Files that must be present for the executable to run successfully. These # are always materialized, whether the executable is the output of a build # or executed as a host tool. They become .hidden() arguments when executing diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 5c9933ed7..41604c390 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -227,13 +227,13 @@ CxxLibraryOutput = record( # its corresponding DWARF debug info. # May be None when Split DWARF is disabled, for static/static-pic libraries, # for some types of synthetic link objects or for pre-built shared libraries. - dwp = field([Artifact, None], None), + dwp = field(Artifact | None, None), # A shared shared library may have an associated PDB file with # its corresponding Windows debug info. - pdb = field([Artifact, None], None), + pdb = field(Artifact | None, None), # The import library is the linkable output of a Windows shared library build. - implib = field([Artifact, None], None), + implib = field(Artifact | None, None), # Data about the linker map, only available on shared libraries # TODO(cjhopman): always available? when is it/is it not available? linker_map = field([CxxLinkerMapData, None], None), @@ -1358,7 +1358,7 @@ _CxxSharedLibraryResult = record( link_result = CxxLinkResult, # Shared library name (e.g. SONAME) soname = str, - objects_bitcode_bundle = [Artifact, None], + objects_bitcode_bundle = Artifact | None, # `LinkInfo` used to link against the shared library. info = LinkInfo, ) diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index 320339cd5..cdfb1badf 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -57,13 +57,13 @@ def linker_map_args(toolchain: CxxToolchainInfo, linker_map) -> LinkArgs: LinkArgsOutput = record( link_args = ArgLike, hidden = list[typing.Any], - pdb_artifact = [Artifact, None], + pdb_artifact = Artifact | None, # The filelist artifact which contains the list of all object files. # Only present for Darwin linkers. Note that object files referenced # _inside_ the filelist are _not_ part of the `hidden` field above. # That's by design - we do not want to materialise _all_ object files # to inspect the filelist. Intended to be used for debugging. - filelist = [Artifact, None], + filelist = Artifact | None, ) def make_link_args( diff --git a/prelude/cxx/dist_lto/dist_lto.bzl b/prelude/cxx/dist_lto/dist_lto.bzl index 8a50ad707..c3163e660 100644 --- a/prelude/cxx/dist_lto/dist_lto.bzl +++ b/prelude/cxx/dist_lto/dist_lto.bzl @@ -84,7 +84,7 @@ def cxx_dist_link( links: list[LinkArgs], # The destination for the link output. output: Artifact, - linker_map: [Artifact, None] = None, + linker_map: Artifact | None = None, # A category suffix that will be added to the category of the link action that is generated. category_suffix: [str, None] = None, # An identifier that will uniquely name this link action in the context of a category. Useful for diff --git a/prelude/cxx/headers.bzl b/prelude/cxx/headers.bzl index 406a8b56d..65134432c 100644 --- a/prelude/cxx/headers.bzl +++ b/prelude/cxx/headers.bzl @@ -67,7 +67,7 @@ HeaderStyle = enum( Headers = record( include_path = field(cmd_args), # NOTE(agallagher): Used for module hack replacement. - symlink_tree = field([Artifact, None], None), + symlink_tree = field(Artifact | None, None), # args that map symlinked private headers to source path file_prefix_args = field([cmd_args, None], None), ) @@ -182,7 +182,7 @@ def _header_mode(ctx: AnalysisContext) -> HeaderMode: return toolchain_header_mode -def prepare_headers(ctx: AnalysisContext, srcs: dict[str, Artifact], name: str, project_root_file: [Artifact, None]) -> [Headers, None]: +def prepare_headers(ctx: AnalysisContext, srcs: dict[str, Artifact], name: str, project_root_file: Artifact | None) -> [Headers, None]: """ Prepare all the headers we want to use, depending on the header_mode set on the target's toolchain. @@ -343,7 +343,7 @@ def _get_debug_prefix_args(ctx: AnalysisContext, header_dir: Artifact) -> [cmd_a ) return debug_prefix_args -def _mk_hmap(ctx: AnalysisContext, name: str, headers: dict[str, (Artifact, str)], project_root_file: [Artifact, None]) -> Artifact: +def _mk_hmap(ctx: AnalysisContext, name: str, headers: dict[str, (Artifact, str)], project_root_file: Artifact | None) -> Artifact: output = ctx.actions.declare_output(name + ".hmap") cmd = cmd_args(get_cxx_toolchain_info(ctx).mk_hmap) cmd.add(["--output", output.as_output()]) diff --git a/prelude/cxx/link.bzl b/prelude/cxx/link.bzl index 4a4ff3789..361faea2c 100644 --- a/prelude/cxx/link.bzl +++ b/prelude/cxx/link.bzl @@ -83,8 +83,8 @@ CxxLinkResult = record( def link_external_debug_info( ctx: AnalysisContext, links: list[LinkArgs], - split_debug_output: [Artifact, None] = None, - pdb: [Artifact, None] = None) -> ArtifactTSet: + split_debug_output: Artifact | None = None, + pdb: Artifact | None = None) -> ArtifactTSet: external_debug_artifacts = [] # When using LTO+split-dwarf, the link step will generate externally diff --git a/prelude/cxx/link_types.bzl b/prelude/cxx/link_types.bzl index ddcac7c52..8e065a8a6 100644 --- a/prelude/cxx/link_types.bzl +++ b/prelude/cxx/link_types.bzl @@ -32,7 +32,7 @@ LinkOptions = record( strip = bool, # A function/lambda which will generate the strip args using the ctx. strip_args_factory = [typing.Callable, None], - import_library = [Artifact, None], + import_library = Artifact | None, allow_cache_upload = bool, cxx_toolchain = [CxxToolchainInfo, None], # Force callers to use link_options() or merge_link_options() to create. @@ -49,7 +49,7 @@ def link_options( identifier: [str, None] = None, strip: bool = False, strip_args_factory = None, - import_library: [Artifact, None] = None, + import_library: Artifact | None = None, allow_cache_upload: bool = False, cxx_toolchain: [CxxToolchainInfo, None] = None) -> LinkOptions: """ diff --git a/prelude/cxx/linker.bzl b/prelude/cxx/linker.bzl index 4e6180646..59e8a3e27 100644 --- a/prelude/cxx/linker.bzl +++ b/prelude/cxx/linker.bzl @@ -248,7 +248,7 @@ def get_output_flags(linker_type: str, output: Artifact) -> list[ArgLike]: def get_import_library( ctx: AnalysisContext, linker_type: str, - output_short_path: str) -> ([Artifact, None], list[ArgLike]): + output_short_path: str) -> (Artifact | None, list[ArgLike]): if linker_type == "windows": import_library = ctx.actions.declare_output(output_short_path + ".imp.lib") return import_library, [cmd_args(import_library.as_output(), format = "/IMPLIB:{}")] diff --git a/prelude/cxx/preprocessor.bzl b/prelude/cxx/preprocessor.bzl index 00cd190a6..6fe20c87a 100644 --- a/prelude/cxx/preprocessor.bzl +++ b/prelude/cxx/preprocessor.bzl @@ -254,7 +254,7 @@ def cxx_exported_preprocessor_info(ctx: AnalysisContext, headers_layout: CxxHead modular_args = modular_args, ) -def _get_exported_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], style: HeaderStyle, compiler_type: str, raw_headers: list[Artifact], extra_preprocessors: list[CPreprocessor], project_root_file: [Artifact, None]) -> CPreprocessorArgs: +def _get_exported_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], style: HeaderStyle, compiler_type: str, raw_headers: list[Artifact], extra_preprocessors: list[CPreprocessor], project_root_file: Artifact | None) -> CPreprocessorArgs: header_root = prepare_headers(ctx, headers, "buck-headers", project_root_file) # Process args to handle the `$(cxx-header-tree)` macro. @@ -289,7 +289,7 @@ def _get_exported_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Art def cxx_private_preprocessor_info( ctx: AnalysisContext, headers_layout: CxxHeadersLayout, - project_root_file: [Artifact, None], + project_root_file: Artifact | None, raw_headers: list[Artifact] = [], extra_preprocessors: list[CPreprocessor] = [], non_exported_deps: list[Dependency] = [], @@ -310,7 +310,7 @@ def _cxx_private_preprocessor_info( headers_layout: CxxHeadersLayout, raw_headers: list[Artifact], extra_preprocessors: list[CPreprocessor], - project_root_file: [Artifact, None]) -> CPreprocessor: + project_root_file: Artifact | None) -> CPreprocessor: """ This rule's preprocessor info which is only applied to the compilation of its source, and not propagated to dependents. @@ -364,7 +364,7 @@ def _cxx_private_preprocessor_info( uses_modules = uses_modules, ) -def _get_private_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], compiler_type: str, all_raw_headers: list[Artifact], project_root_file: [Artifact, None]) -> CPreprocessorArgs: +def _get_private_preprocessor_args(ctx: AnalysisContext, headers: dict[str, Artifact], compiler_type: str, all_raw_headers: list[Artifact], project_root_file: Artifact | None) -> CPreprocessorArgs: # Create private header tree and propagate via args. args = [] file_prefix_args = [] diff --git a/prelude/debugging/types.bzl b/prelude/debugging/types.bzl index bfc9ea4e7..d6560eecb 100644 --- a/prelude/debugging/types.bzl +++ b/prelude/debugging/types.bzl @@ -60,7 +60,7 @@ Custom = record( # Java DAP server requires this file in order to correctly locate classes in the source files # The integration with a tool is available as a part of "JVM" rules. (java/kotlin_library/binary/test) JavaInfo = record( - classmap_file = field([Artifact, None]), + classmap_file = field(Artifact | None), ) # Customizations that are understood by debugging tool diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index c85e4c563..c21b20e40 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -952,7 +952,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: def derive_indexing_tset( actions: AnalysisActions, link_style: LinkStyle, - value: [list[Artifact], None], + value: list[Artifact] | None, children: list[Dependency]) -> HaskellIndexingTSet: index_children = [] for dep in children: diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index faea3eb2f..3cb3e6ab8 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -330,10 +330,10 @@ def _replace_macros_in_script_template( script_template: Artifact, haskell_toolchain: HaskellToolchainInfo, # Optional artifacts - ghci_bin: [Artifact, None] = None, - start_ghci: [Artifact, None] = None, - iserv_script: [Artifact, None] = None, - squashed_so: [Artifact, None] = None, + ghci_bin: Artifact | None = None, + start_ghci: Artifact | None = None, + iserv_script: Artifact | None = None, + squashed_so: Artifact | None = None, # Optional cmd_args exposed_package_args: [cmd_args, None] = None, packagedb_args: [cmd_args, None] = None, diff --git a/prelude/ide_integrations/xcode.bzl b/prelude/ide_integrations/xcode.bzl index f8f2cda0d..a71ab54bb 100644 --- a/prelude/ide_integrations/xcode.bzl +++ b/prelude/ide_integrations/xcode.bzl @@ -15,7 +15,7 @@ XcodeDataInfo = provider(fields = { def generate_xcode_data( ctx: AnalysisContext, rule_type: str, - output: [Artifact, None], + output: Artifact | None, populate_rule_specific_attributes_func: [typing.Callable, None] = None, **kwargs) -> (list[DefaultInfo], XcodeDataInfo): data = { diff --git a/prelude/java/class_to_srcs.bzl b/prelude/java/class_to_srcs.bzl index d253cab69..fa5d04e8c 100644 --- a/prelude/java/class_to_srcs.bzl +++ b/prelude/java/class_to_srcs.bzl @@ -11,7 +11,7 @@ load( "JavaToolchainInfo", # @unused Used as a type ) -def _class_to_src_map_args(mapping: [Artifact, None]): +def _class_to_src_map_args(mapping: Artifact | None): if mapping != None: return cmd_args(mapping) return cmd_args() @@ -34,8 +34,8 @@ JavaClassToSourceMapInfo = provider( def create_class_to_source_map_info( ctx: AnalysisContext, - mapping: [Artifact, None] = None, - mapping_debuginfo: [Artifact, None] = None, + mapping: Artifact | None = None, + mapping_debuginfo: Artifact | None = None, deps = [Dependency]) -> JavaClassToSourceMapInfo: # Only generate debuginfo if the debug info tool is available. java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] @@ -88,7 +88,7 @@ def maybe_create_class_to_source_map_debuginfo( actions: AnalysisActions, name: str, java_toolchain: JavaToolchainInfo, - srcs: list[Artifact]) -> [Artifact, None]: + srcs: list[Artifact]) -> Artifact | None: # Only generate debuginfo if the debug info tool is available. if java_toolchain.gen_class_to_source_map_debuginfo == None: return None @@ -107,7 +107,7 @@ def merge_class_to_source_map_from_jar( actions: AnalysisActions, name: str, java_test_toolchain: JavaTestToolchainInfo, - mapping: [Artifact, None] = None, + mapping: Artifact | None = None, relative_to: [CellRoot, None] = None, # TODO(nga): I think this meant to be type, not default value. deps = [JavaClassToSourceMapInfo.type]) -> Artifact: diff --git a/prelude/java/java_library.bzl b/prelude/java/java_library.bzl index 564194889..a5e1ab59a 100644 --- a/prelude/java/java_library.bzl +++ b/prelude/java/java_library.bzl @@ -276,13 +276,13 @@ def compile_to_jar( srcs: list[Artifact], *, abi_generation_mode: [AbiGenerationMode, None] = None, - output: [Artifact, None] = None, + output: Artifact | None = None, actions_identifier: [str, None] = None, javac_tool: [typing.Any, None] = None, resources: [list[Artifact], None] = None, resources_root: [str, None] = None, remove_classes: [list[str], None] = None, - manifest_file: [Artifact, None] = None, + manifest_file: Artifact | None = None, annotation_processor_properties: [AnnotationProcessorProperties, None] = None, plugin_params: [PluginParams, None] = None, source_level: [int, None] = None, @@ -292,7 +292,7 @@ def compile_to_jar( source_only_abi_deps: [list[Dependency], None] = None, extra_arguments: [cmd_args, None] = None, additional_classpath_entries: [list[Artifact], None] = None, - additional_compiled_srcs: [Artifact, None] = None, + additional_compiled_srcs: Artifact | None = None, bootclasspath_entries: [list[Artifact], None] = None, is_creating_subtarget: bool = False) -> JavaCompileOutputs: if not additional_classpath_entries: @@ -356,13 +356,13 @@ def _create_jar_artifact( abi_generation_mode: [AbiGenerationMode, None], java_toolchain: JavaToolchainInfo, label: Label, - output: [Artifact, None], + output: Artifact | None, javac_tool: [typing.Any, None], srcs: list[Artifact], remove_classes: list[str], resources: list[Artifact], resources_root: [str, None], - manifest_file: [Artifact, None], + manifest_file: Artifact | None, annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], source_level: int, @@ -372,7 +372,7 @@ def _create_jar_artifact( _source_only_abi_deps: list[Dependency], extra_arguments: cmd_args, additional_classpath_entries: list[Artifact], - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, bootclasspath_entries: list[Artifact], _is_building_android_binary: bool, _is_creating_subtarget: bool = False) -> JavaCompileOutputs: @@ -517,7 +517,7 @@ def build_java_library( run_annotation_processors = True, additional_classpath_entries: list[Artifact] = [], bootclasspath_entries: list[Artifact] = [], - additional_compiled_srcs: [Artifact, None] = None, + additional_compiled_srcs: Artifact | None = None, generated_sources: list[Artifact] = [], override_abi_generation_mode: [AbiGenerationMode, None] = None, extra_sub_targets: dict = {}, diff --git a/prelude/java/java_providers.bzl b/prelude/java/java_providers.bzl index 2d2c6e6ac..43fa8661c 100644 --- a/prelude/java/java_providers.bzl +++ b/prelude/java/java_providers.bzl @@ -90,7 +90,7 @@ JavaClasspathEntry = record( abi = field(Artifact), # abi_as_dir is the abi .jar unzipped into a directory. If available, it is used to provide # .class level granularity for javacd and kotlincd dep-files. - abi_as_dir = field([Artifact, None]), + abi_as_dir = field(Artifact | None), required_for_source_only_abi = field(bool), ) @@ -125,11 +125,11 @@ JavaCompilingDepsTSet = transitive_set( JavaPackagingDep = record( label = Label, - jar = [Artifact, None], + jar = Artifact | None, dex = [DexLibraryInfo, None], - gwt_module = [Artifact, None], + gwt_module = Artifact | None, is_prebuilt_jar = bool, - proguard_config = [Artifact, None], + proguard_config = Artifact | None, # An output that is used solely by the system to have an artifact bound to the target (that the core can then use to find # the right target from the given artifact). @@ -204,11 +204,11 @@ KeystoreInfo = provider( JavaCompileOutputs = record( full_library = Artifact, - class_abi = [Artifact, None], - source_abi = [Artifact, None], - source_only_abi = [Artifact, None], + class_abi = Artifact | None, + source_abi = Artifact | None, + source_only_abi = Artifact | None, classpath_entry = JavaClasspathEntry, - annotation_processor_output = [Artifact, None], + annotation_processor_output = Artifact | None, ) JavaProviders = record( @@ -242,13 +242,13 @@ def to_list(java_providers: JavaProviders) -> list[Provider]: # specific artifact to be used as the abi for the JavaClasspathEntry. def make_compile_outputs( full_library: Artifact, - class_abi: [Artifact, None] = None, - source_abi: [Artifact, None] = None, - source_only_abi: [Artifact, None] = None, - classpath_abi: [Artifact, None] = None, - classpath_abi_dir: [Artifact, None] = None, + class_abi: Artifact | None = None, + source_abi: Artifact | None = None, + source_only_abi: Artifact | None = None, + classpath_abi: Artifact | None = None, + classpath_abi_dir: Artifact | None = None, required_for_source_only_abi: bool = False, - annotation_processor_output: [Artifact, None] = None) -> JavaCompileOutputs: + annotation_processor_output: Artifact | None = None) -> JavaCompileOutputs: expect(classpath_abi != None or classpath_abi_dir == None, "A classpath_abi_dir should only be provided if a classpath_abi is provided!") return JavaCompileOutputs( full_library = full_library, @@ -303,15 +303,15 @@ def derive_compiling_deps( def create_java_packaging_dep( ctx: AnalysisContext, - library_jar: [Artifact, None] = None, - output_for_classpath_macro: [Artifact, None] = None, + library_jar: Artifact | None = None, + output_for_classpath_macro: Artifact | None = None, needs_desugar: bool = False, desugar_deps: list[Artifact] = [], is_prebuilt_jar: bool = False, has_srcs: bool = True, dex_weight_factor: int = 1, - proguard_config: [Artifact, None] = None, - gwt_module: [Artifact, None] = None) -> JavaPackagingDep: + proguard_config: Artifact | None = None, + gwt_module: Artifact | None = None) -> JavaPackagingDep: dex_toolchain = getattr(ctx.attrs, "_dex_toolchain", None) if library_jar != None and has_srcs and dex_toolchain != None and ctx.attrs._dex_toolchain[DexToolchainInfo].d8_command != None: dex = get_dex_produced_from_java_library( @@ -396,8 +396,8 @@ def _create_non_template_providers( desugar_classpath: list[Artifact] = [], is_prebuilt_jar: bool = False, has_srcs: bool = True, - proguard_config: [Artifact, None] = None, - gwt_module: [Artifact, None] = None) -> (JavaLibraryInfo, JavaPackagingInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph): + proguard_config: Artifact | None = None, + gwt_module: Artifact | None = None) -> (JavaLibraryInfo, JavaPackagingInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph): """Creates java library providers of type `JavaLibraryInfo` and `JavaPackagingInfo`. Args: @@ -460,9 +460,9 @@ def create_java_library_providers( is_prebuilt_jar: bool = False, has_srcs: bool = True, generated_sources: list[Artifact] = [], - annotation_jars_dir: [Artifact, None] = None, - proguard_config: [Artifact, None] = None, - gwt_module: [Artifact, None] = None) -> (JavaLibraryInfo, JavaPackagingInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph, TemplatePlaceholderInfo, JavaLibraryIntellijInfo): + annotation_jars_dir: Artifact | None = None, + proguard_config: Artifact | None = None, + gwt_module: Artifact | None = None) -> (JavaLibraryInfo, JavaPackagingInfo, SharedLibraryInfo, ResourceInfo, LinkableGraph, TemplatePlaceholderInfo, JavaLibraryIntellijInfo): first_order_classpath_deps = filter(None, [x.get(JavaLibraryInfo) for x in declared_deps + exported_deps + runtime_deps]) first_order_classpath_libs = [dep.output_for_classpath_macro for dep in first_order_classpath_deps] diff --git a/prelude/java/javacd_jar_creator.bzl b/prelude/java/javacd_jar_creator.bzl index 8b8e582ce..c13152edd 100644 --- a/prelude/java/javacd_jar_creator.bzl +++ b/prelude/java/javacd_jar_creator.bzl @@ -57,13 +57,13 @@ def create_jar_artifact_javacd( abi_generation_mode: [AbiGenerationMode, None], java_toolchain: JavaToolchainInfo, label, - output: [Artifact, None], + output: Artifact | None, javac_tool: [typing.Any, None], srcs: list[Artifact], remove_classes: list[str], resources: list[Artifact], resources_root: [str, None], - manifest_file: [Artifact, None], + manifest_file: Artifact | None, annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], source_level: int, @@ -73,7 +73,7 @@ def create_jar_artifact_javacd( source_only_abi_deps: list[Dependency], extra_arguments: cmd_args, additional_classpath_entries: list[Artifact], - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, bootclasspath_entries: list[Artifact], is_building_android_binary: bool, is_creating_subtarget: bool = False) -> JavaCompileOutputs: @@ -194,9 +194,9 @@ def create_jar_artifact_javacd( qualified_name: str, output_paths: OutputPaths, classpath_jars_tag: ArtifactTag, - abi_dir: [Artifact, None], + abi_dir: Artifact | None, target_type: TargetType, - path_to_class_hashes: [Artifact, None], + path_to_class_hashes: Artifact | None, is_creating_subtarget: bool = False, source_only_abi_compiling_deps: list[JavaClasspathEntry] = []): proto = declare_prefixed_output(actions, actions_identifier, "jar_command.proto.json") diff --git a/prelude/js/js_providers.bzl b/prelude/js/js_providers.bzl index bc24622a4..c5027eaf5 100644 --- a/prelude/js/js_providers.bzl +++ b/prelude/js/js_providers.bzl @@ -37,7 +37,7 @@ JsBundleInfo = provider( def get_transitive_outputs( actions: AnalysisActions, - value: [Artifact, None] = None, + value: Artifact | None = None, deps: list[JsLibraryInfo] = []) -> TransitiveOutputsTSet: kwargs = {} if value: diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index f476bead5..a1ca8e531 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -131,7 +131,7 @@ def define_output_paths(actions: AnalysisActions, prefix: [str, None], label: La ) # buildifier: disable=uninitialized -def add_output_paths_to_cmd_args(cmd: cmd_args, output_paths: OutputPaths, path_to_class_hashes: [Artifact, None]) -> cmd_args: +def add_output_paths_to_cmd_args(cmd: cmd_args, output_paths: OutputPaths, path_to_class_hashes: Artifact | None) -> cmd_args: if path_to_class_hashes != None: cmd.hidden(path_to_class_hashes.as_output()) cmd.hidden(output_paths.jar_parent.as_output()) @@ -158,7 +158,7 @@ def encode_output_paths(label: Label, paths: OutputPaths, target_type: TargetTyp libraryTargetFullyQualifiedName = base_qualified_name(label), ) -def encode_jar_params(remove_classes: list[str], output_paths: OutputPaths, manifest_file: [Artifact, None]) -> struct: +def encode_jar_params(remove_classes: list[str], output_paths: OutputPaths, manifest_file: Artifact | None) -> struct: return struct( jarPath = output_paths.jar.as_output(), removeEntryPredicate = struct( @@ -289,7 +289,7 @@ def encode_base_jar_command( resources_map: dict[str, Artifact], annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], - manifest_file: [Artifact, None], + manifest_file: Artifact | None, extra_arguments: cmd_args, source_only_abi_compiling_deps: list[JavaClasspathEntry], track_class_usage: bool) -> struct: @@ -461,9 +461,9 @@ def prepare_cd_exe( def prepare_final_jar( actions: AnalysisActions, actions_identifier: [str, None], - output: [Artifact, None], + output: Artifact | None, output_paths: OutputPaths, - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, jar_builder: RunInfo, jar_postprocessor: [RunInfo, None]) -> Artifact: if not additional_compiled_srcs: @@ -504,14 +504,14 @@ def generate_abi_jars( actions_identifier: [str, None], label: Label, abi_generation_mode: [AbiGenerationMode, None], - additional_compiled_srcs: [Artifact, None], + additional_compiled_srcs: Artifact | None, is_building_android_binary: bool, class_abi_generator: Dependency, final_jar: Artifact, compiling_deps_tset: [JavaCompilingDepsTSet, None], source_only_abi_deps: list[Dependency], - class_abi_jar: [Artifact, None], - class_abi_output_dir: [Artifact, None], + class_abi_jar: Artifact | None, + class_abi_output_dir: Artifact | None, encode_abi_command: typing.Callable, define_action: typing.Callable) -> tuple: class_abi = None diff --git a/prelude/kotlin/kotlin_library.bzl b/prelude/kotlin/kotlin_library.bzl index c3eefe0f7..417d3001c 100644 --- a/prelude/kotlin/kotlin_library.bzl +++ b/prelude/kotlin/kotlin_library.bzl @@ -50,7 +50,7 @@ def _create_kotlin_sources( deps: list[Dependency], annotation_processor_properties: AnnotationProcessorProperties, ksp_annotation_processor_properties: AnnotationProcessorProperties, - additional_classpath_entries: list[Artifact]) -> (Artifact, [Artifact, None], [Artifact, None]): + additional_classpath_entries: list[Artifact]) -> (Artifact, Artifact | None, Artifact | None): """ Runs kotlinc on the provided kotlin sources. """ diff --git a/prelude/kotlin/kotlincd_jar_creator.bzl b/prelude/kotlin/kotlincd_jar_creator.bzl index 0eaf5681c..521b8c766 100644 --- a/prelude/kotlin/kotlincd_jar_creator.bzl +++ b/prelude/kotlin/kotlincd_jar_creator.bzl @@ -62,7 +62,7 @@ def create_jar_artifact_kotlincd( resources_root: [str, None], annotation_processor_properties: AnnotationProcessorProperties, plugin_params: [PluginParams, None], - manifest_file: [Artifact, None], + manifest_file: Artifact | None, source_level: int, target_level: int, deps: list[Dependency], @@ -242,9 +242,9 @@ def create_jar_artifact_kotlincd( qualified_name: str, output_paths: OutputPaths, classpath_jars_tag: ArtifactTag, - abi_dir: [Artifact, None], + abi_dir: Artifact | None, target_type: TargetType, - path_to_class_hashes: [Artifact, None], + path_to_class_hashes: Artifact | None, source_only_abi_compiling_deps: list[JavaClasspathEntry] = [], is_creating_subtarget: bool = False): _unused = source_only_abi_compiling_deps diff --git a/prelude/linking/link_info.bzl b/prelude/linking/link_info.bzl index 948878962..feb35d1ee 100644 --- a/prelude/linking/link_info.bzl +++ b/prelude/linking/link_info.bzl @@ -80,7 +80,7 @@ ArchiveLinkable = record( # Artifact in the .a format from ar archive = field(Archive), # If a bitcode bundle was created for this artifact it will be present here - bitcode_bundle = field([Artifact, None], None), + bitcode_bundle = field(Artifact | None, None), linker_type = field(str), link_whole = field(bool, False), # Indicates if this archive may contain LTO bit code. Can be set to `False` @@ -99,7 +99,7 @@ SharedLibLinkable = record( ObjectsLinkable = record( objects = field([list[Artifact], None], None), # Any of the objects that are in bitcode format - bitcode_bundle = field([Artifact, None], None), + bitcode_bundle = field(Artifact | None, None), linker_type = field(str), link_whole = field(bool, False), ) @@ -381,11 +381,11 @@ LinkArgs = record( LinkedObject = record( output = field([Artifact, Promise]), # The combined bitcode from this linked object and any static libraries - bitcode_bundle = field([Artifact, None], None), + bitcode_bundle = field(Artifact | None, None), # the generated linked output before running stripping(and bolt). unstripped_output = field(Artifact), # the generated linked output before running bolt, may be None if bolt is not used. - prebolt_output = field([Artifact, None], None), + prebolt_output = field(Artifact | None, None), # The LinkArgs used to produce this LinkedObject. This can be useful for debugging or # for downstream rules to reproduce the shared library with some modifications (for example # android relinker will link again with an added version script argument). @@ -393,32 +393,32 @@ LinkedObject = record( # A linked object (binary/shared library) may have an associated dwp file with # its corresponding DWARF debug info. # May be None when Split DWARF is disabled or for some types of synthetic link objects. - dwp = field([Artifact, None], None), + dwp = field(Artifact | None, None), # Additional dirs or paths that contain debug info referenced by the linked # object (e.g. split dwarf files or PDB file). external_debug_info = field(ArtifactTSet, ArtifactTSet()), # This argsfile is generated in the `cxx_link` step and contains a list of arguments # passed to the linker. It is being exposed as a sub-target for debugging purposes. - linker_argsfile = field([Artifact, None], None), + linker_argsfile = field(Artifact | None, None), # The filelist is generated in the `cxx_link` step and contains a list of # object files (static libs or plain object files) passed to the linker. # It is being exposed for debugging purposes. Only present when a Darwin # linker is used. - linker_filelist = field([Artifact, None], None), + linker_filelist = field(Artifact | None, None), # The linker command as generated by `cxx_link`. Exposed for debugging purposes only. # Not present for DistLTO scenarios. linker_command = field([cmd_args, None], None), # This sub-target is only available for distributed thinLTO builds. - index_argsfile = field([Artifact, None], None), + index_argsfile = field(Artifact | None, None), # Import library for linking with DLL on Windows. # If not on Windows it's always None. - import_library = field([Artifact, None], None), + import_library = field(Artifact | None, None), # A linked object (binary/shared library) may have an associated PDB file with # its corresponding Windows debug info. # If not on Windows it's always None. - pdb = field([Artifact, None], None), + pdb = field(Artifact | None, None), # Split-debug info generated by the link. - split_debug_output = field([Artifact, None], None), + split_debug_output = field(Artifact | None, None), ) # A map of native linkable infos from transitive dependencies for each LinkStrategy. @@ -898,7 +898,7 @@ LinkCommandDebugOutput = record( filename = str, command = ArgLike, argsfile = Artifact, - filelist = [Artifact, None], + filelist = Artifact | None, ) # NB: Debug output is _not_ transitive over deps, so tsets are not used here. diff --git a/prelude/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl index 7d95e5345..25c3eeca6 100644 --- a/prelude/linking/shared_libraries.bzl +++ b/prelude/linking/shared_libraries.bzl @@ -23,7 +23,7 @@ SharedLibrary = record( # The sonames of the shared libraries that this links against. # TODO(cjhopman): This is currently always available. shlib_deps = field(list[str] | None), - stripped_lib = field([Artifact, None]), + stripped_lib = field(Artifact | None), can_be_asset = field(bool), for_primary_apk = field(bool), soname = field(str), diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index dcd173c22..f238cb81f 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -425,7 +425,7 @@ def rust_compile( params: BuildParams, default_roots: list[str], extra_link_args: list[typing.Any] = [], - predeclared_output: [Artifact, None] = None, + predeclared_output: Artifact | None = None, extra_flags: list[[str, ResolvedStringWithMacros]] = [], designated_clippy: bool = False, allow_cache_upload: bool = False, @@ -1179,7 +1179,7 @@ def _rustc_emit( emit: Emit, subdir: str, params: BuildParams, - predeclared_output: [Artifact, None] = None) -> EmitOperation: + predeclared_output: Artifact | None = None) -> EmitOperation: toolchain_info = compile_ctx.toolchain_info simple_crate = attr_simple_crate_for_filenames(ctx) crate_type = params.crate_type @@ -1254,7 +1254,7 @@ def _rustc_emit( Invoke = record( diag_txt = field(Artifact), diag_json = field(Artifact), - build_status = field([Artifact, None]), + build_status = field(Artifact | None), identifier = field([str, None]), ) diff --git a/prelude/rust/context.bzl b/prelude/rust/context.bzl index f59400b09..8c3df11d7 100644 --- a/prelude/rust/context.bzl +++ b/prelude/rust/context.bzl @@ -12,7 +12,7 @@ load(":rust_toolchain.bzl", "PanicRuntime", "RustExplicitSysrootDeps", "RustTool CrateName = record( simple = field(str), - dynamic = field([Artifact, None]), + dynamic = field(Artifact | None), ) # Struct for sharing common args between rustc and rustdoc diff --git a/prelude/rust/failure_filter.bzl b/prelude/rust/failure_filter.bzl index 67d1482f0..67533c1da 100644 --- a/prelude/rust/failure_filter.bzl +++ b/prelude/rust/failure_filter.bzl @@ -14,7 +14,7 @@ load(":context.bzl", "CompileContext") def failure_filter( ctx: AnalysisContext, compile_ctx: CompileContext, - predeclared_output: [Artifact, None], + predeclared_output: Artifact | None, build_status: Artifact, required: Artifact, stderr: Artifact, diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 85ae1cc5c..0e6add967 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -115,7 +115,7 @@ RustLinkStrategyInfo = record( transitive_proc_macro_deps = field(dict[RustProcMacroMarker, ()]), # Path to PDB file with Windows debug data. - pdb = field([Artifact, None]), + pdb = field(Artifact | None), # Debug info which is referenced -- but not included -- by the linkable rlib. external_debug_info = field(ArtifactTSet), ) @@ -531,7 +531,7 @@ def inherited_rust_external_debug_info( def inherited_external_debug_info( ctx: AnalysisContext, dep_ctx: DepCollectionContext, - dwo_output_directory: [Artifact, None], + dwo_output_directory: Artifact | None, dep_link_strategy: LinkStrategy) -> ArtifactTSet: inherited_debug_infos = [] inherited_link_infos = [] diff --git a/prelude/rust/outputs.bzl b/prelude/rust/outputs.bzl index dd3cca3c3..2607558ab 100644 --- a/prelude/rust/outputs.bzl +++ b/prelude/rust/outputs.bzl @@ -18,11 +18,11 @@ RustcOutput = record( # Only available on metadata-like emits clippy_txt = field(Artifact | None), clippy_json = field(Artifact | None), - pdb = field([Artifact, None]), - dwp_output = field([Artifact, None]), + pdb = field(Artifact | None), + dwp_output = field(Artifact | None), # Zero or more Split DWARF debug info files are emitted into this directory # with unpredictable filenames. - dwo_output_directory = field([Artifact, None]), + dwo_output_directory = field(Artifact | None), extra_external_debug_info = field(list[ArtifactTSet]), ) From 0583c35c7c494266899a97286639a965fb527a86 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sat, 30 Mar 2024 21:29:04 -0700 Subject: [PATCH 0647/1133] More symbols as types Summary: `AndroidResourceInfo` is removed because of import cycle. Reviewed By: JakobDegen Differential Revision: D55563452 fbshipit-source-id: 0b18c3744b71a8df2ff09c9de024a94e09067ce3 --- prelude/android/android_binary.bzl | 11 +++++++++-- prelude/android/android_providers.bzl | 2 +- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/prelude/android/android_binary.bzl b/prelude/android/android_binary.bzl index d22c5a5b0..d6a5b35ec 100644 --- a/prelude/android/android_binary.bzl +++ b/prelude/android/android_binary.bzl @@ -24,12 +24,19 @@ load("@prelude//android:preprocess_java_classes.bzl", "get_preprocessed_java_cla load("@prelude//android:proguard.bzl", "get_proguard_output") load("@prelude//android:util.bzl", "create_enhancement_context") load("@prelude//android:voltron.bzl", "get_target_to_module_mapping") -load("@prelude//java:java_providers.bzl", "JavaPackagingInfo", "create_java_packaging_dep", "get_all_java_packaging_deps", "get_all_java_packaging_deps_from_packaging_infos") +load( + "@prelude//java:java_providers.bzl", + "JavaPackagingDep", # @unused Used as type + "JavaPackagingInfo", + "create_java_packaging_dep", + "get_all_java_packaging_deps", + "get_all_java_packaging_deps_from_packaging_infos", +) load("@prelude//utils:expect.bzl", "expect") AndroidBinaryInfo = record( sub_targets = dict, - java_packaging_deps = list["JavaPackagingDep"], + java_packaging_deps = list[JavaPackagingDep], deps_by_platform = dict, primary_platform = str, dex_files_info = DexFilesInfo, diff --git a/prelude/android/android_providers.bzl b/prelude/android/android_providers.bzl index 7359eea5b..c437d17a1 100644 --- a/prelude/android/android_providers.bzl +++ b/prelude/android/android_providers.bzl @@ -82,7 +82,7 @@ AndroidBinaryResourcesInfo = record( # list of jars that could contain resources that should be packaged into the APK jar_files_that_may_contain_resources = list[Artifact], # The resource infos that are used in this APK - unfiltered_resource_infos = list["AndroidResourceInfo"], + unfiltered_resource_infos = list, # list[AndroidResourceInfo] ) # Information about an `android_build_config` From 7f0f4605462786c6fc23d88d5b8aa909a9975e18 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sat, 30 Mar 2024 21:29:04 -0700 Subject: [PATCH 0648/1133] There is no "artifacts" type Summary: Getting rid of string literals in types, which are deprecated and need to be removed. Reviewed By: JakobDegen Differential Revision: D55563448 fbshipit-source-id: 4717999138a4f68af3c3745ec15539d7e7e82620 --- prelude/cxx/argsfiles.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/cxx/argsfiles.bzl b/prelude/cxx/argsfiles.bzl index 327fccaca..37c026969 100644 --- a/prelude/cxx/argsfiles.bzl +++ b/prelude/cxx/argsfiles.bzl @@ -15,7 +15,7 @@ CompileArgsfile = record( # This argsfile as a command form that would use the argsfile (includes dependent inputs). cmd_form = field(cmd_args), # Input args necessary for the argsfile to reference. - input_args = field(list[["artifacts", cmd_args]]), + input_args = field(list[cmd_args]), # Args as written to the argsfile (with shell quoting applied). args = field(cmd_args), # Args aggregated for the argsfile excluding file prefix args (excludes shell quoting). From 58ec8deebdf8d953437973dfe1ee35a3e2887649 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sat, 30 Mar 2024 21:29:04 -0700 Subject: [PATCH 0649/1133] "label_relative_path" -> CellPath Summary: String literals in types are deprecated and will be removed. Reviewed By: JakobDegen Differential Revision: D55563449 fbshipit-source-id: 37cfbf1bc14385df6ae48a951c85be0c6340c3fd --- prelude/cxx/preprocessor.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/cxx/preprocessor.bzl b/prelude/cxx/preprocessor.bzl index 6fe20c87a..e5067e43c 100644 --- a/prelude/cxx/preprocessor.bzl +++ b/prelude/cxx/preprocessor.bzl @@ -32,7 +32,7 @@ SystemIncludeDirs = record( # Compiler type to infer correct include flags compiler_type = field(str), # Directories to be included via [-isystem | /external:I] [arglike things] - include_dirs = field(list["label_relative_path"]), + include_dirs = field(list[CellPath]), ) CPreprocessorArgs = record( @@ -53,7 +53,7 @@ CPreprocessor = record( # Those should be mutually exclusive with normal headers as per documentation raw_headers = field(list[Artifact], []), # Directories to be included via -I, [arglike things] - include_dirs = field(list["label_relative_path"], []), + include_dirs = field(list[CellPath], []), # Directories to be included via -isystem, [arglike things] system_include_dirs = field([SystemIncludeDirs, None], None), # Whether to compile with modules support From 12e10c717b8c93763806076c6fde8c974076f507 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sun, 31 Mar 2024 19:48:04 -0700 Subject: [PATCH 0650/1133] Convert some types from strings to symbols Summary: String literals as types are deprecated. Reviewed By: JakobDegen Differential Revision: D55565566 fbshipit-source-id: f47df315e659ab7bca15c364ba475f16a9506c55 --- prelude/android/android_binary.bzl | 6 ++++-- prelude/android/android_providers.bzl | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/prelude/android/android_binary.bzl b/prelude/android/android_binary.bzl index d6a5b35ec..25abc825e 100644 --- a/prelude/android/android_binary.bzl +++ b/prelude/android/android_binary.bzl @@ -10,6 +10,8 @@ load("@prelude//android:android_binary_resources_rules.bzl", "get_android_binary load("@prelude//android:android_build_config.bzl", "generate_android_build_config", "get_build_config_fields") load( "@prelude//android:android_providers.bzl", + "AndroidBinaryNativeLibsInfo", # @unused Used as type + "AndroidBinaryResourcesInfo", # @unused Used as type "AndroidBuildConfigInfo", # @unused Used as type "BuildConfigField", "DexFilesInfo", @@ -40,8 +42,8 @@ AndroidBinaryInfo = record( deps_by_platform = dict, primary_platform = str, dex_files_info = DexFilesInfo, - native_library_info = "AndroidBinaryNativeLibsInfo", - resources_info = "AndroidBinaryResourcesInfo", + native_library_info = AndroidBinaryNativeLibsInfo, + resources_info = AndroidBinaryResourcesInfo, materialized_artifacts = list[Artifact], ) diff --git a/prelude/android/android_providers.bzl b/prelude/android/android_providers.bzl index c437d17a1..fa458f51e 100644 --- a/prelude/android/android_providers.bzl +++ b/prelude/android/android_providers.bzl @@ -51,7 +51,7 @@ AndroidBinaryNativeLibsInfo = record( # Indicates which shared lib producing targets are included in the binary. Used by instrumentation tests # to exclude those from the test apk. shared_libraries = list[TargetLabel], - exopackage_info = ["ExopackageNativeInfo", None], + exopackage_info = [ExopackageNativeInfo, None], root_module_native_lib_assets = list[Artifact], non_root_module_native_lib_assets = list[Artifact], native_libs_for_primary_apk = list[Artifact], From 53c9a2f1974634273cbc7ed6ae25b97956c358aa Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sun, 31 Mar 2024 19:48:04 -0700 Subject: [PATCH 0651/1133] "ArtifactTSet" -> ArtifactTSet Summary: Because string literals as types will stop working soon. Reviewed By: JakobDegen Differential Revision: D55566093 fbshipit-source-id: 8917a5f32b0a5e7028d5cd4486c55d2855cdab92 --- prelude/apple/debug.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/apple/debug.bzl b/prelude/apple/debug.bzl index 7b6b7239b..d3385d68a 100644 --- a/prelude/apple/debug.bzl +++ b/prelude/apple/debug.bzl @@ -32,7 +32,7 @@ AppleDebuggableInfo = provider( ) _AppleDebugInfo = record( - debug_info_tset = "ArtifactTSet", + debug_info_tset = ArtifactTSet, filtered_map = field([dict[Label, list[Artifact]], None]), ) From de46ee9b58a0c8c5692c7d1aeb8321a83d4b9534 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Mon, 1 Apr 2024 00:22:07 -0700 Subject: [PATCH 0652/1133] Use symbol types instead of string literal types Summary: `.type` returns string, and string as type is deprecated. Use `isinstance` instead of `eval_type` while I'm there, because `eval_type` is not needed here. Reviewed By: JakobDegen Differential Revision: D55565742 fbshipit-source-id: ce4ef3511cc3bfd5b65d6bd126bac1e49907fb32 --- prelude/linking/linkable_graph.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index 74531b2d8..3f026e8b4 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -212,7 +212,7 @@ def create_linkable_graph( deps: list[[LinkableGraph, Dependency]] = []) -> LinkableGraph: graph_deps = [] for d in deps: - if eval_type(LinkableGraph.type).matches(d): + if isinstance(d, LinkableGraph): graph_deps.append(d) else: graph = d.get(LinkableGraph) @@ -256,7 +256,7 @@ def linkable_deps(deps: list[Dependency | LinkableGraph]) -> list[Label]: labels = [] for dep in deps: - if eval_type(LinkableGraph.type).matches(dep): + if isinstance(dep, LinkableGraph): labels.append(dep.label) else: dep_info = linkable_graph(dep) From 940706e85fb3159eaa0d2e89f0c1cb1fa4076d23 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Mon, 1 Apr 2024 09:50:18 -0700 Subject: [PATCH 0653/1133] remove dead platforms Summary: Remove the no longer buildable 32 bit platforms, the toolchain doesn't support these any more. Reviewed By: chatura-atapattu Differential Revision: D55485679 fbshipit-source-id: 236a855eec5f1e9b756ec12bceababc9bdcf7d09 --- prelude/platforms/apple/base.bzl | 4 ---- prelude/platforms/apple/constants.bzl | 12 ------------ 2 files changed, 16 deletions(-) diff --git a/prelude/platforms/apple/base.bzl b/prelude/platforms/apple/base.bzl index e1960832f..180ea3aae 100644 --- a/prelude/platforms/apple/base.bzl +++ b/prelude/platforms/apple/base.bzl @@ -43,14 +43,10 @@ BUILD_MODE_TO_CONSTRAINTS_MAP = { _MOBILE_PLATFORMS = [ ios_platforms.IPHONEOS_ARM64, - ios_platforms.IPHONEOS_ARMV7, ios_platforms.IPHONESIMULATOR_ARM64, - ios_platforms.IPHONESIMULATOR_I386, ios_platforms.IPHONESIMULATOR_X86_64, watch_platforms.WATCHOS_ARM64_32, - watch_platforms.WATCHOS_ARMV7K, watch_platforms.WATCHSIMULATOR_ARM64, - watch_platforms.WATCHSIMULATOR_I386, watch_platforms.WATCHSIMULATOR_X86_64, ] diff --git a/prelude/platforms/apple/constants.bzl b/prelude/platforms/apple/constants.bzl index 88bec5da6..59a2e12b4 100644 --- a/prelude/platforms/apple/constants.bzl +++ b/prelude/platforms/apple/constants.bzl @@ -34,12 +34,8 @@ APPLETVSIMULATOR_X86_64 = "appletvsimulator-x86_64" IPHONEOS_ARM64 = "iphoneos-arm64" -IPHONEOS_ARMV7 = "iphoneos-armv7" - IPHONESIMULATOR_ARM64 = "iphonesimulator-arm64" -IPHONESIMULATOR_I386 = "iphonesimulator-i386" - IPHONESIMULATOR_X86_64 = "iphonesimulator-x86_64" # Mac Catalyst Platforms/Flavors @@ -60,12 +56,8 @@ MACOS_UNIVERSAL = "macosx-universal" WATCHOS_ARM64_32 = "watchos-arm64_32" -WATCHOS_ARMV7K = "watchos-armv7k" - WATCHSIMULATOR_ARM64 = "watchsimulator-arm64" -WATCHSIMULATOR_I386 = "watchsimulator-i386" - WATCHSIMULATOR_X86_64 = "watchsimulator-x86_64" # Vision OS Platforms/Flavors @@ -89,9 +81,7 @@ appletv_platforms = struct( ios_platforms = struct( IPHONEOS_ARM64 = IPHONEOS_ARM64, - IPHONEOS_ARMV7 = IPHONEOS_ARMV7, IPHONESIMULATOR_ARM64 = IPHONESIMULATOR_ARM64, - IPHONESIMULATOR_I386 = IPHONESIMULATOR_I386, IPHONESIMULATOR_X86_64 = IPHONESIMULATOR_X86_64, ) @@ -108,9 +98,7 @@ mac_platforms = struct( watch_platforms = struct( WATCHOS_ARM64_32 = WATCHOS_ARM64_32, - WATCHOS_ARMV7K = WATCHOS_ARMV7K, WATCHSIMULATOR_ARM64 = WATCHSIMULATOR_ARM64, - WATCHSIMULATOR_I386 = WATCHSIMULATOR_I386, WATCHSIMULATOR_X86_64 = WATCHSIMULATOR_X86_64, ) From 9425348d14e3fc8505c3e693d88fea71f4a9c86d Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Mon, 1 Apr 2024 14:55:06 -0700 Subject: [PATCH 0654/1133] Use some pure code instead of cmd_args.add Reviewed By: iguridi Differential Revision: D55233576 fbshipit-source-id: 931a4fa1f7a7f31c93136e52038ec73cf5d9fb21 --- prelude/worker_tool.bzl | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/prelude/worker_tool.bzl b/prelude/worker_tool.bzl index 7b1bad6ce..1cac6e325 100644 --- a/prelude/worker_tool.bzl +++ b/prelude/worker_tool.bzl @@ -28,9 +28,9 @@ def worker_tool(ctx: AnalysisContext) -> list[Provider]: expect(worker_tool_run_info != None, "Worker tool executable must have a RunInfo!") worker_tool_runner = ctx.attrs._worker_tool_runner[RunInfo] - worker_tool_cmd = cmd_args(worker_tool_runner) - worker_tool_cmd.add("--worker-tool") - worker_tool_cmd.add(worker_tool_run_info) + worker_tool_cmd = [worker_tool_runner] + worker_tool_cmd.append("--worker-tool") + worker_tool_cmd.append(worker_tool_run_info) worker_args = ctx.attrs.args if worker_args: @@ -40,8 +40,8 @@ def worker_tool(ctx: AnalysisContext) -> list[Provider]: allow_args = True, ) - worker_tool_cmd.add("--worker-args-file") - worker_tool_cmd.add(worker_args_file) + worker_tool_cmd.append("--worker-args-file") + worker_tool_cmd.append(worker_args_file) worker_env = ctx.attrs.env if worker_env: @@ -56,9 +56,10 @@ def worker_tool(ctx: AnalysisContext) -> list[Provider]: allow_args = True, ) - worker_tool_cmd.add("--worker-env-file") - worker_tool_cmd.add(env_args_file) + worker_tool_cmd.append("--worker-env-file") + worker_tool_cmd.append(env_args_file) + worker_tool_cmd = cmd_args(worker_tool_cmd) return [ DefaultInfo(), RunInfo( From e25b4d65f848d25eb12b88ee94747c9e218510f6 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Mon, 1 Apr 2024 14:55:06 -0700 Subject: [PATCH 0655/1133] Use some pure code instead of cmd_args.add Reviewed By: iguridi Differential Revision: D55233696 fbshipit-source-id: 96f298779bc8c37bd8d03e4898afcec530209138 --- prelude/js/js_utils.bzl | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/prelude/js/js_utils.bzl b/prelude/js/js_utils.bzl index b52f22932..2bb1ce8a8 100644 --- a/prelude/js/js_utils.bzl +++ b/prelude/js/js_utils.bzl @@ -142,8 +142,11 @@ def run_worker_commands( identifier: str, category: str, hidden_artifacts = [cmd_args]): - worker_args = cmd_args("--command-args-file", command_args_files) - worker_args.add("--command-args-file-extra-data-fixup-hack=true") + worker_args = cmd_args( + "--command-args-file", + command_args_files, + "--command-args-file-extra-data-fixup-hack=true", + ) worker_argsfile = ctx.actions.declare_output(paths.join(identifier, "worker_{}.argsfile".format(category))) ctx.actions.write(worker_argsfile.as_output(), worker_args) From 72b7f0b1d512dc6336a3bd0ae897cba869d894fe Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Mon, 1 Apr 2024 14:55:06 -0700 Subject: [PATCH 0656/1133] Use some pure code instead of cmd_args.add Reviewed By: iguridi Differential Revision: D55233744 fbshipit-source-id: 23f7e7288b7042eb587fbeaad374d44214a2062d --- prelude/go/go_test.bzl | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index de3da9394..fc6d55106 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -35,19 +35,19 @@ def _gen_test_main( Generate a `main.go` which calls tests from the given sources. """ output = ctx.actions.declare_output("main.go") - cmd = cmd_args() - cmd.add(ctx.attrs._testmaingen[RunInfo]) + cmd = [] + cmd.append(ctx.attrs._testmaingen[RunInfo]) # if ctx.attrs.coverage_mode: - # cmd.add(cmd_args(ctx.attrs.coverage_mode, format = "--cover-mode={}")) - cmd.add(cmd_args(output.as_output(), format = "--output={}")) - cmd.add(cmd_args(pkg_name, format = "--import-path={}")) + # cmd.append(cmd_args(ctx.attrs.coverage_mode, format = "--cover-mode={}")) + cmd.append(cmd_args(output.as_output(), format = "--output={}")) + cmd.append(cmd_args(pkg_name, format = "--import-path={}")) if coverage_mode != None: - cmd.add("--cover-mode", coverage_mode.value) + cmd.extend(["--cover-mode", coverage_mode.value]) for _, vars in coverage_vars.items(): - cmd.add(vars) - cmd.add(srcs) - ctx.actions.run(cmd, category = "go_test_main_gen") + cmd.append(vars) + cmd.append(srcs) + ctx.actions.run(cmd_args(cmd), category = "go_test_main_gen") return output def is_subpackage_of(other_pkg_name: str, pkg_name: str) -> bool: From 697bbeef333bcdf0d05b221a26411c04c30302b8 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Mon, 1 Apr 2024 14:55:06 -0700 Subject: [PATCH 0657/1133] Use some pure code instead of cmd_args.add Reviewed By: igorsugak Differential Revision: D55233877 fbshipit-source-id: fc767a391850ec7756c348016e41ae3fa8372181 --- prelude/cxx/bitcode.bzl | 12 +++++++----- prelude/cxx/headers.bzl | 4 +--- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/prelude/cxx/bitcode.bzl b/prelude/cxx/bitcode.bzl index 782e8a30b..cdb9f84e6 100644 --- a/prelude/cxx/bitcode.bzl +++ b/prelude/cxx/bitcode.bzl @@ -38,11 +38,13 @@ def _bundle(ctx: AnalysisContext, name: str, args: cmd_args, prefer_local: bool) argsfile, _ = ctx.actions.write(name + ".argsfile", args, allow_args = True) command = cmd_args(argsfile, format = "@{}", delimiter = "").hidden(args) - llvm_cmd = cmd_args(llvm_link) - llvm_cmd.add(command) - llvm_cmd.add("-v") - llvm_cmd.add("-o") - llvm_cmd.add(bundle_output.as_output()) + llvm_cmd = cmd_args( + llvm_link, + command, + "-v", + "-o", + bundle_output.as_output(), + ) ctx.actions.run(llvm_cmd, category = "bitcode_bundle", identifier = name, prefer_local = prefer_local) return bundle_output diff --git a/prelude/cxx/headers.bzl b/prelude/cxx/headers.bzl index 65134432c..eaabc577c 100644 --- a/prelude/cxx/headers.bzl +++ b/prelude/cxx/headers.bzl @@ -336,12 +336,10 @@ def _get_debug_prefix_args(ctx: AnalysisContext, header_dir: Artifact) -> [cmd_a if get_cxx_toolchain_info(ctx).linker_info.type != "gnu": return None - debug_prefix_args = cmd_args() fmt = "-fdebug-prefix-map={}=" + value_or(header_dir.owner.cell, ".") - debug_prefix_args.add( + return cmd_args( cmd_args(header_dir, format = fmt), ) - return debug_prefix_args def _mk_hmap(ctx: AnalysisContext, name: str, headers: dict[str, (Artifact, str)], project_root_file: Artifact | None) -> Artifact: output = ctx.actions.declare_output(name + ".hmap") From 59d7f07e5a6240f9af97cd3a42ec0521657c6ae6 Mon Sep 17 00:00:00 2001 From: Nuri Amari Date: Mon, 1 Apr 2024 16:56:52 -0700 Subject: [PATCH 0658/1133] Create linker wrapper to diagnose unexported symbol problems Summary: A developer may make a change that introduces a cross dylib symbol reference that didn't previously exist. This means a symbol that was previously not exported, must now be exported. This linker wrapper intercepts linker error messages and compares them against the current set of unexported_symbols lists to diagnose such errors, and update the lists by invoking a tool. Reviewed By: narissiam, rmaz Differential Revision: D55246335 fbshipit-source-id: db0aaa5e2eeb1d6db28594be3f3abad29c0abacf --- prelude/apple/tools/BUCK.v2 | 12 +++ prelude/apple/tools/linker_wrapper.py | 114 ++++++++++++++++++++++++++ 2 files changed, 126 insertions(+) create mode 100644 prelude/apple/tools/linker_wrapper.py diff --git a/prelude/apple/tools/BUCK.v2 b/prelude/apple/tools/BUCK.v2 index 7c7893fa0..8d7803b8c 100644 --- a/prelude/apple/tools/BUCK.v2 +++ b/prelude/apple/tools/BUCK.v2 @@ -81,3 +81,15 @@ python_bootstrap_binary( main = "swift_objc_header_postprocess.py", visibility = ["PUBLIC"], ) + +python_bootstrap_binary( + name = "linker_wrapper", + main = "linker_wrapper.py", + visibility = ["PUBLIC"], +) + +python_library( + name = "linker_wrapper_library", + srcs = ["linker_wrapper.py"], + visibility = ["PUBLIC"], +) diff --git a/prelude/apple/tools/linker_wrapper.py b/prelude/apple/tools/linker_wrapper.py new file mode 100644 index 000000000..aa19a4057 --- /dev/null +++ b/prelude/apple/tools/linker_wrapper.py @@ -0,0 +1,114 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import re +import subprocess +import sys + +from typing import List, Optional, Tuple + + +def _eprintln(msg: str) -> None: + print(msg, flush=True, file=sys.stderr) + + +def _expand_arg_files(args: List[str]) -> List[str]: + expanded_args = [] + for arg in args: + if arg.startswith("@"): + with open(arg[1:]) as argfile: + expanded_args.extend( + [line.strip('"') for line in argfile.read().splitlines()] + ) + else: + expanded_args.append(arg) + return expanded_args + + +def _seperate_wrapper_args_from_linker_args( + args: List[str], +) -> Tuple[List[str], List[str]]: + wrapper_args = [] + linker_args = [] + expanded_args = _expand_arg_files(args) + + i = 0 + while i < len(expanded_args): + if expanded_args[i] == "-Xwrapper": + wrapper_args.append(expanded_args[i + 1]) + i += 1 + else: + linker_args.append(expanded_args[i]) + i += 1 + + return wrapper_args, linker_args + + +def _diagnose_potential_unexported_symbol_issue( + unexported_symbol_lists: List[str], stderr: str +) -> Optional[str]: + stderr_lines = stderr.splitlines() + undefined_symbol_re = re.compile(r"undefined symbol:.*\(mangled: (\S+)\)") + undefined_symbols = set() + for stderr_line in stderr_lines: + match = re.search(undefined_symbol_re, stderr_line) + if match: + undefined_symbols.add(match.group(1)) + + if not undefined_symbols: + return None + + unexported_symbols = set() + incorrectly_unexported_symbols = set() + incorrect_unexported_symbol_lists = [] + for unexported_symbol_list in unexported_symbol_lists: + target_name, file_path = unexported_symbol_list.split(",") + with open(file_path, "r") as unexported_symbol_list_file: + unexported_symbols = set(unexported_symbol_list_file.read().splitlines()) + intersection = undefined_symbols & unexported_symbols + if intersection: + incorrectly_unexported_symbols.update(intersection) + incorrect_unexported_symbol_lists.append(target_name) + + if not incorrect_unexported_symbol_lists: + return None + + return f""" +UNEXPORTED SYMBOLS ERROR: + +At least one symbol is included in an unexported symbol list, but referenced across dylib boundaries. Please +run the following command to fix the unexported symbol lists: + +arc fix-unexported-symbol-lists {"".join(["--target " + target for target in incorrect_unexported_symbol_lists])} {" ".join(["--symbol " + symbol for symbol in sorted(incorrectly_unexported_symbols)])} + +Here is the linker failure message: +""" + + +def main(argv: List[str]) -> int: + wrapper_args, linker_args = _seperate_wrapper_args_from_linker_args(argv[1:]) + + parser = argparse.ArgumentParser() + parser.add_argument("-linker") + parser.add_argument("-lld", action="store_true") + parser.add_argument("-unexported_symbol_list", action="append") + args = parser.parse_args(wrapper_args) + result = subprocess.run([args.linker] + linker_args, capture_output=True, text=True) + if result.returncode != 0: + if args.unexported_symbol_list and args.lld: + diagnosis = _diagnose_potential_unexported_symbol_issue( + args.unexported_symbol_list, result.stderr + ) + if diagnosis: + _eprintln(diagnosis) + _eprintln(result.stderr) + return result.returncode + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) From 3d76d481858ff37f6fa91fe08c388b679dc43983 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Mon, 1 Apr 2024 17:27:36 -0700 Subject: [PATCH 0659/1133] Fix merge_class_to_source_map_from_jar Summary: Mostly to fix `deps` parameter, because `.type` is not valid in the following diff D55577235. Cleanup other parameters while there: `mapping` is always `None`, and other parameters do not need default. Reviewed By: JakobDegen Differential Revision: D55577960 fbshipit-source-id: 569fa19dca15be706bd3c9b86d03506f3a231cd1 --- prelude/java/class_to_srcs.bzl | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/prelude/java/class_to_srcs.bzl b/prelude/java/class_to_srcs.bzl index fa5d04e8c..961ab90cf 100644 --- a/prelude/java/class_to_srcs.bzl +++ b/prelude/java/class_to_srcs.bzl @@ -107,10 +107,8 @@ def merge_class_to_source_map_from_jar( actions: AnalysisActions, name: str, java_test_toolchain: JavaTestToolchainInfo, - mapping: Artifact | None = None, - relative_to: [CellRoot, None] = None, - # TODO(nga): I think this meant to be type, not default value. - deps = [JavaClassToSourceMapInfo.type]) -> Artifact: + relative_to: [CellRoot, None], + deps: list[JavaClassToSourceMapInfo]) -> Artifact: output = actions.declare_output(name) cmd = cmd_args(java_test_toolchain.merge_class_to_source_maps[RunInfo]) cmd.add(cmd_args(output.as_output(), format = "--output={}")) @@ -118,7 +116,7 @@ def merge_class_to_source_map_from_jar( cmd.add(cmd_args(str(relative_to), format = "--relative-to={}")) tset = actions.tset( JavaClassToSourceMapTset, - value = mapping, + value = None, children = [d.tset for d in deps], ) class_to_source_files = tset.project_as_args("class_to_src_map") From 4e3ad7aa1168e0278682d3b2fadbd73cd5fb73c1 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Mon, 1 Apr 2024 21:57:18 -0700 Subject: [PATCH 0660/1133] dynamic_output(outputs=[.as_output()]) Summary: Following diff D55389251 requires output artifact as parameter to `.dynamic_output` call. Reviewed By: JakobDegen Differential Revision: D55390973 fbshipit-source-id: 4b2b0f46724687d23ecf7a455f8cf3df5583ff4d --- prelude/android/android_binary_native_library_rules.bzl | 6 +++--- prelude/android/android_binary_resources_rules.bzl | 4 ++-- prelude/android/dex_rules.bzl | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 751f9cca0..8b7a47621 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -378,7 +378,7 @@ def get_android_binary_native_library_info( ctx.actions.symlink_file(outputs[non_root_module_metadata_assets], dynamic_info.non_root_module_metadata_assets) ctx.actions.symlink_file(outputs[non_root_module_lib_assets], dynamic_info.non_root_module_lib_assets) - ctx.actions.dynamic_output(dynamic = dynamic_inputs, inputs = [], outputs = dynamic_outputs, f = dynamic_native_libs_info) + ctx.actions.dynamic_output(dynamic = dynamic_inputs, inputs = [], outputs = [o.as_output() for o in dynamic_outputs], f = dynamic_native_libs_info) all_native_libs = ctx.actions.symlinked_dir("debug_all_native_libs", {"others": native_libs, "primary": native_libs_always_in_primary_apk}) lib_subtargets = _create_library_subtargets(lib_outputs_by_platform, native_libs) @@ -1743,7 +1743,7 @@ def create_relinker_version_script(actions: AnalysisActions, relinker_allowlist: version_script += "};\n" ctx.actions.write(outputs[output], version_script) - actions.dynamic_output(dynamic = needed_symbols + [provided_symbols], inputs = [], outputs = [output], f = create_version_script) + actions.dynamic_output(dynamic = needed_symbols + [provided_symbols], inputs = [], outputs = [output.as_output()], f = create_version_script) def extract_undefined_symbols(ctx: AnalysisContext, toolchain: CxxToolchainInfo, lib: Artifact) -> Artifact: return extract_undefined_syms(ctx, toolchain, lib, "relinker_extract_undefined_symbols") @@ -1757,7 +1757,7 @@ def union_needed_symbols(actions: AnalysisActions, output: Artifact, needed_symb symbols = sorted(unioned_symbols.keys()) ctx.actions.write(outputs[output], symbols) - actions.dynamic_output(dynamic = needed_symbols, inputs = [], outputs = [output], f = compute_union) + actions.dynamic_output(dynamic = needed_symbols, inputs = [], outputs = [output.as_output()], f = compute_union) def strip_lib(ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, shlib: Artifact, output_path: [str, None] = None): strip_flags = cmd_args(get_strip_non_global_flags(cxx_toolchain)) diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index 4bded64c8..dbe23e0cc 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -533,7 +533,7 @@ def _get_module_manifests( ctx.actions.dynamic_output( dynamic = [apk_module_graph_file], inputs = [], - outputs = [module_manifests_dir], + outputs = [module_manifests_dir.as_output()], f = get_manifests_modular, ) @@ -615,7 +615,7 @@ def _merge_assets( ctx.actions.dynamic_output( dynamic = [apk_module_graph_file], inputs = [], - outputs = declared_outputs, + outputs = [o.as_output() for o in declared_outputs], f = merge_assets_modular, ) diff --git a/prelude/android/dex_rules.bzl b/prelude/android/dex_rules.bzl index d66acc92b..9e0b15c27 100644 --- a/prelude/android/dex_rules.bzl +++ b/prelude/android/dex_rules.bzl @@ -222,7 +222,7 @@ def get_multi_dex( ctx.actions.symlinked_dir(outputs[secondary_dex_dir], secondary_dex_dir_srcs) - ctx.actions.dynamic_output(dynamic = inputs, inputs = [], outputs = outputs, f = do_multi_dex) + ctx.actions.dynamic_output(dynamic = inputs, inputs = [], outputs = [o.as_output() for o in outputs], f = do_multi_dex) return DexFilesInfo( primary_dex = primary_dex_file, @@ -554,7 +554,7 @@ def merge_to_split_dex( metadata_lines.append(artifacts[metadata_line_artifact].read_string().strip()) ctx.actions.write(outputs[metadata_dot_txt], metadata_lines) - ctx.actions.dynamic_output(dynamic = flatten(metadata_line_artifacts_by_module.values()), inputs = [], outputs = metadata_dot_txt_files_by_module.values(), f = write_metadata_dot_txts) + ctx.actions.dynamic_output(dynamic = flatten(metadata_line_artifacts_by_module.values()), inputs = [], outputs = [o.as_output() for o in metadata_dot_txt_files_by_module.values()], f = write_metadata_dot_txts) ctx.actions.symlinked_dir( outputs[root_module_secondary_dexes_dir], @@ -565,7 +565,7 @@ def merge_to_split_dex( non_root_module_secondary_dexes_for_symlinking, ) - ctx.actions.dynamic_output(dynamic = input_artifacts, inputs = [], outputs = outputs, f = merge_pre_dexed_libs) + ctx.actions.dynamic_output(dynamic = input_artifacts, inputs = [], outputs = [o.as_output() for o in outputs], f = merge_pre_dexed_libs) if is_exopackage_enabled_for_secondary_dex: root_module_secondary_dex_dirs = [] From 334e146cc4aefd2a6b132633173e121b9a822a2e Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 2 Apr 2024 06:00:26 -0700 Subject: [PATCH 0661/1133] Back out "Create linker wrapper to diagnose unexported symbol problems" Summary: Original commit changeset: db0aaa5e2eeb Original Phabricator Diff: D55246335 Reviewed By: samkevich Differential Revision: D55634873 fbshipit-source-id: f059a4841eb06c709c034f0e404c4558c129ef54 --- prelude/apple/tools/BUCK.v2 | 12 --- prelude/apple/tools/linker_wrapper.py | 114 -------------------------- 2 files changed, 126 deletions(-) delete mode 100644 prelude/apple/tools/linker_wrapper.py diff --git a/prelude/apple/tools/BUCK.v2 b/prelude/apple/tools/BUCK.v2 index 8d7803b8c..7c7893fa0 100644 --- a/prelude/apple/tools/BUCK.v2 +++ b/prelude/apple/tools/BUCK.v2 @@ -81,15 +81,3 @@ python_bootstrap_binary( main = "swift_objc_header_postprocess.py", visibility = ["PUBLIC"], ) - -python_bootstrap_binary( - name = "linker_wrapper", - main = "linker_wrapper.py", - visibility = ["PUBLIC"], -) - -python_library( - name = "linker_wrapper_library", - srcs = ["linker_wrapper.py"], - visibility = ["PUBLIC"], -) diff --git a/prelude/apple/tools/linker_wrapper.py b/prelude/apple/tools/linker_wrapper.py deleted file mode 100644 index aa19a4057..000000000 --- a/prelude/apple/tools/linker_wrapper.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -import argparse -import re -import subprocess -import sys - -from typing import List, Optional, Tuple - - -def _eprintln(msg: str) -> None: - print(msg, flush=True, file=sys.stderr) - - -def _expand_arg_files(args: List[str]) -> List[str]: - expanded_args = [] - for arg in args: - if arg.startswith("@"): - with open(arg[1:]) as argfile: - expanded_args.extend( - [line.strip('"') for line in argfile.read().splitlines()] - ) - else: - expanded_args.append(arg) - return expanded_args - - -def _seperate_wrapper_args_from_linker_args( - args: List[str], -) -> Tuple[List[str], List[str]]: - wrapper_args = [] - linker_args = [] - expanded_args = _expand_arg_files(args) - - i = 0 - while i < len(expanded_args): - if expanded_args[i] == "-Xwrapper": - wrapper_args.append(expanded_args[i + 1]) - i += 1 - else: - linker_args.append(expanded_args[i]) - i += 1 - - return wrapper_args, linker_args - - -def _diagnose_potential_unexported_symbol_issue( - unexported_symbol_lists: List[str], stderr: str -) -> Optional[str]: - stderr_lines = stderr.splitlines() - undefined_symbol_re = re.compile(r"undefined symbol:.*\(mangled: (\S+)\)") - undefined_symbols = set() - for stderr_line in stderr_lines: - match = re.search(undefined_symbol_re, stderr_line) - if match: - undefined_symbols.add(match.group(1)) - - if not undefined_symbols: - return None - - unexported_symbols = set() - incorrectly_unexported_symbols = set() - incorrect_unexported_symbol_lists = [] - for unexported_symbol_list in unexported_symbol_lists: - target_name, file_path = unexported_symbol_list.split(",") - with open(file_path, "r") as unexported_symbol_list_file: - unexported_symbols = set(unexported_symbol_list_file.read().splitlines()) - intersection = undefined_symbols & unexported_symbols - if intersection: - incorrectly_unexported_symbols.update(intersection) - incorrect_unexported_symbol_lists.append(target_name) - - if not incorrect_unexported_symbol_lists: - return None - - return f""" -UNEXPORTED SYMBOLS ERROR: - -At least one symbol is included in an unexported symbol list, but referenced across dylib boundaries. Please -run the following command to fix the unexported symbol lists: - -arc fix-unexported-symbol-lists {"".join(["--target " + target for target in incorrect_unexported_symbol_lists])} {" ".join(["--symbol " + symbol for symbol in sorted(incorrectly_unexported_symbols)])} - -Here is the linker failure message: -""" - - -def main(argv: List[str]) -> int: - wrapper_args, linker_args = _seperate_wrapper_args_from_linker_args(argv[1:]) - - parser = argparse.ArgumentParser() - parser.add_argument("-linker") - parser.add_argument("-lld", action="store_true") - parser.add_argument("-unexported_symbol_list", action="append") - args = parser.parse_args(wrapper_args) - result = subprocess.run([args.linker] + linker_args, capture_output=True, text=True) - if result.returncode != 0: - if args.unexported_symbol_list and args.lld: - diagnosis = _diagnose_potential_unexported_symbol_issue( - args.unexported_symbol_list, result.stderr - ) - if diagnosis: - _eprintln(diagnosis) - _eprintln(result.stderr) - return result.returncode - - -if __name__ == "__main__": - sys.exit(main(sys.argv)) From 1fd5788a21ad68f9c9df8bc1c2aaf153ecc1f64f Mon Sep 17 00:00:00 2001 From: Mark Vitale Date: Tue, 2 Apr 2024 06:55:20 -0700 Subject: [PATCH 0662/1133] Switch from untyped argparse to typed-argument-parser Summary: Migrate from argparser to typed-argument-parser for better typesafety. Note this would have caught the typesafety error that caused S405362. Differential Revision: D55506204 fbshipit-source-id: 3e9d76ab14ae8f7a1f9ed3afd46c5ccc4c86c8d0 --- prelude/apple/tools/code_signing/BUCK.v2 | 1 + prelude/apple/tools/code_signing/main.py | 154 +++++++++++++---------- 2 files changed, 86 insertions(+), 69 deletions(-) diff --git a/prelude/apple/tools/code_signing/BUCK.v2 b/prelude/apple/tools/code_signing/BUCK.v2 index dac8cad31..ae5e27dd3 100644 --- a/prelude/apple/tools/code_signing/BUCK.v2 +++ b/prelude/apple/tools/code_signing/BUCK.v2 @@ -35,6 +35,7 @@ python_binary( main = "main.py", visibility = ["PUBLIC"], deps = [ + "fbsource//third-party/pypi/typed-argument-parser:typed-argument-parser", ":lib", ], ) diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index a6c62da82..6847087bc 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -7,9 +7,11 @@ # pyre-strict -import argparse import pathlib import sys +from typing import List, Optional + +from tap import Tap from .apple_platform import ApplePlatform from .codesign_bundle import ( @@ -22,72 +24,85 @@ from .provisioning_profile_selection import CodeSignProvisioningError -def _args_parser() -> argparse.ArgumentParser: - parser = argparse.ArgumentParser( - description="Tool which code signs the Apple bundle. `Info.plist` file is amended as a part of it." - ) - parser.add_argument( - "--bundle-path", - metavar="", - type=pathlib.Path, - required=True, - help="Absolute path to Apple bundle result.", - ) - parser.add_argument( - "--info-plist", - metavar="", - type=pathlib.Path, - required=True, - help="Bundle relative destination path to Info.plist file if it is present in bundle.", - ) - parser.add_argument( - "--entitlements", - metavar="", - type=pathlib.Path, - required=False, - help="Path to file with entitlements to be used during code signing. If it's not provided the minimal entitlements are going to be generated.", - ) - parser.add_argument( - "--profiles-dir", - metavar="", - type=pathlib.Path, - required=False, - help="Path to directory with provisioning profile files. Required if code signing is not ad-hoc.", - ) - parser.add_argument( - "--ad-hoc", - action="store_true", - help="Perform ad-hoc signing if set.", - ) - parser.add_argument( - "--ad-hoc-codesign-identity", - metavar="", - type=str, - required=False, - help="Codesign identity to use when ad-hoc signing is performed.", - ) - parser.add_argument( - "--platform", - metavar="", - type=ApplePlatform, - required=True, - help="Apple platform for which the bundle was built.", - ) - parser.add_argument( - "--codesign-on-copy", - metavar="", - type=pathlib.Path, - action="append", - required=False, - help="Bundle relative path that should be codesigned prior to result bundle.", - ) - parser.add_argument( - "--fast-provisioning-profile-parsing", - action="store_true", - help="Uses experimental faster provisioning profile parsing.", - ) - - return parser +class Arguments(Tap): # pyre-ignore[13] ignore uninitialized attributes for typed argument parser + """ + Tool which code signs the Apple bundle. `Info.plist` file is amended as a part of it. + """ + + bundle_path: pathlib.Path + info_plist: pathlib.Path + entitlements: Optional[pathlib.Path] = None + profiles_dir: Optional[pathlib.Path] = None + ad_hoc: bool = False + ad_hoc_codesign_identity: Optional[str] = None + platform: ApplePlatform + codesign_on_copy: Optional[List[pathlib.Path]] = None + fast_provisioning_profile_parsing: bool = False + + def configure(self) -> None: + """ + Configure the arguments. + """ + self.add_argument( + "--bundle-path", + metavar="", + type=pathlib.Path, + required=True, + help="Absolute path to Apple bundle result.", + ) + self.add_argument( + "--info-plist", + metavar="", + type=pathlib.Path, + required=True, + help="Bundle relative destination path to Info.plist file if it is present in bundle.", + ) + self.add_argument( + "--entitlements", + metavar="", + type=pathlib.Path, + required=False, + help="Path to file with entitlements to be used during code signing. If it's not provided the minimal entitlements are going to be generated.", + ) + self.add_argument( + "--profiles-dir", + metavar="", + type=pathlib.Path, + required=False, + help="Path to directory with provisioning profile files. Required if code signing is not ad-hoc.", + ) + self.add_argument( + "--ad-hoc", + action="store_true", + help="Perform ad-hoc signing if set.", + ) + self.add_argument( + "--ad-hoc-codesign-identity", + metavar="", + type=str, + required=False, + help="Codesign identity to use when ad-hoc signing is performed.", + ) + self.add_argument( + "--platform", + metavar="", + type=ApplePlatform, + required=True, + help="Apple platform for which the bundle was built.", + ) + self.add_argument( + "--codesign-on-copy", + metavar="", + type=pathlib.Path, + action="append", + required=False, + help="Bundle relative path that should be codesigned prior to result bundle.", + ) + self.add_argument( + "--fast-provisioning-profile-parsing", + action="store_true", + help="Uses experimental faster provisioning profile parsing.", + ) # Add emoji to beginning of actionable error message so it stands out more. @@ -96,7 +111,7 @@ def decorate_error_message(message: str) -> str: def _main() -> None: - args = _args_parser().parse_args() + args = Arguments().parse_args() try: if args.ad_hoc: signing_context = AdhocSigningContext( @@ -106,10 +121,11 @@ def _main() -> None: assert ( args.profiles_dir ), "Path to directory with provisioning profile files should be set when signing is not ad-hoc." + non_optional_profiles_dir = args.profiles_dir signing_context = signing_context_with_profile_selection( info_plist_source=args.bundle_path / args.info_plist, info_plist_destination=args.info_plist, - provisioning_profiles_dir=args.profiles_dir, + provisioning_profiles_dir=non_optional_profiles_dir, entitlements_path=args.entitlements, list_codesign_identities=ListCodesignIdentities.default(), platform=args.platform, From a554af5018d95db073aba488d8da7354c2ffe70e Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 2 Apr 2024 07:50:01 -0700 Subject: [PATCH 0663/1133] better incremental bundling decision tests Summary: make sure when the culprit is changed build is again incremental Reviewed By: milend Differential Revision: D55634443 fbshipit-source-id: 9f35ba4d0a08e4a117ae3a91e9c1a42cfafab123 --- prelude/apple/tools/bundling/incremental_utils_test.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/prelude/apple/tools/bundling/incremental_utils_test.py b/prelude/apple/tools/bundling/incremental_utils_test.py index 2edb0483a..ec9ed15b5 100644 --- a/prelude/apple/tools/bundling/incremental_utils_test.py +++ b/prelude/apple/tools/bundling/incremental_utils_test.py @@ -251,6 +251,8 @@ def test_not_run_incrementally_when_codesign_on_copy_paths_mismatch(self): codesign_identity="same_identity", ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + spec[0].codesign_on_copy = True + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) def test_not_run_incrementally_when_codesign_on_copy_entitlements_mismatch(self): spec = [ @@ -288,6 +290,8 @@ def test_not_run_incrementally_when_codesign_on_copy_entitlements_mismatch(self) codesign_identity="same_identity", ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + incremental_context.metadata[Path("baz/entitlements.plist")] = "old_digest" + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) def test_not_run_incrementally_when_codesign_configurations_mismatch(self): spec = [ From e765dc9ae5de8b103f696fd8855b0ba55f18e8d3 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Tue, 2 Apr 2024 07:50:01 -0700 Subject: [PATCH 0664/1133] check codesign args when deciding to bundle incrementally Summary: if codesign arguments changed, do not run bundling incrementally Reviewed By: milend Differential Revision: D55634444 fbshipit-source-id: df36fecc5c35cca89b8a7d37db9215465e8e62d8 --- .../tools/bundling/assemble_bundle_types.py | 3 +- .../apple/tools/bundling/incremental_state.py | 3 +- .../tools/bundling/incremental_state_test.py | 4 ++ .../apple/tools/bundling/incremental_utils.py | 6 +++ .../tools/bundling/incremental_utils_test.py | 51 +++++++++++++++++++ prelude/apple/tools/bundling/main.py | 6 +++ .../newer_version_incremental_state.json | 2 +- .../valid_incremental_state.json | 6 ++- 8 files changed, 77 insertions(+), 4 deletions(-) diff --git a/prelude/apple/tools/bundling/assemble_bundle_types.py b/prelude/apple/tools/bundling/assemble_bundle_types.py index f95d16100..1d98db812 100644 --- a/prelude/apple/tools/bundling/assemble_bundle_types.py +++ b/prelude/apple/tools/bundling/assemble_bundle_types.py @@ -12,7 +12,7 @@ import functools from dataclasses import dataclass from pathlib import Path -from typing import Dict, Optional +from typing import Dict, List, Optional from apple.tools.code_signing.codesign_bundle import CodesignConfiguration @@ -74,3 +74,4 @@ class IncrementalContext: codesigned: bool codesign_configuration: Optional[CodesignConfiguration] codesign_identity: Optional[str] + codesign_arguments: List[str] diff --git a/prelude/apple/tools/bundling/incremental_state.py b/prelude/apple/tools/bundling/incremental_state.py index ab0303591..c17fe2b25 100644 --- a/prelude/apple/tools/bundling/incremental_state.py +++ b/prelude/apple/tools/bundling/incremental_state.py @@ -17,7 +17,7 @@ from apple.tools.code_signing.codesign_bundle import CodesignConfiguration -_VERSION = 4 +_VERSION = 5 @dataclass @@ -63,6 +63,7 @@ class IncrementalState: codesign_configuration: CodesignConfiguration codesigned_on_copy: List[CodesignedOnCopy] codesign_identity: Optional[str] + codesign_arguments: List[str] swift_stdlib_paths: List[Path] version: int = _VERSION diff --git a/prelude/apple/tools/bundling/incremental_state_test.py b/prelude/apple/tools/bundling/incremental_state_test.py index 5e1e3bb70..a61b4d4aa 100644 --- a/prelude/apple/tools/bundling/incremental_state_test.py +++ b/prelude/apple/tools/bundling/incremental_state_test.py @@ -59,6 +59,10 @@ def test_valid_state_is_parsed_successfully(self): ), ], codesign_identity="Johny Appleseed", + codesign_arguments=[ + "--force", + "--deep", + ], swift_stdlib_paths=[Path("Frameworks/libswiftCore.dylib")], ) self.assertEqual( diff --git a/prelude/apple/tools/bundling/incremental_utils.py b/prelude/apple/tools/bundling/incremental_utils.py index 0017dc123..9b4de2840 100644 --- a/prelude/apple/tools/bundling/incremental_utils.py +++ b/prelude/apple/tools/bundling/incremental_utils.py @@ -52,6 +52,12 @@ def should_assemble_incrementally( "Decided not to assemble incrementally — previous vs current builds have mismatching codesigning identities." ) return False + # If previous codesign arguments are different from the current ones also perform non-incremental run. + if previous_run_state.codesign_arguments != incremental_context.codesign_arguments: + logging.getLogger(__name__).info( + "Decided not to assemble incrementally — previous vs current builds have mismatching codesigning arguments." + ) + return False # If bundle from previous run was signed in a different configuration vs the current run (e.g. dry code signed while now regular code signing is required) perform non-incremental run. if ( previous_run_state.codesign_configuration diff --git a/prelude/apple/tools/bundling/incremental_utils_test.py b/prelude/apple/tools/bundling/incremental_utils_test.py index ec9ed15b5..7b5817e20 100644 --- a/prelude/apple/tools/bundling/incremental_utils_test.py +++ b/prelude/apple/tools/bundling/incremental_utils_test.py @@ -53,6 +53,7 @@ def test_not_run_incrementally_when_previous_build_not_incremental(self): codesigned=False, codesign_configuration=None, codesign_identity=None, + codesign_arguments=[], ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) @@ -79,11 +80,13 @@ def test_run_incrementally_when_previous_build_not_codesigned(self): codesign_configuration=None, codesigned_on_copy=[], codesign_identity=None, + codesign_arguments=[], swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity=None, + codesign_arguments=[], ) self.assertTrue(should_assemble_incrementally(spec, incremental_context)) @@ -112,11 +115,13 @@ def test_not_run_incrementally_when_previous_build_codesigned_and_current_is_not codesign_configuration=None, codesigned_on_copy=[], codesign_identity=None, + codesign_arguments=[], swift_stdlib_paths=[], ), codesigned=False, codesign_configuration=None, codesign_identity=None, + codesign_arguments=[], ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) # Check that behavior changes when both builds are codesigned @@ -148,11 +153,13 @@ def test_not_run_incrementally_when_previous_build_codesigned_with_different_ide codesign_configuration=None, codesigned_on_copy=[], codesign_identity="old_identity", + codesign_arguments=[], swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity="new_identity", + codesign_arguments=[], ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) # Check that behavior changes when identities are same @@ -209,11 +216,13 @@ def test_run_incrementally_when_codesign_on_copy_paths_match(self): ), ], codesign_identity="same_identity", + codesign_arguments=[], swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity="same_identity", + codesign_arguments=[], ) self.assertTrue(should_assemble_incrementally(spec, incremental_context)) @@ -244,11 +253,13 @@ def test_not_run_incrementally_when_codesign_on_copy_paths_mismatch(self): CodesignedOnCopy(path=Path("foo"), entitlements_digest=None) ], codesign_identity="same_identity", + codesign_arguments=[], swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity="same_identity", + codesign_arguments=[], ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) spec[0].codesign_on_copy = True @@ -283,16 +294,54 @@ def test_not_run_incrementally_when_codesign_on_copy_entitlements_mismatch(self) CodesignedOnCopy(path=Path("foo"), entitlements_digest="old_digest") ], codesign_identity="same_identity", + codesign_arguments=[], swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=None, codesign_identity="same_identity", + codesign_arguments=[], ) self.assertFalse(should_assemble_incrementally(spec, incremental_context)) incremental_context.metadata[Path("baz/entitlements.plist")] = "old_digest" self.assertTrue(should_assemble_incrementally(spec, incremental_context)) + def test_not_run_incrementally_when_codesign_arguments_mismatch(self): + spec = [ + BundleSpecItem( + src="src/foo", + dst="foo", + ) + ] + incremental_context = IncrementalContext( + metadata={ + Path("src/foo"): "digest", + }, + state=IncrementalState( + items=[ + IncrementalStateItem( + source=Path("src/foo"), + destination_relative_to_bundle=Path("foo"), + digest="digest", + resolved_symlink=None, + ) + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[], + codesign_identity="same_identity", + codesign_arguments=["--force"], + swift_stdlib_paths=[], + ), + codesigned=True, + codesign_configuration=None, + codesign_identity="same_identity", + codesign_arguments=["--force", "--deep"], + ) + self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + incremental_context.codesign_arguments = ["--force"] + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) + def test_not_run_incrementally_when_codesign_configurations_mismatch(self): spec = [ BundleSpecItem( @@ -319,11 +368,13 @@ def test_not_run_incrementally_when_codesign_configurations_mismatch(self): CodesignedOnCopy(path=Path("foo"), entitlements_digest=None) ], codesign_identity="same_identity", + codesign_arguments=[], swift_stdlib_paths=[], ), codesigned=True, codesign_configuration=CodesignConfiguration.dryRun, codesign_identity="same_identity", + codesign_arguments=[], ) # Canary self.assertTrue(should_assemble_incrementally(spec, incremental_context)) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index da57f7037..42ecd684d 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -353,6 +353,7 @@ def _main() -> None: codesigned=args.codesign, codesign_configuration=args.codesign_configuration, codesign_identity=selected_identity_argument, + codesign_arguments=args.codesign_args, ) incremental_state = assemble_bundle( @@ -422,6 +423,7 @@ def _main() -> None: codesigned=args.codesign, codesign_configuration=args.codesign_configuration, selected_codesign_identity=selected_identity_argument, + codesign_arguments=args.codesign_args, swift_stdlib_paths=swift_stdlib_paths, incremental_context=incremental_context, ) @@ -439,6 +441,7 @@ def _incremental_context( codesigned: bool, codesign_configuration: CodesignConfiguration, codesign_identity: Optional[str], + codesign_arguments: List[str], ) -> Optional[IncrementalContext]: action_metadata = action_metadata_if_present(_METADATA_PATH_KEY) if action_metadata is None: @@ -457,6 +460,7 @@ def _incremental_context( codesigned=codesigned, codesign_configuration=codesign_configuration, codesign_identity=codesign_identity, + codesign_arguments=codesign_arguments, ) @@ -528,6 +532,7 @@ def _write_incremental_state( codesigned: bool, codesign_configuration: CodesignConfiguration, selected_codesign_identity: Optional[str], + codesign_arguments: List[str], swift_stdlib_paths: List[Path], incremental_context: IncrementalContext, ) -> None: @@ -547,6 +552,7 @@ def _write_incremental_state( if i.codesign_on_copy ], codesign_identity=selected_codesign_identity, + codesign_arguments=codesign_arguments, swift_stdlib_paths=swift_stdlib_paths, ) path.touch() diff --git a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json index a78e806cb..1caf23bcf 100644 --- a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json +++ b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json @@ -1,5 +1,5 @@ { - "version": 5, + "version": 6, "data": { "something": [] } diff --git a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json index a5c287421..c3e879c07 100644 --- a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json +++ b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json @@ -28,8 +28,12 @@ } ], "codesign_identity": "Johny Appleseed", + "codesign_arguments": [ + "--force", + "--deep" + ], "swift_stdlib_paths": [ "Frameworks/libswiftCore.dylib" ], - "version": 4 + "version": 5 } From 3481ccdc014e244501c8098d3516112d21fd6c45 Mon Sep 17 00:00:00 2001 From: Ernald Nicolas Date: Tue, 2 Apr 2024 08:32:49 -0700 Subject: [PATCH 0665/1133] Set artifact upload device path to accessible app-specific directory v2 Summary: This is a re-implementation of D54883945 which broke instrumentation tests running on Android 13+. In Android 13 (API 33), App Scoped storage limitations were pushed even further such that subdirectories (e.g. test_result/artifacts) could not be created in app-specific directories on external storage. Coincidentally enough, the failing tests seem to still be able to access external storage directories outside of app-specific directories (e.g. /sdcard/test_result/artifacts). Since jetpack benchmark infra uses the Storage Access Framework to create output files, the app-specific directory must be used for the jetpack benchmark library starting from Android 11 (API 30). This solution also introduces the self-instrumentation flag to the instrumentation test runner because in the case of self-instrumentation, the package executing test logic is the test apk instead of the apk under test. Reviewed By: fbvxp Differential Revision: D55040422 fbshipit-source-id: 008b62f3664a2431833ce4468f189f138401b98e --- prelude/android/android.bzl | 1 + prelude/android/android_instrumentation_test.bzl | 2 ++ 2 files changed, 3 insertions(+) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 7231139ab..db37dcf99 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -159,6 +159,7 @@ extra_attributes = { "android_instrumentation_test": { "instrumentation_test_listener": attrs.option(attrs.source(), default = None), "instrumentation_test_listener_class": attrs.option(attrs.string(), default = None), + "is_self_instrumenting": attrs.bool(default = False), "_android_toolchain": toolchains_common.android(), "_exec_os_type": buck.exec_os_type_arg(), "_java_toolchain": toolchains_common.java_for_android(), diff --git a/prelude/android/android_instrumentation_test.bzl b/prelude/android/android_instrumentation_test.bzl index 3b64d4599..0b5b10cad 100644 --- a/prelude/android/android_instrumentation_test.bzl +++ b/prelude/android/android_instrumentation_test.bzl @@ -42,6 +42,8 @@ def android_instrumentation_test_impl(ctx: AnalysisContext): instrumentation_apk_info = ctx.attrs.apk.get(AndroidInstrumentationApkInfo) if instrumentation_apk_info != None: cmd.extend(["--apk-under-test-path", instrumentation_apk_info.apk_under_test]) + if ctx.attrs.is_self_instrumenting: + cmd.extend(["--is-self-instrumenting"]) target_package_file = ctx.actions.declare_output("target_package_file") package_file = ctx.actions.declare_output("package_file") From f70498e0c5f992b1061587a17228342cf60c32ca Mon Sep 17 00:00:00 2001 From: Mark Vitale Date: Tue, 2 Apr 2024 12:04:14 -0700 Subject: [PATCH 0666/1133] Fix optional arguments Summary: In switching from argparse to Typed Argument Parser, I missed that some of these arguments now needed to be explicit about not being required. Differential Revision: D55646743 fbshipit-source-id: d8ef5ac0d9df74fa15bd2d9196a9f48e26e001e4 --- prelude/apple/tools/code_signing/main.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index 6847087bc..b4598fd0c 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -74,6 +74,7 @@ def configure(self) -> None: self.add_argument( "--ad-hoc", action="store_true", + required=False, help="Perform ad-hoc signing if set.", ) self.add_argument( @@ -101,6 +102,7 @@ def configure(self) -> None: self.add_argument( "--fast-provisioning-profile-parsing", action="store_true", + required=False, help="Uses experimental faster provisioning profile parsing.", ) From bb0f2fff0dd3d8ce3db7ad14ed784d9a6dca230c Mon Sep 17 00:00:00 2001 From: Fernando Gasperi Date: Tue, 2 Apr 2024 12:26:32 -0700 Subject: [PATCH 0667/1133] Remove unused import Summary: title Reviewed By: VladimirMakaev Differential Revision: D55631571 fbshipit-source-id: 4b6b0c01c1e9093a11bb8ceb183b9f8baba7804c --- prelude/python/tools/__test_main__.py | 1 - 1 file changed, 1 deletion(-) diff --git a/prelude/python/tools/__test_main__.py b/prelude/python/tools/__test_main__.py index d699a7e9c..29932ac93 100644 --- a/prelude/python/tools/__test_main__.py +++ b/prelude/python/tools/__test_main__.py @@ -32,7 +32,6 @@ import time import traceback import unittest -import warnings from importlib.machinery import PathFinder From 65f9bb1a0d56c0855099971446e3a7301f0041cc Mon Sep 17 00:00:00 2001 From: Fernando Gasperi Date: Tue, 2 Apr 2024 12:26:32 -0700 Subject: [PATCH 0668/1133] Rename variable Summary: Gets rid of linter warning Reviewed By: VladimirMakaev Differential Revision: D55631572 fbshipit-source-id: 5b63c854d090a4e2fe471d52eb4762bcc75283f3 --- prelude/python/tools/__test_main__.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/prelude/python/tools/__test_main__.py b/prelude/python/tools/__test_main__.py index 29932ac93..80a96a486 100644 --- a/prelude/python/tools/__test_main__.py +++ b/prelude/python/tools/__test_main__.py @@ -756,12 +756,12 @@ def convert_to_diff_cov_str(self, analysis): analysis[3][-1] if len(analysis[3]) else 0, ) lines = ["N"] * numLines - for l in analysis[1]: - lines[l - 1] = "C" - for l in analysis[2]: - lines[l - 1] = "X" - for l in analysis[3]: - lines[l - 1] = "U" + for line in analysis[1]: + lines[line - 1] = "C" + for line in analysis[2]: + lines[line - 1] = "X" + for line in analysis[3]: + lines[line - 1] = "U" return "".join(lines) From c30d147e013a10f90af2c5c11667ee89f1414a7a Mon Sep 17 00:00:00 2001 From: Overhatted <15021741+Overhatted@users.noreply.github.com> Date: Tue, 2 Apr 2024 13:35:07 -0700 Subject: [PATCH 0669/1133] Added support for headers in the windows_resource rule Summary: Like other cxx rules it supports specifying "headers" or "include_directories" and "raw_headers". As a test, I added a header dependency to the test I provided in the previous PR: https://github.com/facebook/buck2/issues/581. The final result being the following files: BUCK: ``` # A rule that includes a single .rc file and compiles it into an object file. windows_resource( name = "resources", srcs = [ "resources.rc", ], header_namespace = "", headers = ["message.h"], ) # A rule that links against the above windows_resource rule. # A rule that includes a single .rc file and compiles it into an object file. windows_resource( name = "resources", srcs = [ "resources.rc", ], header_namespace = "", headers = ["message.h"], ) # A rule that links against the above windows_resource rule. cxx_binary( name = "app", srcs = [ "main.cpp", ], deps = [ ":resources" ], linker_flags = [ "User32.lib", ], ) ``` main.cpp: ``` #include #include int main(int argc, const char* argv[]) { std::string message; message.resize(50); int returnValue = LoadStringA(NULL, 4, message.data(), message.size()); if (returnValue == 0) { std::cout << "Failed to read resource"; return 1; } message.resize(returnValue); std::cout << message; return 0; } ``` message.h: ``` #define MESSAGE_TO_PRINT "Hello from header" ``` resources.rc: ``` #include #include "message.h" STRINGTABLE BEGIN 4 MESSAGE_TO_PRINT END ``` X-link: https://github.com/facebook/buck2/pull/607 Reviewed By: JakobDegen Differential Revision: D55633608 Pulled By: KapJI fbshipit-source-id: 6d47acaaa0eb77a653b33d46c8c26088fbf8f0f8 --- prelude/cxx/windows_resource.bzl | 23 +++++++++++++++++++++++ prelude/decls/cxx_rules.bzl | 5 +++++ 2 files changed, 28 insertions(+) diff --git a/prelude/cxx/windows_resource.bzl b/prelude/cxx/windows_resource.bzl index 1be963028..b9169624c 100644 --- a/prelude/cxx/windows_resource.bzl +++ b/prelude/cxx/windows_resource.bzl @@ -5,13 +5,34 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//apple:xcode.bzl", "get_project_root_file") load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") +load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout") +load("@prelude//cxx:preprocessor.bzl", "cxx_merge_cpreprocessors", "cxx_private_preprocessor_info") load("@prelude//linking:link_groups.bzl", "LinkGroupLibInfo") load("@prelude//linking:link_info.bzl", "LibOutputStyle", "LinkInfo", "LinkInfos", "ObjectsLinkable", "create_merged_link_info") load("@prelude//linking:linkable_graph.bzl", "create_linkable_graph") load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo") def windows_resource_impl(ctx: AnalysisContext) -> list[Provider]: + (own_non_exported_preprocessor_info, _) = cxx_private_preprocessor_info( + ctx = ctx, + headers_layout = cxx_get_regular_cxx_headers_layout(ctx), + project_root_file = get_project_root_file(ctx), + raw_headers = ctx.attrs.raw_headers, + extra_preprocessors = [], + non_exported_deps = [], + is_test = False, + ) + + preprocessor = cxx_merge_cpreprocessors( + ctx, + [own_non_exported_preprocessor_info], + [], + ) + + headers_tag = ctx.actions.artifact_tag() + objects = [] toolchain = get_cxx_toolchain_info(ctx) @@ -24,6 +45,8 @@ def windows_resource_impl(ctx: AnalysisContext) -> list[Provider]: toolchain.rc_compiler_info.compiler, toolchain.rc_compiler_info.compiler_flags, cmd_args(rc_output.as_output(), format = "/fo{}"), + headers_tag.tag_artifacts(preprocessor.set.project_as_args("args")), + headers_tag.tag_artifacts(preprocessor.set.project_as_args("include_dirs")), src, ) diff --git a/prelude/decls/cxx_rules.bzl b/prelude/decls/cxx_rules.bzl index 64239dafe..a0976c09f 100644 --- a/prelude/decls/cxx_rules.bzl +++ b/prelude/decls/cxx_rules.bzl @@ -771,6 +771,11 @@ windows_resource = prelude_rule( further = None, attrs = ( cxx_common.srcs_arg() | + cxx_common.headers_arg() | + cxx_common.platform_headers_arg() | + cxx_common.header_namespace_arg() | + cxx_common.raw_headers_arg() | + cxx_common.include_directories_arg() | { "labels": attrs.list(attrs.string(), default = []), } From 1ecfcd1c5e8a6351e79a3d2ae5b560077cf48503 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Tue, 2 Apr 2024 13:50:57 -0700 Subject: [PATCH 0670/1133] Use some pure code instead of cmd_args.add Reviewed By: igorsugak Differential Revision: D55233956 fbshipit-source-id: 4d7de572388022bb6261407c30ef19deac93cd42 --- prelude/cxx/cxx_library.bzl | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 41604c390..6794f6b97 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -769,26 +769,28 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # Some rules, e.g. fbcode//thrift/lib/cpp:thrift-core-module # define preprocessor flags as things like: -DTHRIFT_PLATFORM_CONFIG= # and unless they get quoted, they break shell syntax. - cxx_preprocessor_flags = cmd_args() cxx_compiler_info = get_cxx_toolchain_info(ctx).cxx_compiler_info - cxx_preprocessor_flags.add(cmd_args(cxx_compiler_info.preprocessor_flags or [], quote = "shell")) - cxx_preprocessor_flags.add(cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell")) - cxx_preprocessor_flags.add(propagated_preprocessor.set.project_as_args("include_dirs")) + cxx_preprocessor_flags = cmd_args( + cmd_args(cxx_compiler_info.preprocessor_flags or [], quote = "shell"), + cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell"), + propagated_preprocessor.set.project_as_args("include_dirs"), + ) templ_vars["cxxppflags"] = cxx_preprocessor_flags - c_preprocessor_flags = cmd_args() c_compiler_info = get_cxx_toolchain_info(ctx).c_compiler_info - c_preprocessor_flags.add(cmd_args(c_compiler_info.preprocessor_flags or [], quote = "shell")) - c_preprocessor_flags.add(cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell")) - c_preprocessor_flags.add(propagated_preprocessor.set.project_as_args("include_dirs")) + c_preprocessor_flags = cmd_args( + cmd_args(c_compiler_info.preprocessor_flags or [], quote = "shell"), + cmd_args(propagated_preprocessor.set.project_as_args("args"), quote = "shell"), + propagated_preprocessor.set.project_as_args("include_dirs"), + ) templ_vars["cppflags"] = c_preprocessor_flags # Add in ldflag macros. for link_strategy in (LinkStrategy("static"), LinkStrategy("static_pic")): name = "ldflags-" + link_strategy.value.replace("_", "-") - args = cmd_args() + args = [] linker_info = get_cxx_toolchain_info(ctx).linker_info - args.add(linker_info.linker_flags or []) + args.append(linker_info.linker_flags or []) # Normally, we call get_link_args_for_strategy for getting the args for our own link from our # deps. This case is a bit different as we are effectively trying to get the args for how this library @@ -798,8 +800,8 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc [merged_native_link_info], link_strategy, ) - args.add(unpack_link_args(link_args)) - templ_vars[name] = args + args.append(unpack_link_args(link_args)) + templ_vars[name] = cmd_args(args) # TODO(T110378127): To implement `$(ldflags-shared ...)` properly, we'd need # to setup a symink tree rule for all transitive shared libs. Since this From c36a275fa9796701e75bd67d7549d24834c33f79 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Tue, 2 Apr 2024 13:50:57 -0700 Subject: [PATCH 0671/1133] Use some pure code instead of cmd_args.add Reviewed By: iguridi Differential Revision: D55234119 fbshipit-source-id: 333e8b897defb2a3a5fb7db5bb43e7d027f3b4f5 --- prelude/erlang/erlang_utils.bzl | 59 +++++++++++++++++---------------- 1 file changed, 30 insertions(+), 29 deletions(-) diff --git a/prelude/erlang/erlang_utils.bzl b/prelude/erlang/erlang_utils.bzl index ac46e5b43..9bde46523 100644 --- a/prelude/erlang/erlang_utils.bzl +++ b/prelude/erlang/erlang_utils.bzl @@ -20,13 +20,14 @@ def normalise_metadata(data: [str, list[str]]) -> [cmd_args, list[cmd_args]]: def to_term_args(data: typing.Any) -> cmd_args: """ convert nested lists/tuple/map data structure to Erlang Term cmd_args """ - args = cmd_args([]) - args.add(cmd_args([ - convert(data), - ".", - ], delimiter = "")) - args.add("") - return args + + return cmd_args( + cmd_args([ + convert(data), + ".", + ], delimiter = ""), + "", + ) # paths def app_file(ctx: AnalysisContext) -> str: @@ -64,57 +65,57 @@ def convert(data: typing.Any, ignore_artifacts: bool = False) -> cmd_args: elif type(data) == "bool": return convert_bool(data) - args = cmd_args([], ignore_artifacts = ignore_artifacts) - args.add(cmd_args(["\"", data, "\""], delimiter = "")) - return args + return cmd_args( + cmd_args(["\"", data, "\""], delimiter = ""), + ignore_artifacts = ignore_artifacts, + ) # internal def convert_list(ls: list, ob: str = "[", cb: str = "]") -> cmd_args: - args = cmd_args([]) - args.add(ob) + args = [] + args.append(ob) if len(ls) >= 1: - args.add(cmd_args([ + args.append(cmd_args([ convert(ls[0]), ], delimiter = "")) for item in ls[1:]: - args.add(cmd_args([ + args.append(cmd_args([ ",", convert(item), ], delimiter = "")) - args.add(cb) - return args + args.append(cb) + return cmd_args(args) def convert_dict(dt: dict) -> cmd_args: - args = cmd_args([]) - args.add("#{") + args = [] + args.append("#{") items = list(dt.items()) if len(items) >= 1: k, v = items[0] - args.add(cmd_args([ + args.append(cmd_args([ convert(k), "=>", convert(v), ], delimiter = "")) for k, v in items[1:]: - args.add(cmd_args([ + args.append(cmd_args([ ",", convert(k), "=>", convert(v), ], delimiter = "")) - args.add("}") - return args + args.append("}") + return cmd_args(args) def convert_args(data: cmd_args) -> cmd_args: - args = cmd_args() - args.add("\"") - args.add(cmd_args(data, delimiter = " ")) - args.add("\"") - return args + return cmd_args( + "\"", + cmd_args(data, delimiter = " "), + "\"", + ) def convert_string(st: str) -> cmd_args: - args = cmd_args() - return args.add(cmd_args(["\"", st.replace("\"", "\\\""), "\""], delimiter = "")) + return cmd_args(cmd_args(["\"", st.replace("\"", "\\\""), "\""], delimiter = "")) def convert_bool(bl: bool) -> cmd_args: if bl: From 5baeab947b72a54faf4293453f2fe72f8f88ca51 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Tue, 2 Apr 2024 13:50:57 -0700 Subject: [PATCH 0672/1133] Use some pure code instead of cmd_args.add Reviewed By: iguridi Differential Revision: D55234142 fbshipit-source-id: a5867e9ab86e31f18076b7f749c2a77b9cbac167 --- prelude/utils/pick.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/prelude/utils/pick.bzl b/prelude/utils/pick.bzl index e64a7513f..d6de059ae 100644 --- a/prelude/utils/pick.bzl +++ b/prelude/utils/pick.bzl @@ -18,7 +18,7 @@ def pick_raw(override, underlying): return override if override != None else underlying def pick_and_add(override, additional, underlying): - flags = cmd_args(pick(override, underlying)) + flags = [pick(override, underlying)] if additional: - flags.add(additional) - return flags + flags.append(additional) + return cmd_args(flags) From 0a2fab3e22d527ad7dc1fd96056b08f50749e085 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Tue, 2 Apr 2024 13:50:57 -0700 Subject: [PATCH 0673/1133] .absolute_prefix() -> absolute_prefix= Summary: Working on immutable `cmd_args` API. Reviewed By: podtserkovskiy Differential Revision: D55256727 fbshipit-source-id: edfa22acf71cf8720be0b1c28c7c24a4372119a2 --- prelude/go/go_stdlib.bzl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/prelude/go/go_stdlib.bzl b/prelude/go/go_stdlib.bzl index 49414dc7c..e1f41ded1 100644 --- a/prelude/go/go_stdlib.bzl +++ b/prelude/go/go_stdlib.bzl @@ -32,10 +32,10 @@ def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: ) go_wrapper_args += [ - cmd_args(c_compiler.compiler, format = "--cc={}").absolute_prefix("%cwd%/"), - cmd_args([c_compiler.compiler_flags, go_toolchain.c_compiler_flags], format = "--cgo_cflags={}").absolute_prefix("%cwd%/"), - cmd_args(c_compiler.preprocessor_flags, format = "--cgo_cppflags={}").absolute_prefix("%cwd%/"), - cmd_args(cgo_ldflags, format = "--cgo_ldflags={}").absolute_prefix("%cwd%/"), + cmd_args(c_compiler.compiler, format = "--cc={}", absolute_prefix = "%cwd%/"), + cmd_args([c_compiler.compiler_flags, go_toolchain.c_compiler_flags], format = "--cgo_cflags={}", absolute_prefix = "%cwd%/"), + cmd_args(c_compiler.preprocessor_flags, format = "--cgo_cppflags={}", absolute_prefix = "%cwd%/"), + cmd_args(cgo_ldflags, format = "--cgo_ldflags={}", absolute_prefix = "%cwd%/"), ] cmd = get_toolchain_cmd_args(go_toolchain, go_root = True) From f30b859f5125f6bb4f6951cf11b4a0ab83be3566 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Tue, 2 Apr 2024 13:50:57 -0700 Subject: [PATCH 0674/1133] Use some pure code instead of cmd_args.add Summary: Working on immutable `cmd_args` API. Reviewed By: ndmitchell Differential Revision: D55256948 fbshipit-source-id: 517fe0dda44d11b0c2a129d4a9a5e570d8676868 --- prelude/toolchains/conan/defs.bzl | 58 ++++++++++++++++++------------- 1 file changed, 33 insertions(+), 25 deletions(-) diff --git a/prelude/toolchains/conan/defs.bzl b/prelude/toolchains/conan/defs.bzl index 029ca6530..a45bd0533 100644 --- a/prelude/toolchains/conan/defs.bzl +++ b/prelude/toolchains/conan/defs.bzl @@ -651,9 +651,14 @@ def _make_wrapper_script(ctx, name, tool): cmd_args([ "#!/bin/sh", '_SCRIPTDIR=`dirname "$0"`', - cmd_args("exec", tool, '"$@"', delimiter = " ") - .relative_to(wrapper, parent = 1) - .absolute_prefix('"$_SCRIPTDIR"/'), + cmd_args( + "exec", + tool, + '"$@"', + delimiter = " ", + relative_to = (wrapper, 1), + absolute_prefix = '"$_SCRIPTDIR"/', + ), ]), allow_args = True, is_executable = True, @@ -675,47 +680,50 @@ def _profile_env_tool(ctx, name, tool): def _conan_profile_impl(ctx: AnalysisContext) -> list[Provider]: cxx = ctx.attrs._cxx_toolchain[CxxToolchainInfo] - content = cmd_args() + content = [] - content.add("[settings]") - content.add(cmd_args(ctx.attrs.arch, format = "arch={}")) - content.add(cmd_args(ctx.attrs.os, format = "os={}")) - content.add(cmd_args(ctx.attrs.build_type, format = "build_type={}")) + content.append("[settings]") + content.append(cmd_args(ctx.attrs.arch, format = "arch={}")) + content.append(cmd_args(ctx.attrs.os, format = "os={}")) + content.append(cmd_args(ctx.attrs.build_type, format = "build_type={}")) # TODO[AH] Auto-generate the compiler setting based on the toolchain. # Needs a translation of CxxToolProviderType to compiler setting. - content.add(cmd_args(ctx.attrs.compiler, format = "compiler={}")) - content.add(cmd_args(ctx.attrs.compiler_version, format = "compiler.version={}")) - content.add(cmd_args(ctx.attrs.compiler_libcxx, format = "compiler.libcxx={}")) + content.append(cmd_args(ctx.attrs.compiler, format = "compiler={}")) + content.append(cmd_args(ctx.attrs.compiler_version, format = "compiler.version={}")) + content.append(cmd_args(ctx.attrs.compiler_libcxx, format = "compiler.libcxx={}")) - content.add("") - content.add("[env]") - content.add(_profile_env_var("CMAKE_FIND_ROOT_PATH", "")) + content.append("") + content.append("[env]") + content.append(_profile_env_var("CMAKE_FIND_ROOT_PATH", "")) # TODO[AH] Define CMAKE_SYSROOT if needed. # TODO[AH] Define target CHOST for cross-compilation - content.add(_profile_env_tool(ctx, "AR", cxx.linker_info.archiver)) + content.append(_profile_env_tool(ctx, "AR", cxx.linker_info.archiver)) if cxx.as_compiler_info: - content.add(_profile_env_tool(ctx, "AS", cxx.as_compiler_info.compiler)) + content.append(_profile_env_tool(ctx, "AS", cxx.as_compiler_info.compiler)) # TODO[AH] Use asm_compiler_info for Windows if cxx.binary_utilities_info: if cxx.binary_utilities_info.nm: - content.add(_profile_env_tool(ctx, "NM", cxx.binary_utilities_info.nm)) + content.append(_profile_env_tool(ctx, "NM", cxx.binary_utilities_info.nm)) if cxx.binary_utilities_info.ranlib: - content.add(_profile_env_tool(ctx, "RANLIB", cxx.binary_utilities_info.ranlib)) + content.append(_profile_env_tool(ctx, "RANLIB", cxx.binary_utilities_info.ranlib)) if cxx.binary_utilities_info.strip: - content.add(_profile_env_tool(ctx, "STRIP", cxx.binary_utilities_info.strip)) + content.append(_profile_env_tool(ctx, "STRIP", cxx.binary_utilities_info.strip)) if cxx.c_compiler_info: - content.add(_profile_env_tool(ctx, "CC", cxx.c_compiler_info.compiler)) - content.add(_profile_env_var("CFLAGS", cxx.c_compiler_info.compiler_flags)) + content.append(_profile_env_tool(ctx, "CC", cxx.c_compiler_info.compiler)) + content.append(_profile_env_var("CFLAGS", cxx.c_compiler_info.compiler_flags)) if cxx.cxx_compiler_info: - content.add(_profile_env_tool(ctx, "CXX", cxx.cxx_compiler_info.compiler)) - content.add(_profile_env_var("CXXFLAGS", cxx.cxx_compiler_info.compiler_flags)) + content.append(_profile_env_tool(ctx, "CXX", cxx.cxx_compiler_info.compiler)) + content.append(_profile_env_var("CXXFLAGS", cxx.cxx_compiler_info.compiler_flags)) output = ctx.actions.declare_output(ctx.label.name) - content.relative_to(output, parent = 1) - content.absolute_prefix("$PROFILE_DIR/") + content = cmd_args( + content, + relative_to = (output, 1), + absolute_prefix = "$PROFILE_DIR/", + ) _, args_inputs = ctx.actions.write(output, content, allow_args = True) return [ From 5281b16663dd93d1c580946cc5739d77ece5ea8f Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Tue, 2 Apr 2024 13:50:57 -0700 Subject: [PATCH 0675/1133] Use some pure code instead of cmd_args.add Summary: Working on immutable `cmd_args`. Reviewed By: iguridi Differential Revision: D55259847 fbshipit-source-id: 0c3a84d168f1da6810111e1bbe86c98494e7b823 --- prelude/js/js_utils.bzl | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/prelude/js/js_utils.bzl b/prelude/js/js_utils.bzl index 2bb1ce8a8..8a69d7899 100644 --- a/prelude/js/js_utils.bzl +++ b/prelude/js/js_utils.bzl @@ -152,10 +152,14 @@ def run_worker_commands( ctx.actions.write(worker_argsfile.as_output(), worker_args) worker_tool_info = worker_tool[WorkerToolInfo] - worker_command = worker_tool_info.command.copy() - worker_command.hidden(hidden_artifacts) - worker_command.hidden(command_args_files) - worker_command.add(cmd_args(worker_argsfile, format = "@{}")) + worker_command = cmd_args( + worker_tool_info.command.copy(), + cmd_args(worker_argsfile, format = "@{}"), + hidden = [ + hidden_artifacts, + command_args_files, + ], + ) ctx.actions.run( worker_command, From 6b889db464c5d0fb9e16dd3d6f6c1351708a83f7 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Tue, 2 Apr 2024 13:50:57 -0700 Subject: [PATCH 0676/1133] Use some pure code instead of cmd_args.add Summary: Working on immutable `cmd_args`. Reviewed By: iguridi Differential Revision: D55260369 fbshipit-source-id: 1ef7ce68a8e115ea52909a86852ab1412bb11967 --- prelude/zip_file/zip_file.bzl | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/prelude/zip_file/zip_file.bzl b/prelude/zip_file/zip_file.bzl index 9c98b55db..9876039b8 100644 --- a/prelude/zip_file/zip_file.bzl +++ b/prelude/zip_file/zip_file.bzl @@ -29,13 +29,13 @@ def zip_file_impl(ctx: AnalysisContext) -> list[Provider]: zip_srcs = ctx.attrs.zip_srcs srcs = ctx.attrs.srcs - create_zip_cmd = cmd_args([ + create_zip_cmd = [ create_zip_tool, "--output_path", output.as_output(), "--on_duplicate_entry", on_duplicate_entry if on_duplicate_entry else "overwrite", - ]) + ] if srcs: # add artifact and is_source flag pair @@ -47,19 +47,19 @@ def zip_file_impl(ctx: AnalysisContext) -> list[Provider]: ) entries_file = ctx.actions.write("entries", srcs_file_cmd) - create_zip_cmd.add("--entries_file") - create_zip_cmd.add(entries_file) - create_zip_cmd.hidden(srcs) + create_zip_cmd.append("--entries_file") + create_zip_cmd.append(entries_file) + create_zip_cmd.append(cmd_args(hidden = srcs)) if zip_srcs: - create_zip_cmd.add("--zip_sources") - create_zip_cmd.add(zip_srcs) + create_zip_cmd.append("--zip_sources") + create_zip_cmd.append(zip_srcs) if entries_to_exclude: - create_zip_cmd.add("--entries_to_exclude") - create_zip_cmd.add(entries_to_exclude) + create_zip_cmd.append("--entries_to_exclude") + create_zip_cmd.append(entries_to_exclude) - ctx.actions.run(create_zip_cmd, category = "zip") + ctx.actions.run(cmd_args(create_zip_cmd), category = "zip") return [DefaultInfo(default_output = output)] From b770d1f12eb66853b5a18030eaa6f09f7fc32d35 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Tue, 2 Apr 2024 13:50:57 -0700 Subject: [PATCH 0677/1133] Use some pure code instead of cmd_args.xxx Summary: Working on immutable `cmd_args`. Reviewed By: iguridi Differential Revision: D55258948 fbshipit-source-id: 68098905294f372984bfe0e764d44d3fd85fdbde --- prelude/command_alias.bzl | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/prelude/command_alias.bzl b/prelude/command_alias.bzl index 1dcc7ea9b..b6b38e6fd 100644 --- a/prelude/command_alias.bzl +++ b/prelude/command_alias.bzl @@ -148,7 +148,11 @@ def _relativize_path_unix( trampoline_args: cmd_args) -> Artifact: # FIXME(ndmitchell): more straightforward relativization with better API non_materialized_reference = ctx.actions.write("dummy", "") - trampoline_args.relative_to(non_materialized_reference, parent = 1).absolute_prefix("__BUCK_COMMAND_ALIAS_ABSOLUTE__/") + trampoline_args = cmd_args( + trampoline_args, + relative_to = (non_materialized_reference, 1), + absolute_prefix = "__BUCK_COMMAND_ALIAS_ABSOLUTE__/", + ) trampoline_tmp, _ = ctx.actions.write("__command_alias_trampoline.{}.pre".format(extension), trampoline_args, allow_args = True) @@ -178,7 +182,11 @@ def _relativize_path_windows( trampoline_args: cmd_args) -> Artifact: # FIXME(ndmitchell): more straightforward relativization with better API non_materialized_reference = ctx.actions.write("dummy", "") - trampoline_args.relative_to(non_materialized_reference, parent = 1).absolute_prefix(var + "/") + trampoline_args = cmd_args( + trampoline_args, + relative_to = (non_materialized_reference, 1), + absolute_prefix = var + "/", + ) trampoline, _ = ctx.actions.write("__command_alias_trampoline.{}".format(extension), trampoline_args, allow_args = True) From d6ac5b9537017ceaa8c8f290e7e622c89d0c0dbd Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Tue, 2 Apr 2024 13:53:48 -0700 Subject: [PATCH 0678/1133] dynamic_output(outputs=[.as_output()]) Summary: Following diff D55389251 requires output artifact as parameter to `.dynamic_output` call. Reviewed By: JakobDegen Differential Revision: D55390969 fbshipit-source-id: 496c4b3de2a0b37ff9b10a9322b343d280afb9cc --- prelude/apple/user/apple_selective_debugging.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/apple/user/apple_selective_debugging.bzl b/prelude/apple/user/apple_selective_debugging.bzl index 6df13c4ee..69f49a5f4 100644 --- a/prelude/apple/user/apple_selective_debugging.bzl +++ b/prelude/apple/user/apple_selective_debugging.bzl @@ -125,7 +125,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: inner_ctx.actions.dynamic_output( dynamic = [targets_json_file], inputs = [], - outputs = [output], + outputs = [output.as_output()], f = scrub_selected_debug_paths_action, ) From 8786b6248e017114bd007a45920e52b8f7cf2e6b Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Tue, 2 Apr 2024 13:53:48 -0700 Subject: [PATCH 0679/1133] dynamic_output(outputs=[.as_output()]) Reviewed By: JakobDegen Differential Revision: D55390971 fbshipit-source-id: 7c599960b8f88fd43658ca65128c62b9cdf9e15f --- prelude/cxx/dist_lto/dist_lto.bzl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/prelude/cxx/dist_lto/dist_lto.bzl b/prelude/cxx/dist_lto/dist_lto.bzl index c3163e660..456b09960 100644 --- a/prelude/cxx/dist_lto/dist_lto.bzl +++ b/prelude/cxx/dist_lto/dist_lto.bzl @@ -227,7 +227,7 @@ def cxx_dist_link( ), ) index_link_data.append(data) - plan_outputs.extend([bc_output, plan_output]) + plan_outputs.extend([bc_output.as_output(), plan_output.as_output()]) elif isinstance(linkable, ArchiveLinkable) and linkable.supports_lto: # Our implementation of Distributed ThinLTO operates on individual objects, not archives. Since these # archives might still contain LTO-able bitcode, we first extract the objects within the archive into @@ -275,7 +275,7 @@ def cxx_dist_link( index_link_data.append(data) archive_opt_manifests.append(archive_opt_manifest) plan_inputs.extend([archive_manifest, archive_objects]) - plan_outputs.extend([archive_indexes, archive_plan]) + plan_outputs.extend([archive_indexes.as_output(), archive_plan.as_output()]) else: add_linkable(idx, linkable) index_link_data.append(None) @@ -398,7 +398,7 @@ def cxx_dist_link( # directly, since it uses `ctx.outputs` to bind its outputs. Instead of doing Starlark hacks to work around # the lack of `ctx.outputs`, we declare an empty file as a dynamic input. plan_inputs.append(ctx.actions.write(output.basename + ".plan_hack.txt", "")) - plan_outputs.extend([link_plan, index_argsfile_out, final_link_index]) + plan_outputs.extend([link_plan.as_output(), index_argsfile_out.as_output(), final_link_index.as_output()]) ctx.actions.dynamic_output(dynamic = plan_inputs, inputs = [], outputs = plan_outputs, f = plan) link_plan_out = ctx.actions.declare_output(output.basename + ".link-plan.json") @@ -467,7 +467,7 @@ def cxx_dist_link( opt_cmd.hidden(archives) ctx.actions.run(opt_cmd, category = make_cat("thin_lto_opt_object"), identifier = name) - ctx.actions.dynamic_output(dynamic = [plan], inputs = [], outputs = [opt_object], f = optimize_object) + ctx.actions.dynamic_output(dynamic = [plan], inputs = [], outputs = [opt_object.as_output()], f = optimize_object) def dynamic_optimize_archive(archive: _ArchiveLinkData): def optimize_archive(ctx: AnalysisContext, artifacts, outputs): @@ -532,7 +532,7 @@ def cxx_dist_link( ctx.actions.write(outputs[archive.opt_manifest], output_manifest, allow_args = True) archive_opt_inputs = [archive.plan] - archive_opt_outputs = [archive.opt_objects_dir, archive.opt_manifest] + archive_opt_outputs = [archive.opt_objects_dir.as_output(), archive.opt_manifest.as_output()] ctx.actions.dynamic_output(dynamic = archive_opt_inputs, inputs = [], outputs = archive_opt_outputs, f = optimize_archive) for artifact in index_link_data: @@ -611,7 +611,7 @@ def cxx_dist_link( ctx.actions.dynamic_output( dynamic = final_link_inputs, inputs = [], - outputs = [output] + ([linker_map] if linker_map else []) + [linker_argsfile_out], + outputs = [output.as_output()] + ([linker_map] if linker_map else []) + [linker_argsfile_out], f = thin_lto_final_link, ) From 8213cadd32eb40142e9ad6454c729a06920ca5be Mon Sep 17 00:00:00 2001 From: Nuri Amari Date: Tue, 2 Apr 2024 14:09:46 -0700 Subject: [PATCH 0680/1133] Re-land linker wrapper without needing to select on linker Summary: D55246335 was reverted as it passed flags intended only for lld to other linkers such as ld64. Apparently selecting on the linker is difficult, so the wrapper script is now passed the same arguments for all linkers, and instead discovers the selected linker using the -fuse-ld= flag, and modifies flags forwarded to the linker accordingly. Reviewed By: rmaz Differential Revision: D55640540 fbshipit-source-id: 54711aeb5e80b0b2a77c31b646032c1ea4a33d1a --- prelude/apple/tools/BUCK.v2 | 12 +++ prelude/apple/tools/linker_wrapper.py | 138 ++++++++++++++++++++++++++ 2 files changed, 150 insertions(+) create mode 100644 prelude/apple/tools/linker_wrapper.py diff --git a/prelude/apple/tools/BUCK.v2 b/prelude/apple/tools/BUCK.v2 index 7c7893fa0..8d7803b8c 100644 --- a/prelude/apple/tools/BUCK.v2 +++ b/prelude/apple/tools/BUCK.v2 @@ -81,3 +81,15 @@ python_bootstrap_binary( main = "swift_objc_header_postprocess.py", visibility = ["PUBLIC"], ) + +python_bootstrap_binary( + name = "linker_wrapper", + main = "linker_wrapper.py", + visibility = ["PUBLIC"], +) + +python_library( + name = "linker_wrapper_library", + srcs = ["linker_wrapper.py"], + visibility = ["PUBLIC"], +) diff --git a/prelude/apple/tools/linker_wrapper.py b/prelude/apple/tools/linker_wrapper.py new file mode 100644 index 000000000..77ede38b5 --- /dev/null +++ b/prelude/apple/tools/linker_wrapper.py @@ -0,0 +1,138 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +import argparse +import enum +import re +import subprocess +import sys + +from typing import List, Optional, Tuple + + +def _eprintln(msg: str) -> None: + print(msg, flush=True, file=sys.stderr) + + +def _expand_arg_files(args: List[str]) -> List[str]: + expanded_args = [] + for arg in args: + if arg.startswith("@"): + with open(arg[1:]) as argfile: + expanded_args.extend( + [line.strip('"') for line in argfile.read().splitlines()] + ) + else: + expanded_args.append(arg) + return expanded_args + + +def _seperate_wrapper_args_from_linker_args( + args: List[str], +) -> Tuple[List[str], List[str]]: + wrapper_args = [] + linker_args = [] + expanded_args = _expand_arg_files(args) + + i = 0 + while i < len(expanded_args): + if expanded_args[i] == "-Xwrapper": + wrapper_args.append(expanded_args[i + 1]) + i += 1 + else: + linker_args.append(expanded_args[i]) + i += 1 + + return wrapper_args, linker_args + + +def _diagnose_potential_unexported_symbol_issue( + unexported_symbol_lists: List[str], stderr: str +) -> Optional[str]: + stderr_lines = stderr.splitlines() + undefined_symbol_re = re.compile(r"undefined symbol:.*\(mangled: (\S+)\)") + undefined_symbols = set() + for stderr_line in stderr_lines: + match = re.search(undefined_symbol_re, stderr_line) + if match: + undefined_symbols.add(match.group(1)) + + if not undefined_symbols: + return None + + unexported_symbols = set() + incorrectly_unexported_symbols = set() + incorrect_unexported_symbol_lists = [] + for unexported_symbol_list in unexported_symbol_lists: + target_name, file_path = unexported_symbol_list.split(",") + with open(file_path, "r") as unexported_symbol_list_file: + unexported_symbols = set(unexported_symbol_list_file.read().splitlines()) + intersection = undefined_symbols & unexported_symbols + if intersection: + incorrectly_unexported_symbols.update(intersection) + incorrect_unexported_symbol_lists.append(target_name) + + if not incorrect_unexported_symbol_lists: + return None + + return f""" +UNEXPORTED SYMBOLS ERROR: + +At least one symbol is included in an unexported symbol list, but referenced across dylib boundaries. Please +run the following command to fix the unexported symbol lists: + +arc fix-unexported-symbol-lists {"".join(["--target " + target for target in incorrect_unexported_symbol_lists])} {" ".join(["--symbol " + symbol for symbol in sorted(incorrectly_unexported_symbols)])} + +Here is the linker failure message: +""" + + +class Linker(enum.Enum): + LLD = "lld" + LD64 = "ld64" + + +def _discover_linker(args: List[str]) -> Optional[Linker]: + for arg in args: + if arg.startswith("-fuse-ld="): + linker_name = arg.split("=")[-1] + if linker_name == Linker.LLD.value: + return Linker.LLD + elif linker_name == Linker.LD64.value: + return Linker.LD64 + else: + raise Exception(f"Unknown linker: {linker_name}") + + +def main(argv: List[str]) -> int: + wrapper_args, linker_args = _seperate_wrapper_args_from_linker_args(argv[1:]) + + parser = argparse.ArgumentParser() + parser.add_argument("-linker") + parser.add_argument("-unexported_symbol_list", action="append") + args = parser.parse_args(wrapper_args) + + linker = _discover_linker(linker_args) + if linker == Linker.LLD: + linker_args.extend( + ["-Xlinker", "-pika_include_mangled_names_in_undefined_symbol_errors"] + ) + + result = subprocess.run([args.linker] + linker_args, capture_output=True, text=True) + if result.returncode != 0: + if args.unexported_symbol_list and linker == Linker.LLD: + diagnosis = _diagnose_potential_unexported_symbol_issue( + args.unexported_symbol_list, result.stderr + ) + if diagnosis: + _eprintln(diagnosis) + _eprintln(result.stderr) + return result.returncode + + +if __name__ == "__main__": + sys.exit(main(sys.argv)) From 719cb57226d8b88a63742eca8bcf39c882026a76 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Tue, 2 Apr 2024 15:03:27 -0700 Subject: [PATCH 0681/1133] Add type-hints for GoToolchainInfo Summary: Just made GoToolchainInfo look more clear Reviewed By: leoleovich Differential Revision: D55453839 fbshipit-source-id: 2f2947c382e604096e7887b5ae6e065e5bd20627 --- prelude/go/cgo_library.bzl | 1 - prelude/go/toolchain.bzl | 19 +++++++++---------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 7902c8b51..83febc04b 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -92,7 +92,6 @@ def _cgo( # Return a `cmd_args` to use as the generated sources. go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - expect(go_toolchain.cgo != None) expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index c454b7af1..e1c0e9ca9 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -13,7 +13,7 @@ GoToolchainInfo = provider( "assembler": provider_field(RunInfo), "assembler_flags": provider_field(typing.Any, default = None), "c_compiler_flags": provider_field(typing.Any, default = None), - "cgo": provider_field(RunInfo | None, default = None), + "cgo": provider_field(RunInfo), "cgo_wrapper": provider_field(RunInfo), "gen_stdlib_importcfg": provider_field(RunInfo), "go_list_wrapper": provider_field(RunInfo), @@ -23,9 +23,9 @@ GoToolchainInfo = provider( "concat_files": provider_field(RunInfo), "cover": provider_field(RunInfo), "cxx_toolchain_for_linking": provider_field(CxxToolchainInfo | None, default = None), - "env_go_arch": provider_field(typing.Any, default = None), - "env_go_os": provider_field(typing.Any, default = None), - "env_go_arm": provider_field(typing.Any, default = None), + "env_go_arch": provider_field(str), + "env_go_os": provider_field(str), + "env_go_arm": provider_field(str | None, default = None), "env_go_root": provider_field(typing.Any, default = None), "env_go_debug": provider_field(dict[str, str], default = {}), "external_linker_flags": provider_field(typing.Any, default = None), @@ -33,7 +33,7 @@ GoToolchainInfo = provider( "linker": provider_field(RunInfo), "linker_flags": provider_field(typing.Any, default = None), "packer": provider_field(RunInfo), - "tags": provider_field(typing.Any, default = None), + "tags": provider_field(list[str], default = []), }, ) @@ -43,10 +43,8 @@ def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True, force_dis # opt-out from Go1.20 coverage redisign cmd.add("GOEXPERIMENT=nocoverageredesign") - if toolchain.env_go_arch != None: - cmd.add("GOARCH={}".format(toolchain.env_go_arch)) - if toolchain.env_go_os != None: - cmd.add("GOOS={}".format(toolchain.env_go_os)) + cmd.add("GOARCH={}".format(toolchain.env_go_arch)) + cmd.add("GOOS={}".format(toolchain.env_go_os)) if toolchain.env_go_arm != None: cmd.add("GOARM={}".format(toolchain.env_go_arm)) if go_root and toolchain.env_go_root != None: @@ -60,7 +58,8 @@ def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True, force_dis # CGO is enabled by default for native compilation, but we need to set it # explicitly for cross-builds: # https://go-review.googlesource.com/c/go/+/12603/2/src/cmd/cgo/doc.go - if toolchain.cgo != None: + cxx_toolchain_available = toolchain.cxx_toolchain_for_linking != None + if cxx_toolchain_available: cmd.add("CGO_ENABLED=1") return cmd From 7b3302d126ce08c1bc381e36f063a3c8ad280fff Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Tue, 2 Apr 2024 15:19:19 -0700 Subject: [PATCH 0682/1133] Materialize split debug objects for bolt Summary: It appears that Bolt requires accessing DWO files when running, so make sure we materialize them to avoid errors like: ``` ... BOLT-WARNING: Debug Fission: DWO debug information for buck-out/v2/gen/fbcode/2373444e343de73a/unicorn/py/__query/query__/__objects__/query.cpp.o was not retrieved and won't be updated. Please check relative path. BOLT-WARNING: Debug Fission: DWO debug information for buck-out/v2/gen/fbcode/2373444e343de73a/unicorn/py/__doc_id/doc_id__/__objects__/doc_id.cpp.o was not retrieved and won't be updated. Please check relative path. BOLT-WARNING: Debug Fission: DWO debug information for buck-out/v2/gen/fbcode/2373444e343de73a/unified_graph/client/py/___client/_client__/__objects__/_client.cpp.o was not retrieved and won't be updated. Please check relative path. BOLT-WARNING: Debug Fission: DWO debug information for buck-out/v2/gen/fbcode/2373444e343de73a/videos/mp4utils/media/py/__MP4UtilClientExt__/__objects__/MP4UtilClientExt.cpp.o was not retrieved and won't be updated. Please check relative path. BOLT-WARNING: Debug Fission: DWO debug information for buck-out/v2/gen/fbcode/2373444e343de73a/vision/cortex/client/__cortex_calculation_lite_lib_cython/cortex_calculation_lite__/__objects__/cortex_calculation_lite.cpp.o was not retrieved and won't be updated. Please check relative path. BOLT-WARNING: Debug Fission: DWO debug information for buck-out/v2/gen/fbcode/2373444e343de73a/vision/cortex/client/__cortex_storage_lib_cython/cortex_storage__/__objects__/cortex_storage.cpp.o was not retrieved and won't be updated. Please check relative path. BOLT-WARNING: Debug Fission: DWO debug information for buck-out/v2/gen/fbcode/2373444e343de73a/infrasec/authorization/certtoken/py/__cert_token_util__/__objects__/CertTokenPy.cpp.o was not retrieved and won't be updated. Please check relative path. BOLT-INFO: processing split DWARF llvm-bolt: /data/sandcastle/boxes/trunk-grepo-llvm-c2-grepo/llvm-project/bolt/lib/Core/BinaryContext.cpp:1760: void llvm::bolt::BinaryContext::preprocessDebugInfo(): Assertion `Iter != DWOCUs.end() && "DWO CU was not found."' failed. Stack dump without symbol names (ensure you have llvm-symbolizer in your PATH or set the environment var `LLVM_SYMBOLIZER_PATH` to point to it): 0 llvm-bolt 0x0000000000d6bfdc 1 llvm-bolt 0x0000000000d6d958 2 libc.so.6 0x00007f0a88c445c0 3 libc.so.6 0x00007f0a88c9c9f3 pthread_kill + 67 4 libc.so.6 0x00007f0a88c4450d raise + 29 5 libc.so.6 0x00007f0a88c2c433 abort + 202 6 libc.so.6 0x00007f0a88c3bc28 7 libc.so.6 0x00007f0a88c3bc93 8 llvm-bolt 0x000000000141d7ea 9 llvm-bolt 0x0000000000dd65fa 10 llvm-bolt 0x00000000004fe78a 11 libc.so.6 0x00007f0a88c2c657 12 libc.so.6 0x00007f0a88c2c718 __libc_start_main + 136 13 llvm-bolt 0x000000000044f431 PLEASE submit a bug report to https://github.com/llvm/llvm-project/issues/ and include the crash backtrace. Stack dump: 0. Program arguments: /data/users/agallagher/fbsource4/buck-out/v2/gen/fbcode/902526078e99e677/compilers/bolt/__bolt_linux_component__/unpacked/bin/real/llvm-bolt buck-out/v2/gen/fbcode/2373444e343de73a/instagram/server/native_python/__native_python_ig_server_bolt__/native_python_ig_server_bolt-wrapper -o buck-o ut/v2/gen/fbcode/2373444e343de73a/instagram/server/native_python/__native_python_ig_server_bolt__/native_python_ig_server_bolt -data=buck-out/v2/gen/fbcode/2373444e343de73a/fdo/bolt/instagram/server/native_python/native_python_ig_server/__bolt__/out/profile -use-gnu-stack=1 -reorder-blocks=ext-tsp -update-debug-secti ons -dyno-stats -plt=all -split-functions -skip-funcs=__strobe_loc_.* -skip-funcs=mkl_vml_kernel_sCdfNorm_.*,mkl_vml_kernel_sErfc_.*,mkl_vml_kernel_sErf_.* -skip-funcs=TcmallocSlab_Internal.* -infer-stale-profile=1 -enable-bat -reorder-functions=cdsort -hot-text -relocs -split-all-cold -split-eh -use-compact-aligner= true -split-functions -skip-funcs=_ZN5follyL13detached_taskEv/1 Aborted (core dumped) ``` Reviewed By: yulong-zhang-1 Differential Revision: D55641453 fbshipit-source-id: c6e84b2c6e3929ab37378fd34c712928470ff247 --- prelude/cxx/cxx_bolt.bzl | 10 +++++++++- prelude/cxx/dist_lto/dist_lto.bzl | 2 +- prelude/cxx/link.bzl | 2 +- 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/prelude/cxx/cxx_bolt.bzl b/prelude/cxx/cxx_bolt.bzl index b26ac90cb..26c12dd65 100644 --- a/prelude/cxx/cxx_bolt.bzl +++ b/prelude/cxx/cxx_bolt.bzl @@ -8,13 +8,18 @@ # BOLT (Binary Optimization Layout Tool) is a post link profile guided optimizer used for # performance-critical services in fbcode: https://www.internalfb.com/intern/wiki/HHVM-BOLT/ +load( + "@prelude//:artifact_tset.bzl", + "ArtifactTSet", + "project_artifacts", +) load(":cxx_context.bzl", "get_cxx_toolchain_info") def cxx_use_bolt(ctx: AnalysisContext) -> bool: cxx_toolchain_info = get_cxx_toolchain_info(ctx) return cxx_toolchain_info.bolt_enabled and ctx.attrs.bolt_profile != None -def bolt(ctx: AnalysisContext, prebolt_output: Artifact, identifier: [str, None]) -> Artifact: +def bolt(ctx: AnalysisContext, prebolt_output: Artifact, external_debug_info: ArtifactTSet, identifier: [str, None]) -> Artifact: output_name = prebolt_output.short_path.removesuffix("-wrapper") postbolt_output = ctx.actions.declare_output(output_name) bolt_msdk = get_cxx_toolchain_info(ctx).binary_utilities_info.bolt_msdk @@ -33,6 +38,9 @@ def bolt(ctx: AnalysisContext, prebolt_output: Artifact, identifier: [str, None] ctx.attrs.bolt_flags, ) + materialized_external_debug_info = project_artifacts(ctx.actions, [external_debug_info]) + args.hidden(materialized_external_debug_info) + ctx.actions.run( args, category = "bolt", diff --git a/prelude/cxx/dist_lto/dist_lto.bzl b/prelude/cxx/dist_lto/dist_lto.bzl index 456b09960..cbab2f4d5 100644 --- a/prelude/cxx/dist_lto/dist_lto.bzl +++ b/prelude/cxx/dist_lto/dist_lto.bzl @@ -623,7 +623,7 @@ def cxx_dist_link( ], ) - final_output = output if not (executable_link and cxx_use_bolt(ctx)) else bolt(ctx, output, identifier) + final_output = output if not (executable_link and cxx_use_bolt(ctx)) else bolt(ctx, output, external_debug_info, identifier) dwp_output = ctx.actions.declare_output(output.short_path.removesuffix("-wrapper") + ".dwp") if generate_dwp else None if generate_dwp: diff --git a/prelude/cxx/link.bzl b/prelude/cxx/link.bzl index 361faea2c..e62e1af54 100644 --- a/prelude/cxx/link.bzl +++ b/prelude/cxx/link.bzl @@ -279,7 +279,7 @@ def cxx_link_into( strip_args = opts.strip_args_factory(ctx) if opts.strip_args_factory else cmd_args() output = strip_object(ctx, cxx_toolchain_info, output, strip_args, opts.category_suffix) - final_output = output if not (is_result_executable and cxx_use_bolt(ctx)) else bolt(ctx, output, opts.identifier) + final_output = output if not (is_result_executable and cxx_use_bolt(ctx)) else bolt(ctx, output, external_debug_info, opts.identifier) dwp_artifact = None if should_generate_dwp: # TODO(T110378144): Once we track split dwarf from compiles, we should From e545433a6a1e42f8e0cfefaa0d1e89af8c155a48 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 3 Apr 2024 02:42:59 -0700 Subject: [PATCH 0683/1133] do not fail analysis if bundle binary doesn't have debuggable info Summary: we don't support (yet?) debuggable info when bundle binary is a library, handle the absense in generic manner Differential Revision: D55664332 fbshipit-source-id: 238017a4cb6ec58cd1669530a8eb97c8759726c1 --- prelude/apple/apple_bundle.bzl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/prelude/apple/apple_bundle.bzl b/prelude/apple/apple_bundle.bzl index db379855c..83cfbcedc 100644 --- a/prelude/apple/apple_bundle.bzl +++ b/prelude/apple/apple_bundle.bzl @@ -262,10 +262,7 @@ def _get_bundle_binary_dsym_artifacts(ctx: AnalysisContext, binary_output: Apple return binary_output.debuggable_info.dsyms def _get_all_agg_debug_info(ctx: AnalysisContext, binary_output: AppleBundleBinaryOutput, deps_debuggable_infos: list[AppleDebuggableInfo]) -> AggregatedAppleDebugInfo: - all_debug_infos = deps_debuggable_infos - if not binary_output.is_watchkit_stub_binary: - binary_debuggable_info = binary_output.debuggable_info - all_debug_infos = all_debug_infos + [binary_debuggable_info] + all_debug_infos = deps_debuggable_infos + ([binary_output.debuggable_info] if binary_output.debuggable_info else []) return get_aggregated_debug_info(ctx, all_debug_infos) def _maybe_scrub_selected_debug_paths_file(ctx: AnalysisContext, package_names: list[str]) -> Artifact: From 103559dc1aaec72f9c345557a44bb64cb83eb1a9 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 3 Apr 2024 04:06:17 -0700 Subject: [PATCH 0684/1133] Always set GOROOT Summary: There's no sense in setting it conditionally Reviewed By: leoleovich Differential Revision: D55640329 fbshipit-source-id: 7eeb3621535aa35a8ab65d9b794a20214158601f --- prelude/go/cgo_library.bzl | 2 +- prelude/go/go_stdlib.bzl | 2 +- prelude/go/package_builder.bzl | 12 ++++++------ prelude/go/toolchain.bzl | 4 ++-- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 83febc04b..253592179 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -95,7 +95,7 @@ def _cgo( expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] - cmd = get_toolchain_cmd_args(go_toolchain, go_root = False) + cmd = get_toolchain_cmd_args(go_toolchain) cmd.add(go_toolchain.cgo_wrapper) args = cmd_args() diff --git a/prelude/go/go_stdlib.bzl b/prelude/go/go_stdlib.bzl index e1f41ded1..fb8dfc2eb 100644 --- a/prelude/go/go_stdlib.bzl +++ b/prelude/go/go_stdlib.bzl @@ -38,7 +38,7 @@ def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: cmd_args(cgo_ldflags, format = "--cgo_ldflags={}", absolute_prefix = "%cwd%/"), ] - cmd = get_toolchain_cmd_args(go_toolchain, go_root = True) + cmd = get_toolchain_cmd_args(go_toolchain) cmd.add([ "GODEBUG={}".format("installgoroot=all"), "CGO_ENABLED={}".format("1" if cgo_enabled else "0"), diff --git a/prelude/go/package_builder.bzl b/prelude/go/package_builder.bzl index c633046de..cd694b558 100644 --- a/prelude/go/package_builder.bzl +++ b/prelude/go/package_builder.bzl @@ -99,7 +99,7 @@ def build_package( def _go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_root: str, force_disable_cgo: bool): go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain, go_root = True, force_disable_cgo = force_disable_cgo) + env_args = get_toolchain_cmd_args(go_toolchain, force_disable_cgo = force_disable_cgo) go_list_out = ctx.actions.declare_output(paths.basename(pkg_name) + "_go_list.json") # Create file sructure that `go list` can recognize @@ -186,7 +186,7 @@ def _compile( gen_asmhdr: bool = False) -> (Artifact, Artifact | None): go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain, go_root = True) + env_args = get_toolchain_cmd_args(go_toolchain) out = ctx.actions.declare_output("go_compile_out.a") if len(go_srcs) == 0: @@ -224,7 +224,7 @@ def _symabis(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], assem return None go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain, go_root = True) + env_args = get_toolchain_cmd_args(go_toolchain) # we have to supply "go_asm.h" with any content to make asm tool happy # its content doesn't matter if -gensymabis provided @@ -253,7 +253,7 @@ def _asssembly(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], asm return [] go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain, go_root = True) + env_args = get_toolchain_cmd_args(go_toolchain) o_files = [] identifier = paths.basename(pkg_name) @@ -282,7 +282,7 @@ def _pack(ctx: AnalysisContext, pkg_name: str, a_file: Artifact, o_files: list[A return a_file go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain, go_root = True) + env_args = get_toolchain_cmd_args(go_toolchain) pkg_file = ctx.actions.declare_output("pkg.a") @@ -315,7 +315,7 @@ def _cover(ctx: AnalysisContext, pkg_name: str, go_files: list[Artifact], covera return go_files, "" go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain, go_root = True) + env_args = get_toolchain_cmd_args(go_toolchain) covered_files = [] coverage_vars = {} for go_file in go_files: diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index e1c0e9ca9..8922a94c9 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -37,7 +37,7 @@ GoToolchainInfo = provider( }, ) -def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True, force_disable_cgo = False) -> cmd_args: +def get_toolchain_cmd_args(toolchain: GoToolchainInfo, force_disable_cgo = False) -> cmd_args: cmd = cmd_args("env") # opt-out from Go1.20 coverage redisign @@ -47,7 +47,7 @@ def get_toolchain_cmd_args(toolchain: GoToolchainInfo, go_root = True, force_dis cmd.add("GOOS={}".format(toolchain.env_go_os)) if toolchain.env_go_arm != None: cmd.add("GOARM={}".format(toolchain.env_go_arm)) - if go_root and toolchain.env_go_root != None: + if toolchain.env_go_root != None: cmd.add(cmd_args(toolchain.env_go_root, format = "GOROOT={}")) if toolchain.env_go_debug: godebug = ",".join(["{}={}".format(k, v) for k, v in toolchain.env_go_debug.items()]) From b11eab121ee593efa9a040256d844a9787090dd7 Mon Sep 17 00:00:00 2001 From: Pepe Iborra Date: Mon, 15 Jan 2024 06:32:35 -0800 Subject: [PATCH 0685/1133] Extract attr helpers Summary: Just a refactoring for clarity Reviewed By: simonmar Differential Revision: D52778009 fbshipit-source-id: 1b4fb123666354c3e3e66d2629e0b0d43a76ba6b --- prelude/haskell/compile.bzl | 80 ++++---------------------------- prelude/haskell/haskell.bzl | 7 ++- prelude/haskell/haskell_ghci.bzl | 7 ++- prelude/haskell/library_info.bzl | 51 ++++++++++++++++++++ prelude/haskell/util.bzl | 32 +++++++++++++ prelude/rules_impl.bzl | 4 +- 6 files changed, 104 insertions(+), 77 deletions(-) create mode 100644 prelude/haskell/library_info.bzl diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index db999c3b7..e0fc173f2 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -10,9 +10,13 @@ load( "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors", ) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfo", + "HaskellLibraryProvider", +) load( "@prelude//haskell:link_info.bzl", - "HaskellLinkInfo", "merge_haskell_link_infos", ) load( @@ -23,6 +27,8 @@ load( load( "@prelude//haskell:util.bzl", "attr_deps", + "attr_deps_haskell_lib_infos", + "attr_deps_haskell_link_infos", "get_artifact_suffix", "is_haskell_src", "output_extensions", @@ -52,51 +58,6 @@ CompileArgsInfo = record( args_for_file = field(cmd_args), ) -# If the target is a haskell library, the HaskellLibraryProvider -# contains its HaskellLibraryInfo. (in contrast to a HaskellLinkInfo, -# which contains the HaskellLibraryInfo for all the transitive -# dependencies). Direct dependencies are treated differently from -# indirect dependencies for the purposes of module visibility. -HaskellLibraryProvider = provider( - fields = { - "metadata": provider_field(typing.Any, default = None), # Artifact - "lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] - "prof_lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] - }, -) - -# A record of a Haskell library. -HaskellLibraryInfo = record( - # The library target name: e.g. "rts" - name = str, - # package config database: e.g. platform009/build/ghc/lib/package.conf.d - db = Artifact, - # package config database, referring to the empty lib which is only used for compilation - empty_db = Artifact, - # e.g. "base-4.13.0.0" - id = str, - # Import dirs indexed by profiling enabled/disabled - import_dirs = dict[bool, list[Artifact]], - # Object files indexed by profiling enabled/disabled - objects = dict[bool, list[Artifact]], - stub_dirs = list[Artifact], - - # This field is only used as hidden inputs to compilation, to - # support Template Haskell which may need access to the libraries - # at compile time. The real library flags are propagated up the - # dependency graph via MergedLinkInfo. - libs = field(list[Artifact], []), - # GHC insists on loading a library, but does not actually need it when we - # pass module granular object files into compilation actions. - empty_libs = field(list[Artifact], []), - # Package version, used to specify the full package when exposing it, - # e.g. filepath-1.4.2.1, deepseq-1.4.4.0. - # Internal packages default to 1.0.0, e.g. `fbcode-dsi-logger-hs-types-1.0.0`. - version = str, - is_prebuilt = bool, - profiling_enabled = bool, -) - PackagesInfo = record( exposed_package_imports = field(list[Artifact]), exposed_package_objects = field(list[Artifact]), @@ -203,15 +164,6 @@ def target_metadata( return md_file -def _attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: - return filter( - None, - [ - d.get(HaskellLinkInfo) - for d in attr_deps(ctx) + ctx.attrs.template_deps - ], - ) - def _attr_deps_haskell_lib_metadata_files(ctx: AnalysisContext) -> list[Artifact]: result = [] @@ -228,20 +180,6 @@ def _attr_deps_haskell_lib_metadata_files(ctx: AnalysisContext) -> list[Artifact return result -def _attr_deps_haskell_lib_infos( - ctx: AnalysisContext, - link_style: LinkStyle, - enable_profiling: bool) -> list[HaskellLibraryInfo]: - if enable_profiling and link_style == LinkStyle("shared"): - fail("Profiling isn't supported when using dynamic linking") - return [ - x.prof_lib[link_style] if enable_profiling else x.lib[link_style] - for x in filter(None, [ - d.get(HaskellLibraryProvider) - for d in attr_deps(ctx) + ctx.attrs.template_deps - ]) - ] - def _package_flag(toolchain: HaskellToolchainInfo) -> str: if toolchain.support_expose_package: return "-expose-package" @@ -262,7 +200,7 @@ def get_packages_info( # particular order and we really want to remove duplicates (there # are a *lot* of duplicates). libs = {} - direct_deps_link_info = _attr_deps_haskell_link_infos(ctx) + direct_deps_link_info = attr_deps_haskell_link_infos(ctx) merged_hs_link_info = merge_haskell_link_infos(direct_deps_link_info) hs_link_info = merged_hs_link_info.prof_info if enable_profiling else merged_hs_link_info.info @@ -316,7 +254,7 @@ def get_packages_info( # direct and transitive (e.g. `fbcode-common-hs-util-hs-array`) packagedb_args.add("-package-db", lib.empty_db if use_empty_lib else lib.db) - haskell_direct_deps_lib_infos = _attr_deps_haskell_lib_infos( + haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( ctx, link_style, enable_profiling, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index c21b20e40..f6feb6d20 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -59,8 +59,6 @@ load( load( "@prelude//haskell:compile.bzl", "CompileResultInfo", - "HaskellLibraryInfo", - "HaskellLibraryProvider", "compile", "target_metadata", ) @@ -68,6 +66,11 @@ load( "@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_lib", ) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfo", + "HaskellLibraryProvider", +) load( "@prelude//haskell:link_info.bzl", "HaskellLinkInfo", diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index 3cb3e6ab8..1ffdd3748 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -18,11 +18,14 @@ load( ) load( "@prelude//haskell:compile.bzl", - "HaskellLibraryInfo", - "HaskellLibraryProvider", "PackagesInfo", "get_packages_info", ) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfo", + "HaskellLibraryProvider", +) load( "@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo", diff --git a/prelude/haskell/library_info.bzl b/prelude/haskell/library_info.bzl new file mode 100644 index 000000000..3ac1d06c0 --- /dev/null +++ b/prelude/haskell/library_info.bzl @@ -0,0 +1,51 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# If the target is a haskell library, the HaskellLibraryProvider +# contains its HaskellLibraryInfo. (in contrast to a HaskellLinkInfo, +# which contains the HaskellLibraryInfo for all the transitive +# dependencies). Direct dependencies are treated differently from +# indirect dependencies for the purposes of module visibility. +HaskellLibraryProvider = provider( + fields = { + "metadata": provider_field(typing.Any, default = None), # Artifact + "lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] + "prof_lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] + }, +) + +# A record of a Haskell library. +HaskellLibraryInfo = record( + # The library target name: e.g. "rts" + name = str, + # package config database: e.g. platform009/build/ghc/lib/package.conf.d + db = Artifact, + # package config database, referring to the empty lib which is only used for compilation + empty_db = Artifact, + # e.g. "base-4.13.0.0" + id = str, + # Import dirs indexed by profiling enabled/disabled + import_dirs = dict[bool, list[Artifact]], + # Object files indexed by profiling enabled/disabled + objects = dict[bool, list[Artifact]], + stub_dirs = list[Artifact], + + # This field is only used as hidden inputs to compilation, to + # support Template Haskell which may need access to the libraries + # at compile time. The real library flags are propagated up the + # dependency graph via MergedLinkInfo. + libs = field(list[Artifact], []), + # GHC insists on loading a library, but does not actually need it when we + # pass module granular object files into compilation actions. + empty_libs = field(list[Artifact], []), + # Package version, used to specify the full package when exposing it, + # e.g. filepath-1.4.2.1, deepseq-1.4.4.0. + # Internal packages default to 1.0.0, e.g. `fbcode-dsi-logger-hs-types-1.0.0`. + version = str, + is_prebuilt = bool, + profiling_enabled = bool, +) diff --git a/prelude/haskell/util.bzl b/prelude/haskell/util.bzl index 21cbd7b05..76eed8180 100644 --- a/prelude/haskell/util.bzl +++ b/prelude/haskell/util.bzl @@ -10,6 +10,15 @@ load( "@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", ) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfo", + "HaskellLibraryProvider", +) +load( + "@prelude//haskell:link_info.bzl", + "HaskellLinkInfo", +) load( "@prelude//linking:link_info.bzl", "LinkStyle", @@ -49,6 +58,29 @@ def _by_platform(ctx: AnalysisContext, xs: list[(str, list[typing.Any])]) -> lis def attr_deps(ctx: AnalysisContext) -> list[Dependency]: return ctx.attrs.deps + _by_platform(ctx, ctx.attrs.platform_deps) +def attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: + return filter( + None, + [ + d.get(HaskellLinkInfo) + for d in attr_deps(ctx) + ctx.attrs.template_deps + ], + ) + +def attr_deps_haskell_lib_infos( + ctx: AnalysisContext, + link_style: LinkStyle, + enable_profiling: bool) -> list[HaskellLibraryInfo]: + if enable_profiling and link_style == LinkStyle("shared"): + fail("Profiling isn't supported when using dynamic linking") + return [ + x.prof_lib[link_style] if enable_profiling else x.lib[link_style] + for x in filter(None, [ + d.get(HaskellLibraryProvider) + for d in attr_deps(ctx) + ctx.attrs.template_deps + ]) + ] + def _link_style_extensions(link_style: LinkStyle) -> (str, str): if link_style == LinkStyle("shared"): return ("dyn_o", "dyn_hi") diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index 99e39a7b3..db6213548 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -27,12 +27,12 @@ load("@prelude//go:go_exported_library.bzl", "go_exported_library_impl") load("@prelude//go:go_library.bzl", "go_library_impl") load("@prelude//go:go_stdlib.bzl", "go_stdlib_impl") load("@prelude//go:go_test.bzl", "go_test_impl") -load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") -load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl", "haskell_toolchain_library_impl") load("@prelude//go/transitions:defs.bzl", "cgo_enabled_attr", "compile_shared_attr", "coverage_mode_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition", "race_attr", "tags_attr") +load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl", "haskell_toolchain_library_impl") load("@prelude//haskell:haskell_ghci.bzl", "haskell_ghci_impl") load("@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_impl") load("@prelude//haskell:haskell_ide.bzl", "haskell_ide_impl") +load("@prelude//haskell:library_info.bzl", "HaskellLibraryProvider") load("@prelude//http_archive:http_archive.bzl", "http_archive_impl") load("@prelude//java:java.bzl", _java_extra_attributes = "extra_attributes", _java_implemented_rules = "implemented_rules") load("@prelude//js:js.bzl", _js_extra_attributes = "extra_attributes", _js_implemented_rules = "implemented_rules") From a2cd4db8268072dd7238f8375f9713722166deb6 Mon Sep 17 00:00:00 2001 From: Pepe Iborra Date: Mon, 15 Jan 2024 06:32:35 -0800 Subject: [PATCH 0686/1133] Extract calls to attr_deps Summary: Just a pure refactoring to highlight that `haskell_library_impl` is incorrectly processing the link infos Reviewed By: simonmar Differential Revision: D52778010 fbshipit-source-id: d6cd10ad294684198108ffd9e478322dad3e4f61 (cherry picked from commit 130c2261571deb096ce595d14d96658e2f6f8ae9) --- prelude/haskell/haskell.bzl | 45 +++++++---------------------------- prelude/haskell/link_info.bzl | 9 +++++++ prelude/haskell/util.bzl | 43 +++++++++++++++++++++++++++++++++ 3 files changed, 61 insertions(+), 36 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index f6feb6d20..6fe4fbae0 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -74,6 +74,7 @@ load( load( "@prelude//haskell:link_info.bzl", "HaskellLinkInfo", + "HaskellProfLinkInfo", "attr_link_style", "cxx_toolchain_link_style", "merge_haskell_link_infos", @@ -86,6 +87,10 @@ load( load( "@prelude//haskell:util.bzl", "attr_deps", + "attr_deps_haskell_link_infos_sans_template_deps", + "attr_deps_merged_link_infos", + "attr_deps_profiling_link_infos", + "attr_deps_shared_library_infos", "get_artifact_suffix", "is_haskell_src", "output_extensions", @@ -154,21 +159,6 @@ HaskellIndexInfo = provider( }, ) -# HaskellProfLinkInfo exposes the MergedLinkInfo of a target and all of its -# dependencies built for profiling. This allows top-level targets (e.g. -# `haskell_binary`) to be defined with profiling enabled by default. -HaskellProfLinkInfo = provider( - fields = { - "prof_infos": provider_field(typing.Any, default = None), # MergedLinkInfo - }, -) - -# -- - -# Disable until we have a need to call this. -# def _attr_deps_merged_link_infos(ctx: AnalysisContext) -> [MergedLinkInfo]: -# return filter(None, [d[MergedLinkInfo] for d in attr_deps(ctx)]) - # This conversion is non-standard, see TODO about link style below def _to_lib_output_style(link_style: LinkStyle) -> LibOutputStyle: return default_output_style_for_link_strategy(to_link_strategy(link_style)) @@ -719,27 +709,10 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage = Linkage("static") # Get haskell and native link infos from all deps - hlis = [] - nlis = [] - prof_nlis = [] - shared_library_infos = [] - for lib in attr_deps(ctx): - li = lib.get(HaskellLinkInfo) - if li != None: - hlis.append(li) - li = lib.get(MergedLinkInfo) - if li != None: - nlis.append(li) - if HaskellLinkInfo not in lib: - # MergedLinkInfo from non-haskell deps should be part of the - # profiling MergedLinkInfo - prof_nlis.append(li) - li = lib.get(HaskellProfLinkInfo) - if li != None: - prof_nlis.append(li.prof_infos) - li = lib.get(SharedLibraryInfo) - if li != None: - shared_library_infos.append(li) + hlis = attr_deps_haskell_link_infos_sans_template_deps(ctx) + nlis = attr_deps_merged_link_infos(ctx) + prof_nlis = attr_deps_profiling_link_infos(ctx) + shared_library_infos = attr_deps_shared_library_infos(ctx) solibs = {} link_infos = {} diff --git a/prelude/haskell/link_info.bzl b/prelude/haskell/link_info.bzl index 5bce615ba..8699a875e 100644 --- a/prelude/haskell/link_info.bzl +++ b/prelude/haskell/link_info.bzl @@ -23,6 +23,15 @@ HaskellLinkInfo = provider( }, ) +# HaskellProfLinkInfo exposes the MergedLinkInfo of a target and all of its +# dependencies built for profiling. This allows top-level targets (e.g. +# `haskell_binary`) to be defined with profiling enabled by default. +HaskellProfLinkInfo = provider( + fields = { + "prof_infos": provider_field(typing.Any, default = None), # MergedLinkInfo + }, +) + def merge_haskell_link_infos(deps: list[HaskellLinkInfo]) -> HaskellLinkInfo: merged = {} prof_merged = {} diff --git a/prelude/haskell/util.bzl b/prelude/haskell/util.bzl index 76eed8180..89545e7af 100644 --- a/prelude/haskell/util.bzl +++ b/prelude/haskell/util.bzl @@ -18,10 +18,16 @@ load( load( "@prelude//haskell:link_info.bzl", "HaskellLinkInfo", + "HaskellProfLinkInfo", ) load( "@prelude//linking:link_info.bzl", "LinkStyle", + "MergedLinkInfo", +) +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraryInfo", ) load("@prelude//utils:platform_flavors_util.bzl", "by_platform") load("@prelude//utils:utils.bzl", "flatten") @@ -67,6 +73,16 @@ def attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: ], ) +# DONT CALL THIS FUNCTION, you want attr_deps_haskell_link_infos instead +def attr_deps_haskell_link_infos_sans_template_deps(ctx: AnalysisContext) -> list[HaskellLinkInfo]: + return filter( + None, + [ + d.get(HaskellLinkInfo) + for d in attr_deps(ctx) + ], + ) + def attr_deps_haskell_lib_infos( ctx: AnalysisContext, link_style: LinkStyle, @@ -81,6 +97,33 @@ def attr_deps_haskell_lib_infos( ]) ] +def attr_deps_merged_link_infos(ctx: AnalysisContext) -> list[MergedLinkInfo]: + return filter( + None, + [ + d.get(MergedLinkInfo) + for d in attr_deps(ctx) + ], + ) + +def attr_deps_profiling_link_infos(ctx: AnalysisContext) -> list[MergedLinkInfo]: + return filter( + None, + [ + d.get(HaskellProfLinkInfo).prof_infos if d.get(HaskellProfLinkInfo) else d.get(MergedLinkInfo) + for d in attr_deps(ctx) + ], + ) + +def attr_deps_shared_library_infos(ctx: AnalysisContext) -> list[SharedLibraryInfo]: + return filter( + None, + [ + d.get(SharedLibraryInfo) + for d in attr_deps(ctx) + ], + ) + def _link_style_extensions(link_style: LinkStyle) -> (str, str): if link_style == LinkStyle("shared"): return ("dyn_o", "dyn_hi") From 16be2c0795b89b555d291ff1de6b4244c0b6df23 Mon Sep 17 00:00:00 2001 From: Pepe Iborra Date: Wed, 17 Jan 2024 07:53:13 -0800 Subject: [PATCH 0687/1133] Propagate module reorg to bxl script Summary: D52778009 moved the provider type to a new module Reviewed By: josefs Differential Revision: D52836385 fbshipit-source-id: 7353c30aa99bfb1f45e2335a7f29a39cd7054fd5 (cherry picked from commit 02d9e15c93dde10f47f7baa7db895b3bdfb087e5) --- prelude/haskell/ide/ide.bxl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 843250d09..258061e1b 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//haskell:compile.bzl", "HaskellLibraryProvider") +load("@prelude//haskell:library_info.bzl", "HaskellLibraryProvider") load("@prelude//haskell:link_info.bzl", "HaskellLinkInfo") load("@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo", "HaskellToolchainLibrary") load("@prelude//haskell:util.bzl", "is_haskell_src", "srcs_to_pairs") From 436b6c2f68795ce80346f245e4eff810dd0f0d4d Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 5 Apr 2024 10:34:14 +0200 Subject: [PATCH 0688/1133] [buck2] Remove duplicate load statements --- prelude/haskell/haskell.bzl | 7 ------- 1 file changed, 7 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 6fe4fbae0..89250d8b1 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -22,13 +22,6 @@ load( "CxxToolchainInfo", "PicBehavior", ) -load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") -load( - "@prelude//cxx:linker.bzl", - "LINKERS", - "get_rpath_origin", - "get_shared_library_flags", -) load("@prelude//cxx:groups.bzl", "get_dedupped_roots_from_groups") load( "@prelude//cxx:link_groups.bzl", From 887364345414c37209b7a3b4df25fa97298e8399 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 5 Apr 2024 10:46:09 +0200 Subject: [PATCH 0689/1133] [buck2] Remove unused `lib` arg from `_make_package` --- prelude/haskell/haskell.bzl | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 89250d8b1..a857740e8 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -403,7 +403,6 @@ def _make_package( libname: str, hlis: list[HaskellLibraryInfo], hi: dict[bool, list[Artifact]], - lib: dict[bool, Artifact], enable_profiling: bool, use_empty_lib: bool) -> Artifact: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) @@ -631,10 +630,6 @@ def _build_haskell_lib( True: compiled.objects, False: non_profiling_hlib.compiled.objects, } - library_artifacts = { - True: lib, - False: non_profiling_hlib.libs[0], - } all_libs = libs + non_profiling_hlib.libs stub_dirs = [compiled.stubs] + [non_profiling_hlib.compiled.stubs] else: @@ -644,9 +639,6 @@ def _build_haskell_lib( object_artifacts = { False: compiled.objects, } - library_artifacts = { - False: lib, - } all_libs = libs stub_dirs = [compiled.stubs] @@ -657,7 +649,6 @@ def _build_haskell_lib( libstem, uniq_infos, import_artifacts, - library_artifacts, enable_profiling = enable_profiling, use_empty_lib = False, ) @@ -668,7 +659,6 @@ def _build_haskell_lib( libstem, uniq_infos, import_artifacts, - library_artifacts, enable_profiling = enable_profiling, use_empty_lib = True, ) From c5726c6f831b02ad592f0f1d356dec1c44fb59ec Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 5 Apr 2024 10:47:09 +0200 Subject: [PATCH 0690/1133] [buck2] Remove unused variables --- prelude/haskell/haskell.bzl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index a857740e8..13333aaf1 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -516,8 +516,6 @@ def _build_haskell_lib( # Link the objects into a library haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - osuf, _hisuf = output_extensions(link_style, enable_profiling) - # Compile the sources compiled = compile( ctx, @@ -961,8 +959,6 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link.add(haskell_toolchain.linker_flags) link.add(ctx.attrs.linker_flags) - osuf, _hisuf = output_extensions(link_style, enable_profiling) - link.add(compiled.objects) indexing_tsets = {} From d7681bb12aaf395af84e96e496376a2df9cd1b86 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 5 Apr 2024 10:48:10 +0200 Subject: [PATCH 0691/1133] [buck2] Remove unused symbols from load --- prelude/haskell/haskell.bzl | 1 - prelude/haskell/haskell_ghci.bzl | 1 - 2 files changed, 2 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 13333aaf1..5d813b619 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -86,7 +86,6 @@ load( "attr_deps_shared_library_infos", "get_artifact_suffix", "is_haskell_src", - "output_extensions", "src_to_module_name", "srcs_to_pairs", ) diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index 1ffdd3748..4238ccf8f 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -57,7 +57,6 @@ load("@prelude//linking:types.bzl", "Linkage") load( "@prelude//cxx:linker.bzl", "get_rpath_origin", - "get_shared_library_flags", ) load( "@prelude//utils:graph_utils.bzl", From 3d6f491b559a0681569b23b1ed7128259546b52b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 5 Apr 2024 09:41:10 +0200 Subject: [PATCH 0692/1133] Use tset for HaskellLibraryInfo Fixes #93 --- prelude/haskell/compile.bzl | 20 ++++++++------------ prelude/haskell/haskell.bzl | 29 ++++++++++++----------------- prelude/haskell/haskell_ghci.bzl | 7 +++---- prelude/haskell/library_info.bzl | 13 +++++++++++++ prelude/haskell/link_info.bzl | 19 ++++++++++++------- 5 files changed, 48 insertions(+), 40 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index e0fc173f2..697ea2724 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -14,6 +14,7 @@ load( "@prelude//haskell:library_info.bzl", "HaskellLibraryInfo", "HaskellLibraryProvider", + "HaskellLibraryInfoTSet", ) load( "@prelude//haskell:link_info.bzl", @@ -64,7 +65,7 @@ PackagesInfo = record( exposed_package_libs = cmd_args, exposed_package_args = cmd_args, packagedb_args = cmd_args, - transitive_deps = field(list[HaskellLibraryInfo]), + transitive_deps = field(HaskellLibraryInfoTSet), ) _Module = record( @@ -199,14 +200,12 @@ def get_packages_info( # Collect library dependencies. Note that these don't need to be in a # particular order and we really want to remove duplicates (there # are a *lot* of duplicates). - libs = {} direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - merged_hs_link_info = merge_haskell_link_infos(direct_deps_link_info) + merged_hs_link_info = merge_haskell_link_infos(direct_deps_link_info, ctx) hs_link_info = merged_hs_link_info.prof_info if enable_profiling else merged_hs_link_info.info - for lib in hs_link_info[link_style]: - libs[lib.db] = lib # lib.db is a good enough unique key + libs = hs_link_info[link_style] # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) @@ -220,7 +219,7 @@ def get_packages_info( if transitive_deps != None: lib_objects = {} lib_interfaces = {} - for lib in libs.values(): + for lib in libs.traverse(): lib_objects[lib.name] = {} lib_interfaces[lib.name] = {} @@ -242,17 +241,14 @@ def get_packages_info( exposed_package_objects.append(lib_objects[pkg][mod]) exposed_package_imports.append(lib_interfaces[pkg][mod]) else: - for lib in libs.values(): + for lib in libs.traverse(): exposed_package_imports.extend(lib.import_dirs[enable_profiling]) exposed_package_objects.extend(lib.objects[enable_profiling]) # libs of dependencies might be needed at compile time if # we're using Template Haskell: exposed_package_libs.hidden(lib.libs) - for lib in libs.values(): - # These we need to add for all the packages/dependencies, i.e. - # direct and transitive (e.g. `fbcode-common-hs-util-hs-array`) - packagedb_args.add("-package-db", lib.empty_db if use_empty_lib else lib.db) + packagedb_args.add(libs.project_as_args("empty_package_db" if use_empty_lib else "package_db")) haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( ctx, @@ -274,7 +270,7 @@ def get_packages_info( exposed_package_libs = exposed_package_libs, exposed_package_args = exposed_package_args, packagedb_args = packagedb_args, - transitive_deps = libs.values(), + transitive_deps = libs, ) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 5d813b619..e9a6f2025 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -63,6 +63,7 @@ load( "@prelude//haskell:library_info.bzl", "HaskellLibraryInfo", "HaskellLibraryProvider", + "HaskellLibraryInfoTSet", ) load( "@prelude//haskell:link_info.bzl", @@ -274,9 +275,9 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: ] hlibinfos[link_style] = hlibinfo - hlinkinfos[link_style] = [hlibinfo] + hlinkinfos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlibinfo) prof_hlibinfos[link_style] = prof_hlibinfo - prof_hlinkinfos[link_style] = [prof_hlibinfo] + prof_hlinkinfos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = prof_hlibinfo) link_infos[link_style] = LinkInfos( default = LinkInfo( pre_flags = ctx.attrs.exported_linker_flags, @@ -357,7 +358,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: shared_library_infos, ), merge_link_group_lib_info(deps = ctx.attrs.deps), - merge_haskell_link_infos(haskell_infos + [haskell_link_infos]), + merge_haskell_link_infos(haskell_infos + [haskell_link_infos], ctx), merged_link_info, HaskellProfLinkInfo( prof_infos = prof_merged_link_info, @@ -400,7 +401,7 @@ def _make_package( link_style: LinkStyle, pkgname: str, libname: str, - hlis: list[HaskellLibraryInfo], + hlis: HaskellLibraryInfoTSet, hi: dict[bool, list[Artifact]], enable_profiling: bool, use_empty_lib: bool) -> Artifact: @@ -409,10 +410,6 @@ def _make_package( # Don't expose boot sources, as they're only meant to be used for compiling. modules = [src_to_module_name(x) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] - uniq_hlis = {} - for x in hlis: - uniq_hlis[x.id] = x - if enable_profiling: # Add the `-p` suffix otherwise ghc will look for objects # following this logic (https://fburl.com/code/3gmobm5x) and will fail. @@ -441,7 +438,7 @@ def _make_package( "import-dirs:" + ", ".join(import_dirs), "library-dirs:" + ", ".join(library_dirs), "extra-libraries: " + libname, - "depends: " + ", ".join(uniq_hlis), + "depends: " + ", ".join([l.id for l in hlis.traverse()]), ] if use_empty_lib: pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + "_empty.conf", conf) @@ -450,14 +447,12 @@ def _make_package( pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + ".conf", conf) db = ctx.actions.declare_output("db-" + artifact_suffix, dir = True) - db_deps = {} - for x in uniq_hlis.values(): - db_deps[repr(x.db)] = x.db + db_deps = [x.db for x in hlis.traverse()] # So that ghc-pkg can find the DBs for the dependencies. We might # be able to use flags for this instead, but this works. ghc_package_path = cmd_args( - db_deps.values(), + db_deps, delimiter = ":", ) @@ -537,7 +532,7 @@ def _build_haskell_lib( lib_short_path = paths.join("lib-{}".format(artifact_suffix), libfile) linfos = [x.prof_info if enable_profiling else x.info for x in hlis] - uniq_infos = dedupe(flatten([x[link_style] for x in linfos])) + uniq_infos = ctx.actions.tset(HaskellLibraryInfoTSet, children = [x[link_style] for x in linfos]) toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] @@ -744,11 +739,11 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: if enable_profiling: prof_hlib_infos[link_style] = hlib - prof_hlink_infos[link_style] = [hlib] + prof_hlink_infos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlib) prof_link_infos[link_style] = hlib_build_out.link_infos else: hlib_infos[link_style] = hlib - hlink_infos[link_style] = [hlib] + hlink_infos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlib) link_infos[link_style] = hlib_build_out.link_infos # Build the indices and create subtargets only once, with profiling @@ -854,7 +849,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: merge_haskell_link_infos(hlis + [HaskellLinkInfo( info = hlink_infos, prof_info = prof_hlink_infos, - )]), + )], ctx), merged_link_info, HaskellProfLinkInfo( prof_infos = prof_merged_link_info, diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index 4238ccf8f..053b3adcc 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -640,11 +640,11 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: package_symlinks_root = ctx.label.name + ".packages" packagedb_args = cmd_args(delimiter = " ") - prebuilt_packagedb_args_set = {} + prebuilt_packagedb_args = cmd_args(delimiter = " ") - for lib in packages_info.transitive_deps: + for lib in packages_info.transitive_deps.traverse(): if lib.is_prebuilt: - prebuilt_packagedb_args_set[lib.db] = lib.db + prebuilt_packagedb_args.add(lib.db) else: lib_symlinks_root = paths.join( package_symlinks_root, @@ -675,7 +675,6 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: "packagedb", ), ) - prebuilt_packagedb_args = cmd_args(prebuilt_packagedb_args_set.values(), delimiter = " ") script_templates = [] for script_template in ctx.attrs.extra_script_templates: diff --git a/prelude/haskell/library_info.bzl b/prelude/haskell/library_info.bzl index 3ac1d06c0..d41c8aead 100644 --- a/prelude/haskell/library_info.bzl +++ b/prelude/haskell/library_info.bzl @@ -49,3 +49,16 @@ HaskellLibraryInfo = record( is_prebuilt = bool, profiling_enabled = bool, ) + +def _project_as_package_db(lib: HaskellLibraryInfo): + return cmd_args("-package-db", lib.db) + +def _project_as_empty_package_db(lib: HaskellLibraryInfo): + return cmd_args("-package-db", lib.empty_db) + +HaskellLibraryInfoTSet = transitive_set( + args_projections = { + "package_db": _project_as_package_db, + "empty_package_db": _project_as_empty_package_db, + } +) diff --git a/prelude/haskell/link_info.bzl b/prelude/haskell/link_info.bzl index 8699a875e..f2b8fb133 100644 --- a/prelude/haskell/link_info.bzl +++ b/prelude/haskell/link_info.bzl @@ -13,13 +13,18 @@ load( "@prelude//linking:link_info.bzl", "LinkStyle", ) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfo", + "HaskellLibraryInfoTSet", +) # A list of `HaskellLibraryInfo`s. HaskellLinkInfo = provider( # Contains a list of HaskellLibraryInfo records. fields = { - "info": provider_field(typing.Any, default = None), # dict[LinkStyle, list[HaskellLibraryInfo]] # TODO use a tset - "prof_info": provider_field(typing.Any, default = None), # dict[LinkStyle, list[HaskellLibraryInfo]] # TODO use a tset + "info": provider_field(dict[LinkStyle, HaskellLibraryInfoTSet]), + "prof_info": provider_field(dict[LinkStyle, HaskellLibraryInfoTSet]), }, ) @@ -32,7 +37,7 @@ HaskellProfLinkInfo = provider( }, ) -def merge_haskell_link_infos(deps: list[HaskellLinkInfo]) -> HaskellLinkInfo: +def merge_haskell_link_infos(deps: list[HaskellLinkInfo], ctx: AnalysisContext, ) -> HaskellLinkInfo: merged = {} prof_merged = {} for link_style in LinkStyle: @@ -40,13 +45,13 @@ def merge_haskell_link_infos(deps: list[HaskellLinkInfo]) -> HaskellLinkInfo: prof_children = [] for dep in deps: if link_style in dep.info: - children.extend(dep.info[link_style]) + children.append(dep.info[link_style]) if link_style in dep.prof_info: - prof_children.extend(dep.prof_info[link_style]) + prof_children.append(dep.prof_info[link_style]) - merged[link_style] = dedupe(children) - prof_merged[link_style] = dedupe(prof_children) + merged[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, children = children) + prof_merged[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, children = prof_children) return HaskellLinkInfo(info = merged, prof_info = prof_merged) From cff3823de1fdb0af6173b44023d99ce7f0ec2a5f Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 5 Apr 2024 16:08:40 +0200 Subject: [PATCH 0693/1133] Fix ide integration --- prelude/haskell/ide/ide.bxl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index 258061e1b..a583d292a 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -287,7 +287,7 @@ def _assembleSolution(ctx, linkStyle, result): for provider in result["haskell_deps"].values(): info = provider.info.get(linkStyle) if info != None: - for item in info: + for item in info.traverse(): if result["exclude_packages"].get(item.name) == None: hlis[item.name] = item for hli in hlis.values(): From 47f35ce73bb333d4e5619aa3ee302b88772bc217 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 5 Apr 2024 16:15:27 +0200 Subject: [PATCH 0694/1133] Fix using HaskellLibraryInfo tsets --- prelude/haskell/compile.bzl | 1 - prelude/haskell/haskell.bzl | 27 +++++++++++++++------------ prelude/haskell/library_info.bzl | 6 ++++++ prelude/haskell/link_info.bzl | 1 - 4 files changed, 21 insertions(+), 14 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 697ea2724..1df7378fa 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -12,7 +12,6 @@ load( ) load( "@prelude//haskell:library_info.bzl", - "HaskellLibraryInfo", "HaskellLibraryProvider", "HaskellLibraryInfoTSet", ) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index e9a6f2025..72afda456 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -71,7 +71,6 @@ load( "HaskellProfLinkInfo", "attr_link_style", "cxx_toolchain_link_style", - "merge_haskell_link_infos", ) load( "@prelude//haskell:toolchain.bzl", @@ -275,9 +274,13 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: ] hlibinfos[link_style] = hlibinfo - hlinkinfos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlibinfo) + hlinkinfos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlibinfo, children = [ + lib.info[link_style] for lib in haskell_infos + ]) prof_hlibinfos[link_style] = prof_hlibinfo - prof_hlinkinfos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = prof_hlibinfo) + prof_hlinkinfos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = prof_hlibinfo, children = [ + lib.prof_info[link_style] for lib in haskell_infos + ]) link_infos[link_style] = LinkInfos( default = LinkInfo( pre_flags = ctx.attrs.exported_linker_flags, @@ -358,7 +361,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: shared_library_infos, ), merge_link_group_lib_info(deps = ctx.attrs.deps), - merge_haskell_link_infos(haskell_infos + [haskell_link_infos], ctx), + haskell_link_infos, merged_link_info, HaskellProfLinkInfo( prof_infos = prof_merged_link_info, @@ -401,7 +404,7 @@ def _make_package( link_style: LinkStyle, pkgname: str, libname: str, - hlis: HaskellLibraryInfoTSet, + hlis: list[HaskellLibraryInfo], hi: dict[bool, list[Artifact]], enable_profiling: bool, use_empty_lib: bool) -> Artifact: @@ -438,7 +441,7 @@ def _make_package( "import-dirs:" + ", ".join(import_dirs), "library-dirs:" + ", ".join(library_dirs), "extra-libraries: " + libname, - "depends: " + ", ".join([l.id for l in hlis.traverse()]), + "depends: " + ", ".join([lib.id for lib in hlis]), ] if use_empty_lib: pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + "_empty.conf", conf) @@ -447,7 +450,7 @@ def _make_package( pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + ".conf", conf) db = ctx.actions.declare_output("db-" + artifact_suffix, dir = True) - db_deps = [x.db for x in hlis.traverse()] + db_deps = [x.db for x in hlis] # So that ghc-pkg can find the DBs for the dependencies. We might # be able to use flags for this instead, but this works. @@ -532,8 +535,8 @@ def _build_haskell_lib( lib_short_path = paths.join("lib-{}".format(artifact_suffix), libfile) linfos = [x.prof_info if enable_profiling else x.info for x in hlis] - uniq_infos = ctx.actions.tset(HaskellLibraryInfoTSet, children = [x[link_style] for x in linfos]) + uniq_infos = [x[link_style].reduce("root") for x in linfos] toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] if link_style == LinkStyle("shared"): @@ -739,11 +742,11 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: if enable_profiling: prof_hlib_infos[link_style] = hlib - prof_hlink_infos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlib) + prof_hlink_infos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlib, children = [li.prof_info[link_style] for li in hlis]) prof_link_infos[link_style] = hlib_build_out.link_infos else: hlib_infos[link_style] = hlib - hlink_infos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlib) + hlink_infos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlib, children = [li.info[link_style] for li in hlis]) link_infos[link_style] = hlib_build_out.link_infos # Build the indices and create subtargets only once, with profiling @@ -846,10 +849,10 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: lib = hlib_infos, prof_lib = prof_hlib_infos, ), - merge_haskell_link_infos(hlis + [HaskellLinkInfo( + HaskellLinkInfo( info = hlink_infos, prof_info = prof_hlink_infos, - )], ctx), + ), merged_link_info, HaskellProfLinkInfo( prof_infos = prof_merged_link_info, diff --git a/prelude/haskell/library_info.bzl b/prelude/haskell/library_info.bzl index d41c8aead..ea305ea33 100644 --- a/prelude/haskell/library_info.bzl +++ b/prelude/haskell/library_info.bzl @@ -56,9 +56,15 @@ def _project_as_package_db(lib: HaskellLibraryInfo): def _project_as_empty_package_db(lib: HaskellLibraryInfo): return cmd_args("-package-db", lib.empty_db) +def _direct_deps(_children: list[HaskellLibraryInfo | None], lib: HaskellLibraryInfo | None) -> HaskellLibraryInfo | None: + return lib + HaskellLibraryInfoTSet = transitive_set( args_projections = { "package_db": _project_as_package_db, "empty_package_db": _project_as_empty_package_db, + }, + reductions = { + "root": _direct_deps, } ) diff --git a/prelude/haskell/link_info.bzl b/prelude/haskell/link_info.bzl index f2b8fb133..48d9cc8e6 100644 --- a/prelude/haskell/link_info.bzl +++ b/prelude/haskell/link_info.bzl @@ -15,7 +15,6 @@ load( ) load( "@prelude//haskell:library_info.bzl", - "HaskellLibraryInfo", "HaskellLibraryInfoTSet", ) From 49682e52e8ab0e635d8b920290a46f8bb72cbb34 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 9 Apr 2024 10:36:27 +0200 Subject: [PATCH 0695/1133] [buck2] Construct tset for packages_info specific to link_style and profiling ... instead of merging all possible link_style and profiling libs. Remove `merge_haskell_link_infos` since it is no longer used. --- prelude/haskell/compile.bzl | 16 +++++----------- prelude/haskell/link_info.bzl | 18 ------------------ 2 files changed, 5 insertions(+), 29 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 1df7378fa..7733961a8 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -15,10 +15,6 @@ load( "HaskellLibraryProvider", "HaskellLibraryInfoTSet", ) -load( - "@prelude//haskell:link_info.bzl", - "merge_haskell_link_infos", -) load( "@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo", @@ -197,14 +193,12 @@ def get_packages_info( haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] # Collect library dependencies. Note that these don't need to be in a - # particular order and we really want to remove duplicates (there - # are a *lot* of duplicates). + # particular order. direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - merged_hs_link_info = merge_haskell_link_infos(direct_deps_link_info, ctx) - - hs_link_info = merged_hs_link_info.prof_info if enable_profiling else merged_hs_link_info.info - - libs = hs_link_info[link_style] + libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = [ + lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + for lib in direct_deps_link_info + ]) # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) diff --git a/prelude/haskell/link_info.bzl b/prelude/haskell/link_info.bzl index 48d9cc8e6..37d86699a 100644 --- a/prelude/haskell/link_info.bzl +++ b/prelude/haskell/link_info.bzl @@ -36,24 +36,6 @@ HaskellProfLinkInfo = provider( }, ) -def merge_haskell_link_infos(deps: list[HaskellLinkInfo], ctx: AnalysisContext, ) -> HaskellLinkInfo: - merged = {} - prof_merged = {} - for link_style in LinkStyle: - children = [] - prof_children = [] - for dep in deps: - if link_style in dep.info: - children.append(dep.info[link_style]) - - if link_style in dep.prof_info: - prof_children.append(dep.prof_info[link_style]) - - merged[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, children = children) - prof_merged[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, children = prof_children) - - return HaskellLinkInfo(info = merged, prof_info = prof_merged) - def cxx_toolchain_link_style(ctx: AnalysisContext) -> LinkStyle: return ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info.link_style From 359d0c3d1ed8193d18d54b8aff2a6376ac3affaa Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 10 Apr 2024 14:34:44 +0200 Subject: [PATCH 0696/1133] [buck2] Do not create empty libs Since #90 we generate separate package-db's for use with template haskell during compilation, and consuming packages in dependencies from ghci. When generating a `extra-libraries` stanza, ghc will try to load the given library in any case even when not needed. Skipping the entry from the package conf file will avoid that. --- prelude/haskell/compile.bzl | 4 --- prelude/haskell/haskell.bzl | 52 +++++++++++--------------------- prelude/haskell/library_info.bzl | 3 -- 3 files changed, 18 insertions(+), 41 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 7733961a8..d988f7184 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -222,10 +222,6 @@ def get_packages_info( for hi in lib.import_dirs[enable_profiling]: lib_interfaces[lib.name][src_to_module_name(hi.short_path)] = hi - # libs of dependencies might be needed at compile time if - # we're using Template Haskell: - exposed_package_libs.hidden(lib.empty_libs) - for pkg, mods in transitive_deps.items(): if pkg == pkgname: # Skip dependencies from the same package. diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 72afda456..e3c89bee1 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -403,7 +403,7 @@ def _make_package( ctx: AnalysisContext, link_style: LinkStyle, pkgname: str, - libname: str, + libname: str | None, hlis: list[HaskellLibraryInfo], hi: dict[bool, list[Artifact]], enable_profiling: bool, @@ -413,11 +413,6 @@ def _make_package( # Don't expose boot sources, as they're only meant to be used for compiling. modules = [src_to_module_name(x) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] - if enable_profiling: - # Add the `-p` suffix otherwise ghc will look for objects - # following this logic (https://fburl.com/code/3gmobm5x) and will fail. - libname += "_p" - def mk_artifact_dir(dir_prefix: str, profiled: bool) -> str: art_suff = get_artifact_suffix(link_style, profiled) return "\"${pkgroot}/" + dir_prefix + "-" + art_suff + "\"" @@ -426,10 +421,6 @@ def _make_package( mk_artifact_dir("mod", profiled) for profiled in hi.keys() ] - library_dirs = ["${pkgroot}/empty/lib-shared"] if use_empty_lib else [ - mk_artifact_dir("lib", profiled) - for profiled in hi.keys() - ] conf = [ "name: " + pkgname, @@ -439,14 +430,28 @@ def _make_package( "exposed: False", "exposed-modules: " + ", ".join(modules), "import-dirs:" + ", ".join(import_dirs), - "library-dirs:" + ", ".join(library_dirs), - "extra-libraries: " + libname, "depends: " + ", ".join([lib.id for lib in hlis]), ] + if use_empty_lib: pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + "_empty.conf", conf) db = ctx.actions.declare_output("db-" + artifact_suffix + "_empty", dir = True) else: + if not libname: + fail("argument `libname` cannot be empty, when use_empty_lib == False") + + if enable_profiling: + # Add the `-p` suffix otherwise ghc will look for objects + # following this logic (https://fburl.com/code/3gmobm5x) and will fail. + libname += "_p" + + library_dirs = [ + mk_artifact_dir("lib", profiled) + for profiled in hi.keys() + ] + conf.append("library-dirs:" + ", ".join(library_dirs)) + conf.append("extra-libraries: " + libname) + pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + ".conf", conf) db = ctx.actions.declare_output("db-" + artifact_suffix, dir = True) @@ -575,24 +580,6 @@ def _build_haskell_lib( default = LinkInfo(linkables = [SharedLibLinkable(lib = lib)]), ) - empty_lib = ctx.actions.declare_output("empty", lib_short_path) - empty_link = cmd_args(haskell_toolchain.linker) - empty_link.add("-o", empty_lib.as_output()) - empty_link.add( - get_shared_library_flags(linker_info.type), - "-dynamic", - cmd_args( - _get_haskell_shared_library_name_linker_flags(linker_info.type, libfile), - prepend = "-optl", - ), - ) - empty_link.add(ctx.actions.write("empty.c", "")) - ctx.actions.run( - empty_link, - category = "haskell_link_empty" + artifact_suffix.replace("-", "_"), - ) - empty_libs = [empty_lib] - else: # static flavours # TODO: avoid making an archive for a single object, like cxx does # (but would that work with Template Haskell?) @@ -611,8 +598,6 @@ def _build_haskell_lib( ), ) - empty_libs = [] - if enable_profiling and link_style != LinkStyle("shared"): if not non_profiling_hlib: fail("Non-profiling HaskellLibBuildOutput wasn't provided when building profiling lib") @@ -651,7 +636,7 @@ def _build_haskell_lib( ctx, link_style, pkgname, - libstem, + None, uniq_infos, import_artifacts, enable_profiling = enable_profiling, @@ -667,7 +652,6 @@ def _build_haskell_lib( objects = object_artifacts, stub_dirs = stub_dirs, libs = all_libs, - empty_libs = empty_libs, version = "1.0.0", is_prebuilt = False, profiling_enabled = enable_profiling, diff --git a/prelude/haskell/library_info.bzl b/prelude/haskell/library_info.bzl index ea305ea33..0a87443bc 100644 --- a/prelude/haskell/library_info.bzl +++ b/prelude/haskell/library_info.bzl @@ -39,9 +39,6 @@ HaskellLibraryInfo = record( # at compile time. The real library flags are propagated up the # dependency graph via MergedLinkInfo. libs = field(list[Artifact], []), - # GHC insists on loading a library, but does not actually need it when we - # pass module granular object files into compilation actions. - empty_libs = field(list[Artifact], []), # Package version, used to specify the full package when exposing it, # e.g. filepath-1.4.2.1, deepseq-1.4.4.0. # Internal packages default to 1.0.0, e.g. `fbcode-dsi-logger-hs-types-1.0.0`. From f4857360b6f74be74cbd24229c073d2bc491d17b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 8 Mar 2024 13:52:17 +0100 Subject: [PATCH 0697/1133] [buck2] Build dynamic outputs too when linking haskell statically When the link style is `static` or `static_pic` we also produce the dynamic object and interface files and also keep track of these in the _Module record. Also pass `-outputdir` to avoid any files are created in the project directory. --- prelude/haskell/compile.bzl | 44 ++++++++++++++++++++++++++----------- 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index d988f7184..f9d223708 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -65,8 +65,8 @@ PackagesInfo = record( _Module = record( source = field(Artifact), - interface = field(Artifact), - object = field(Artifact), + interfaces = field(list[Artifact]), + objects = field(list[Artifact]), stub_dir = field(Artifact), prefix_dir = field(str), ) @@ -90,10 +90,22 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl module_name = src_to_module_name(src.short_path) interface_path = paths.replace_extension(src.short_path, "." + hisuf) interface = ctx.actions.declare_output("mod-" + suffix, interface_path) + interfaces = [interface] object_path = paths.replace_extension(src.short_path, "." + osuf) object = ctx.actions.declare_output("mod-" + suffix, object_path) + objects = [object] + + if link_style in [LinkStyle("static"), LinkStyle("static_pic")]: + dyn_osuf, dyn_hisuf = output_extensions(LinkStyle("shared"), enable_profiling) + interface_path = paths.replace_extension(src.short_path, "." + dyn_hisuf) + interface = ctx.actions.declare_output("mod-" + suffix, interface_path) + interfaces.append(interface) + object_path = paths.replace_extension(src.short_path, "." + dyn_osuf) + object = ctx.actions.declare_output("mod-" + suffix, object_path) + objects.append(object) + stub_dir = ctx.actions.declare_output("stub-" + suffix + "-" + module_name, dir=True) - modules[module_name] = _Module(source = src, interface = interface, object = object, stub_dir = stub_dir, prefix_dir = "mod-" + suffix) + modules[module_name] = _Module(source = src, interfaces = interfaces, objects = objects, stub_dir = stub_dir, prefix_dir = "mod-" + suffix) return modules @@ -415,14 +427,20 @@ def _compile_module_args( compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), transitive_deps = transitive_deps) - object = outputs[module.object] - hi = outputs[module.interface] + objects = [outputs[obj] for obj in module.objects] + his = [outputs[hi] for hi in module.interfaces] stubs = outputs[module.stub_dir] - compile_args.add("-ohi", cmd_args(hi.as_output())) - compile_args.add("-o", cmd_args(object.as_output())) + compile_args.add("-outputdir", cmd_args([cmd_args(stubs.as_output()).parent(), module.prefix_dir], delimiter="/")) + compile_args.add("-o", objects[0].as_output()) + compile_args.add("-ohi", his[0].as_output()) compile_args.add("-stubdir", stubs.as_output()) + if link_style in [LinkStyle("static_pic"), LinkStyle("static")]: + compile_args.add("-dynamic-too") + compile_args.add("-dyno", objects[1].as_output()) + compile_args.add("-dynohi", his[1].as_output()) + srcs = cmd_args(module.source) for (path, src) in srcs_to_pairs(ctx.attrs.srcs): # hs-boot files aren't expected to be an argument to compiler but does need @@ -434,8 +452,8 @@ def _compile_module_args( return CompileArgsInfo( result = CompileResultInfo( - objects = [object], - hi = [hi], + objects = objects, + hi = his, stubs = stubs, producing_indices = producing_indices, ), @@ -493,9 +511,9 @@ def _compile_module( for dep_name in breadth_first_traversal(graph, [module_name])[1:]: dep = modules[dep_name] - compile_cmd.hidden(dep.interface) + compile_cmd.hidden(dep.interfaces) if enable_th: - compile_cmd.hidden(dep.object) + compile_cmd.hidden(dep.objects) ctx.actions.run(compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name) @@ -537,8 +555,8 @@ def compile( pkgname = pkgname, ) - interfaces = [module.interface for module in modules.values()] - objects = [module.object for module in modules.values()] + interfaces = [interface for module in modules.values() for interface in module.interfaces] + objects = [object for module in modules.values() for object in module.objects] stub_dirs = [module.stub_dir for module in modules.values()] ctx.actions.dynamic_output( From 59b59c651c19a93bf1290298bdfed1f59dbc8599 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 12 Mar 2024 09:40:27 +0100 Subject: [PATCH 0698/1133] [buck2] Add `-package-db` and `-package` flags for each haskell library dependency --- prelude/haskell/haskell.bzl | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index e3c89bee1..570261714 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -53,6 +53,7 @@ load( "@prelude//haskell:compile.bzl", "CompileResultInfo", "compile", + "get_packages_info", "target_metadata", ) load( @@ -932,10 +933,22 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] + # Add -package-db and -package/-expose-package flags for each Haskell + # library dependency. + packages_info = get_packages_info( + ctx, + link_style, + specify_pkg_version = False, + enable_profiling = enable_profiling, + use_empty_lib = False, + ) + output = ctx.actions.declare_output(ctx.attrs.name) link = cmd_args(haskell_toolchain.compiler) link.add("-hide-all-packages") link.add(cmd_args(toolchain_libs, prepend="-package")) + link.add(cmd_args(packages_info.exposed_package_args)) + link.add(packages_info.packagedb_args) link.add("-o", output.as_output()) link.add(haskell_toolchain.linker_flags) link.add(ctx.attrs.linker_flags) From a7beed1b24835c0610747c98af317e1d1c28933a Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 12 Mar 2024 09:42:22 +0100 Subject: [PATCH 0699/1133] [buck2] Only use the first object file per module when linking The second is the dyn_o file. --- prelude/haskell/haskell.bzl | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 570261714..b3c4e5828 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -953,7 +953,15 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link.add(haskell_toolchain.linker_flags) link.add(ctx.attrs.linker_flags) - link.add(compiled.objects) + objects = {} + # only add the first object per module + # TODO[CB] restructure this to use a record / dict for compiled.objects + for obj in compiled.objects: + key = paths.replace_extension(obj.short_path, "") + if not key in objects: + objects[key] = obj + + link.add(objects.values()) indexing_tsets = {} if compiled.producing_indices: From 339c6427027b84d38cfbe5d28ea1ad833a115c65 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 12 Mar 2024 12:17:05 +0100 Subject: [PATCH 0700/1133] [buck2] Add libs from packages to link command --- prelude/haskell/haskell.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index b3c4e5828..4aeb671c1 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -953,6 +953,8 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link.add(haskell_toolchain.linker_flags) link.add(ctx.attrs.linker_flags) + link.hidden(packages_info.exposed_package_libs) + objects = {} # only add the first object per module # TODO[CB] restructure this to use a record / dict for compiled.objects From e3b9048539d83164f35010194f0422d5282d6083 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 11 Apr 2024 14:33:07 +0200 Subject: [PATCH 0701/1133] [buck2] Add both interface (.hi and .dyn_hi) to compile cmd --- prelude/haskell/compile.bzl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index f9d223708..39d3d1396 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -229,10 +229,12 @@ def get_packages_info( lib_interfaces[lib.name] = {} for o in lib.objects[enable_profiling]: + # this should prefer the dyn_o -- since it is used for TH lib_objects[lib.name][src_to_module_name(o.short_path)] = o for hi in lib.import_dirs[enable_profiling]: - lib_interfaces[lib.name][src_to_module_name(hi.short_path)] = hi + mod_name = src_to_module_name(hi.short_path) + lib_interfaces[lib.name].setdefault(mod_name, []).append(hi) for pkg, mods in transitive_deps.items(): if pkg == pkgname: @@ -240,7 +242,7 @@ def get_packages_info( continue for mod in mods: exposed_package_objects.append(lib_objects[pkg][mod]) - exposed_package_imports.append(lib_interfaces[pkg][mod]) + exposed_package_imports.extend(lib_interfaces[pkg][mod]) else: for lib in libs.traverse(): exposed_package_imports.extend(lib.import_dirs[enable_profiling]) From fc4df9a0a1019a7e943fff0dbca2a6935c905f35 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 10 Apr 2024 15:22:01 +0200 Subject: [PATCH 0702/1133] [buck2] Use a package when linking haskell binaries statically As explained in a comment in haskell.bzl, the way packages are used by the haskell rules of the prelude is a bit strange. These are intentionally not used at link time, all the information is in the HaskellLibraryInfo and linker commands are constructed manually. But this breaks with our usage of ghcWithPackages and static linking. For ghcWithPackages, we introduced the concept of HaskellToolchainLibraries that represent packages available from the nix toolchain. We have to add these packages to the ghc command line when linking, in order for their libraries to be passed to the linker. However, when linking statically dependencies must be passed in topological order to the linker. When passing static libraries using `-optl` as is done in haskell_binary, the libraries from dependent packages will always be passed before the static archives and this will lead to many undefined symbol errors. The way of solving this within the original prelude would be to introduce haskell_prebuilt_library rules for every toolchain library. This would need to include proper information about the location of the libraries, compiler and linker flags and so on. Basically, all the information contained in the package conf file would need to be introduced into a rule. This change uses a different approach by constructing a package that passes the static archives for dependencies in the `ld-flags` (similar to adding them with `-optl`) but also adds the haskell dependencies which is to ensure these are passed to the linker in the correct order by ghc. --- prelude/haskell/haskell.bzl | 76 ++++++++++++++++++++++++++++++++++++- 1 file changed, 75 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 4aeb671c1..5b27c9a3c 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -7,6 +7,7 @@ # Implementation of the Haskell build rules. +load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:archive.bzl", "make_archive") load( @@ -400,6 +401,9 @@ PKGCONF=$3 # directly declared as dependencies may be used # # - by GHCi when loading packages into the repl +# +# - when linking binaries statically, in order to pass libraries +# to the linker in the correct order def _make_package( ctx: AnalysisContext, link_style: LinkStyle, @@ -906,6 +910,53 @@ def derive_indexing_tset( children = index_children, ) +def _make_link_package( + ctx: AnalysisContext, + link_style: LinkStyle, + pkgname: str, + hlis: list[HaskellLibraryInfo], + static_libs: ArgLike) -> Artifact: + artifact_suffix = get_artifact_suffix(link_style, False) + + conf = cmd_args( + "name: " + pkgname, + "version: 1.0.0", + "id: " + pkgname, + "key: " + pkgname, + "exposed: False", + cmd_args(cmd_args(static_libs, delimiter = ", "), format = "ld-options: {}"), + "depends: " + ", ".join([lib.id for lib in hlis]), + ) + + pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + "_link.conf", conf) + db = ctx.actions.declare_output("db-" + artifact_suffix + "_link", dir = True) + + db_deps = [x.db for x in hlis] + + # So that ghc-pkg can find the DBs for the dependencies. We might + # be able to use flags for this instead, but this works. + ghc_package_path = cmd_args( + db_deps, + delimiter = ":", + ) + + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + ctx.actions.run( + cmd_args([ + "sh", + "-c", + _REGISTER_PACKAGE, + "", + haskell_toolchain.packager, + db.as_output(), + pkg_conf, + ]), + category = "haskell_package_link" + artifact_suffix.replace("-", "_"), + env = {"GHC_PACKAGE_PATH": ghc_package_path}, + ) + + return db + def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: enable_profiling = ctx.attrs.enable_profiling @@ -1109,7 +1160,30 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: sos[name] = shared_lib.lib infos = get_link_args_for_strategy(ctx, nlis, to_link_strategy(link_style)) - link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) + if link_style in [LinkStyle("static"), LinkStyle("static_pic")]: + hlis = attr_deps_haskell_link_infos_sans_template_deps(ctx) + linfos = [x.prof_info if enable_profiling else x.info for x in hlis] + uniq_infos = [x[link_style].reduce("root") for x in linfos] + + pkgname = ctx.label.name + "-link" + linkable_artifacts = [ + f.archive.artifact + for link in infos.tset.infos.traverse(ordering = "topological") + for f in link.default.linkables + ] + db = _make_link_package( + ctx, + link_style, + pkgname, + uniq_infos, + linkable_artifacts, + ) + + link.add(cmd_args(db, prepend="-package-db")) + link.add("-package", pkgname) + link.hidden(linkable_artifacts) + else: + link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) ctx.actions.run(link, category = "haskell_link") From 5ff2c12c9a6d59e91ae20310891a843a351984d6 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 15 Apr 2024 10:09:36 +0200 Subject: [PATCH 0703/1133] [buck2] Don't pass import artifacts to _make_package Only the list of profiling settings (true / false) is needed. --- prelude/haskell/haskell.bzl | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 5b27c9a3c..ab8eaa794 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -410,7 +410,7 @@ def _make_package( pkgname: str, libname: str | None, hlis: list[HaskellLibraryInfo], - hi: dict[bool, list[Artifact]], + profiling: list[bool], enable_profiling: bool, use_empty_lib: bool) -> Artifact: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) @@ -422,10 +422,7 @@ def _make_package( art_suff = get_artifact_suffix(link_style, profiled) return "\"${pkgroot}/" + dir_prefix + "-" + art_suff + "\"" - import_dirs = [ - mk_artifact_dir("mod", profiled) - for profiled in hi.keys() - ] + import_dirs = [ mk_artifact_dir("mod", profiled) for profiled in profiling ] conf = [ "name: " + pkgname, @@ -450,10 +447,7 @@ def _make_package( # following this logic (https://fburl.com/code/3gmobm5x) and will fail. libname += "_p" - library_dirs = [ - mk_artifact_dir("lib", profiled) - for profiled in hi.keys() - ] + library_dirs = [ mk_artifact_dir("lib", profiled) for profiled in profiling ] conf.append("library-dirs:" + ", ".join(library_dirs)) conf.append("extra-libraries: " + libname) @@ -633,7 +627,7 @@ def _build_haskell_lib( pkgname, libstem, uniq_infos, - import_artifacts, + import_artifacts.keys(), enable_profiling = enable_profiling, use_empty_lib = False, ) @@ -643,7 +637,7 @@ def _build_haskell_lib( pkgname, None, uniq_infos, - import_artifacts, + import_artifacts.keys(), enable_profiling = enable_profiling, use_empty_lib = True, ) From e003bb1b4ef1e3927fcef08ceee4acfdd1006985 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 12 Mar 2024 15:22:45 +0100 Subject: [PATCH 0704/1133] [buck2] Properly pass ghc arguments to haddock --- prelude/haskell/haskell_haddock.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/haskell_haddock.bzl b/prelude/haskell/haskell_haddock.bzl index 4e8489790..ace59507a 100644 --- a/prelude/haskell/haskell_haddock.bzl +++ b/prelude/haskell/haskell_haddock.bzl @@ -80,7 +80,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: cmd.add(cmd_args(argsfile, format = "@{}")) cmd.hidden(fileargs) else: - cmd.add(args.args_for_file) + cmd.add(cmd_args(args.args_for_file, format = "--optghc={}")) # Buck2 requires that the output artifacts are always produced, but Haddock only # creates them if it needs to, so we need a wrapper script to mkdir the outputs. From 21af3baab636474008e4219df541690743a4f2ac Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 12 Mar 2024 15:23:48 +0100 Subject: [PATCH 0705/1133] [buck2] Add haskell sources to haddock command line --- prelude/haskell/haskell_haddock.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/haskell/haskell_haddock.bzl b/prelude/haskell/haskell_haddock.bzl index ace59507a..487c9b0e8 100644 --- a/prelude/haskell/haskell_haddock.bzl +++ b/prelude/haskell/haskell_haddock.bzl @@ -82,6 +82,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: else: cmd.add(cmd_args(args.args_for_file, format = "--optghc={}")) + cmd.add(args.srcs) # Buck2 requires that the output artifacts are always produced, but Haddock only # creates them if it needs to, so we need a wrapper script to mkdir the outputs. script = ctx.actions.declare_output("haddock-script") From cf14eb46d6f104391ab78317043c757df8b160d1 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 15 Mar 2024 12:56:01 +0100 Subject: [PATCH 0706/1133] [buck2] Don't use bashism when calling `/bin/sh` On many systems, `/bin/sh` is not Bash, but e.g. dash or some other posix shell. --- prelude/haskell/haskell_haddock.bzl | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/prelude/haskell/haskell_haddock.bzl b/prelude/haskell/haskell_haddock.bzl index 487c9b0e8..c0db8ecc9 100644 --- a/prelude/haskell/haskell_haddock.bzl +++ b/prelude/haskell/haskell_haddock.bzl @@ -136,16 +136,18 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: script = ctx.actions.declare_output("haddock-script") script_args = cmd_args([ "#!/bin/sh", - "set -ueo pipefail", - cmd_args(cmd, delimiter = " ", quote = "shell"), - ]) - for dir in dep_htmls: - script_args.add( - cmd_args( - ["cp", "-Rf", "--reflink=auto", cmd_args(dir, format = "{}/*"), out.as_output()], - delimiter = " ", - ), + cmd_args( + cmd_args(cmd, delimiter = " ", quote = "shell"), + [ + cmd_args( + ["cp", "-Rf", "--reflink=auto", cmd_args(dir, format = "{}/*"), out.as_output()], + delimiter = " ", + ) for dir in dep_htmls + ], + delimiter = " && " ) + ]) + ctx.actions.write( script, script_args, From 14488155c94504c8f6a0a1da5fee3016d8f2b856 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 3 Apr 2024 05:45:31 -0700 Subject: [PATCH 0707/1133] Use env-vars API instead of passing them into command line Summary: Historically we set it with `env` command, it looks like this was required in the past, but now we can simply use native buck's way to set env vars Reviewed By: leoleovich Differential Revision: D55641193 fbshipit-source-id: 7901923c7af9cdbb2fa3f79e8b8081f0343bf219 --- prelude/go/cgo_library.bzl | 9 ++++++--- prelude/go/go_stdlib.bzl | 13 +++++++------ prelude/go/link.bzl | 8 +++++--- prelude/go/package_builder.bzl | 35 +++++++++++++++------------------- prelude/go/toolchain.bzl | 27 +++++++++++++------------- 5 files changed, 47 insertions(+), 45 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 253592179..b6302b306 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -53,7 +53,7 @@ load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") load(":package_builder.bzl", "build_package") load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") # A map of expected linkages for provided link style _LINKAGE_FOR_LINK_STYLE = { @@ -95,7 +95,7 @@ def _cgo( expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] - cmd = get_toolchain_cmd_args(go_toolchain) + cmd = cmd_args() cmd.add(go_toolchain.cgo_wrapper) args = cmd_args() @@ -153,7 +153,10 @@ def _cgo( for src in go_srcs + c_headers + c_srcs: cmd.hidden(src.as_output()) - ctx.actions.run(cmd, category = "cgo") + + env = get_toolchain_env_vars(go_toolchain) + + ctx.actions.run(cmd, env = env, category = "cgo") return go_srcs, c_headers, c_srcs diff --git a/prelude/go/go_stdlib.bzl b/prelude/go/go_stdlib.bzl index fb8dfc2eb..8618575db 100644 --- a/prelude/go/go_stdlib.bzl +++ b/prelude/go/go_stdlib.bzl @@ -6,7 +6,7 @@ # of this source tree. load(":packages.bzl", "GoStdlib") -load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled", "get_toolchain_cmd_args") +load(":toolchain.bzl", "GoToolchainInfo", "evaluate_cgo_enabled", "get_toolchain_env_vars") def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] @@ -38,10 +38,7 @@ def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: cmd_args(cgo_ldflags, format = "--cgo_ldflags={}", absolute_prefix = "%cwd%/"), ] - cmd = get_toolchain_cmd_args(go_toolchain) - cmd.add([ - "GODEBUG={}".format("installgoroot=all"), - "CGO_ENABLED={}".format("1" if cgo_enabled else "0"), + cmd = cmd_args([ go_toolchain.go_wrapper, go_toolchain.go, go_wrapper_args, @@ -56,7 +53,11 @@ def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: "std", ]) - ctx.actions.run(cmd, category = "go_build_stdlib", identifier = "go_build_stdlib") + env = get_toolchain_env_vars(go_toolchain) + env["GODEBUG"] = "installgoroot=all" + env["CGO_ENABLED"] = "1" if cgo_enabled else "0" + + ctx.actions.run(cmd, env = env, category = "go_build_stdlib", identifier = "go_build_stdlib") importcfg = ctx.actions.declare_output("stdlib.importcfg") ctx.actions.run( diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 600c5d51f..0be4c44a8 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -36,7 +36,7 @@ load( "merge_pkgs", "pkg_artifacts", ) -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") # Provider wrapping packages used for linking. GoPkgLinkInfo = provider(fields = { @@ -112,7 +112,7 @@ def link( file_extension = shared_extension if build_mode == GoBuildMode("c_shared") else executable_extension output = ctx.actions.declare_output(ctx.label.name + file_extension) - cmd = get_toolchain_cmd_args(go_toolchain) + cmd = cmd_args() cmd.add(go_toolchain.linker) cmd.add(go_toolchain.linker_flags) @@ -193,6 +193,8 @@ def link( cmd.add(main) - ctx.actions.run(cmd, category = "go_link") + env = get_toolchain_env_vars(go_toolchain) + + ctx.actions.run(cmd, env = env, category = "go_link") return (output, executable_args.runtime_files, executable_args.external_debug_info) diff --git a/prelude/go/package_builder.bzl b/prelude/go/package_builder.bzl index cd694b558..44c2393ab 100644 --- a/prelude/go/package_builder.bzl +++ b/prelude/go/package_builder.bzl @@ -13,7 +13,7 @@ load( "GoCoverageMode", # @Unused used as type ) load(":packages.bzl", "GoPkg", "make_importcfg", "merge_pkgs", "pkg_artifacts") -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_cmd_args") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") def build_package( ctx: AnalysisContext, @@ -99,7 +99,9 @@ def build_package( def _go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_root: str, force_disable_cgo: bool): go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain, force_disable_cgo = force_disable_cgo) + env = get_toolchain_env_vars(go_toolchain, force_disable_cgo = force_disable_cgo) + env["GO111MODULE"] = "off" + go_list_out = ctx.actions.declare_output(paths.basename(pkg_name) + "_go_list.json") # Create file sructure that `go list` can recognize @@ -110,8 +112,6 @@ def _go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_ ) tags = go_toolchain.tags + ctx.attrs._tags go_list_args = [ - env_args, - "GO111MODULE=off", go_toolchain.go_list_wrapper, "-e", ["--go", go_toolchain.go], @@ -123,7 +123,7 @@ def _go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_ ] identifier = paths.basename(pkg_name) - ctx.actions.run(go_list_args, category = "go_list", identifier = identifier) + ctx.actions.run(go_list_args, env = env, category = "go_list", identifier = identifier) return go_list_out @@ -186,7 +186,7 @@ def _compile( gen_asmhdr: bool = False) -> (Artifact, Artifact | None): go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain) + env = get_toolchain_env_vars(go_toolchain) out = ctx.actions.declare_output("go_compile_out.a") if len(go_srcs) == 0: @@ -196,7 +196,6 @@ def _compile( asmhdr = ctx.actions.declare_output("__asmhdr__/go_asm.h") if gen_asmhdr else None compile_cmd = cmd_args([ - env_args, go_toolchain.compiler, go_toolchain.compiler_flags, compiler_flags, @@ -215,7 +214,7 @@ def _compile( compile_cmd.hidden(embed_files) # files and directories should be available for embedding identifier = paths.basename(pkg_name) - ctx.actions.run(compile_cmd, category = "go_compile", identifier = identifier) + ctx.actions.run(compile_cmd, env = env, category = "go_compile", identifier = identifier) return (out, asmhdr) @@ -224,7 +223,7 @@ def _symabis(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], assem return None go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain) + env = get_toolchain_env_vars(go_toolchain) # we have to supply "go_asm.h" with any content to make asm tool happy # its content doesn't matter if -gensymabis provided @@ -232,7 +231,6 @@ def _symabis(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], assem fake_asmhdr = ctx.actions.write("__fake_asmhdr__/go_asm.h", "") symabis = ctx.actions.declare_output("symabis") asm_cmd = [ - env_args, go_toolchain.assembler, go_toolchain.assembler_flags, assembler_flags, @@ -244,7 +242,7 @@ def _symabis(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], assem ] identifier = paths.basename(pkg_name) - ctx.actions.run(asm_cmd, category = "go_symabis", identifier = identifier) + ctx.actions.run(asm_cmd, env = env, category = "go_symabis", identifier = identifier) return symabis @@ -253,7 +251,7 @@ def _asssembly(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], asm return [] go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain) + env = get_toolchain_env_vars(go_toolchain) o_files = [] identifier = paths.basename(pkg_name) @@ -262,7 +260,6 @@ def _asssembly(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], asm o_files.append(o_file) asm_cmd = [ - env_args, go_toolchain.assembler, go_toolchain.assembler_flags, assembler_flags, @@ -272,7 +269,7 @@ def _asssembly(ctx: AnalysisContext, pkg_name: str, s_files: list[Artifact], asm s_file, ] - ctx.actions.run(asm_cmd, category = "go_assembly", identifier = identifier + "/" + s_file.short_path) + ctx.actions.run(asm_cmd, env = env, category = "go_assembly", identifier = identifier + "/" + s_file.short_path) return o_files @@ -282,12 +279,11 @@ def _pack(ctx: AnalysisContext, pkg_name: str, a_file: Artifact, o_files: list[A return a_file go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain) + env = get_toolchain_env_vars(go_toolchain) pkg_file = ctx.actions.declare_output("pkg.a") pack_cmd = [ - env_args, go_toolchain.packer, "c", pkg_file.as_output(), @@ -296,7 +292,7 @@ def _pack(ctx: AnalysisContext, pkg_name: str, a_file: Artifact, o_files: list[A ] identifier = paths.basename(pkg_name) - ctx.actions.run(pack_cmd, category = "go_pack", identifier = identifier) + ctx.actions.run(pack_cmd, env = env, category = "go_pack", identifier = identifier) return pkg_file @@ -315,7 +311,7 @@ def _cover(ctx: AnalysisContext, pkg_name: str, go_files: list[Artifact], covera return go_files, "" go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env_args = get_toolchain_cmd_args(go_toolchain) + env = get_toolchain_env_vars(go_toolchain) covered_files = [] coverage_vars = {} for go_file in go_files: @@ -326,7 +322,6 @@ def _cover(ctx: AnalysisContext, pkg_name: str, go_files: list[Artifact], covera coverage_vars[var] = go_file.short_path cover_cmd = [ - env_args, go_toolchain.cover, ["-mode", coverage_mode.value], ["-var", var], @@ -334,7 +329,7 @@ def _cover(ctx: AnalysisContext, pkg_name: str, go_files: list[Artifact], covera go_file, ] - ctx.actions.run(cover_cmd, category = "go_cover", identifier = paths.basename(pkg_name) + "/" + go_file.short_path) + ctx.actions.run(cover_cmd, env = env, category = "go_cover", identifier = paths.basename(pkg_name) + "/" + go_file.short_path) coverage_vars_out = "" if len(coverage_vars) > 0: diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index 8922a94c9..adf2f7c53 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -37,32 +37,33 @@ GoToolchainInfo = provider( }, ) -def get_toolchain_cmd_args(toolchain: GoToolchainInfo, force_disable_cgo = False) -> cmd_args: - cmd = cmd_args("env") +def get_toolchain_env_vars(toolchain: GoToolchainInfo, force_disable_cgo = False) -> dict[str, str | cmd_args]: + env = { + "GOARCH": toolchain.env_go_arch, + # opt-out from Go1.20 coverage redisign + "GOEXPERIMENT": "nocoverageredesign", + "GOOS": toolchain.env_go_os, + } - # opt-out from Go1.20 coverage redisign - cmd.add("GOEXPERIMENT=nocoverageredesign") - - cmd.add("GOARCH={}".format(toolchain.env_go_arch)) - cmd.add("GOOS={}".format(toolchain.env_go_os)) if toolchain.env_go_arm != None: - cmd.add("GOARM={}".format(toolchain.env_go_arm)) + env["GOARM"] = toolchain.env_go_arm if toolchain.env_go_root != None: - cmd.add(cmd_args(toolchain.env_go_root, format = "GOROOT={}")) + env["GOROOT"] = toolchain.env_go_root if toolchain.env_go_debug: godebug = ",".join(["{}={}".format(k, v) for k, v in toolchain.env_go_debug.items()]) - cmd.add("GODEBUG={}".format(godebug)) + env["GODEBUG"] = godebug + if force_disable_cgo: - cmd.add("CGO_ENABLED=0") + env["CGO_ENABLED"] = "0" else: # CGO is enabled by default for native compilation, but we need to set it # explicitly for cross-builds: # https://go-review.googlesource.com/c/go/+/12603/2/src/cmd/cgo/doc.go cxx_toolchain_available = toolchain.cxx_toolchain_for_linking != None if cxx_toolchain_available: - cmd.add("CGO_ENABLED=1") + env["CGO_ENABLED"] = "1" - return cmd + return env # Sets default value of cgo_enabled attribute based on the presence of C++ toolchain. def evaluate_cgo_enabled(toolchain: GoToolchainInfo, cgo_enabled: [bool, None]) -> bool: From 4c84ffc1f2d224faed0e52b00b25e78bb89ae40d Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 3 Apr 2024 06:34:16 -0700 Subject: [PATCH 0708/1133] Pass CGO_* vars with env-var API and Quote CGO_LDFLAGS Summary: Get rid of old hacky way passing CGO_* env vars and quote ldflags to avoid further errors on macos D55074907 Reviewed By: leoleovich Differential Revision: D55641878 fbshipit-source-id: fb618bbeb467dc02d13b6c6d5130e9ab92363ca4 --- prelude/go/cgo_library.bzl | 11 +---------- prelude/go/go_stdlib.bzl | 21 +++++++++------------ prelude/go/link.bzl | 1 + prelude/go/tools/cgo_wrapper.py | 4 ---- prelude/go/tools/go_wrapper.py | 24 +++--------------------- 5 files changed, 14 insertions(+), 47 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index b6302b306..deb5ff6e5 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -102,14 +102,6 @@ def _cgo( args.add(cmd_args(go_toolchain.cgo, format = "--cgo={}")) c_compiler = cxx_toolchain.c_compiler_info - # linker = cxx_toolchain.linker_info - - # Passing fbcode-platform ldflags may create S365277, so I would - # comment this change until we really need to do it. - # ldflags = cmd_args( - # linker.linker_flags, - # go_toolchain.external_linker_flags, - # ) # Construct the full C/C++ command needed to preprocess/compile sources. cxx_cmd = cmd_args() @@ -135,8 +127,6 @@ def _cgo( allow_args = True, is_executable = True, ) - args.add(cmd_args(cxx_wrapper, format = "--env-cc={}")) - args.hidden(cxx_cmd) # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not # sure how to pass in an output dir *and* enumerate the sources we know will @@ -155,6 +145,7 @@ def _cgo( cmd.hidden(src.as_output()) env = get_toolchain_env_vars(go_toolchain) + env["CC"] = cmd_args(cxx_wrapper, hidden = cxx_cmd) ctx.actions.run(cmd, env = env, category = "cgo") diff --git a/prelude/go/go_stdlib.bzl b/prelude/go/go_stdlib.bzl index 8618575db..2bc411f27 100644 --- a/prelude/go/go_stdlib.bzl +++ b/prelude/go/go_stdlib.bzl @@ -21,7 +21,10 @@ def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: assembler_flags += ["-shared"] compiler_flags += ["-shared"] - go_wrapper_args = [] + env = get_toolchain_env_vars(go_toolchain) + env["GODEBUG"] = "installgoroot=all" + env["CGO_ENABLED"] = "1" if cgo_enabled else "0" + cxx_toolchain = go_toolchain.cxx_toolchain_for_linking if cxx_toolchain != None: c_compiler = cxx_toolchain.c_compiler_info @@ -29,19 +32,17 @@ def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: cgo_ldflags = cmd_args( cxx_toolchain.linker_info.linker_flags, go_toolchain.external_linker_flags, + quote = "shell", ) - go_wrapper_args += [ - cmd_args(c_compiler.compiler, format = "--cc={}", absolute_prefix = "%cwd%/"), - cmd_args([c_compiler.compiler_flags, go_toolchain.c_compiler_flags], format = "--cgo_cflags={}", absolute_prefix = "%cwd%/"), - cmd_args(c_compiler.preprocessor_flags, format = "--cgo_cppflags={}", absolute_prefix = "%cwd%/"), - cmd_args(cgo_ldflags, format = "--cgo_ldflags={}", absolute_prefix = "%cwd%/"), - ] + env["CC"] = cmd_args(c_compiler.compiler, delimiter = " ", absolute_prefix = "%cwd%/") + env["CGO_CFLAGS"] = cmd_args(c_compiler.compiler_flags, delimiter = " ", absolute_prefix = "%cwd%/") + env["CGO_CPPFLAGS"] = cmd_args(c_compiler.preprocessor_flags, delimiter = " ", absolute_prefix = "%cwd%/") + env["CGO_LDFLAGS"] = cmd_args(cgo_ldflags, delimiter = " ", absolute_prefix = "%cwd%/") cmd = cmd_args([ go_toolchain.go_wrapper, go_toolchain.go, - go_wrapper_args, "install", "-pkgdir", stdlib_pkgdir.as_output(), @@ -53,10 +54,6 @@ def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: "std", ]) - env = get_toolchain_env_vars(go_toolchain) - env["GODEBUG"] = "installgoroot=all" - env["CGO_ENABLED"] = "1" if cgo_enabled else "0" - ctx.actions.run(cmd, env = env, category = "go_build_stdlib", identifier = "go_build_stdlib") importcfg = ctx.actions.declare_output("stdlib.importcfg") diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 0be4c44a8..361449efc 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -187,6 +187,7 @@ def link( cxx_toolchain.linker_info.linker_flags, go_toolchain.external_linker_flags, delimiter = " ", + quote = "shell", )) cmd.add(linker_flags) diff --git a/prelude/go/tools/cgo_wrapper.py b/prelude/go/tools/cgo_wrapper.py index 38e4b845f..5fe6f7802 100644 --- a/prelude/go/tools/cgo_wrapper.py +++ b/prelude/go/tools/cgo_wrapper.py @@ -21,8 +21,6 @@ def main(argv): parser.add_argument("--cgo", action="append", default=[]) parser.add_argument("--output", required=True, type=Path) parser.add_argument("--cpp", action="append", default=[]) - parser.add_argument("--env-cc", action="append", default=[]) - parser.add_argument("--env-ldflags", action="append", default=[]) parser.add_argument("srcs", type=Path, nargs="*") args = parser.parse_args(argv[1:]) @@ -30,8 +28,6 @@ def main(argv): os.makedirs(output, exist_ok=True) env = os.environ.copy() - env["CC"] = " ".join(args.env_cc) - env["CGO_LDFLAGS"] = " ".join(args.env_ldflags) cmd = [] cmd.extend(args.cgo) diff --git a/prelude/go/tools/go_wrapper.py b/prelude/go/tools/go_wrapper.py index 83b3c9ef5..9a7b2d0fc 100644 --- a/prelude/go/tools/go_wrapper.py +++ b/prelude/go/tools/go_wrapper.py @@ -24,10 +24,6 @@ def main(argv): wrapped_binary = Path(argv[1]) parser = argparse.ArgumentParser() - parser.add_argument("--cc", action="append", default=[]) - parser.add_argument("--cgo_cflags", action="append", default=[]) - parser.add_argument("--cgo_cppflags", action="append", default=[]) - parser.add_argument("--cgo_ldflags", action="append", default=[]) parsed, unknown = parser.parse_known_args(argv[2:]) env = os.environ.copy() @@ -38,23 +34,9 @@ def main(argv): env["GOCACHE"] = os.path.realpath(env["BUCK_SCRATCH_PATH"]) cwd = os.getcwd() - if len(parsed.cc) > 0: - env["CC"] = " ".join([arg.replace("%cwd%", cwd) for arg in parsed.cc]) - - if len(parsed.cgo_cflags) > 0: - env["CGO_CFLAGS"] = " ".join( - [arg.replace("%cwd%", cwd) for arg in parsed.cgo_cflags] - ) - - if len(parsed.cgo_cppflags) > 0: - env["CGO_CPPFLAGS"] = " ".join( - [arg.replace("%cwd%", cwd) for arg in parsed.cgo_cppflags] - ) - - if len(parsed.cgo_ldflags) > 0: - env["CGO_LDFLAGS"] = " ".join( - [arg.replace("%cwd%", cwd) for arg in parsed.cgo_ldflags] - ) + for env_var in ["CC", "CGO_CFLAGS", "CGO_CPPFLAGS", "CGO_LDFLAGS"]: + if env_var in env: + env[env_var] = env[env_var].replace("%cwd%", cwd) return subprocess.call([wrapped_binary] + unknown, env=env) From 77c5a943cbe8bd433bb5a66a8fba2bcac58fb619 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 3 Apr 2024 06:34:16 -0700 Subject: [PATCH 0709/1133] Get rid of unused code in cgo_wrapper Summary: Just removed some unused code Reviewed By: leoleovich Differential Revision: D55643226 fbshipit-source-id: 15fffb86c3c5e2aae41a13a9f6a32bf8f97813ed --- prelude/go/tools/cgo_wrapper.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/prelude/go/tools/cgo_wrapper.py b/prelude/go/tools/cgo_wrapper.py index 5fe6f7802..498197362 100644 --- a/prelude/go/tools/cgo_wrapper.py +++ b/prelude/go/tools/cgo_wrapper.py @@ -12,7 +12,6 @@ import os import subprocess import sys -import tempfile from pathlib import Path @@ -20,7 +19,6 @@ def main(argv): parser = argparse.ArgumentParser(fromfile_prefix_chars="@") parser.add_argument("--cgo", action="append", default=[]) parser.add_argument("--output", required=True, type=Path) - parser.add_argument("--cpp", action="append", default=[]) parser.add_argument("srcs", type=Path, nargs="*") args = parser.parse_args(argv[1:]) @@ -31,19 +29,8 @@ def main(argv): cmd = [] cmd.extend(args.cgo) - # cmd.append("-importpath={}") - # cmd.append("-srcdir={}") cmd.append(f"-objdir={output}") - # cmd.append(cgoCompilerFlags) cmd.append("--") - # cmd.append(cxxCompilerFlags) - - if args.cpp: - with tempfile.NamedTemporaryFile("w", delete=False) as argsfile: - for arg in args.cpp: - print(arg, file=argsfile) - argsfile.flush() - cmd.append("@" + argsfile.name) cmd.extend(args.srcs) return subprocess.call(cmd, env=env) From 6df995468bf9d09fbfa1b5c58339edd2d6b3111a Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 3 Apr 2024 06:34:16 -0700 Subject: [PATCH 0710/1133] cgo_library extract cxx_wpapper into a function Summary: Simplify _cgo function to unblock further refactoring Reviewed By: leoleovich Differential Revision: D55644411 fbshipit-source-id: c3e6ceecde96c4a5f7b20421a44a2810f891a90b --- prelude/go/cgo_library.bzl | 74 ++++++++++++++++++++------------------ 1 file changed, 40 insertions(+), 34 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index deb5ff6e5..4c8a1d4cd 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -71,10 +71,6 @@ def _cgo( Run `cgo` on `.go` sources to generate Go, C, and C-Header sources. """ - pre = cxx_merge_cpreprocessors(ctx, own_pre, inherited_pre) - pre_args = pre.set.project_as_args("args") - pre_include_dirs = pre.set.project_as_args("include_dirs") - # If you change this dir or naming convention, please # update the corresponding logic in `fbgolist`. # Otherwise editing and linting for Go will break. @@ -92,8 +88,6 @@ def _cgo( # Return a `cmd_args` to use as the generated sources. go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) - cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] cmd = cmd_args() cmd.add(go_toolchain.cgo_wrapper) @@ -101,33 +95,6 @@ def _cgo( args = cmd_args() args.add(cmd_args(go_toolchain.cgo, format = "--cgo={}")) - c_compiler = cxx_toolchain.c_compiler_info - - # Construct the full C/C++ command needed to preprocess/compile sources. - cxx_cmd = cmd_args() - cxx_cmd.add(c_compiler.compiler) - cxx_cmd.add(c_compiler.preprocessor_flags) - cxx_cmd.add(c_compiler.compiler_flags) - cxx_cmd.add(pre_args) - cxx_cmd.add(pre_include_dirs) - cxx_cmd.add(go_toolchain.c_compiler_flags) - - # Wrap the C/C++ command in a wrapper script to avoid arg length limits. - is_win = ctx.attrs._exec_os_type[OsLookup].platform == "windows" - cxx_sh = cmd_args( - [ - cmd_args(cxx_cmd, quote = "shell"), - "%*" if is_win else "\"$@\"", - ], - delimiter = " ", - ) - cxx_wrapper, _ = ctx.actions.write( - "__{}_cxx__.{}".format(ctx.label.name, "bat" if is_win else "sh"), - ([] if is_win else ["#!/bin/sh"]) + [cxx_sh], - allow_args = True, - is_executable = True, - ) - # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not # sure how to pass in an output dir *and* enumerate the sources we know will # generated w/o v2 complaining that the output dir conflicts with the nested @@ -145,12 +112,51 @@ def _cgo( cmd.hidden(src.as_output()) env = get_toolchain_env_vars(go_toolchain) - env["CC"] = cmd_args(cxx_wrapper, hidden = cxx_cmd) + env["CC"] = _cxx_wrapper(ctx, own_pre, inherited_pre) ctx.actions.run(cmd, env = env, category = "cgo") return go_srcs, c_headers, c_srcs +def _cxx_wrapper(ctx: AnalysisContext, own_pre: list[CPreprocessor], inherited_pre: list[CPreprocessorInfo]) -> cmd_args: + pre = cxx_merge_cpreprocessors(ctx, own_pre, inherited_pre) + pre_args = pre.set.project_as_args("args") + pre_include_dirs = pre.set.project_as_args("include_dirs") + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) + cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] + + c_compiler = cxx_toolchain.c_compiler_info + + # Construct the full C/C++ command needed to preprocess/compile sources. + cxx_cmd = cmd_args( + c_compiler.compiler, + c_compiler.preprocessor_flags, + c_compiler.compiler_flags, + pre_args, + pre_include_dirs, + go_toolchain.c_compiler_flags, + ) + + # Wrap the C/C++ command in a wrapper script to avoid arg length limits. + is_win = ctx.attrs._exec_os_type[OsLookup].platform == "windows" + cxx_sh = cmd_args( + [ + cmd_args(cxx_cmd, quote = "shell"), + "%*" if is_win else "\"$@\"", + ], + delimiter = " ", + ) + cxx_wrapper, _ = ctx.actions.write( + "__{}_cxx__.{}".format(ctx.label.name, "bat" if is_win else "sh"), + ([] if is_win else ["#!/bin/sh"]) + [cxx_sh], + allow_args = True, + is_executable = True, + ) + + return cmd_args(cxx_wrapper, hidden = cxx_cmd) + def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: pkg_name = go_attr_pkg_name(ctx) From 19baf6c1fccf6fe70bc954e85c2e476b437a4b98 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 3 Apr 2024 06:34:16 -0700 Subject: [PATCH 0711/1133] Use utils:cmd_script.bzl for cxx_wrapper in cgo_library Summary: Use common wrapper instead of DIY one Reviewed By: leoleovich Differential Revision: D55644410 fbshipit-source-id: d188a0771159505b6c1913170de0e4db14859700 --- prelude/go/cgo_library.bzl | 21 ++++++--------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 4c8a1d4cd..e28b5db37 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -43,6 +43,7 @@ load( ) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//os_lookup:defs.bzl", "OsLookup") +load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:utils.bzl", @@ -140,22 +141,12 @@ def _cxx_wrapper(ctx: AnalysisContext, own_pre: list[CPreprocessor], inherited_p ) # Wrap the C/C++ command in a wrapper script to avoid arg length limits. - is_win = ctx.attrs._exec_os_type[OsLookup].platform == "windows" - cxx_sh = cmd_args( - [ - cmd_args(cxx_cmd, quote = "shell"), - "%*" if is_win else "\"$@\"", - ], - delimiter = " ", + return cmd_script( + ctx = ctx, + name = "cxx_wrapper", + cmd = cxx_cmd, + os = ScriptOs("windows" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "unix"), ) - cxx_wrapper, _ = ctx.actions.write( - "__{}_cxx__.{}".format(ctx.label.name, "bat" if is_win else "sh"), - ([] if is_win else ["#!/bin/sh"]) + [cxx_sh], - allow_args = True, - is_executable = True, - ) - - return cmd_args(cxx_wrapper, hidden = cxx_cmd) def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: pkg_name = go_attr_pkg_name(ctx) From 5103a7658f578ee8019c2cee7f9b5bb979c5e5dd Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 3 Apr 2024 06:34:16 -0700 Subject: [PATCH 0712/1133] Simplify cmd_args for cgo command Summary: Get rid of argsfile, we don't need it anymore as we don't pass CXX args directly into Cgo tool Reviewed By: leoleovich Differential Revision: D55644782 fbshipit-source-id: 7a0a93f936e81319e0710bea87e8044ba67af164 --- prelude/go/cgo_library.bzl | 32 +++++++++++--------------------- 1 file changed, 11 insertions(+), 21 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index e28b5db37..0502cf0bf 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -90,27 +90,17 @@ def _cgo( # Return a `cmd_args` to use as the generated sources. go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - cmd = cmd_args() - cmd.add(go_toolchain.cgo_wrapper) - - args = cmd_args() - args.add(cmd_args(go_toolchain.cgo, format = "--cgo={}")) - - # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not - # sure how to pass in an output dir *and* enumerate the sources we know will - # generated w/o v2 complaining that the output dir conflicts with the nested - # artifacts. - args.add(cmd_args(go_srcs[0].as_output(), format = "--output={}/..")) - - args.add(srcs) - - argsfile = ctx.actions.declare_output(paths.join(gen_dir, ".cgo.argsfile")) - ctx.actions.write(argsfile.as_output(), args, allow_args = True) - - cmd.add(cmd_args(argsfile, format = "@{}").hidden([args])) - - for src in go_srcs + c_headers + c_srcs: - cmd.hidden(src.as_output()) + cmd = cmd_args( + go_toolchain.cgo_wrapper, + cmd_args(go_toolchain.cgo, format = "--cgo={}"), + # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not + # sure how to pass in an output dir *and* enumerate the sources we know will + # generated w/o v2 complaining that the output dir conflicts with the nested + # artifacts. + cmd_args(go_srcs[0].as_output(), format = "--output={}/.."), + srcs, + hidden = [src.as_output() for src in go_srcs + c_headers + c_srcs], + ) env = get_toolchain_env_vars(go_toolchain) env["CC"] = _cxx_wrapper(ctx, own_pre, inherited_pre) From 0d99fca257b158c4393be75a87c9f40531094024 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 3 Apr 2024 06:34:16 -0700 Subject: [PATCH 0713/1133] Get rid of cgo_wrapper Summary: No needed it anymore Reviewed By: leoleovich Differential Revision: D55651596 fbshipit-source-id: fe58412657aeaf31ce0ff938ea09b0b646d0e01c --- prelude/go/cgo_library.bzl | 6 ++--- prelude/go/toolchain.bzl | 1 - prelude/go/tools/BUCK.v2 | 6 ----- prelude/go/tools/cgo_wrapper.py | 39 --------------------------------- prelude/toolchains/go.bzl | 2 -- 5 files changed, 3 insertions(+), 51 deletions(-) delete mode 100644 prelude/go/tools/cgo_wrapper.py diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 0502cf0bf..49526821a 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -91,13 +91,13 @@ def _cgo( go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] cmd = cmd_args( - go_toolchain.cgo_wrapper, - cmd_args(go_toolchain.cgo, format = "--cgo={}"), + go_toolchain.cgo, # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not # sure how to pass in an output dir *and* enumerate the sources we know will # generated w/o v2 complaining that the output dir conflicts with the nested # artifacts. - cmd_args(go_srcs[0].as_output(), format = "--output={}/.."), + cmd_args(go_srcs[0].as_output(), format = "-objdir={}", parent = 1), + "--", srcs, hidden = [src.as_output() for src in go_srcs + c_headers + c_srcs], ) diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index adf2f7c53..f698fbaed 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -14,7 +14,6 @@ GoToolchainInfo = provider( "assembler_flags": provider_field(typing.Any, default = None), "c_compiler_flags": provider_field(typing.Any, default = None), "cgo": provider_field(RunInfo), - "cgo_wrapper": provider_field(RunInfo), "gen_stdlib_importcfg": provider_field(RunInfo), "go_list_wrapper": provider_field(RunInfo), "go_wrapper": provider_field(RunInfo), diff --git a/prelude/go/tools/BUCK.v2 b/prelude/go/tools/BUCK.v2 index 6d6a5bfdc..fd0ba410a 100644 --- a/prelude/go/tools/BUCK.v2 +++ b/prelude/go/tools/BUCK.v2 @@ -6,12 +6,6 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) -prelude.python_bootstrap_binary( - name = "cgo_wrapper", - main = "cgo_wrapper.py", - visibility = ["PUBLIC"], -) - prelude.python_bootstrap_binary( name = "gen_stdlib_importcfg", main = "gen_stdlib_importcfg.py", diff --git a/prelude/go/tools/cgo_wrapper.py b/prelude/go/tools/cgo_wrapper.py deleted file mode 100644 index 498197362..000000000 --- a/prelude/go/tools/cgo_wrapper.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -# pyre-unsafe - -import argparse -import os -import subprocess -import sys -from pathlib import Path - - -def main(argv): - parser = argparse.ArgumentParser(fromfile_prefix_chars="@") - parser.add_argument("--cgo", action="append", default=[]) - parser.add_argument("--output", required=True, type=Path) - parser.add_argument("srcs", type=Path, nargs="*") - args = parser.parse_args(argv[1:]) - - output = args.output.resolve(strict=False) - os.makedirs(output, exist_ok=True) - - env = os.environ.copy() - - cmd = [] - cmd.extend(args.cgo) - cmd.append(f"-objdir={output}") - cmd.append("--") - - cmd.extend(args.srcs) - return subprocess.call(cmd, env=env) - - -sys.exit(main(sys.argv)) diff --git a/prelude/toolchains/go.bzl b/prelude/toolchains/go.bzl index cf741b17e..e969ef7a9 100644 --- a/prelude/toolchains/go.bzl +++ b/prelude/toolchains/go.bzl @@ -35,7 +35,6 @@ def _system_go_toolchain_impl(ctx): GoToolchainInfo( assembler = RunInfo(cmd_script(ctx, "asm", cmd_args(go, "tool", "asm"), script_os)), cgo = RunInfo(cmd_script(ctx, "cgo", cmd_args(go, "tool", "cgo"), script_os)), - cgo_wrapper = ctx.attrs.cgo_wrapper[RunInfo], concat_files = ctx.attrs.concat_files[RunInfo], compiler = RunInfo(cmd_script(ctx, "compile", cmd_args(go, "tool", "compile"), script_os)), cover = RunInfo(cmd_script(ctx, "cover", cmd_args(go, "tool", "cover"), script_os)), @@ -64,7 +63,6 @@ system_go_toolchain = rule( visibility = ["PUBLIC"], )""", attrs = { - "cgo_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cgo_wrapper")), "concat_files": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:concat_files")), "gen_stdlib_importcfg": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:gen_stdlib_importcfg")), "go_list_wrapper": attrs.exec_dep(providers = [RunInfo], default = "prelude//go/tools:go_list_wrapper"), From 5d1db7277aa91c86b75414f85804e8d48e548366 Mon Sep 17 00:00:00 2001 From: "Chen Xie (Reality Labs)" Date: Wed, 3 Apr 2024 07:23:27 -0700 Subject: [PATCH 0714/1133] create a new subtarget to generate exe only without external debug info Summary: create a new subtarget that doesn't materilize external build info, this is equivalent to the default build behavior before D51189971 was committed. Reviewed By: scottcao Differential Revision: D55652709 fbshipit-source-id: 8c383bae10597df0b1a144c6506fc3bec9fa8bca --- prelude/cxx/cxx_executable.bzl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 5a19716e1..3e902568e 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -664,6 +664,11 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, default_output = materialize_external_debug_info, )] + sub_targets["exe"] = [DefaultInfo( + default_output = binary.output, + other_outputs = runtime_files, + )] + for additional_subtarget, subtarget_providers in impl_params.additional.subtargets.items(): sub_targets[additional_subtarget] = subtarget_providers From 4357cdc4bc18cc51543ca2b2a7d7cd9dde2a20ed Mon Sep 17 00:00:00 2001 From: Thomas David Cuvillier Date: Wed, 3 Apr 2024 09:13:37 -0700 Subject: [PATCH 0715/1133] Remove tpx labels from OSS Summary: We remove the internal tpx labels from the open source buck2/erlang. Reviewed By: JakobDegen Differential Revision: D55480876 fbshipit-source-id: 9003246fab33e6abad53a1159bba849b882b4535 --- prelude/erlang/erlang_tests.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/erlang/erlang_tests.bzl b/prelude/erlang/erlang_tests.bzl index 478da71ee..4463060c3 100644 --- a/prelude/erlang/erlang_tests.bzl +++ b/prelude/erlang/erlang_tests.bzl @@ -77,7 +77,7 @@ def erlang_tests_macro( if prop_target: property_tests = [prop_target] - common_attributes["labels"] = common_attributes.get("labels", []) + ["tpx-enable-artifact-reporting", "test-framework=39:erlang_common_test"] + common_attributes["labels"] = common_attributes.get("labels", []) common_attributes["labels"] = list_dedupe(common_attributes["labels"]) From 61b651d6bed554f71f10fe673100b85a3cd548c5 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Wed, 3 Apr 2024 11:01:41 -0700 Subject: [PATCH 0716/1133] add watchos-arm64 constants and platforms Summary: Add constants and target platforms for watchos-arm64, which is the default target for watchos apps now. Reviewed By: drodriguez Differential Revision: D55646572 fbshipit-source-id: 46572459a579e7e89c7676a9c91b7f23a25d9d76 --- prelude/platforms/apple/arch.bzl | 2 -- prelude/platforms/apple/base.bzl | 1 + prelude/platforms/apple/constants.bzl | 3 +++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/prelude/platforms/apple/arch.bzl b/prelude/platforms/apple/arch.bzl index e351bed4e..72163c98b 100644 --- a/prelude/platforms/apple/arch.bzl +++ b/prelude/platforms/apple/arch.bzl @@ -8,8 +8,6 @@ _APPLE_ARCHES = [ "arm64", "arm64_32", - "armv7k", - "i386", "x86_64", ] diff --git a/prelude/platforms/apple/base.bzl b/prelude/platforms/apple/base.bzl index 180ea3aae..7631e531b 100644 --- a/prelude/platforms/apple/base.bzl +++ b/prelude/platforms/apple/base.bzl @@ -45,6 +45,7 @@ _MOBILE_PLATFORMS = [ ios_platforms.IPHONEOS_ARM64, ios_platforms.IPHONESIMULATOR_ARM64, ios_platforms.IPHONESIMULATOR_X86_64, + watch_platforms.WATCHOS_ARM64, watch_platforms.WATCHOS_ARM64_32, watch_platforms.WATCHSIMULATOR_ARM64, watch_platforms.WATCHSIMULATOR_X86_64, diff --git a/prelude/platforms/apple/constants.bzl b/prelude/platforms/apple/constants.bzl index 59a2e12b4..dfd7208e0 100644 --- a/prelude/platforms/apple/constants.bzl +++ b/prelude/platforms/apple/constants.bzl @@ -54,6 +54,8 @@ MACOS_UNIVERSAL = "macosx-universal" # Watch OS Platforms/Flavors +WATCHOS_ARM64 = "watchos-arm64" + WATCHOS_ARM64_32 = "watchos-arm64_32" WATCHSIMULATOR_ARM64 = "watchsimulator-arm64" @@ -97,6 +99,7 @@ mac_platforms = struct( ) watch_platforms = struct( + WATCHOS_ARM64 = WATCHOS_ARM64, WATCHOS_ARM64_32 = WATCHOS_ARM64_32, WATCHSIMULATOR_ARM64 = WATCHSIMULATOR_ARM64, WATCHSIMULATOR_X86_64 = WATCHSIMULATOR_X86_64, From 9a3cdaa069193519a87b7cad13e1b54778985638 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 3 Apr 2024 14:58:09 -0700 Subject: [PATCH 0717/1133] dynamic_output(outputs=[.as_output()]) Summary: Following diff D55389250 requires output artifact, not just declared artifact, as output parameter of `dynamic_output`. Reviewed By: JakobDegen Differential Revision: D55461023 fbshipit-source-id: 065bd9bfbc5c7408e7938f0ea0942796de7003af --- prelude/ocaml/ocaml.bzl | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/prelude/ocaml/ocaml.bzl b/prelude/ocaml/ocaml.bzl index ebd89eaa9..d7137a87d 100644 --- a/prelude/ocaml/ocaml.bzl +++ b/prelude/ocaml/ocaml.bzl @@ -582,7 +582,12 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> if outputs == []: ctx.actions.write(cmxs_order, "") else: - ctx.actions.dynamic_output(dynamic = [depends_output], inputs = todo_inputs, outputs = outputs + [cmxs_order], f = f) + ctx.actions.dynamic_output( + dynamic = [depends_output], + inputs = todo_inputs, + outputs = [o.as_output() for o in outputs + [cmxs_order]], + f = f, + ) return CompileResultInfo(cmxs_order = cmxs_order, stbs = stbs, objs = objs, cmis = cmis, cmos = cmos, cmxs = cmxs, cmts = cmts, cmtis = cmtis, ppmlis = ppmlis, ppmls = ppmls) From 62c43bce8074d4d4180120fd19b633dcf9d46276 Mon Sep 17 00:00:00 2001 From: Mark Bridges Date: Thu, 4 Apr 2024 06:29:53 -0700 Subject: [PATCH 0718/1133] Buck2 Mockingbird Integration Tweaks Summary: Couple of adjustments here needed after trying the to use this in real life. - This had omitted trying to insert the Mockingbird generated source files into anything that wasn't a test target but I realised we've got TestUtilities, which are libraries for tests, that use these mocks but aren't themselves tests targets. - Generation hung for me on one target and I think it was because the project json included targets where the source code list was empty. Omitting the library info for any target with no swift sources fixed it. Reviewed By: blackm00n Differential Revision: D55486786 fbshipit-source-id: 00802830dded4ff95e97079dbf0e001afa58e034 --- prelude/apple/apple_library.bzl | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index 6cd725cfa..740cafe7a 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -144,16 +144,19 @@ def apple_library_impl(ctx: AnalysisContext) -> [Promise, list[Provider]]: ) output = cxx_library_parameterized(ctx, constructor_params) - return output.providers + [_make_mockingbird_library_info_provider(ctx)] + return output.providers + _make_mockingbird_library_info_provider(ctx) if uses_explicit_modules(ctx): return get_swift_anonymous_targets(ctx, get_apple_library_providers) else: return get_apple_library_providers([]) -def _make_mockingbird_library_info_provider(ctx: AnalysisContext) -> MockingbirdLibraryInfo: +def _make_mockingbird_library_info_provider(ctx: AnalysisContext) -> list[MockingbirdLibraryInfo]: _, swift_sources = _filter_swift_srcs(ctx) + if len(swift_sources) == 0: + return [] + all_deps = cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx) deps_mockingbird_infos = filter(None, [dep.get(MockingbirdLibraryInfo) for dep in all_deps]) @@ -163,7 +166,7 @@ def _make_mockingbird_library_info_provider(ctx: AnalysisContext) -> Mockingbird dep_names.append(info.name) children.append(info.tset) - mockingbird_srcs_folder = ctx.actions.declare_output("mockingbird_srcs" + "_" + ctx.attrs.name, dir = True) + mockingbird_srcs_folder = ctx.actions.declare_output("mockingbird_srcs_" + ctx.attrs.name, dir = True) ctx.actions.symlinked_dir( mockingbird_srcs_folder, @@ -180,14 +183,14 @@ def _make_mockingbird_library_info_provider(ctx: AnalysisContext) -> Mockingbird mockingbird_tset = ctx.actions.tset(MockingbirdLibraryInfoTSet, value = mockingbird_record, children = children) - return MockingbirdLibraryInfo( + return [MockingbirdLibraryInfo( name = ctx.attrs.name, tset = mockingbird_tset, - ) + )] def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisContext, params: AppleLibraryAdditionalParams, deps_providers: list = [], is_test_target: bool = False) -> CxxRuleConstructorParams: mockingbird_gen_sources = [] - if is_test_target: + if not "dummy_library" in ctx.attrs.labels: for dep in cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx): if MockingbirdSourcesInfo in dep: for src in dep[MockingbirdSourcesInfo].srcs: From fe563ac9aafc825212bd885ba5e3139c72c70bf2 Mon Sep 17 00:00:00 2001 From: Vladimir Makaev Date: Thu, 4 Apr 2024 08:08:17 -0700 Subject: [PATCH 0719/1133] Fix jacoco coverage wrapper failing due to duplicate classes Summary: The underlying problem here is that there is a duplicated androidx.databinding.* class in jars fed to Jacoco and it fails on report generation. Those are generated at compile time and seem to be embedded in the jars throughout we have no way of knowing this due to how we discover classes for the purpose of Jacoco report Excludes don't work at agent level: https://github.com/jacoco/jacoco/issues/1093 So a coverage wrapper needs to know in advance classes and exlude them specifically. Passing a classname even without associated source file will allow us to do so This should land only after a previous diff is rolled out Reviewed By: asm89 Differential Revision: D55454598 fbshipit-source-id: 7624335110356c25415d462bde53e158e152dd4a --- prelude/java/class_to_srcs.bzl | 3 +++ prelude/java/java_toolchain.bzl | 1 + prelude/java/tools/gen_class_to_source_map.py | 22 +++++++++++++++++++ 3 files changed, 26 insertions(+) diff --git a/prelude/java/class_to_srcs.bzl b/prelude/java/class_to_srcs.bzl index 961ab90cf..5da9ef1d3 100644 --- a/prelude/java/class_to_srcs.bzl +++ b/prelude/java/class_to_srcs.bzl @@ -76,6 +76,9 @@ def create_class_to_source_map_from_jar( srcs: list[Artifact]) -> Artifact: output = actions.declare_output(name) cmd = cmd_args(java_toolchain.gen_class_to_source_map[RunInfo]) + if java_toolchain.gen_class_to_source_map_include_sourceless_compiled_packages != None: + for item in java_toolchain.gen_class_to_source_map_include_sourceless_compiled_packages: + cmd.add("-i", item) cmd.add("-o", output.as_output()) cmd.add(jar) inputs_file = actions.write("class_to_srcs_map_argsfile.txt", srcs) diff --git a/prelude/java/java_toolchain.bzl b/prelude/java/java_toolchain.bzl index 82ab7517f..fbd474fa3 100644 --- a/prelude/java/java_toolchain.bzl +++ b/prelude/java/java_toolchain.bzl @@ -33,6 +33,7 @@ JavaToolchainInfo = provider( "fat_jar_main_class_lib": provider_field(typing.Any, default = None), "gen_class_to_source_map": provider_field(typing.Any, default = None), "gen_class_to_source_map_debuginfo": provider_field(typing.Any, default = None), # optional + "gen_class_to_source_map_include_sourceless_compiled_packages": provider_field(typing.Any, default = None), "graalvm_java": provider_field(typing.Any, default = None), "is_bootstrap_toolchain": provider_field(typing.Any, default = None), "jar": provider_field(typing.Any, default = None), diff --git a/prelude/java/tools/gen_class_to_source_map.py b/prelude/java/tools/gen_class_to_source_map.py index 8e58557d4..85af85916 100644 --- a/prelude/java/tools/gen_class_to_source_map.py +++ b/prelude/java/tools/gen_class_to_source_map.py @@ -14,6 +14,13 @@ def main(argv): parser = argparse.ArgumentParser(fromfile_prefix_chars="@") + parser.add_argument( + "--include_classes_prefixes", + "-i", + default=[], + nargs="*", + help="Prefixes of classes to include in the output, even if their source isn't present", + ) parser.add_argument( "--output", "-o", type=argparse.FileType("w"), default=sys.stdin ) @@ -44,6 +51,7 @@ def main(argv): if "$" in base: continue + found = False for src_base, src_path in sources.items(): if base == src_base or src_base.endswith("/" + base): classes.append( @@ -52,8 +60,22 @@ def main(argv): "srcPath": src_path, } ) + found = True break + if not found: + # If the class is not present in the sources, we stil want to + # include it if it has a prefix that we are interested in. + # certain classes in "androidx.databinding.*" are generated and its useful to know their presense in jars + for prefix in args.include_classes_prefixes: + if classname.startswith(prefix): + classes.append( + { + "className": classname, + } + ) + break + json.dump( { "jarPath": args.jar, From 489d0e465ed51e7063616b7c78bd99f7410b0026 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Thu, 4 Apr 2024 09:41:59 -0700 Subject: [PATCH 0720/1133] Back out "Get rid of cgo_wrapper" Summary: Original commit changeset: fe58412657ae Original Phabricator Diff: D55651596 Reviewed By: leoleovich Differential Revision: D55704771 fbshipit-source-id: 216881717dd678a63961228e6e5709f856a9ef83 --- prelude/go/cgo_library.bzl | 6 ++--- prelude/go/toolchain.bzl | 1 + prelude/go/tools/BUCK.v2 | 6 +++++ prelude/go/tools/cgo_wrapper.py | 39 +++++++++++++++++++++++++++++++++ prelude/toolchains/go.bzl | 2 ++ 5 files changed, 51 insertions(+), 3 deletions(-) create mode 100644 prelude/go/tools/cgo_wrapper.py diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 49526821a..0502cf0bf 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -91,13 +91,13 @@ def _cgo( go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] cmd = cmd_args( - go_toolchain.cgo, + go_toolchain.cgo_wrapper, + cmd_args(go_toolchain.cgo, format = "--cgo={}"), # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not # sure how to pass in an output dir *and* enumerate the sources we know will # generated w/o v2 complaining that the output dir conflicts with the nested # artifacts. - cmd_args(go_srcs[0].as_output(), format = "-objdir={}", parent = 1), - "--", + cmd_args(go_srcs[0].as_output(), format = "--output={}/.."), srcs, hidden = [src.as_output() for src in go_srcs + c_headers + c_srcs], ) diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index f698fbaed..adf2f7c53 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -14,6 +14,7 @@ GoToolchainInfo = provider( "assembler_flags": provider_field(typing.Any, default = None), "c_compiler_flags": provider_field(typing.Any, default = None), "cgo": provider_field(RunInfo), + "cgo_wrapper": provider_field(RunInfo), "gen_stdlib_importcfg": provider_field(RunInfo), "go_list_wrapper": provider_field(RunInfo), "go_wrapper": provider_field(RunInfo), diff --git a/prelude/go/tools/BUCK.v2 b/prelude/go/tools/BUCK.v2 index fd0ba410a..6d6a5bfdc 100644 --- a/prelude/go/tools/BUCK.v2 +++ b/prelude/go/tools/BUCK.v2 @@ -6,6 +6,12 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) +prelude.python_bootstrap_binary( + name = "cgo_wrapper", + main = "cgo_wrapper.py", + visibility = ["PUBLIC"], +) + prelude.python_bootstrap_binary( name = "gen_stdlib_importcfg", main = "gen_stdlib_importcfg.py", diff --git a/prelude/go/tools/cgo_wrapper.py b/prelude/go/tools/cgo_wrapper.py new file mode 100644 index 000000000..498197362 --- /dev/null +++ b/prelude/go/tools/cgo_wrapper.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +# pyre-unsafe + +import argparse +import os +import subprocess +import sys +from pathlib import Path + + +def main(argv): + parser = argparse.ArgumentParser(fromfile_prefix_chars="@") + parser.add_argument("--cgo", action="append", default=[]) + parser.add_argument("--output", required=True, type=Path) + parser.add_argument("srcs", type=Path, nargs="*") + args = parser.parse_args(argv[1:]) + + output = args.output.resolve(strict=False) + os.makedirs(output, exist_ok=True) + + env = os.environ.copy() + + cmd = [] + cmd.extend(args.cgo) + cmd.append(f"-objdir={output}") + cmd.append("--") + + cmd.extend(args.srcs) + return subprocess.call(cmd, env=env) + + +sys.exit(main(sys.argv)) diff --git a/prelude/toolchains/go.bzl b/prelude/toolchains/go.bzl index e969ef7a9..cf741b17e 100644 --- a/prelude/toolchains/go.bzl +++ b/prelude/toolchains/go.bzl @@ -35,6 +35,7 @@ def _system_go_toolchain_impl(ctx): GoToolchainInfo( assembler = RunInfo(cmd_script(ctx, "asm", cmd_args(go, "tool", "asm"), script_os)), cgo = RunInfo(cmd_script(ctx, "cgo", cmd_args(go, "tool", "cgo"), script_os)), + cgo_wrapper = ctx.attrs.cgo_wrapper[RunInfo], concat_files = ctx.attrs.concat_files[RunInfo], compiler = RunInfo(cmd_script(ctx, "compile", cmd_args(go, "tool", "compile"), script_os)), cover = RunInfo(cmd_script(ctx, "cover", cmd_args(go, "tool", "cover"), script_os)), @@ -63,6 +64,7 @@ system_go_toolchain = rule( visibility = ["PUBLIC"], )""", attrs = { + "cgo_wrapper": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:cgo_wrapper")), "concat_files": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:concat_files")), "gen_stdlib_importcfg": attrs.default_only(attrs.dep(providers = [RunInfo], default = "prelude//go/tools:gen_stdlib_importcfg")), "go_list_wrapper": attrs.exec_dep(providers = [RunInfo], default = "prelude//go/tools:go_list_wrapper"), From 447bbcd1bea4eb9c979f059a6c9f281b25dbc0a4 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Thu, 4 Apr 2024 09:41:59 -0700 Subject: [PATCH 0721/1133] Return directory with generated sources from cgo rule Summary: Instead of returning all files, we return the directory, this will unblock merging cgo code with package_builder Reviewed By: leoleovich Differential Revision: D55700582 fbshipit-source-id: a89e432134f0bd10f36c1f4ad88a198adaccc60d --- prelude/go/cgo_library.bzl | 27 +++++++++++---------------- prelude/go/tools/cgo_wrapper.py | 2 ++ 2 files changed, 13 insertions(+), 16 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 0502cf0bf..aaf64152d 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -67,7 +67,7 @@ def _cgo( ctx: AnalysisContext, srcs: list[Artifact], own_pre: list[CPreprocessor], - inherited_pre: list[CPreprocessorInfo]) -> (list[Artifact], list[Artifact], list[Artifact]): + inherited_pre: list[CPreprocessorInfo]) -> (list[Artifact], list[Artifact], list[Artifact], Artifact): """ Run `cgo` on `.go` sources to generate Go, C, and C-Header sources. """ @@ -75,17 +75,17 @@ def _cgo( # If you change this dir or naming convention, please # update the corresponding logic in `fbgolist`. # Otherwise editing and linting for Go will break. - gen_dir = "cgo_gen" + gen_dir = ctx.actions.declare_output("cgo_gen", dir = True) go_srcs = [] c_headers = [] c_srcs = [] - go_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, "_cgo_gotypes.go"))) - c_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, "_cgo_export.c"))) - c_headers.append(ctx.actions.declare_output(paths.join(gen_dir, "_cgo_export.h"))) + go_srcs.append(gen_dir.project("_cgo_gotypes.go")) + c_srcs.append(gen_dir.project("_cgo_export.c")) + c_headers.append(gen_dir.project("_cgo_export.h")) for src in srcs: - go_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, paths.replace_extension(src.basename, ".cgo1.go")))) - c_srcs.append(ctx.actions.declare_output(paths.join(gen_dir, paths.replace_extension(src.basename, ".cgo2.c")))) + go_srcs.append(gen_dir.project(paths.replace_extension(src.basename, ".cgo1.go"))) + c_srcs.append(gen_dir.project(paths.replace_extension(src.basename, ".cgo2.c"))) # Return a `cmd_args` to use as the generated sources. go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] @@ -93,13 +93,8 @@ def _cgo( cmd = cmd_args( go_toolchain.cgo_wrapper, cmd_args(go_toolchain.cgo, format = "--cgo={}"), - # TODO(agallagher): cgo outputs a dir with generated sources, but I'm not - # sure how to pass in an output dir *and* enumerate the sources we know will - # generated w/o v2 complaining that the output dir conflicts with the nested - # artifacts. - cmd_args(go_srcs[0].as_output(), format = "--output={}/.."), + cmd_args(gen_dir.as_output(), format = "--output={}"), srcs, - hidden = [src.as_output() for src in go_srcs + c_headers + c_srcs], ) env = get_toolchain_env_vars(go_toolchain) @@ -107,7 +102,7 @@ def _cgo( ctx.actions.run(cmd, env = env, category = "cgo") - return go_srcs, c_headers, c_srcs + return go_srcs, c_headers, c_srcs, gen_dir def _cxx_wrapper(ctx: AnalysisContext, own_pre: list[CPreprocessor], inherited_pre: list[CPreprocessorInfo]) -> cmd_args: pre = cxx_merge_cpreprocessors(ctx, own_pre, inherited_pre) @@ -163,7 +158,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: fail("unexpected extension: {}".format(src)) # Generate CGO and C sources. - go_srcs, c_headers, c_srcs = _cgo(ctx, cgo_srcs, [own_pre], inherited_pre) + go_srcs, c_headers, c_srcs, gen_dir = _cgo(ctx, cgo_srcs, [own_pre], inherited_pre) cxx_srcs.extend(c_srcs) # Wrap the generated CGO C headers in a CPreprocessor object for compiling. @@ -233,7 +228,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: # to work with cgo. And when nearly every FB service client is cgo, # we need to support it well. return [ - DefaultInfo(default_output = compiled_pkg.pkg, other_outputs = go_srcs), + DefaultInfo(default_output = compiled_pkg.pkg, other_outputs = [gen_dir]), GoPkgCompileInfo(pkgs = merge_pkgs([ pkgs, get_inherited_compile_pkgs(ctx.attrs.exported_deps), diff --git a/prelude/go/tools/cgo_wrapper.py b/prelude/go/tools/cgo_wrapper.py index 498197362..44f98ab63 100644 --- a/prelude/go/tools/cgo_wrapper.py +++ b/prelude/go/tools/cgo_wrapper.py @@ -23,6 +23,8 @@ def main(argv): args = parser.parse_args(argv[1:]) output = args.output.resolve(strict=False) + # the only reason we need this whapper is to create `-objdir`, + # because neither `go tool cgo` nor buck can create it. os.makedirs(output, exist_ok=True) env = os.environ.copy() From 500eef2a259b703be28a4c900dbfeeb124ef216c Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Thu, 4 Apr 2024 11:50:18 -0700 Subject: [PATCH 0722/1133] Parameterize typing.Callable in utils Summary: As example. Reviewed By: JakobDegen Differential Revision: D55727178 fbshipit-source-id: d713e9d0c9452014de29022c040e6f3e4e9c8d82 --- prelude/utils/expect.bzl | 2 +- prelude/utils/graph_utils.bzl | 8 ++++---- prelude/utils/utils.bzl | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/prelude/utils/expect.bzl b/prelude/utils/expect.bzl index 7635ac6f5..612a13808 100644 --- a/prelude/utils/expect.bzl +++ b/prelude/utils/expect.bzl @@ -43,7 +43,7 @@ def expect_non_none(val, msg: str = "unexpected none", *fmt_args, **fmt_kwargs): fail(msg.format(*fmt_args, **fmt_kwargs)) return val -def expect_type(name: str, check: typing.Callable, desc: str, val: typing.Any): +def expect_type(name: str, check: typing.Callable[[typing.Any], bool], desc: str, val: typing.Any): """Fails if check(val) if not truthy. name, desc are used for the error message. Usually you shouldn't need to directly use this, and prefer the expect_* family of functions diff --git a/prelude/utils/graph_utils.bzl b/prelude/utils/graph_utils.bzl index 3fadf06f6..4176160d0 100644 --- a/prelude/utils/graph_utils.bzl +++ b/prelude/utils/graph_utils.bzl @@ -9,7 +9,7 @@ load("@prelude//utils:expect.bzl", "expect") def pre_order_traversal( graph: dict[typing.Any, list[typing.Any]], - node_formatter: typing.Callable = str) -> list[typing.Any]: + node_formatter: typing.Callable[[typing.Any], str] = str) -> list[typing.Any]: """ Perform a pre-order (topologically sorted) traversal of `graph` and return the ordered nodes """ @@ -46,7 +46,7 @@ def pre_order_traversal( def post_order_traversal( graph: dict[typing.Any, list[typing.Any]], - node_formatter: typing.Callable = str) -> list[typing.Any]: + node_formatter: typing.Callable[[typing.Any], str] = str) -> list[typing.Any]: """ Performs a post-order traversal of `graph`. """ @@ -82,7 +82,7 @@ def post_order_traversal( def fail_cycle( graph: dict[typing.Any, list[typing.Any]], - node_formatter: typing.Callable) -> typing.Never: + node_formatter: typing.Callable[[typing.Any], str]) -> typing.Never: cycle = find_cycle(graph) if cycle: fail( @@ -182,7 +182,7 @@ def breadth_first_traversal_by( graph_nodes: [dict[typing.Any, typing.Any], None], roots: list[typing.Any], get_nodes_to_traverse_func: typing.Callable, - node_formatter: typing.Callable = str) -> list[typing.Any]: + node_formatter: typing.Callable[[typing.Any], str] = str) -> list[typing.Any]: """ Performs a breadth first traversal of `graph_nodes`, beginning with the `roots` and queuing the nodes returned by`get_nodes_to_traverse_func`. diff --git a/prelude/utils/utils.bzl b/prelude/utils/utils.bzl index cecc99d36..bbaf1b887 100644 --- a/prelude/utils/utils.bzl +++ b/prelude/utils/utils.bzl @@ -63,7 +63,7 @@ def idx(x: [typing.Any, None], key: typing.Any) -> [typing.Any, None]: def dedupe_by_value(vals: list[typing.Any]) -> list[typing.Any]: return {val: None for val in vals}.keys() -def map_val(func: typing.Callable, val: [typing.Any, None]) -> [typing.Any, None]: +def map_val(func: typing.Callable[[typing.Any], typing.Any], val: [typing.Any, None]) -> [typing.Any, None]: """ If `val` if `None`, return `None`, else apply `func` to `val` and return the result. From c0c748df66b05119240ebc755b9182ce673a7e1f Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Thu, 4 Apr 2024 14:37:58 -0700 Subject: [PATCH 0723/1133] dynamic_output(.as_output()) Summary: Following diff D55389251 makes error to pass artifact (not output artifact) to outputs parameter. Reviewed By: JakobDegen Differential Revision: D55747668 fbshipit-source-id: 89b32820022b17ebf61df8e1a7c00a2307969809 --- prelude/cxx/dist_lto/dist_lto.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/cxx/dist_lto/dist_lto.bzl b/prelude/cxx/dist_lto/dist_lto.bzl index cbab2f4d5..710d94747 100644 --- a/prelude/cxx/dist_lto/dist_lto.bzl +++ b/prelude/cxx/dist_lto/dist_lto.bzl @@ -611,7 +611,7 @@ def cxx_dist_link( ctx.actions.dynamic_output( dynamic = final_link_inputs, inputs = [], - outputs = [output.as_output()] + ([linker_map] if linker_map else []) + [linker_argsfile_out], + outputs = [output.as_output()] + ([linker_map.as_output()] if linker_map else []) + [linker_argsfile_out.as_output()], f = thin_lto_final_link, ) From c06230f7dff5136efa215107f1231ef08c609d16 Mon Sep 17 00:00:00 2001 From: Ivan Balaksha Date: Fri, 5 Apr 2024 01:44:16 -0700 Subject: [PATCH 0724/1133] Fix support for java_test.test_case_timeout_ms Summary: I was looking into ways to prevent problems similar to S398273(T181233028) and set timeout on test rule level, but found that all available options didn't work(`rule_timeout` in buckconfig, `test_rule_timeout_ms` on target level). After trying `test_case_timeout_ms` I found that it causing test listing failure when we start test run. Here is part of stacktrace: ``` STDOUT: STDERR:java.lang.ClassNotFoundException: --default_test_timeout at java.base/jdk.internal.loader.BuiltinClassLoader.loadClass(BuiltinClassLoader.java:641) at java.base/jdk.internal.loader.ClassLoaders$AppClassLoader.loadClass(ClassLoaders.java:188) at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:520) ``` This is caused by incorrect parameter name used in buck2 `--default_test_timeout` JunitRunner actually expects to see `--default-test-timeout` https://fburl.com/code/m9eq8ll6 Reviewed By: jselbo Differential Revision: D55754397 fbshipit-source-id: f93c88396c611156dac8ba63573697e1204ceeef --- prelude/java/java_test.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index 2a18e2294..e2d086d47 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -115,7 +115,7 @@ def build_junit_test( cmd.extend(java_test_toolchain.junit_test_runner_main_class_args) if ctx.attrs.test_case_timeout_ms: - cmd.extend(["--default_test_timeout", str(ctx.attrs.test_case_timeout_ms)]) + cmd.extend(["--default-test-timeout", str(ctx.attrs.test_case_timeout_ms)]) if ctx.attrs.test_class_names_file: class_names = ctx.attrs.test_class_names_file From a4a7e98c33a414afbd04896eadd895618ddb14bd Mon Sep 17 00:00:00 2001 From: Alexander Kampmann Date: Fri, 5 Apr 2024 02:57:54 -0700 Subject: [PATCH 0725/1133] allow preprocessing of instrumentation apks Summary: This allows us to preprocess jars before putting them into the instrumentation apk. This is required to be able to get jacoco coverage for classes in the instrumentation apk (e.g. the tests themselves). Reviewed By: IanChilds Differential Revision: D55691796 fbshipit-source-id: c458927a0a2dbd749684a32009b690f1bcf6c595 --- prelude/android/android.bzl | 1 + prelude/android/android_instrumentation_apk.bzl | 9 ++++++++- prelude/decls/android_rules.bzl | 3 +++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index db37dcf99..380b653da 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -151,6 +151,7 @@ extra_attributes = { "native_library_merge_sequence": attrs.option(attrs.list(attrs.any()), default = None), "_android_toolchain": toolchains_common.android(), "_dex_toolchain": toolchains_common.dex(), + "_exec_os_type": buck.exec_os_type_arg(), "_is_building_android_binary": attrs.default_only(attrs.bool(default = True)), "_is_force_single_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_CPU)), "_is_force_single_default_cpu": attrs.default_only(attrs.bool(default = FORCE_SINGLE_DEFAULT_CPU)), diff --git a/prelude/android/android_instrumentation_apk.bzl b/prelude/android/android_instrumentation_apk.bzl index d534f5e33..21e8128d1 100644 --- a/prelude/android/android_instrumentation_apk.bzl +++ b/prelude/android/android_instrumentation_apk.bzl @@ -12,6 +12,7 @@ load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidApkUnd load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") load("@prelude//android:configuration.bzl", "get_deps_by_platform") load("@prelude//android:dex_rules.bzl", "get_multi_dex", "get_single_primary_dex", "get_split_dex_merge_config", "merge_to_single_dex", "merge_to_split_dex") +load("@prelude//android:preprocess_java_classes.bzl", "get_preprocessed_java_classes") load("@prelude//android:util.bzl", "create_enhancement_context") load("@prelude//java:java_providers.bzl", "create_java_packaging_dep", "get_all_java_packaging_deps") load("@prelude//java/utils:java_utils.bzl", "get_class_to_source_map_info") @@ -65,6 +66,8 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): for r_dot_java in resources_info.r_dot_java_infos ] + enhance_ctx = create_enhancement_context(ctx) + materialized_artifacts = [] if not ctx.attrs.disable_pre_dex: pre_dexed_libs = [java_packaging_dep.dex for java_packaging_dep in java_packaging_deps] if ctx.attrs.use_split_dex: @@ -79,6 +82,10 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): dex_files_info = merge_to_single_dex(ctx, android_toolchain, pre_dexed_libs) else: jars_to_owners = {packaging_dep.jar: packaging_dep.jar.owner.raw_target() for packaging_dep in java_packaging_deps} + if ctx.attrs.preprocess_java_classes_bash: + jars_to_owners, materialized_artifacts_dir = get_preprocessed_java_classes(enhance_ctx, jars_to_owners) + if materialized_artifacts_dir: + materialized_artifacts.append(materialized_artifacts_dir) if ctx.attrs.use_split_dex: dex_files_info = get_multi_dex( ctx, @@ -119,7 +126,7 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): ) return [ - AndroidApkInfo(apk = output_apk, manifest = resources_info.manifest), + AndroidApkInfo(apk = output_apk, materialized_artifacts = materialized_artifacts, manifest = resources_info.manifest), AndroidInstrumentationApkInfo(apk_under_test = ctx.attrs.apk[AndroidApkInfo].apk), DefaultInfo(default_output = output_apk, sub_targets = enhance_ctx.get_sub_targets()), class_to_srcs, diff --git a/prelude/decls/android_rules.bzl b/prelude/decls/android_rules.bzl index eeed2d27d..979ea125b 100644 --- a/prelude/decls/android_rules.bzl +++ b/prelude/decls/android_rules.bzl @@ -579,6 +579,9 @@ android_instrumentation_apk = prelude_rule( "licenses": attrs.list(attrs.source(), default = []), "use_split_dex": attrs.option(attrs.bool(), default = None), "primary_dex_patterns": attrs.list(attrs.string(), default = []), + "preprocess_java_classes_bash": attrs.option(attrs.arg(), default = None), + "preprocess_java_classes_cmd": attrs.option(attrs.arg(), default = None), + "preprocess_java_classes_deps": attrs.list(attrs.dep(), default = []), } ), ) From 94af878a105764a0548256db5bb4af0de213731c Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Fri, 5 Apr 2024 05:41:21 -0700 Subject: [PATCH 0726/1133] Fix stdlib CGO_* flags quoting Summary: Context https://fb.workplace.com/groups/codegophers/permalink/26034342366187699/ Reviewed By: leoleovich Differential Revision: D55761272 fbshipit-source-id: a47b39d25583ee86c429a5c02ae6e026b2225fdc --- prelude/go/go_stdlib.bzl | 13 +++-------- prelude/go/tools/go_wrapper.py | 42 +++++++++++++++++++++++++++++++++- 2 files changed, 44 insertions(+), 11 deletions(-) diff --git a/prelude/go/go_stdlib.bzl b/prelude/go/go_stdlib.bzl index 2bc411f27..d4d9f6a9a 100644 --- a/prelude/go/go_stdlib.bzl +++ b/prelude/go/go_stdlib.bzl @@ -29,16 +29,9 @@ def go_stdlib_impl(ctx: AnalysisContext) -> list[Provider]: if cxx_toolchain != None: c_compiler = cxx_toolchain.c_compiler_info - cgo_ldflags = cmd_args( - cxx_toolchain.linker_info.linker_flags, - go_toolchain.external_linker_flags, - quote = "shell", - ) - - env["CC"] = cmd_args(c_compiler.compiler, delimiter = " ", absolute_prefix = "%cwd%/") - env["CGO_CFLAGS"] = cmd_args(c_compiler.compiler_flags, delimiter = " ", absolute_prefix = "%cwd%/") - env["CGO_CPPFLAGS"] = cmd_args(c_compiler.preprocessor_flags, delimiter = " ", absolute_prefix = "%cwd%/") - env["CGO_LDFLAGS"] = cmd_args(cgo_ldflags, delimiter = " ", absolute_prefix = "%cwd%/") + env["CC"] = cmd_args(c_compiler.compiler, delimiter = " ", quote = "shell", absolute_prefix = "%cwd%/") + env["CGO_CFLAGS"] = cmd_args(c_compiler.compiler_flags, delimiter = " ", quote = "shell", absolute_prefix = "%cwd%/") + env["CGO_CPPFLAGS"] = cmd_args(c_compiler.preprocessor_flags, delimiter = " ", quote = "shell", absolute_prefix = "%cwd%/") cmd = cmd_args([ go_toolchain.go_wrapper, diff --git a/prelude/go/tools/go_wrapper.py b/prelude/go/tools/go_wrapper.py index 9a7b2d0fc..7320853d3 100644 --- a/prelude/go/tools/go_wrapper.py +++ b/prelude/go/tools/go_wrapper.py @@ -7,11 +7,46 @@ import argparse import os +import shlex import subprocess import sys from pathlib import Path +# A copy of "cmd/internal/quoted" translated into Python with GPT-4 +# Source: https://github.com/golang/go/blob/7e9894449e8a12157a28a4a14fc9341353a6469c/src/cmd/internal/quoted/quoted.go#L65 +def go_join(args): + buf = [] + for i, arg in enumerate(args): + if i > 0: + buf.append(" ") + saw_space, saw_single_quote, saw_double_quote = False, False, False + for c in arg: + if ord(c) > 127: + continue + elif c.isspace(): + saw_space = True + elif c == "'": + saw_single_quote = True + elif c == '"': + saw_double_quote = True + if not saw_space and not saw_single_quote and not saw_double_quote: + buf.append(arg) + elif not saw_single_quote: + buf.append("'") + buf.append(arg) + buf.append("'") + elif not saw_double_quote: + buf.append('"') + buf.append(arg) + buf.append('"') + else: + raise ValueError( + f"Argument {arg} contains both single and double quotes and cannot be quoted" + ) + return "".join(buf) + + def main(argv): """ This is a wrapper script around the `go` binary. @@ -36,7 +71,12 @@ def main(argv): cwd = os.getcwd() for env_var in ["CC", "CGO_CFLAGS", "CGO_CPPFLAGS", "CGO_LDFLAGS"]: if env_var in env: - env[env_var] = env[env_var].replace("%cwd%", cwd) + # HACK: Split the value into a list of arguments then join them back. + # This is because buck encodes quoted args in a way `go` doesn't like, + # but `go_join` does it in a way that `go` expects. + var_value = go_join(shlex.split(env[env_var])) + # HACK: Replace %cwd% with the current working directory to make it work when `go` does `cd` to a tmp-dir. + env[env_var] = var_value.replace("%cwd%", cwd) return subprocess.call([wrapped_binary] + unknown, env=env) From 02f4cb77badc401f2d9ef76345218c2a1ac0b9a3 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Fri, 5 Apr 2024 09:07:42 -0700 Subject: [PATCH 0727/1133] Extract cgo code out of the rule code Summary: Extract all cgo related code outside of rule code to enable reusing it Reviewed By: leoleovich Differential Revision: D55741987 fbshipit-source-id: 330332bb223e7f5e6560977b5d93460cd636c1c6 --- prelude/go/cgo_builder.bzl | 170 +++++++++++++++++++++++++++++++++++++ prelude/go/cgo_library.bzl | 161 ++--------------------------------- 2 files changed, 175 insertions(+), 156 deletions(-) create mode 100644 prelude/go/cgo_builder.bzl diff --git a/prelude/go/cgo_builder.bzl b/prelude/go/cgo_builder.bzl new file mode 100644 index 000000000..29cf66ac3 --- /dev/null +++ b/prelude/go/cgo_builder.bzl @@ -0,0 +1,170 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load( + "@prelude//apple:xcode.bzl", + "get_project_root_file", +) +load( + "@prelude//cxx:compile.bzl", + "CxxSrcWithFlags", # @unused Used as a type +) +load("@prelude//cxx:cxx_library.bzl", "cxx_compile_srcs") +load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_types.bzl", + "CxxRuleConstructorParams", # @unused Used as a type +) +load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout", "prepare_headers") +load( + "@prelude//cxx:preprocessor.bzl", + "CPreprocessor", + "CPreprocessorArgs", + "CPreprocessorInfo", + "cxx_inherited_preprocessor_infos", + "cxx_merge_cpreprocessors", + "cxx_private_preprocessor_info", +) +load( + "@prelude//linking:link_info.bzl", + "LinkStyle", +) +load("@prelude//linking:types.bzl", "Linkage") +load("@prelude//os_lookup:defs.bzl", "OsLookup") +load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") +load("@prelude//utils:expect.bzl", "expect") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") + +# A map of expected linkages for provided link style +_LINKAGE_FOR_LINK_STYLE = { + LinkStyle("static"): Linkage("static"), + LinkStyle("static_pic"): Linkage("static"), + LinkStyle("shared"): Linkage("shared"), +} + +def _cgo( + ctx: AnalysisContext, + srcs: list[Artifact], + own_pre: list[CPreprocessor], + inherited_pre: list[CPreprocessorInfo]) -> (list[Artifact], list[Artifact], list[Artifact], Artifact): + """ + Run `cgo` on `.go` sources to generate Go, C, and C-Header sources. + """ + + # If you change this dir or naming convention, please + # update the corresponding logic in `fbgolist`. + # Otherwise editing and linting for Go will break. + gen_dir = ctx.actions.declare_output("cgo_gen", dir = True) + + go_srcs = [] + c_headers = [] + c_srcs = [] + go_srcs.append(gen_dir.project("_cgo_gotypes.go")) + c_srcs.append(gen_dir.project("_cgo_export.c")) + c_headers.append(gen_dir.project("_cgo_export.h")) + for src in srcs: + go_srcs.append(gen_dir.project(paths.replace_extension(src.basename, ".cgo1.go"))) + c_srcs.append(gen_dir.project(paths.replace_extension(src.basename, ".cgo2.c"))) + + # Return a `cmd_args` to use as the generated sources. + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + + cmd = cmd_args( + go_toolchain.cgo_wrapper, + cmd_args(go_toolchain.cgo, format = "--cgo={}"), + cmd_args(gen_dir.as_output(), format = "--output={}"), + srcs, + ) + + env = get_toolchain_env_vars(go_toolchain) + env["CC"] = _cxx_wrapper(ctx, own_pre, inherited_pre) + + ctx.actions.run(cmd, env = env, category = "cgo") + + return go_srcs, c_headers, c_srcs, gen_dir + +def _cxx_wrapper(ctx: AnalysisContext, own_pre: list[CPreprocessor], inherited_pre: list[CPreprocessorInfo]) -> cmd_args: + pre = cxx_merge_cpreprocessors(ctx, own_pre, inherited_pre) + pre_args = pre.set.project_as_args("args") + pre_include_dirs = pre.set.project_as_args("include_dirs") + + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) + cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] + + c_compiler = cxx_toolchain.c_compiler_info + + # Construct the full C/C++ command needed to preprocess/compile sources. + cxx_cmd = cmd_args( + c_compiler.compiler, + c_compiler.preprocessor_flags, + c_compiler.compiler_flags, + pre_args, + pre_include_dirs, + go_toolchain.c_compiler_flags, + ) + + # Wrap the C/C++ command in a wrapper script to avoid arg length limits. + return cmd_script( + ctx = ctx, + name = "cxx_wrapper", + cmd = cxx_cmd, + os = ScriptOs("windows" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "unix"), + ) + +def build_cgo(ctx: AnalysisContext, cgo_files: list[Artifact], c_files: list[Artifact]) -> (list[Artifact], list[Artifact], Artifact | None): + if len(cgo_files) == 0: + return [], [], None + + project_root_file = get_project_root_file(ctx) + + # Gather preprocessor inputs. + (own_pre, _) = cxx_private_preprocessor_info( + ctx, + cxx_get_regular_cxx_headers_layout(ctx), + project_root_file = project_root_file, + ) + inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) + + # Separate sources into C++ and GO sources. + go_gen_srcs, c_gen_headers, c_gen_srcs, gen_dir = _cgo(ctx, cgo_files, [own_pre], inherited_pre) + + # Wrap the generated CGO C headers in a CPreprocessor object for compiling. + cgo_headers_pre = CPreprocessor(relative_args = CPreprocessorArgs(args = [ + "-I", + prepare_headers( + ctx, + {h.basename: h for h in c_gen_headers}, + "cgo-private-headers", + None, + ).include_path, + ])) + + link_style = ctx.attrs.link_style + if link_style == None: + link_style = "static" + linkage = _LINKAGE_FOR_LINK_STYLE[LinkStyle(link_style)] + + # Copmile C++ sources into object files. + c_compile_cmds = cxx_compile_srcs( + ctx, + CxxRuleConstructorParams( + rule_type = "cgo_library", + headers_layout = cxx_get_regular_cxx_headers_layout(ctx), + srcs = [CxxSrcWithFlags(file = src) for src in c_files + c_gen_srcs], + ), + # Create private header tree and propagate via args. + [own_pre, cgo_headers_pre], + inherited_pre, + [], + linkage, + ) + + compiled_objects = c_compile_cmds.pic.objects + + return go_gen_srcs, compiled_objects, gen_dir diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index aaf64152d..450c89a81 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -5,34 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:paths.bzl", "paths") -load( - "@prelude//apple:xcode.bzl", - "get_project_root_file", -) -load( - "@prelude//cxx:compile.bzl", - "CxxSrcWithFlags", # @unused Used as a type -) -load("@prelude//cxx:cxx_library.bzl", "cxx_compile_srcs") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") -load( - "@prelude//cxx:cxx_types.bzl", - "CxxRuleConstructorParams", # @unused Used as a type -) -load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout", "prepare_headers") -load( - "@prelude//cxx:preprocessor.bzl", - "CPreprocessor", - "CPreprocessorArgs", - "CPreprocessorInfo", - "cxx_inherited_preprocessor_infos", - "cxx_merge_cpreprocessors", - "cxx_private_preprocessor_info", -) load( "@prelude//linking:link_info.bzl", - "LinkStyle", "MergedLinkInfo", "create_merged_link_info_for_propagation", ) @@ -41,111 +15,20 @@ load( "SharedLibraryInfo", "merge_shared_libraries", ) -load("@prelude//linking:types.bzl", "Linkage") -load("@prelude//os_lookup:defs.bzl", "OsLookup") -load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") -load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:utils.bzl", "map_idx", ) +load(":cgo_builder.bzl", "build_cgo") load(":compile.bzl", "GoPkgCompileInfo", "get_inherited_compile_pkgs") load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") load(":package_builder.bzl", "build_package") load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") -load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") - -# A map of expected linkages for provided link style -_LINKAGE_FOR_LINK_STYLE = { - LinkStyle("static"): Linkage("static"), - LinkStyle("static_pic"): Linkage("static"), - LinkStyle("shared"): Linkage("shared"), -} - -def _cgo( - ctx: AnalysisContext, - srcs: list[Artifact], - own_pre: list[CPreprocessor], - inherited_pre: list[CPreprocessorInfo]) -> (list[Artifact], list[Artifact], list[Artifact], Artifact): - """ - Run `cgo` on `.go` sources to generate Go, C, and C-Header sources. - """ - - # If you change this dir or naming convention, please - # update the corresponding logic in `fbgolist`. - # Otherwise editing and linting for Go will break. - gen_dir = ctx.actions.declare_output("cgo_gen", dir = True) - - go_srcs = [] - c_headers = [] - c_srcs = [] - go_srcs.append(gen_dir.project("_cgo_gotypes.go")) - c_srcs.append(gen_dir.project("_cgo_export.c")) - c_headers.append(gen_dir.project("_cgo_export.h")) - for src in srcs: - go_srcs.append(gen_dir.project(paths.replace_extension(src.basename, ".cgo1.go"))) - c_srcs.append(gen_dir.project(paths.replace_extension(src.basename, ".cgo2.c"))) - - # Return a `cmd_args` to use as the generated sources. - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - - cmd = cmd_args( - go_toolchain.cgo_wrapper, - cmd_args(go_toolchain.cgo, format = "--cgo={}"), - cmd_args(gen_dir.as_output(), format = "--output={}"), - srcs, - ) - - env = get_toolchain_env_vars(go_toolchain) - env["CC"] = _cxx_wrapper(ctx, own_pre, inherited_pre) - - ctx.actions.run(cmd, env = env, category = "cgo") - - return go_srcs, c_headers, c_srcs, gen_dir - -def _cxx_wrapper(ctx: AnalysisContext, own_pre: list[CPreprocessor], inherited_pre: list[CPreprocessorInfo]) -> cmd_args: - pre = cxx_merge_cpreprocessors(ctx, own_pre, inherited_pre) - pre_args = pre.set.project_as_args("args") - pre_include_dirs = pre.set.project_as_args("include_dirs") - - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - expect(CxxToolchainInfo in ctx.attrs._cxx_toolchain) - cxx_toolchain = ctx.attrs._cxx_toolchain[CxxToolchainInfo] - - c_compiler = cxx_toolchain.c_compiler_info - - # Construct the full C/C++ command needed to preprocess/compile sources. - cxx_cmd = cmd_args( - c_compiler.compiler, - c_compiler.preprocessor_flags, - c_compiler.compiler_flags, - pre_args, - pre_include_dirs, - go_toolchain.c_compiler_flags, - ) - - # Wrap the C/C++ command in a wrapper script to avoid arg length limits. - return cmd_script( - ctx = ctx, - name = "cxx_wrapper", - cmd = cxx_cmd, - os = ScriptOs("windows" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "unix"), - ) def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: pkg_name = go_attr_pkg_name(ctx) - project_root_file = get_project_root_file(ctx) - - # Gather preprocessor inputs. - (own_pre, _) = cxx_private_preprocessor_info( - ctx, - cxx_get_regular_cxx_headers_layout(ctx), - project_root_file = project_root_file, - ) - inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) - # Separate sources into C++ and CGO sources. cgo_srcs = [] cxx_srcs = [] @@ -158,41 +41,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: fail("unexpected extension: {}".format(src)) # Generate CGO and C sources. - go_srcs, c_headers, c_srcs, gen_dir = _cgo(ctx, cgo_srcs, [own_pre], inherited_pre) - cxx_srcs.extend(c_srcs) - - # Wrap the generated CGO C headers in a CPreprocessor object for compiling. - cgo_headers_pre = CPreprocessor(relative_args = CPreprocessorArgs(args = [ - "-I", - prepare_headers( - ctx, - {h.basename: h for h in c_headers}, - "cgo-private-headers", - None, - ).include_path, - ])) - - link_style = ctx.attrs.link_style - if link_style == None: - link_style = "static" - linkage = _LINKAGE_FOR_LINK_STYLE[LinkStyle(link_style)] - - # Copmile C++ sources into object files. - c_compile_cmds = cxx_compile_srcs( - ctx, - CxxRuleConstructorParams( - rule_type = "cgo_library", - headers_layout = cxx_get_regular_cxx_headers_layout(ctx), - srcs = [CxxSrcWithFlags(file = src) for src in cxx_srcs], - ), - # Create private header tree and propagate via args. - [own_pre, cgo_headers_pre], - inherited_pre, - [], - linkage, - ) - - compiled_objects = c_compile_cmds.pic.objects + go_gen_files, o_files, gen_dir = build_cgo(ctx, cgo_srcs, cxx_srcs) shared = ctx.attrs._compile_shared race = ctx.attrs._race @@ -205,8 +54,8 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx.attrs.go_srcs, package_root = ctx.attrs.package_root, deps = ctx.attrs.deps + ctx.attrs.exported_deps, - compiled_objects = compiled_objects, - extra_go_files = go_srcs, + compiled_objects = o_files, + extra_go_files = go_gen_files, shared = shared, race = race, coverage_mode = coverage_mode, @@ -228,7 +77,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: # to work with cgo. And when nearly every FB service client is cgo, # we need to support it well. return [ - DefaultInfo(default_output = compiled_pkg.pkg, other_outputs = [gen_dir]), + DefaultInfo(default_output = compiled_pkg.pkg, other_outputs = [gen_dir] if gen_dir else []), GoPkgCompileInfo(pkgs = merge_pkgs([ pkgs, get_inherited_compile_pkgs(ctx.attrs.exported_deps), From 63843d2deddafba992488a488904dcb3a7e0f312 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Fri, 5 Apr 2024 10:31:06 -0700 Subject: [PATCH 0728/1133] propagate tbd interfaces via SharedInterfaceLinkable Summary: Add a SharedInterfaceLinkable provider to propagate tbd interface files from deps within a link unit. Merge them when creating shared library interfaces. Reviewed By: blackm00n Differential Revision: D55699498 fbshipit-source-id: e72e6aa5236e21e57dc3e65c97cb04d2d5a86174 --- prelude/cxx/cxx_library.bzl | 83 ++++++++++++++++-------- prelude/cxx/shared_library_interface.bzl | 24 ++++++- prelude/linking/link_info.bzl | 41 +++++++++++- 3 files changed, 117 insertions(+), 31 deletions(-) diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 6794f6b97..677cc334d 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -67,6 +67,7 @@ load( "LinkStrategy", "LinkedObject", # @unused Used as a type "ObjectsLinkable", + "SharedInterfaceLinkable", "SharedLibLinkable", "SwiftRuntimeLinkable", # @unused Used as a type "SwiftmoduleLinkable", # @unused Used as a type @@ -198,6 +199,7 @@ load( load( ":shared_library_interface.bzl", "create_tbd", + "merge_tbds", "shared_library_interface", ) @@ -439,18 +441,25 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc providers.append(comp_db_info) # TBD generation is done per-target for stub_from_headers mode and collected at link time. - if cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): - if impl_params.shared_library_interface_target == None: - fail("tbd generation requires setting the cxx constructor param 'shared_library_interface_target'") - - tbd = create_tbd( + shared_interface_linkable = None + tbd_output = None + if impl_params.shared_library_interface_target and \ + cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): + tbd_output = create_tbd( ctx, cxx_attr_exported_headers(ctx, impl_params.headers_layout), own_exported_preprocessor_info, inherited_exported_preprocessor_infos, impl_params.shared_library_interface_target, ) - sub_targets["tbd"] = [tbd] + sub_targets["tbd"] = [DefaultInfo(default_output = tbd_output)] + shared_interface_linkable = SharedInterfaceLinkable( + interfaces = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = [tbd_output], + ), + ) # Link Groups link_group = get_link_group(ctx) @@ -516,6 +525,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc extra_static_linkables = extra_static_linkables, gnu_use_link_groups = cxx_is_gnu(ctx) and bool(link_group_mappings), link_execution_preference = link_execution_preference, + tbd_output = tbd_output, ) solib_as_dict = {library_outputs.solib[0]: library_outputs.solib[1]} if library_outputs.solib else {} @@ -611,6 +621,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # Export link info from out (exported) deps. exported_deps = inherited_exported_link, frameworks_linkable = frameworks_linkable, + shared_interfaces_linkable = shared_interface_linkable, swiftmodule_linkable = swiftmodule_linkable, swift_runtime_linkable = swift_runtime_linkable, ) @@ -648,6 +659,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc pic_behavior = pic_behavior, preferred_linkage = Linkage("static"), frameworks_linkable = frameworks_linkable, + shared_interfaces_linkable = shared_interface_linkable, swiftmodule_linkable = swiftmodule_linkable, ), LinkGroupLibInfo(libs = {}), SharedLibraryInfo(set = None)] + additional_providers @@ -951,7 +963,8 @@ def _form_library_outputs( dep_infos: LinkArgs, extra_static_linkables: list[[FrameworksLinkable, SwiftmoduleLinkable, SwiftRuntimeLinkable]], gnu_use_link_groups: bool, - link_execution_preference: LinkExecutionPreference) -> _CxxAllLibraryOutputs: + link_execution_preference: LinkExecutionPreference, + tbd_output: [Artifact, None]) -> _CxxAllLibraryOutputs: # Build static/shared libs and the link info we use to export them to dependents. outputs = {} solib = None @@ -1036,16 +1049,18 @@ def _form_library_outputs( ) extra_linker_flags, extra_linker_outputs = impl_params.extra_linker_outputs_factory(ctx) + result = _shared_library( - ctx, - impl_params, - compiled_srcs.pic.objects, - external_debug_info, - dep_infos, - gnu_use_link_groups, + ctx = ctx, + impl_params = impl_params, + objects = compiled_srcs.pic.objects, + external_debug_info = external_debug_info, + dep_infos = dep_infos, + gnu_use_link_groups = gnu_use_link_groups, extra_linker_flags = extra_linker_flags, link_ordering = map_val(LinkOrdering, ctx.attrs.link_ordering), link_execution_preference = link_execution_preference, + tbd_output = tbd_output, ) shlib = result.link_result.linked_object info = result.info @@ -1374,7 +1389,8 @@ def _shared_library( gnu_use_link_groups: bool, extra_linker_flags: list[ArgLike], link_execution_preference: LinkExecutionPreference, - link_ordering: [LinkOrdering, None] = None) -> _CxxSharedLibraryResult: + link_ordering: [LinkOrdering, None], + tbd_output: [Artifact, None]) -> _CxxSharedLibraryResult: """ Generate a shared library and the associated native link info used by dependents to link against it. @@ -1443,10 +1459,31 @@ def _shared_library( if cxx_use_shlib_intfs(ctx): mode = get_cxx_toolchain_info(ctx).linker_info.shlib_interfaces if mode == ShlibInterfacesMode("stub_from_library"): - shlib_for_interface = exported_shlib + # Generate a library interface from the linked library output. + # This will prevent relinking rdeps when changes do not affect + # the library symbols. + exported_shlib = shared_library_interface( + ctx = ctx, + shared_lib = exported_shlib, + ) elif mode == ShlibInterfacesMode("stub_from_headers"): - # TODO: collect tbd output from providers and merge - shlib_for_interface = None + # Generate a library interface from its deps exported_headers. + # This will allow for linker parallelisation as we do not have + # to wait for dependent libraries to link. + # If the tbd output is missing this is a non apple_library target, + # so skip producing the interface. + if tbd_output != None: + # collect tbd output from providers and merge + all_deps = dedupe(cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx)) + deps_merged_link_infos = cxx_inherited_link_info(all_deps) + children = [li.shared_interfaces[LinkStrategy("shared")].interfaces for li in deps_merged_link_infos] + tbd_set = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = [tbd_output], + children = children, + ) + exported_shlib = merge_tbds(ctx, tbd_set) elif not gnu_use_link_groups: # TODO(agallagher): There's a bug in shlib intfs interacting with link # groups, where we don't include the symbols we're meant to export from @@ -1478,19 +1515,11 @@ def _shared_library( ), name = soname, ) - shlib_for_interface = intf_link_result.linked_object.output - else: - shlib_for_interface = None - - if shlib_for_interface: - # Convert the shared library into an interface. - shlib_interface = shared_library_interface( + exported_shlib = shared_library_interface( ctx = ctx, - shared_lib = shlib_for_interface, + shared_lib = intf_link_result.linked_object.output, ) - exported_shlib = shlib_interface - # Link against import library on Windows. if link_result.linked_object.import_library: exported_shlib = link_result.linked_object.import_library diff --git a/prelude/cxx/shared_library_interface.bzl b/prelude/cxx/shared_library_interface.bzl index 782d37038..0d7e179ad 100644 --- a/prelude/cxx/shared_library_interface.bzl +++ b/prelude/cxx/shared_library_interface.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//:artifact_tset.bzl", "ArtifactTSet", "project_artifacts") load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:preprocessor.bzl", "CPreprocessor", "CPreprocessorInfo") load(":cxx_context.bzl", "get_cxx_toolchain_info") @@ -83,7 +84,7 @@ def shared_library_interface( identifier = shared_lib.short_path, ) -def create_tbd(ctx: AnalysisContext, exported_headers: list[CHeader], exported_preprocessor: CPreprocessor, transitive_preprocessor: list[CPreprocessorInfo], target: str) -> DefaultInfo: +def create_tbd(ctx: AnalysisContext, exported_headers: list[CHeader], exported_preprocessor: CPreprocessor, transitive_preprocessor: list[CPreprocessorInfo], target: str) -> Artifact: # Use the c++ compiler to correctly generate c++ symbols. compiler_info = get_cxx_toolchain_info(ctx).cxx_compiler_info @@ -133,4 +134,23 @@ def create_tbd(ctx: AnalysisContext, exported_headers: list[CHeader], exported_p identifier = ctx.attrs.name, ) - return DefaultInfo(default_output = tbd_file) + return tbd_file + +def merge_tbds(ctx: AnalysisContext, tbd_set: ArtifactTSet) -> Artifact: + # Run the shlib interface tool with the merge command + tbd_file = ctx.actions.declare_output( + paths.join("__tbd__", ctx.attrs.name + ".merged.tbd"), + ) + args = cmd_args(get_cxx_toolchain_info(ctx).linker_info.mk_shlib_intf[RunInfo]) + args.add([ + "merge", + project_artifacts(ctx.actions, [tbd_set]), + "-o", + tbd_file.as_output(), + ]) + ctx.actions.run( + args, + category = "merge_tbd", + identifier = ctx.attrs.name, + ) + return tbd_file diff --git a/prelude/linking/link_info.bzl b/prelude/linking/link_info.bzl index feb35d1ee..6062a4aab 100644 --- a/prelude/linking/link_info.bzl +++ b/prelude/linking/link_info.bzl @@ -123,6 +123,12 @@ FrameworksLinkable = record( library_names = field(list[str], []), ) +# The transitive artifacts of partial shared interface for a linkable. +# These need to be collected and merged to produce the final shared interface. +SharedInterfaceLinkable = record( + interfaces = field(ArtifactTSet, ArtifactTSet()), +) + SwiftmoduleLinkable = record( swiftmodules = field(ArtifactTSet, ArtifactTSet()), ) @@ -245,7 +251,10 @@ def append_linkable_args(args: cmd_args, linkable: LinkableTypes): args.add(get_objects_as_library_args(linkable.linker_type, linkable.objects)) else: args.add(linkable.objects) - elif isinstance(linkable, FrameworksLinkable) or isinstance(linkable, SwiftRuntimeLinkable) or isinstance(linkable, SwiftmoduleLinkable): + elif isinstance(linkable, FrameworksLinkable) or \ + isinstance(linkable, SharedInterfaceLinkable) or \ + isinstance(linkable, SwiftRuntimeLinkable) or \ + isinstance(linkable, SwiftmoduleLinkable): # These flags are handled separately so they can be deduped. # # We've seen in apps with larger dependency graphs that failing @@ -285,7 +294,10 @@ def link_info_filelist(value: LinkInfo) -> list[Artifact]: elif isinstance(linkable, ObjectsLinkable): if linkable.linker_type == "darwin": filelists += linkable.objects - elif isinstance(linkable, FrameworksLinkable) or isinstance(linkable, SwiftRuntimeLinkable) or isinstance(linkable, SwiftmoduleLinkable): + elif isinstance(linkable, FrameworksLinkable) or \ + isinstance(linkable, SharedInterfaceLinkable) or \ + isinstance(linkable, SwiftRuntimeLinkable) or \ + isinstance(linkable, SwiftmoduleLinkable): pass else: fail("Encountered unhandled linkable {}".format(str(linkable))) @@ -434,6 +446,7 @@ MergedLinkInfo = provider(fields = [ # To save on repeated computation of transitive LinkInfos, we store a dedupped # structure, based on the link-style. "frameworks", # dict[LinkStrategy, FrameworksLinkable | None] + "shared_interfaces", # dict[LinkStrategy, SharedInterfaceLinkable | None] "swiftmodules", # dict[LinkStrategy, SwiftmoduleLinkable | None] "swift_runtime", # dict[LinkStrategy, SwiftRuntimeLinkable | None] ]) @@ -478,6 +491,7 @@ def create_merged_link_info( # Link info to always propagate from exported deps. exported_deps: list[MergedLinkInfo] = [], frameworks_linkable: [FrameworksLinkable, None] = None, + shared_interfaces_linkable: [SharedInterfaceLinkable, None] = None, swiftmodule_linkable: [SwiftmoduleLinkable, None] = None, swift_runtime_linkable: [SwiftRuntimeLinkable, None] = None) -> MergedLinkInfo: """ @@ -487,6 +501,7 @@ def create_merged_link_info( infos = {} external_debug_info = {} frameworks = {} + shared_interfaces = {} swift_runtime = {} swiftmodules = {} @@ -499,6 +514,7 @@ def create_merged_link_info( children = [] external_debug_info_children = [] framework_linkables = [] + shared_interface_linkables = [] swift_runtime_linkables = [] swiftmodule_linkables = [] @@ -512,6 +528,9 @@ def create_merged_link_info( framework_linkables.append(frameworks_linkable) framework_linkables += [dep_info.frameworks[link_strategy] for dep_info in exported_deps] + shared_interface_linkables.append(shared_interfaces_linkable) + shared_interface_linkables += [dep_info.shared_interfaces[link_strategy] for dep_info in exported_deps] + swiftmodule_linkables.append(swiftmodule_linkable) swiftmodule_linkables += [dep_info.swiftmodules[link_strategy] for dep_info in exported_deps] @@ -527,7 +546,9 @@ def create_merged_link_info( value = dep_info._external_debug_info.get(link_strategy) if value: external_debug_info_children.append(value) + framework_linkables.append(dep_info.frameworks[link_strategy]) + shared_interface_linkables.append(dep_info.shared_interfaces[link_strategy]) swiftmodule_linkables.append(dep_info.swiftmodules[link_strategy]) swift_runtime_linkables.append(dep_info.swift_runtime[link_strategy]) @@ -541,6 +562,7 @@ def create_merged_link_info( external_debug_info_children.append(value) frameworks[link_strategy] = merge_framework_linkables(framework_linkables) + shared_interfaces[link_strategy] = merge_shared_interface_linkables(ctx, shared_interface_linkables) swift_runtime[link_strategy] = merge_swift_runtime_linkables(swift_runtime_linkables) swiftmodules[link_strategy] = merge_swiftmodule_linkables(ctx, swiftmodule_linkables) @@ -567,6 +589,7 @@ def create_merged_link_info( _infos = infos, _external_debug_info = external_debug_info, frameworks = frameworks, + shared_interfaces = shared_interfaces, swift_runtime = swift_runtime, swiftmodules = swiftmodules, ) @@ -582,6 +605,7 @@ def create_merged_link_info_for_propagation( merged = {} merged_external_debug_info = {} frameworks = {} + shared_interfaces = {} swift_runtime = {} swiftmodules = {} for link_strategy in LinkStrategy: @@ -595,12 +619,14 @@ def create_merged_link_info_for_propagation( children = filter(None, [x._external_debug_info.get(link_strategy) for x in xs]), ) frameworks[link_strategy] = merge_framework_linkables([x.frameworks[link_strategy] for x in xs]) + shared_interfaces[link_strategy] = merge_shared_interface_linkables(ctx, [x.shared_interfaces[link_strategy] for x in xs]) swift_runtime[link_strategy] = merge_swift_runtime_linkables([x.swift_runtime[link_strategy] for x in xs]) swiftmodules[link_strategy] = merge_swiftmodule_linkables(ctx, [x.swiftmodules[link_strategy] for x in xs]) return MergedLinkInfo( _infos = merged, _external_debug_info = merged_external_debug_info, frameworks = frameworks, + shared_interfaces = shared_interfaces, swift_runtime = swift_runtime, swiftmodules = swiftmodules, ) @@ -859,6 +885,17 @@ def merge_framework_linkables(linkables: list[[FrameworksLinkable, None]]) -> Fr library_names = unique_library_names.keys(), ) +def merge_shared_interface_linkables(ctx: AnalysisContext, linkables: list[[SharedInterfaceLinkable, None]]) -> SharedInterfaceLinkable: + return SharedInterfaceLinkable(interfaces = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + children = [ + linkable.interfaces + for linkable in linkables + if linkable != None + ], + )) + def merge_swiftmodule_linkables(ctx: AnalysisContext, linkables: list[[SwiftmoduleLinkable, None]]) -> SwiftmoduleLinkable: return SwiftmoduleLinkable(swiftmodules = make_artifact_tset( actions = ctx.actions, From 187a378ed490bf81410a5be7ebb7a30843ecc7d2 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Fri, 5 Apr 2024 10:31:06 -0700 Subject: [PATCH 0729/1133] implement tbd merging Summary: Implement tbd merging for shared library interface linking. This will allow for linking based on header generated tbd files, improving linker parallelism. Merging of Swift tbd files is yet to be implemented. Reviewed By: blackm00n Differential Revision: D55712048 fbshipit-source-id: b7e2c35ddecf93c2a5dadeb97d8555564208fb81 --- prelude/cxx/cxx_library.bzl | 2 +- prelude/cxx/shared_library_interface.bzl | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 677cc334d..9c41ab85f 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -1483,7 +1483,7 @@ def _shared_library( artifacts = [tbd_output], children = children, ) - exported_shlib = merge_tbds(ctx, tbd_set) + exported_shlib = merge_tbds(ctx, soname, tbd_set) elif not gnu_use_link_groups: # TODO(agallagher): There's a bug in shlib intfs interacting with link # groups, where we don't include the symbols we're meant to export from diff --git a/prelude/cxx/shared_library_interface.bzl b/prelude/cxx/shared_library_interface.bzl index 0d7e179ad..74a0d3aba 100644 --- a/prelude/cxx/shared_library_interface.bzl +++ b/prelude/cxx/shared_library_interface.bzl @@ -136,7 +136,7 @@ def create_tbd(ctx: AnalysisContext, exported_headers: list[CHeader], exported_p return tbd_file -def merge_tbds(ctx: AnalysisContext, tbd_set: ArtifactTSet) -> Artifact: +def merge_tbds(ctx: AnalysisContext, soname: str, tbd_set: ArtifactTSet) -> Artifact: # Run the shlib interface tool with the merge command tbd_file = ctx.actions.declare_output( paths.join("__tbd__", ctx.attrs.name + ".merged.tbd"), @@ -144,6 +144,8 @@ def merge_tbds(ctx: AnalysisContext, tbd_set: ArtifactTSet) -> Artifact: args = cmd_args(get_cxx_toolchain_info(ctx).linker_info.mk_shlib_intf[RunInfo]) args.add([ "merge", + "-install_name", + "@rpath/" + soname, project_artifacts(ctx.actions, [tbd_set]), "-o", tbd_file.as_output(), From 94c2c5683d0e0b2b827eb71fadd6a42c8aba2400 Mon Sep 17 00:00:00 2001 From: Edson Moraes Menegatti Date: Fri, 5 Apr 2024 11:55:10 -0700 Subject: [PATCH 0730/1133] Move 'parse_src_roots' to @prelude//java:java_resources.bzl Summary: Move `parse_src_roots` to a new location which is synced onto the WhatsApp repo as well Reviewed By: IanChilds Differential Revision: D55768022 fbshipit-source-id: 3e2c6bdce985b24825c6aa767a1ff13429a62852 --- prelude/java/java_resources.bzl | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/prelude/java/java_resources.bzl b/prelude/java/java_resources.bzl index 4916d5891..fbab61e4c 100644 --- a/prelude/java/java_resources.bzl +++ b/prelude/java/java_resources.bzl @@ -57,3 +57,18 @@ def get_resources_map( resource_name = get_src_package(java_toolchain.src_root_prefixes, java_toolchain.src_root_elements, full_resource) resources_to_copy[resource_name] = resource return resources_to_copy + +def parse_src_roots(src_roots: list[str]) -> (list[str], list[str]): + prefixes = [] + elements = [] + for src_root in src_roots: + if src_root.startswith("/"): + if not src_root.endswith("/"): + fail("Elements in java.src_roots config that begin with a / must end in one too, but {} does not".format(src_root)) + prefixes.append(src_root[1:]) + elif "/" in src_root: + fail("No / is permitted in java.src_roots config elements, but {} has one".format(src_root)) + else: + elements.append(src_root) + + return elements, prefixes From 0d4abf70d9dca8a187c734e015ac01dff0b70b94 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Sun, 7 Apr 2024 11:35:27 -0700 Subject: [PATCH 0731/1133] Remove strings from types in RuleRegistrationSpec Summary: Following diff D55825491 does not allow it. Reviewed By: JakobDegen Differential Revision: D55842663 fbshipit-source-id: 17b4111aa6b50b8750810fa6a62da12d0938f298 --- prelude/user/rule_spec.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/user/rule_spec.bzl b/prelude/user/rule_spec.bzl index 426b57d3f..ad8b45113 100644 --- a/prelude/user/rule_spec.bzl +++ b/prelude/user/rule_spec.bzl @@ -9,7 +9,8 @@ RuleRegistrationSpec = record( name = field(str), impl = field(typing.Callable), attrs = field(dict[str, Attr]), - cfg = field([None, "transition"], None), + # TODO(nga): should be `transition | None`, but `transition` does not work as type. + cfg = field(typing.Any | None, None), is_toolchain_rule = field(bool, False), doc = field(str, ""), ) From 5d228898405c7e3e209ff9ac906ea3158ddd36f3 Mon Sep 17 00:00:00 2001 From: Leo Chashnikov Date: Mon, 8 Apr 2024 01:54:19 -0700 Subject: [PATCH 0732/1133] pass debug data from library to apple_test explicitly Summary: `apple_library.link_style_sub_targets_and_providers` already generates `AppleDebuggableInfo` provider, so setting `link_style_outputs = True,` ensures that this provider is present in `cxx_library_output`. Than `apple_test.bzl` uses this provider to set debug_info. Reviewed By: blackm00n Differential Revision: D55633373 fbshipit-source-id: c303c0eaaee79deda345a9bcdc3ac64c4ff50566 --- prelude/apple/apple_test.bzl | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/prelude/apple/apple_test.bzl b/prelude/apple/apple_test.bzl index 5da34d8a1..59c8cb09b 100644 --- a/prelude/apple/apple_test.bzl +++ b/prelude/apple/apple_test.bzl @@ -5,6 +5,10 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load( + "@prelude//:artifact_tset.bzl", + "project_artifacts", +) load("@prelude//:paths.bzl", "paths") load("@prelude//apple:apple_library.bzl", "AppleLibraryAdditionalParams", "apple_library_rule_constructor_params_and_swift_providers") load("@prelude//apple:apple_toolchain_types.bzl", "AppleToolchainInfo") @@ -42,7 +46,7 @@ load( ":apple_sdk_metadata.bzl", "MacOSXSdkMetadata", ) -load(":debug.bzl", "DEBUGINFO_SUBTARGET") +load(":debug.bzl", "AppleDebuggableInfo") load(":xcode.bzl", "apple_populate_xcode_attributes") load(":xctest_swift_support.bzl", "XCTestSwiftSupportInfo") @@ -92,7 +96,7 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: compilation_database = True, default = False, linkable_graph = False, - link_style_outputs = False, + link_style_outputs = True, merged_native_link_info = False, omnibus_root = False, preprocessors = False, @@ -120,11 +124,14 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: part_list_output = get_apple_bundle_part_list(ctx, AppleBundlePartListConstructorParams(binaries = [binary_part])) xctest_swift_support_needed = None + debug_info = None for p in cxx_library_output.providers: if isinstance(p, XCTestSwiftSupportInfo): xctest_swift_support_needed = p.support_needed - break + if isinstance(p, AppleDebuggableInfo): + debug_info = project_artifacts(ctx.actions, [p.debug_info_tset]) expect(xctest_swift_support_needed != None, "Expected `XCTestSwiftSupportInfo` provider to be present") + expect(debug_info != None, "Expected `AppleDebuggableInfo` provider to be present") bundle_parts = part_list_output.parts + _get_xctest_framework(ctx, xctest_swift_support_needed) @@ -153,11 +160,11 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: ) sub_targets.update(cxx_library_output.sub_targets) - (debuginfo,) = sub_targets[DEBUGINFO_SUBTARGET] + dsym_artifact = get_apple_dsym( ctx = ctx, executable = test_binary, - debug_info = debuginfo.default_outputs, + debug_info = debug_info, action_identifier = "generate_apple_test_dsym", output_path_override = get_bundle_dir_name(ctx) + ".dSYM", ) From b6e4bc5b13e2c482bc05bbed6ddb50635f3ed590 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Mon, 8 Apr 2024 05:41:00 -0700 Subject: [PATCH 0733/1133] Reduced memory consumption of link groups analysis Summary: Running modes with link groups on scale uses enormously huge amount of RAM on analysis. Profiling blames graph traversal. `breadth_first_traversal_by` allocates new array on each queue population call, that creates large amount of allocations. We can prevent that by adding nodes directly to queue without intermediate array allocation. With that change **we allocating 3x less RAM** for analysis. Reviewed By: artempyanykh Differential Revision: D55372285 fbshipit-source-id: 6aea513696a3416d51daf3db4dd04a7cdc1311d8 --- prelude/cxx/link_groups.bzl | 35 +++++++++++++++++-------------- prelude/utils/graph_utils.bzl | 39 +++++++++++++++++++++++++++++++++++ 2 files changed, 58 insertions(+), 16 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index baee7ae53..1ed21e8fe 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -51,6 +51,7 @@ load("@prelude//utils:expect.bzl", "expect") load( "@prelude//utils:graph_utils.bzl", "breadth_first_traversal_by", + "breadth_first_traversal_with_callback", ) load( "@prelude//utils:set.bzl", @@ -283,13 +284,13 @@ def _transitively_update_shared_linkage( shared_lib_roots.append(target) # buildifier: disable=uninitialized - def process_dependency(node: Label) -> list[Label]: + def process_dependency(node: Label, populate_queue: typing.Callable): linkable_node = linkable_graph_node_map[node] if linkable_node.preferred_linkage == Linkage("any"): link_group_preferred_linkage[node] = Linkage("shared") - return get_deps_for_link(linkable_node, link_strategy, pic_behavior) + populate_queue(get_deps_for_link(linkable_node, link_strategy, pic_behavior)) - breadth_first_traversal_by( + breadth_first_traversal_with_callback( linkable_graph_node_map, shared_lib_roots, process_dependency, @@ -316,11 +317,11 @@ def get_filtered_labels_to_links_map( If no link group is provided, all unmatched link infos are returned. """ - def get_potential_linkables(node: Label) -> list[Label]: + def get_potential_linkables(node: Label, populate_queue: typing.Callable): linkable_node = linkable_graph_node_map[node] # buildifier: disable=uninitialized # Always link against exported deps - node_linkables = list(linkable_node.exported_deps) + populate_queue(linkable_node.exported_deps) # If the preferred linkage is `static` or `any` we need to link against the deps too. # TODO(cjhopman): This code originally was as commented out below and the comment indicated that the @@ -334,12 +335,10 @@ def get_filtered_labels_to_links_map( # should_traverse = link_style != Linkage("shared") if should_traverse_private_deps: - node_linkables += linkable_node.deps - - return node_linkables + populate_queue(linkable_node.deps) # Get all potential linkable targets - linkables = breadth_first_traversal_by( + linkables = breadth_first_traversal_with_callback( linkable_graph_node_map, roots, get_potential_linkables, @@ -560,22 +559,26 @@ def find_relevant_roots( roots: list[Label] = []): # Walk through roots looking for the first node which maps to the current # link group. - def collect_and_traverse_roots(roots, node_target): + + def collect_and_traverse_roots(roots, node_target, populate_queue): node = linkable_graph_node_map.get(node_target) if node.preferred_linkage == Linkage("static") and not node.ignore_force_static_follows_dependents: - return node.deps + node.exported_deps + populate_queue(node.deps) + populate_queue(node.exported_deps) + return node_link_group = link_group_mappings.get(node_target) + if node_link_group == MATCH_ALL_LABEL: roots.append(node_target) - return [] - if node_link_group == link_group: + elif node_link_group == link_group: roots.append(node_target) - return [] - return node.deps + node.exported_deps + else: + populate_queue(node.deps) + populate_queue(node.exported_deps) relevant_roots = [] - breadth_first_traversal_by( + breadth_first_traversal_with_callback( linkable_graph_node_map, roots, partial(collect_and_traverse_roots, relevant_roots), diff --git a/prelude/utils/graph_utils.bzl b/prelude/utils/graph_utils.bzl index 4176160d0..7de5ee2f6 100644 --- a/prelude/utils/graph_utils.bzl +++ b/prelude/utils/graph_utils.bzl @@ -215,3 +215,42 @@ def breadth_first_traversal_by( expect(not queue, "Expected to be done with graph traversal queue.") return visited.keys() + +# That is version of `breadth_first_traversal_by` that allocates much less memory +# via avoiding intermediate list allocations. +def breadth_first_traversal_with_callback( + graph_nodes: [dict[typing.Any, typing.Any], None], + roots: list[typing.Any], + iter_children_with_callback: typing.Callable[[typing.Any, typing.Callable[[list[typing.Any]], None]], None], + node_formatter: typing.Callable[[typing.Any], str] = str) -> list[typing.Any]: + """ + Performs a breadth first traversal of `graph_nodes`, beginning + with the `roots` and queuing the nodes returned by`iter_children_with_callback`. + Returns a list of all visisted nodes. + + Starlark does not offer while loops, so this implementation + must make use of a for loop. We pop from the end of the queue + as a matter of performance. + """ + + # Dictify for O(1) lookup + visited = {k: None for k in roots} + + queue = visited.keys() + + def populate_queue(nodes): + for node in nodes: + if node not in visited: + visited[node] = None + queue.append(node) + + for _ in range(len(graph_nodes) if graph_nodes else 2000000000): + if not queue: + break + node = queue.pop() + if graph_nodes and node not in graph_nodes: + fail("Expected node {} in graph nodes".format(node_formatter(node))) + iter_children_with_callback(node, populate_queue) + expect(not queue, "Expected to be done with graph traversal queue.") + + return visited.keys() From 0617a4cec699d0ca8a3148d85799fc39790bda6c Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Mon, 8 Apr 2024 05:41:00 -0700 Subject: [PATCH 0734/1133] Simplified logic a bit Summary: Just removing small repetition Reviewed By: jbardini Differential Revision: D55635709 fbshipit-source-id: 69259fd80d35d84f07824ea0e282b616e64bc0b1 --- prelude/cxx/link_groups.bzl | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 1ed21e8fe..4d87dc30d 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -324,15 +324,7 @@ def get_filtered_labels_to_links_map( populate_queue(linkable_node.exported_deps) # If the preferred linkage is `static` or `any` we need to link against the deps too. - # TODO(cjhopman): This code originally was as commented out below and the comment indicated that the - # intent was to not traverse in the second case if link style was shared, but at this point idk which - # behavior we actually want. - should_traverse_private_deps = False - if linkable_node.preferred_linkage == Linkage("static"): - should_traverse_private_deps = True - elif linkable_node.preferred_linkage == Linkage("any"): - should_traverse_private_deps = True - # should_traverse = link_style != Linkage("shared") + should_traverse_private_deps = linkable_node.preferred_linkage == Linkage("static") or linkable_node.preferred_linkage == Linkage("any") if should_traverse_private_deps: populate_queue(linkable_node.deps) From 071580b8acc96d7c9aa3dc8b7a413782309f7904 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Mon, 8 Apr 2024 05:41:00 -0700 Subject: [PATCH 0735/1133] Separated roots determination out Summary: That change is a no-op Reviewed By: artempyanykh Differential Revision: D55635706 fbshipit-source-id: 8a60bd28657537cf17e2a3ebabaa9a849859c7a7 --- prelude/cxx/link_groups.bzl | 82 +++++++++++++++++++++++-------------- 1 file changed, 52 insertions(+), 30 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 4d87dc30d..c317deaf0 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -578,6 +578,50 @@ def find_relevant_roots( return relevant_roots +def _get_roots_from_mappings( + spec: LinkGroupLibSpec, + linkable_graph_node_map: dict[Label, LinkableNode]) -> (list[Label], bool): + roots = [] + has_empty_root = False + for mapping in spec.group.mappings: + # If there's no explicit root, this means we need to search the entire + # graph to find candidate nodes. + if not mapping.roots: + has_empty_root = True + elif spec.group.attrs.requires_root_node_exists: + # If spec requires root to always exist (default True), always include to traversal to fail hard if it is not in deps. + # Otherwise add to traversal only if we sure it is in deps graph. + roots.extend(mapping.roots) + else: + roots.extend([root for root in mapping.roots if root in linkable_graph_node_map]) + return (roots, has_empty_root) + +def _get_link_group_roots( + spec: LinkGroupLibSpec, + linkable_graph_node_map: dict[Label, LinkableNode], + link_group_mappings: dict[Label, str], + executable_deps: list[Label], + other_roots: list[Label]) -> list[Label]: + # Get roots to begin the linkable search. + # TODO(agallagher): We should use the groups "public" nodes as the roots. + if spec.root != None: + return spec.root.deps + roots, has_empty_root = _get_roots_from_mappings(spec, linkable_graph_node_map) + + # If this link group has an empty mapping, we need to search everything + # -- even the additional roots -- to find potential nodes in the link + # group. + if has_empty_root: + roots.extend( + find_relevant_roots( + link_group = spec.group.name, + linkable_graph_node_map = linkable_graph_node_map, + link_group_mappings = link_group_mappings, + roots = executable_deps + other_roots, + ), + ) + return roots + def _create_link_group( ctx: AnalysisContext, spec: LinkGroupLibSpec, @@ -616,10 +660,6 @@ def _create_link_group( get_ignore_undefined_symbols_flags(linker_type), )) - # Get roots to begin the linkable search. - # TODO(agallagher): We should use the groups "public" nodes as the roots. - roots = [] - has_empty_root = False if spec.root != None: # If there's a linkable root attached to the spec, use that to guide # linking, as that will contain things like private linker flags that @@ -628,32 +668,14 @@ def _create_link_group( spec.root.link_infos, prefer_stripped = prefer_stripped_objects, )) - roots.extend(spec.root.deps) - else: - for mapping in spec.group.mappings: - # If there's no explicit root, this means we need to search the entire - # graph to find candidate nodes. - if not mapping.roots: - has_empty_root = True - elif spec.group.attrs.requires_root_node_exists: - # If spec requires root to always exist (default True), always include to traversal to fail hard if it is not in deps. - # Otherwise add to traversal only if we sure it is in deps graph. - roots.extend(mapping.roots) - else: - roots.extend([root for root in mapping.roots if root in linkable_graph_node_map]) - - # If this link group has an empty mapping, we need to search everything - # -- even the additional roots -- to find potential nodes in the link - # group. - if has_empty_root: - roots.extend( - find_relevant_roots( - link_group = spec.group.name, - linkable_graph_node_map = linkable_graph_node_map, - link_group_mappings = link_group_mappings, - roots = executable_deps + other_roots, - ), - ) + + roots = _get_link_group_roots( + spec = spec, + linkable_graph_node_map = linkable_graph_node_map, + link_group_mappings = link_group_mappings, + executable_deps = executable_deps, + other_roots = other_roots, + ) # Add roots... filtered_labels_to_links_map = get_filtered_labels_to_links_map( From 3dfe7d4ce5c305b8abad348bf565f087c2616f41 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Mon, 8 Apr 2024 05:41:00 -0700 Subject: [PATCH 0736/1133] Extracted defined method to avoid unnecessary function alloactions inside the loop Summary: Nested methods allocated on each call of wrapping method. That means we either can not use them in loops or extract nested methods out to allocate them only once. Reviewed By: ndmitchell Differential Revision: D55635708 fbshipit-source-id: 0bb979e4b530742a71dd05de7de1e1a79497f47c --- prelude/cxx/groups.bzl | 68 +++++++++++++++++++++++++----------------- 1 file changed, 41 insertions(+), 27 deletions(-) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index f81b60ee4..f891baf39 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -281,6 +281,44 @@ def _find_targets_in_mapping( return matching_targets.keys() +# Extracted from `_update_target_to_group_mapping` to avoid function allocations inside the loop +def _assign_target_to_group( + target_to_group_map, #: {"label": str} + node_traversed_targets, #: {"label": None} + group, # Group, + groups_map, # {str: Group} + mapping, # GroupMapping + target, # Label + node_traversal): # bool + # If the target hasn't already been assigned to a group, assign it to the + # first group claiming the target. Return whether the target was already assigned. + if target not in target_to_group_map: + if mapping.traversal == Traversal("subfolders"): + generated_group_name = _generate_group_subfolder_name(group.name, target.package) + _add_to_implicit_link_group(generated_group_name, group, groups_map, target_to_group_map, target) + else: + target_to_group_map[target] = group.name + + if node_traversal: + node_traversed_targets[target] = None + return False + else: + return True + +# Extracted from `_update_target_to_group_mapping` to avoid function allocations inside the loop +def _transitively_add_targets_to_group_mapping( + assign_target_to_group, # (Label, bool) -> bool + node_traversed_targets, #: {"label": None} + graph_map, # {"label": "_b"} + node): # Label + previously_processed = assign_target_to_group(node, False) + + # If the node has been previously processed, and it was via tree (not node), all child nodes have been assigned + if previously_processed and node not in node_traversed_targets: + return [] + graph_node = graph_map[node] + return graph_node.deps + graph_node.exported_deps + # Types removed to avoid unnecessary type checking which degrades performance. def _update_target_to_group_mapping( graph_map, # {"label": "_b"} @@ -290,35 +328,11 @@ def _update_target_to_group_mapping( groups_map, # {str: Group} mapping, # GroupMapping target): # Label - def assign_target_to_group( - target: Label, - node_traversal: bool) -> bool: - # If the target hasn't already been assigned to a group, assign it to the - # first group claiming the target. Return whether the target was already assigned. - if target not in target_to_group_map: - if mapping.traversal == Traversal("subfolders"): - generated_group_name = _generate_group_subfolder_name(group.name, target.package) - _add_to_implicit_link_group(generated_group_name, group, groups_map, target_to_group_map, target) - else: - target_to_group_map[target] = group.name - - if node_traversal: - node_traversed_targets[target] = None - return False - else: - return True - - def transitively_add_targets_to_group_mapping(node: Label) -> list[Label]: - previously_processed = assign_target_to_group(target = node, node_traversal = False) - - # If the node has been previously processed, and it was via tree (not node), all child nodes have been assigned - if previously_processed and node not in node_traversed_targets: - return [] - graph_node = graph_map[node] - return graph_node.deps + graph_node.exported_deps + assign_target_to_group = partial(_assign_target_to_group, target_to_group_map, node_traversed_targets, group, groups_map, mapping) # (Label, bool) -> bool + transitively_add_targets_to_group_mapping = partial(_transitively_add_targets_to_group_mapping, assign_target_to_group, node_traversed_targets, graph_map) # (Label) -> list[Label] if mapping.traversal in _TRAVERSALS_TO_ASSIGN_NODE: - assign_target_to_group(target = target, node_traversal = True) + assign_target_to_group(target, True) else: # tree breadth_first_traversal_by(graph_map, [target], transitively_add_targets_to_group_mapping) From 45a13c420d0f0d3fbb69d282a6937a27a6573f35 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Mon, 8 Apr 2024 05:41:00 -0700 Subject: [PATCH 0737/1133] Used more memory efficient graph traversal in targets mapping Summary: Removing intermediate list allocations from graph traversals. Reviewed By: jbardini Differential Revision: D55635705 fbshipit-source-id: f05242e8f95e4d20b7fd231753d899ec6965e881 --- prelude/cxx/groups.bzl | 32 ++++++++++++++++++++------------ 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/prelude/cxx/groups.bzl b/prelude/cxx/groups.bzl index f891baf39..cbc3f554c 100644 --- a/prelude/cxx/groups.bzl +++ b/prelude/cxx/groups.bzl @@ -14,7 +14,7 @@ load( ) load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal_by", + "breadth_first_traversal_with_callback", ) load( "@prelude//utils:strings.bzl", @@ -246,7 +246,7 @@ def _find_targets_in_mapping( return False return True - def find_matching_targets(node): # Label -> [Label]: + def populate_matching_targets(node): # Label -> bool: graph_node = graph_map[node] if matches_target(node, graph_node.labels): matching_targets[node] = None @@ -254,20 +254,26 @@ def _find_targets_in_mapping( # We can stop traversing the tree at this point because we've added the # build target to the list of all targets that will be traversed by the # algorithm that applies the groups. - return [] - return graph_node.deps + graph_node.exported_deps + return False + return True + + def populate_matching_targets_bfs_wrapper(node, populate_queue): # (Label, typing.Callable) -> None + if populate_matching_targets(node): + graph_node = graph_map[node] + populate_queue(graph_node.deps) + populate_queue(graph_node.exported_deps) if not mapping.roots: for node in graph_map: - find_matching_targets(node) + populate_matching_targets(node) elif mapping.traversal == Traversal("intersect"): intersected_targets = None for root in mapping.roots: - # This is a captured variable inside `find_matching_targets`. + # This is a captured variable inside `populate_matching_targets`. # We reset it for each root we visit so that we don't have results # from other roots. matching_targets = {} - breadth_first_traversal_by(graph_map, [root], find_matching_targets) + breadth_first_traversal_with_callback(graph_map, [root], populate_matching_targets_bfs_wrapper) if intersected_targets == None: intersected_targets = {target: True for target in matching_targets} else: @@ -277,7 +283,7 @@ def _find_targets_in_mapping( return intersected_targets.keys() else: - breadth_first_traversal_by(graph_map, mapping.roots, find_matching_targets) + breadth_first_traversal_with_callback(graph_map, mapping.roots, populate_matching_targets_bfs_wrapper) return matching_targets.keys() @@ -310,14 +316,16 @@ def _transitively_add_targets_to_group_mapping( assign_target_to_group, # (Label, bool) -> bool node_traversed_targets, #: {"label": None} graph_map, # {"label": "_b"} - node): # Label + node, + populate_queue): # ([Label]) -> None previously_processed = assign_target_to_group(node, False) # If the node has been previously processed, and it was via tree (not node), all child nodes have been assigned if previously_processed and node not in node_traversed_targets: - return [] + return graph_node = graph_map[node] - return graph_node.deps + graph_node.exported_deps + populate_queue(graph_node.deps) + populate_queue(graph_node.exported_deps) # Types removed to avoid unnecessary type checking which degrades performance. def _update_target_to_group_mapping( @@ -334,7 +342,7 @@ def _update_target_to_group_mapping( if mapping.traversal in _TRAVERSALS_TO_ASSIGN_NODE: assign_target_to_group(target, True) else: # tree - breadth_first_traversal_by(graph_map, [target], transitively_add_targets_to_group_mapping) + breadth_first_traversal_with_callback(graph_map, [target], transitively_add_targets_to_group_mapping) def _add_to_implicit_link_group( generated_group_name, # str From ce5e0e9de2ddd69bb2270170b168bb1f611093fd Mon Sep 17 00:00:00 2001 From: Alexander Kampmann Date: Mon, 8 Apr 2024 07:53:15 -0700 Subject: [PATCH 0738/1133] add materialized artifacts to other_outputs Summary: I missed this in D55691796 Reviewed By: IanChilds Differential Revision: D55795181 fbshipit-source-id: 869c5cb6a6824c90ec8529fae77c56d568d88713 --- prelude/android/android_instrumentation_apk.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/android/android_instrumentation_apk.bzl b/prelude/android/android_instrumentation_apk.bzl index 21e8128d1..a9cacf4a7 100644 --- a/prelude/android/android_instrumentation_apk.bzl +++ b/prelude/android/android_instrumentation_apk.bzl @@ -128,7 +128,7 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): return [ AndroidApkInfo(apk = output_apk, materialized_artifacts = materialized_artifacts, manifest = resources_info.manifest), AndroidInstrumentationApkInfo(apk_under_test = ctx.attrs.apk[AndroidApkInfo].apk), - DefaultInfo(default_output = output_apk, sub_targets = enhance_ctx.get_sub_targets()), + DefaultInfo(default_output = output_apk, other_outputs = materialized_artifacts, sub_targets = enhance_ctx.get_sub_targets()), class_to_srcs, ] From 0949e8f496c1533abf584df182361b52b4e64c18 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Mon, 8 Apr 2024 08:04:30 -0700 Subject: [PATCH 0739/1133] merge Swift tbd files Summary: Include Swift tbd files from mixed libraries when producing merged tbd file output. Reviewed By: drodriguez Differential Revision: D55805706 fbshipit-source-id: 6db34696686bede482a877948da847de4b44d4ed --- prelude/apple/apple_library.bzl | 1 + prelude/cxx/cxx_library.bzl | 21 +++++++++++---------- prelude/cxx/cxx_types.bzl | 2 ++ 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index 740cafe7a..659fb7752 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -350,6 +350,7 @@ def apple_library_rule_constructor_params_and_swift_providers(ctx: AnalysisConte link_groups_force_static_follows_dependents = False, extra_linker_outputs_factory = _get_extra_linker_flags_and_outputs, swiftmodule_linkable = get_swiftmodule_linkable(swift_compile), + extra_shared_library_interfaces = [swift_compile.tbd] if (swift_compile and swift_compile.tbd) else None, ) def _get_extra_linker_flags_and_outputs( diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index 9c41ab85f..eb18a2c87 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -442,22 +442,23 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # TBD generation is done per-target for stub_from_headers mode and collected at link time. shared_interface_linkable = None - tbd_output = None + tbd_outputs = impl_params.extra_shared_library_interfaces if impl_params.extra_shared_library_interfaces else [] if impl_params.shared_library_interface_target and \ cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): - tbd_output = create_tbd( + cxx_tbd_output = create_tbd( ctx, cxx_attr_exported_headers(ctx, impl_params.headers_layout), own_exported_preprocessor_info, inherited_exported_preprocessor_infos, impl_params.shared_library_interface_target, ) - sub_targets["tbd"] = [DefaultInfo(default_output = tbd_output)] + tbd_outputs.append(cxx_tbd_output) + sub_targets["tbd"] = [DefaultInfo(default_output = cxx_tbd_output)] shared_interface_linkable = SharedInterfaceLinkable( interfaces = make_artifact_tset( actions = ctx.actions, label = ctx.label, - artifacts = [tbd_output], + artifacts = tbd_outputs, ), ) @@ -525,7 +526,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc extra_static_linkables = extra_static_linkables, gnu_use_link_groups = cxx_is_gnu(ctx) and bool(link_group_mappings), link_execution_preference = link_execution_preference, - tbd_output = tbd_output, + tbd_outputs = tbd_outputs, ) solib_as_dict = {library_outputs.solib[0]: library_outputs.solib[1]} if library_outputs.solib else {} @@ -964,7 +965,7 @@ def _form_library_outputs( extra_static_linkables: list[[FrameworksLinkable, SwiftmoduleLinkable, SwiftRuntimeLinkable]], gnu_use_link_groups: bool, link_execution_preference: LinkExecutionPreference, - tbd_output: [Artifact, None]) -> _CxxAllLibraryOutputs: + tbd_outputs: list[Artifact]) -> _CxxAllLibraryOutputs: # Build static/shared libs and the link info we use to export them to dependents. outputs = {} solib = None @@ -1060,7 +1061,7 @@ def _form_library_outputs( extra_linker_flags = extra_linker_flags, link_ordering = map_val(LinkOrdering, ctx.attrs.link_ordering), link_execution_preference = link_execution_preference, - tbd_output = tbd_output, + tbd_outputs = tbd_outputs, ) shlib = result.link_result.linked_object info = result.info @@ -1390,7 +1391,7 @@ def _shared_library( extra_linker_flags: list[ArgLike], link_execution_preference: LinkExecutionPreference, link_ordering: [LinkOrdering, None], - tbd_output: [Artifact, None]) -> _CxxSharedLibraryResult: + tbd_outputs: list[Artifact]) -> _CxxSharedLibraryResult: """ Generate a shared library and the associated native link info used by dependents to link against it. @@ -1472,7 +1473,7 @@ def _shared_library( # to wait for dependent libraries to link. # If the tbd output is missing this is a non apple_library target, # so skip producing the interface. - if tbd_output != None: + if len(tbd_outputs) > 0: # collect tbd output from providers and merge all_deps = dedupe(cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx)) deps_merged_link_infos = cxx_inherited_link_info(all_deps) @@ -1480,7 +1481,7 @@ def _shared_library( tbd_set = make_artifact_tset( actions = ctx.actions, label = ctx.label, - artifacts = [tbd_output], + artifacts = tbd_outputs, children = children, ) exported_shlib = merge_tbds(ctx, soname, tbd_set) diff --git a/prelude/cxx/cxx_types.bzl b/prelude/cxx/cxx_types.bzl index a87eab24a..1fec9301a 100644 --- a/prelude/cxx/cxx_types.bzl +++ b/prelude/cxx/cxx_types.bzl @@ -194,4 +194,6 @@ CxxRuleConstructorParams = record( exe_allow_cache_upload = field(bool, False), # The target triple to use when generating shared library interfaces shared_library_interface_target = field([str, None], None), + # Extra shared library interfaces to propagate, eg from mixed Swift libraries. + extra_shared_library_interfaces = field([list[Artifact], None], None), ) From 8e57c2a03eff1cb4eb54e4cf1dc49b0ab7c26cff Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Mon, 8 Apr 2024 09:45:23 -0700 Subject: [PATCH 0740/1133] fix entitlements comparison Summary: nit, and not realistic case, but handle cases when entitlement string can be just an empty one Reviewed By: milend Differential Revision: D55867564 fbshipit-source-id: 24a2bb55127277187c353c170cdbe0fe89a758b2 --- prelude/apple/tools/bundling/assemble_bundle_types.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/prelude/apple/tools/bundling/assemble_bundle_types.py b/prelude/apple/tools/bundling/assemble_bundle_types.py index 1d98db812..18e27ea10 100644 --- a/prelude/apple/tools/bundling/assemble_bundle_types.py +++ b/prelude/apple/tools/bundling/assemble_bundle_types.py @@ -52,7 +52,10 @@ def __lt__(self: BundleSpecItem, other: BundleSpecItem) -> bool: or self.codesign_on_copy < other.codesign_on_copy or ( self.codesign_entitlements < other.codesign_entitlements - if (self.codesign_entitlements and other.codesign_entitlements) + if ( + self.codesign_entitlements is not None + and other.codesign_entitlements is not None + ) else ( self.codesign_entitlements is None and other.codesign_entitlements is not None From c0fe4c2c2936438250ce6660f71c1f8bd8428761 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Mon, 8 Apr 2024 09:45:23 -0700 Subject: [PATCH 0741/1133] add `codesign_flags_override` attribute to resource Summary: those are useful to provide `--deep` flag with precision to prebuilt frameworks wrapped in resource rules which in turn include extensions Reviewed By: milend Differential Revision: D55867561 fbshipit-source-id: d77fc2d14e6f67d5701891c86b88af83d0a1485f --- prelude/apple/apple_bundle_part.bzl | 2 ++ prelude/apple/apple_bundle_resources.bzl | 6 ++++-- prelude/apple/apple_resource.bzl | 1 + prelude/apple/apple_resource_types.bzl | 1 + prelude/apple/apple_rules_impl.bzl | 1 + 5 files changed, 9 insertions(+), 2 deletions(-) diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index d6caa9041..a6ef993b7 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -32,6 +32,8 @@ AppleBundlePart = record( codesign_on_copy = field(bool, False), # Entitlements to use when this part is code signed separately. codesign_entitlements = field(Artifact | None, None), + # If present, override the codesign flags with these flags, when this part is code signed separately. + codesign_flags_override = field([list[str], None], None), ) SwiftStdlibArguments = record( diff --git a/prelude/apple/apple_bundle_resources.bzl b/prelude/apple/apple_bundle_resources.bzl index dbd3b8b23..223d73fa7 100644 --- a/prelude/apple/apple_bundle_resources.bzl +++ b/prelude/apple/apple_bundle_resources.bzl @@ -187,6 +187,7 @@ def _copy_resources(ctx: AnalysisContext, specs: list[AppleResourceSpec]) -> lis destination_relative_path = None, codesign_on_copy = spec.codesign_files_on_copy, codesign_entitlements = spec.codesign_entitlements, + codesign_flags_override = spec.codesign_flags_override, ) for x in spec.files] result += _bundle_parts_for_dirs(spec.dirs, bundle_destination, False) result += _bundle_parts_for_dirs(spec.content_dirs, bundle_destination, True) @@ -354,7 +355,8 @@ def _process_apple_resource_file_if_needed( destination: AppleBundleDestination, destination_relative_path: [str, None], codesign_on_copy: bool = False, - codesign_entitlements: Artifact | None = None) -> AppleBundlePart: + codesign_entitlements: Artifact | None = None, + codesign_flags_override: list[str] | None = None) -> AppleBundlePart: output_dir = "_ProcessedResources" basename = paths.basename(file.short_path) output_is_contents_dir = False @@ -385,7 +387,7 @@ def _process_apple_resource_file_if_needed( # When name is empty string only content of the directory will be copied, as opposed to the directory itself. # When name is `None`, directory or file will be copied as it is, without renaming. new_name = destination_relative_path if destination_relative_path else ("" if output_is_contents_dir else None) - return AppleBundlePart(source = processed, destination = destination, new_name = new_name, codesign_on_copy = codesign_on_copy, codesign_entitlements = codesign_entitlements) + return AppleBundlePart(source = processed, destination = destination, new_name = new_name, codesign_on_copy = codesign_on_copy, codesign_entitlements = codesign_entitlements, codesign_flags_override = codesign_flags_override) # Returns a path relative to the _parent_ of the lproj dir. # For example, given a variant file with a short path of`XX/YY.lproj/ZZ` diff --git a/prelude/apple/apple_resource.bzl b/prelude/apple/apple_resource.bzl index e24538eec..a67ebcb41 100644 --- a/prelude/apple/apple_resource.bzl +++ b/prelude/apple/apple_resource.bzl @@ -19,6 +19,7 @@ def apple_resource_impl(ctx: AnalysisContext) -> list[Provider]: named_variant_files = ctx.attrs.named_variants or {}, codesign_files_on_copy = ctx.attrs.codesign_on_copy, codesign_entitlements = ctx.attrs.codesign_entitlements, + codesign_flags_override = ctx.attrs.codesign_flags_override, ) # `files` can contain `apple_library()` which in turn can have `apple_resource()` deps diff --git a/prelude/apple/apple_resource_types.bzl b/prelude/apple/apple_resource_types.bzl index 4d800f7ae..5d8a2773c 100644 --- a/prelude/apple/apple_resource_types.bzl +++ b/prelude/apple/apple_resource_types.bzl @@ -29,6 +29,7 @@ AppleResourceSpec = record( named_variant_files = field(dict[str, list[Artifact]], {}), codesign_files_on_copy = field(bool, False), codesign_entitlements = field(Artifact | None, None), + codesign_flags_override = field(list[str] | None, None), ) # Used when invoking `ibtool`, `actool`, `mapc` and `momc` diff --git a/prelude/apple/apple_rules_impl.bzl b/prelude/apple/apple_rules_impl.bzl index d51cb972c..3308c4b6d 100644 --- a/prelude/apple/apple_rules_impl.bzl +++ b/prelude/apple/apple_rules_impl.bzl @@ -171,6 +171,7 @@ extra_attributes = { }, "apple_resource": { "codesign_entitlements": attrs.option(attrs.source(), default = None), + "codesign_flags_override": attrs.option(attrs.list(attrs.string()), default = None), "codesign_on_copy": attrs.bool(default = False), "content_dirs": attrs.list(attrs.source(allow_directory = True), default = []), "dirs": attrs.list(attrs.source(allow_directory = True), default = []), From 1cef74a8bf879fc93749a0406f8c12bfe5a60fe7 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Mon, 8 Apr 2024 09:45:23 -0700 Subject: [PATCH 0742/1133] pass `codesign_flags_override` from rules to bundling script Summary: do not use those flags in bundling script yet, so no behavior change in this diff Reviewed By: milend Differential Revision: D55867560 fbshipit-source-id: d5dd928975430f59616f41825c548b0854daa880 --- prelude/apple/apple_bundle_part.bzl | 2 ++ .../tools/bundling/assemble_bundle_types.py | 21 ++++++++++++++++++- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/prelude/apple/apple_bundle_part.bzl b/prelude/apple/apple_bundle_part.bzl index a6ef993b7..8cf18454b 100644 --- a/prelude/apple/apple_bundle_part.bzl +++ b/prelude/apple/apple_bundle_part.bzl @@ -240,6 +240,8 @@ def _bundle_spec_json(ctx: AnalysisContext, parts: list[AppleBundlePart], codesi part_spec["codesign_on_copy"] = True if include_entitlements and part.codesign_entitlements: part_spec["codesign_entitlements"] = part.codesign_entitlements + if part.codesign_flags_override: + part_spec["codesign_flags_override"] = part.codesign_flags_override specs.append(part_spec) return ctx.actions.write_json("bundle_spec.json", specs) diff --git a/prelude/apple/tools/bundling/assemble_bundle_types.py b/prelude/apple/tools/bundling/assemble_bundle_types.py index 18e27ea10..716fe26d1 100644 --- a/prelude/apple/tools/bundling/assemble_bundle_types.py +++ b/prelude/apple/tools/bundling/assemble_bundle_types.py @@ -27,6 +27,7 @@ class BundleSpecItem: dst: str codesign_on_copy: bool = False codesign_entitlements: Optional[str] = None + codesign_flags_override: Optional[List[str]] = None def __eq__(self: BundleSpecItem, other: Optional[BundleSpecItem]) -> bool: return ( @@ -35,6 +36,7 @@ def __eq__(self: BundleSpecItem, other: Optional[BundleSpecItem]) -> bool: and self.dst == other.dst and self.codesign_on_copy == other.codesign_on_copy and self.codesign_entitlements == other.codesign_entitlements + and self.codesign_flags_override == other.codesign_flags_override ) def __ne__(self: BundleSpecItem, other: BundleSpecItem) -> bool: @@ -42,7 +44,13 @@ def __ne__(self: BundleSpecItem, other: BundleSpecItem) -> bool: def __hash__(self: BundleSpecItem) -> int: return hash( - (self.src, self.dst, self.codesign_on_copy, self.codesign_entitlements) + ( + self.src, + self.dst, + self.codesign_on_copy, + self.codesign_entitlements, + self.codesign_flags_override, + ) ) def __lt__(self: BundleSpecItem, other: BundleSpecItem) -> bool: @@ -61,6 +69,17 @@ def __lt__(self: BundleSpecItem, other: BundleSpecItem) -> bool: and other.codesign_entitlements is not None ) ) + or ( + self.codesign_flags_override < other.codesign_flags_override + if ( + self.codesign_flags_override is not None + and other.codesign_flags_override is not None + ) + else ( + self.codesign_flags_override is None + and other.codesign_flags_override is not None + ) + ) ) From 90e8dd579431bf21b4e9908461fe985ca8e2f359 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Mon, 8 Apr 2024 09:45:23 -0700 Subject: [PATCH 0743/1133] move codesign flags into CodesignedPath Summary: Make codesign flags a part of context needed to codesign specific path, rather than a global parameter. Pure refactoring, no behavior change. Reviewed By: milend Differential Revision: D55867563 fbshipit-source-id: 3bb8fc68efeffe0acddc9fcf79116d034b78f450 --- prelude/apple/tools/bundling/main.py | 12 +++++++--- .../tools/code_signing/codesign_bundle.py | 22 +++++++------------ prelude/apple/tools/code_signing/main.py | 7 +++--- 3 files changed, 21 insertions(+), 20 deletions(-) diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 42ecd684d..758f97e31 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -386,18 +386,25 @@ def _main() -> None: "Expected signing context to be created before bundling is done if codesign is requested." ) - bundle_path = CodesignedPath(path=args.output, entitlements=args.entitlements) + bundle_path = CodesignedPath( + path=args.output, entitlements=args.entitlements, flags=args.codesign_args + ) codesign_on_copy_paths = [ CodesignedPath( path=bundle_path.path / i.dst, entitlements=( Path(i.codesign_entitlements) if i.codesign_entitlements else None ), + flags=args.codesign_args, ) for i in spec if i.codesign_on_copy ] + [ - CodesignedPath(path=bundle_path.path / path, entitlements=None) + CodesignedPath( + path=bundle_path.path / path, + entitlements=None, + flags=args.codesign_args, + ) for path in swift_stdlib_paths ] @@ -406,7 +413,6 @@ def _main() -> None: signing_context=signing_context, platform=args.platform, codesign_on_copy_paths=codesign_on_copy_paths, - codesign_args=args.codesign_args, codesign_tool=args.codesign_tool, codesign_configuration=args.codesign_configuration, ) diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/prelude/apple/tools/code_signing/codesign_bundle.py index 27e4b526a..d08744825 100644 --- a/prelude/apple/tools/code_signing/codesign_bundle.py +++ b/prelude/apple/tools/code_signing/codesign_bundle.py @@ -66,6 +66,10 @@ class CodesignedPath: """ Path to entitlements to be used when codesigning, relative to buck project """ + flags: List[str] + """ + Flags to be passed to codesign command when codesigning this particular path + """ def _select_provisioning_profile( @@ -187,7 +191,6 @@ def codesign_bundle( signing_context: Union[AdhocSigningContext, SigningContextWithProfileSelection], platform: ApplePlatform, codesign_on_copy_paths: List[CodesignedPath], - codesign_args: List[str], codesign_tool: Optional[Path] = None, codesign_configuration: Optional[CodesignConfiguration] = None, ) -> None: @@ -237,7 +240,6 @@ def codesign_bundle( tmp_dir=tmp_dir, codesign_tool=codesign_tool, platform=platform, - codesign_args=codesign_args, ) else: fast_adhoc_signing_enabled = ( @@ -253,7 +255,6 @@ def codesign_bundle( codesign_command_factory=DefaultCodesignCommandFactory(codesign_tool), platform=platform, fast_adhoc_signing=fast_adhoc_signing_enabled, - codesign_args=codesign_args, ) @@ -286,7 +287,9 @@ def _prepare_entitlements_and_info_plist( bundle_path.path / platform.embedded_provisioning_profile_path(), ) return CodesignedPath( - path=bundle_path.path, entitlements=prepared_entitlements_path + path=bundle_path.path, + entitlements=prepared_entitlements_path, + flags=bundle_path.flags, ) @@ -397,7 +400,6 @@ def _dry_codesign_everything( tmp_dir: str, codesign_tool: Path, platform: ApplePlatform, - codesign_args: List[str], ) -> None: codesign_command_factory = DryRunCodesignCommandFactory(codesign_tool) @@ -412,7 +414,6 @@ def _dry_codesign_everything( tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, platform=platform, - codesign_args=codesign_args, ) # Dry codesigning creates a .plist inside every directory it signs. @@ -433,7 +434,6 @@ def _dry_codesign_everything( tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, platform=platform, - codesign_args=codesign_args, ) @@ -445,7 +445,6 @@ def _codesign_everything( codesign_command_factory: ICodesignCommandFactory, platform: ApplePlatform, fast_adhoc_signing: bool, - codesign_args: List[str], ) -> None: # First sign codesign-on-copy paths codesign_on_copy_filtered_paths = _filter_out_fast_adhoc_paths( @@ -460,7 +459,6 @@ def _codesign_everything( tmp_dir, codesign_command_factory, platform, - codesign_args, ) # Lastly sign whole bundle root_filtered_paths = _filter_out_fast_adhoc_paths( @@ -475,7 +473,6 @@ def _codesign_everything( tmp_dir, codesign_command_factory, platform, - codesign_args, ) @@ -529,10 +526,9 @@ def _spawn_codesign_process( tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, stack: ExitStack, - codesign_args: List[str], ) -> ParallelProcess: command = codesign_command_factory.codesign_command( - path.path, identity_fingerprint, path.entitlements, codesign_args + path.path, identity_fingerprint, path.entitlements, path.flags ) return _spawn_process(command=command, tmp_dir=tmp_dir, stack=stack) @@ -543,7 +539,6 @@ def _codesign_paths( tmp_dir: str, codesign_command_factory: ICodesignCommandFactory, platform: ApplePlatform, - codesign_args: List[str], ) -> None: """Codesigns several paths in parallel.""" processes: List[ParallelProcess] = [] @@ -555,7 +550,6 @@ def _codesign_paths( tmp_dir=tmp_dir, codesign_command_factory=codesign_command_factory, stack=stack, - codesign_args=codesign_args, ) processes.append(process) for p in processes: diff --git a/prelude/apple/tools/code_signing/main.py b/prelude/apple/tools/code_signing/main.py index b4598fd0c..f4d526b5c 100644 --- a/prelude/apple/tools/code_signing/main.py +++ b/prelude/apple/tools/code_signing/main.py @@ -135,11 +135,13 @@ def _main() -> None: ) bundle_path = CodesignedPath( - path=args.bundle_path, entitlements=args.entitlements + path=args.bundle_path, entitlements=args.entitlements, flags=[] ) codesign_on_copy_paths = ( [ - CodesignedPath(path=bundle_path.path / path, entitlements=None) + CodesignedPath( + path=bundle_path.path / path, entitlements=None, flags=[] + ) for path in args.codesign_on_copy ] if args.codesign_on_copy @@ -151,7 +153,6 @@ def _main() -> None: signing_context=signing_context, platform=args.platform, codesign_on_copy_paths=codesign_on_copy_paths, - codesign_args=[], ) except CodeSignProvisioningError as e: print(decorate_error_message(str(e)), file=sys.stderr) From d8c44ffd2ef6d7bdb2b40c6680f6ae777cb6738c Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Mon, 8 Apr 2024 10:00:54 -0700 Subject: [PATCH 0744/1133] Extract go-list code to go_list.bzl Summary: Just extracted code, nothing changed Reviewed By: yarikk Differential Revision: D55810150 fbshipit-source-id: 3e1181f92f63148ac3b880ced2f86629e12c050f --- prelude/go/go_list.bzl | 84 ++++++++++++++++++++++++++++++++++ prelude/go/package_builder.bzl | 80 ++------------------------------ 2 files changed, 87 insertions(+), 77 deletions(-) create mode 100644 prelude/go/go_list.bzl diff --git a/prelude/go/go_list.bzl b/prelude/go/go_list.bzl new file mode 100644 index 000000000..471a5568e --- /dev/null +++ b/prelude/go/go_list.bzl @@ -0,0 +1,84 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//:paths.bzl", "paths") +load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") + +GoListOut = record( + go_files = field(list[Artifact], default = []), + cgo_files = field(list[Artifact], default = []), + s_files = field(list[Artifact], default = []), + test_go_files = field(list[Artifact], default = []), + x_test_go_files = field(list[Artifact], default = []), + embed_files = field(list[Artifact], default = []), +) + +def go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_root: str, force_disable_cgo: bool) -> Artifact: + go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] + env = get_toolchain_env_vars(go_toolchain, force_disable_cgo = force_disable_cgo) + env["GO111MODULE"] = "off" + + go_list_out = ctx.actions.declare_output(paths.basename(pkg_name) + "_go_list.json") + + # Create file sructure that `go list` can recognize + # Use copied_dir, because embed doesn't work with symlinks + srcs_dir = ctx.actions.copied_dir( + "__{}_srcs_dir__".format(paths.basename(pkg_name)), + {src.short_path.removeprefix(package_root).lstrip("/"): src for src in srcs}, + ) + tags = go_toolchain.tags + ctx.attrs._tags + go_list_args = [ + go_toolchain.go_list_wrapper, + "-e", + ["--go", go_toolchain.go], + ["--workdir", srcs_dir], + ["--output", go_list_out.as_output()], + "-json=GoFiles,CgoFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", + ["-tags", ",".join(tags) if tags else []], + ".", + ] + + identifier = paths.basename(pkg_name) + ctx.actions.run(go_list_args, env = env, category = "go_list", identifier = identifier) + + return go_list_out + +def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: ArtifactValue) -> GoListOut: + go_list = go_list_out.read_json() + go_files, cgo_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [] + + for src in srcs: + # remove package_root prefix from src artifact path to match `go list` outout format + src_path = src.short_path.removeprefix(package_root).lstrip("/") + if src_path in go_list.get("GoFiles", []): + go_files.append(src) + if src_path in go_list.get("CgoFiles", []): + cgo_files.append(src) + if src_path in go_list.get("SFiles", []): + s_files.append(src) + if src_path in go_list.get("TestGoFiles", []): + test_go_files.append(src) + if src_path in go_list.get("XTestGoFiles", []): + x_test_go_files.append(src) + if _any_starts_with(go_list.get("EmbedFiles", []), src_path): + embed_files.append(src) + + return GoListOut( + go_files = go_files, + cgo_files = cgo_files, + s_files = s_files, + test_go_files = test_go_files, + x_test_go_files = x_test_go_files, + embed_files = embed_files, + ) + +def _any_starts_with(files: list[str], path: str): + for file in files: + if paths.starts_with(file, path): + return True + + return False diff --git a/prelude/go/package_builder.bzl b/prelude/go/package_builder.bzl index 44c2393ab..ec90171d4 100644 --- a/prelude/go/package_builder.bzl +++ b/prelude/go/package_builder.bzl @@ -12,6 +12,7 @@ load( ":coverage.bzl", "GoCoverageMode", # @Unused used as type ) +load(":go_list.bzl", "go_list", "parse_go_list_out") load(":packages.bzl", "GoPkg", "make_importcfg", "merge_pkgs", "pkg_artifacts") load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") @@ -55,7 +56,7 @@ def build_package( package_root = package_root if package_root != None else infer_package_root(srcs) - go_list_out = _go_list(ctx, pkg_name, srcs, package_root, force_disable_cgo) + go_list_out = go_list(ctx, pkg_name, srcs, package_root, force_disable_cgo) srcs_list_argsfile = ctx.actions.declare_output(paths.basename(pkg_name) + "_srcs_list.argsfile") coverage_vars_argsfile = ctx.actions.declare_output(paths.basename(pkg_name) + "_coverage_vars.argsfile") @@ -68,7 +69,7 @@ def build_package( importcfg = make_importcfg(ctx, pkg_name, all_pkgs, with_importmap = True) def f(ctx: AnalysisContext, artifacts, outputs, go_list_out = go_list_out): - go_list = _parse_go_list_out(srcs, package_root, artifacts[go_list_out]) + go_list = parse_go_list_out(srcs, package_root, artifacts[go_list_out]) symabis = _symabis(ctx, pkg_name, go_list.s_files, assembler_flags, shared) @@ -97,81 +98,6 @@ def build_package( srcs_list = cmd_args(srcs_list_argsfile, format = "@{}").hidden(srcs), ) -def _go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_root: str, force_disable_cgo: bool): - go_toolchain = ctx.attrs._go_toolchain[GoToolchainInfo] - env = get_toolchain_env_vars(go_toolchain, force_disable_cgo = force_disable_cgo) - env["GO111MODULE"] = "off" - - go_list_out = ctx.actions.declare_output(paths.basename(pkg_name) + "_go_list.json") - - # Create file sructure that `go list` can recognize - # Use copied_dir, because embed doesn't work with symlinks - srcs_dir = ctx.actions.copied_dir( - "__{}_srcs_dir__".format(paths.basename(pkg_name)), - {src.short_path.removeprefix(package_root).lstrip("/"): src for src in srcs}, - ) - tags = go_toolchain.tags + ctx.attrs._tags - go_list_args = [ - go_toolchain.go_list_wrapper, - "-e", - ["--go", go_toolchain.go], - ["--workdir", srcs_dir], - ["--output", go_list_out.as_output()], - "-json=GoFiles,CgoFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", - ["-tags", ",".join(tags) if tags else []], - ".", - ] - - identifier = paths.basename(pkg_name) - ctx.actions.run(go_list_args, env = env, category = "go_list", identifier = identifier) - - return go_list_out - -GoListOut = record( - go_files = field(list[Artifact], default = []), - cgo_files = field(list[Artifact], default = []), - s_files = field(list[Artifact], default = []), - test_go_files = field(list[Artifact], default = []), - x_test_go_files = field(list[Artifact], default = []), - embed_files = field(list[Artifact], default = []), -) - -def _parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out) -> GoListOut: - go_list = go_list_out.read_json() - go_files, cgo_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [] - - for src in srcs: - # remove package_root prefix from src artifact path to match `go list` outout format - src_path = src.short_path.removeprefix(package_root).lstrip("/") - if src_path in go_list.get("GoFiles", []): - go_files.append(src) - if src_path in go_list.get("CgoFiles", []): - cgo_files.append(src) - if src_path in go_list.get("SFiles", []): - s_files.append(src) - if src_path in go_list.get("TestGoFiles", []): - test_go_files.append(src) - if src_path in go_list.get("XTestGoFiles", []): - x_test_go_files.append(src) - if _any_starts_with(go_list.get("EmbedFiles", []), src_path): - embed_files.append(src) - - return GoListOut( - go_files = go_files, - cgo_files = cgo_files, - s_files = s_files, - test_go_files = test_go_files, - x_test_go_files = x_test_go_files, - embed_files = embed_files, - ) - -def _any_starts_with(files: list[str], path: str): - for file in files: - if paths.starts_with(file, path): - return True - - return False - def _compile( ctx: AnalysisContext, pkg_name: str, From 3df035943a3bce9c3bdec6e1c04d3974e3e8a82a Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Mon, 8 Apr 2024 10:00:54 -0700 Subject: [PATCH 0745/1133] Build cgo packages with package_builder Summary: This should enable build tags for cgo libraries Reviewed By: yarikk Differential Revision: D55810151 fbshipit-source-id: 8d2a6069d49fee27b316f1f63effab251723090a --- prelude/go/cgo_builder.bzl | 10 +++------- prelude/go/cgo_library.bzl | 18 +++--------------- prelude/go/go_list.bzl | 7 +++++-- prelude/go/go_test.bzl | 2 +- prelude/go/package_builder.bzl | 26 ++++++++++++++++++-------- prelude/go/packages.bzl | 1 + 6 files changed, 31 insertions(+), 33 deletions(-) diff --git a/prelude/go/cgo_builder.bzl b/prelude/go/cgo_builder.bzl index 29cf66ac3..855695664 100644 --- a/prelude/go/cgo_builder.bzl +++ b/prelude/go/cgo_builder.bzl @@ -55,11 +55,7 @@ def _cgo( """ Run `cgo` on `.go` sources to generate Go, C, and C-Header sources. """ - - # If you change this dir or naming convention, please - # update the corresponding logic in `fbgolist`. - # Otherwise editing and linting for Go will break. - gen_dir = ctx.actions.declare_output("cgo_gen", dir = True) + gen_dir = ctx.actions.declare_output("cgo_gen_tmp", dir = True) go_srcs = [] c_headers = [] @@ -117,9 +113,9 @@ def _cxx_wrapper(ctx: AnalysisContext, own_pre: list[CPreprocessor], inherited_p os = ScriptOs("windows" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "unix"), ) -def build_cgo(ctx: AnalysisContext, cgo_files: list[Artifact], c_files: list[Artifact]) -> (list[Artifact], list[Artifact], Artifact | None): +def build_cgo(ctx: AnalysisContext, cgo_files: list[Artifact], c_files: list[Artifact]) -> (list[Artifact], list[Artifact], Artifact): if len(cgo_files) == 0: - return [], [], None + return [], [], ctx.actions.copied_dir("cgo_gen_tmp", {}) project_root_file = get_project_root_file(ctx) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 450c89a81..f4982f4ab 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -19,12 +19,11 @@ load( "@prelude//utils:utils.bzl", "map_idx", ) -load(":cgo_builder.bzl", "build_cgo") load(":compile.bzl", "GoPkgCompileInfo", "get_inherited_compile_pkgs") load(":coverage.bzl", "GoCoverageMode") load(":link.bzl", "GoPkgLinkInfo", "get_inherited_link_pkgs") load(":package_builder.bzl", "build_package") -load(":packages.bzl", "GoPkg", "go_attr_pkg_name", "merge_pkgs") +load(":packages.bzl", "go_attr_pkg_name", "merge_pkgs") def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: pkg_name = go_attr_pkg_name(ctx) @@ -40,9 +39,6 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: else: fail("unexpected extension: {}".format(src)) - # Generate CGO and C sources. - go_gen_files, o_files, gen_dir = build_cgo(ctx, cgo_srcs, cxx_srcs) - shared = ctx.attrs._compile_shared race = ctx.attrs._race coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None @@ -51,23 +47,15 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: compiled_pkg = build_package( ctx, pkg_name, - ctx.attrs.go_srcs, + ctx.attrs.go_srcs + cgo_srcs + cxx_srcs, package_root = ctx.attrs.package_root, deps = ctx.attrs.deps + ctx.attrs.exported_deps, - compiled_objects = o_files, - extra_go_files = go_gen_files, shared = shared, race = race, coverage_mode = coverage_mode, embedcfg = ctx.attrs.embedcfg, ) - # Temporarily hack, it seems like we can update record, so create new one - compiled_pkg = GoPkg( - pkg = compiled_pkg.pkg, - coverage_vars = compiled_pkg.coverage_vars, - ) - pkgs = { pkg_name: compiled_pkg, } @@ -77,7 +65,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: # to work with cgo. And when nearly every FB service client is cgo, # we need to support it well. return [ - DefaultInfo(default_output = compiled_pkg.pkg, other_outputs = [gen_dir] if gen_dir else []), + DefaultInfo(default_output = compiled_pkg.pkg, other_outputs = [compiled_pkg.cgo_gen_dir]), GoPkgCompileInfo(pkgs = merge_pkgs([ pkgs, get_inherited_compile_pkgs(ctx.attrs.exported_deps), diff --git a/prelude/go/go_list.bzl b/prelude/go/go_list.bzl index 471a5568e..db1c46396 100644 --- a/prelude/go/go_list.bzl +++ b/prelude/go/go_list.bzl @@ -10,6 +10,7 @@ load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") GoListOut = record( go_files = field(list[Artifact], default = []), + c_files = field(list[Artifact], default = []), cgo_files = field(list[Artifact], default = []), s_files = field(list[Artifact], default = []), test_go_files = field(list[Artifact], default = []), @@ -37,7 +38,7 @@ def go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_r ["--go", go_toolchain.go], ["--workdir", srcs_dir], ["--output", go_list_out.as_output()], - "-json=GoFiles,CgoFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", + "-json=GoFiles,CgoFiles,CFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", ["-tags", ",".join(tags) if tags else []], ".", ] @@ -49,7 +50,7 @@ def go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_r def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: ArtifactValue) -> GoListOut: go_list = go_list_out.read_json() - go_files, cgo_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [] + go_files, cgo_files, c_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [], [] for src in srcs: # remove package_root prefix from src artifact path to match `go list` outout format @@ -58,6 +59,8 @@ def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: Arti go_files.append(src) if src_path in go_list.get("CgoFiles", []): cgo_files.append(src) + if src_path in go_list.get("CFiles", []): + c_files.append(src) if src_path in go_list.get("SFiles", []): s_files.append(src) if src_path in go_list.get("TestGoFiles", []): diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index fc6d55106..efcd0dd0a 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -103,7 +103,7 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: # Generate a main function which runs the tests and build that into another # package. gen_main = _gen_test_main(ctx, pkg_name, coverage_mode, coverage_vars, tests.srcs_list) - main = build_package(ctx, "main", [gen_main], package_root = "", pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs._race) + main = build_package(ctx, "main", [gen_main], package_root = "", pkgs = pkgs, coverage_mode = coverage_mode, race = ctx.attrs._race, cgo_gen_dir_name = "cgo_gen_test_main") # Link the above into a Go binary. (bin, runtime_files, external_debug_info) = link( diff --git a/prelude/go/package_builder.bzl b/prelude/go/package_builder.bzl index ec90171d4..08493aeca 100644 --- a/prelude/go/package_builder.bzl +++ b/prelude/go/package_builder.bzl @@ -7,6 +7,7 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//utils:utils.bzl", "dedupe_by_value") +load(":cgo_builder.bzl", "build_cgo") load(":compile.bzl", "get_inherited_compile_pkgs", "infer_package_root") load( ":coverage.bzl", @@ -23,9 +24,6 @@ def build_package( package_root: str | None, pkgs: dict[str, Artifact] = {}, deps: list[Dependency] = [], - compiled_objects: list[Artifact] = [], - # hack: extra go files will bypass filtration to enable cgo_library migration - extra_go_files: list[Artifact] = [], compiler_flags: list[str] = [], assembler_flags: list[str] = [], shared: bool = False, @@ -33,16 +31,22 @@ def build_package( coverage_mode: GoCoverageMode | None = None, embedcfg: Artifact | None = None, tests: bool = False, - force_disable_cgo: bool = False) -> GoPkg: + force_disable_cgo: bool = False, + # If you change this dir or naming convention, please + # update the corresponding logic in `fbgolist`. + # Otherwise editing and linting for Go will break. + cgo_gen_dir_name: str = "cgo_gen") -> GoPkg: if race and coverage_mode not in [None, GoCoverageMode("atomic")]: fail("`coverage_mode` must be `atomic` when `race=True`") out = ctx.actions.declare_output(paths.basename(pkg_name) + ".a") + cgo_gen_dir = ctx.actions.declare_output(cgo_gen_dir_name, dir = True) + srcs = dedupe_by_value(srcs) has_go_files = False - for src in (srcs + extra_go_files): + for src in srcs: if src.extension == ".go": has_go_files = True break @@ -52,6 +56,7 @@ def build_package( pkg = ctx.actions.write(out.as_output(), ""), coverage_vars = cmd_args(), srcs_list = cmd_args(), + cgo_gen_dir = ctx.actions.copied_dir(cgo_gen_dir.as_output(), {}), ) package_root = package_root if package_root != None else infer_package_root(srcs) @@ -60,7 +65,7 @@ def build_package( srcs_list_argsfile = ctx.actions.declare_output(paths.basename(pkg_name) + "_srcs_list.argsfile") coverage_vars_argsfile = ctx.actions.declare_output(paths.basename(pkg_name) + "_coverage_vars.argsfile") - dynamic_outputs = [out, srcs_list_argsfile, coverage_vars_argsfile] + dynamic_outputs = [out, srcs_list_argsfile, coverage_vars_argsfile, cgo_gen_dir] all_pkgs = merge_pkgs([ pkgs, @@ -73,7 +78,11 @@ def build_package( symabis = _symabis(ctx, pkg_name, go_list.s_files, assembler_flags, shared) - go_files = go_list.go_files + extra_go_files + # Generate CGO and C sources. + cgo_go_files, cgo_o_files, cgo_gen_tmp_dir = build_cgo(ctx, go_list.cgo_files, go_list.c_files) + ctx.actions.copy_dir(outputs[cgo_gen_dir], cgo_gen_tmp_dir) + + go_files = go_list.go_files + cgo_go_files src_list_for_argsfile = go_files + (go_list.test_go_files + go_list.x_test_go_files if tests else []) ctx.actions.write(outputs[srcs_list_argsfile], cmd_args(src_list_for_argsfile, "")) @@ -86,7 +95,7 @@ def build_package( asm_o_files = _asssembly(ctx, pkg_name, go_list.s_files, asmhdr, assembler_flags, shared) - pkg_file = _pack(ctx, pkg_name, go_a_file, compiled_objects + asm_o_files) + pkg_file = _pack(ctx, pkg_name, go_a_file, cgo_o_files + asm_o_files) ctx.actions.copy_file(outputs[out], pkg_file) @@ -96,6 +105,7 @@ def build_package( pkg = out, coverage_vars = cmd_args(coverage_vars_argsfile, format = "@{}"), srcs_list = cmd_args(srcs_list_argsfile, format = "@{}").hidden(srcs), + cgo_gen_dir = cgo_gen_dir, ) def _compile( diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index 5732dfcb4..67bfa892d 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -12,6 +12,7 @@ GoPkg = record( pkg = field(Artifact), coverage_vars = field(cmd_args | None, default = None), srcs_list = field(cmd_args | None, default = None), + cgo_gen_dir = field(Artifact), ) GoStdlib = provider( From 4d0004560589fe9290b57e1c6fe540d77146c3fc Mon Sep 17 00:00:00 2001 From: David Barsky Date: Mon, 8 Apr 2024 14:43:19 -0700 Subject: [PATCH 0746/1133] add `resolve_owning_buildfile` to rust-analyzer/resolve_deps.bxl Summary: Added a BXL command that fetches all Rust targets within a buildfile. Reviewed By: shayne-fletcher Differential Revision: D55874175 fbshipit-source-id: 7b68801e564326b2d124b075d822888788b953bf --- prelude/rust/rust-analyzer/resolve_deps.bxl | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/prelude/rust/rust-analyzer/resolve_deps.bxl b/prelude/rust/rust-analyzer/resolve_deps.bxl index 5bddc146a..622d91293 100644 --- a/prelude/rust/rust-analyzer/resolve_deps.bxl +++ b/prelude/rust/rust-analyzer/resolve_deps.bxl @@ -234,9 +234,30 @@ def expand_and_resolve_impl(ctx): "resolved_deps": resolved_deps, }) +def resolve_owning_buildfile_impl(ctx): + owners = ctx.uquery().owner(ctx.cli_args.files) + out = {} + for owner in owners: + buildfile = "{}".format(owner.buildfile_path) + targets_in_buildfile = ctx.uquery().targets_in_buildfile(buildfile) + rust_targets = ctx.uquery().kind("^(rust_binary|rust_library|rust_test)$", targets_in_buildfile) + targets = [] + for target in rust_targets: + targets.append(target.label) + out[buildfile] = targets + + ctx.output.print_json(out) + expand_and_resolve = bxl_main( impl = expand_and_resolve_impl, cli_args = { "targets": cli_args.list(cli_args.target_expr()), }, ) + +resolve_owning_buildfile = bxl_main( + impl = resolve_owning_buildfile_impl, + cli_args = { + "files": cli_args.list(cli_args.string()), + }, +) From 9176137293cf0cc1c245d3abbd7496d529010258 Mon Sep 17 00:00:00 2001 From: Neil Mitchell Date: Mon, 8 Apr 2024 15:52:05 -0700 Subject: [PATCH 0747/1133] Fix up some documentation Summary: The file moved, so update the documentation to match. Reviewed By: JakobDegen Differential Revision: D55888895 fbshipit-source-id: ef649addee5458ff205c4a04b25867af68f722c0 --- prelude/toolchains/cxx/zig/defs.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/toolchains/cxx/zig/defs.bzl b/prelude/toolchains/cxx/zig/defs.bzl index bc6bfcccb..4d3898047 100644 --- a/prelude/toolchains/cxx/zig/defs.bzl +++ b/prelude/toolchains/cxx/zig/defs.bzl @@ -32,7 +32,7 @@ the toolchain like so: `toolchains//BUILD` ```bzl -load("@prelude//toolchains/cxx:zig.bzl", "download_zig_distribution", "cxx_zig_toolchain") +load("@prelude//toolchains/cxx/zig:defs.bzl", "download_zig_distribution", "cxx_zig_toolchain") download_zig_distribution( name = "zig", @@ -50,7 +50,7 @@ To define toolchains for multiple platforms and configure cross-compilation you can configure the toolchain like so: ```bzl -load("@prelude//toolchains/cxx:zig.bzl", "download_zig_distribution", "cxx_zig_toolchain") +load("@prelude//toolchains/cxx/zig:defs.bzl", "download_zig_distribution", "cxx_zig_toolchain") download_zig_distribution( name = "zig-x86_64-linux", From dfd37b253417b13dc2703f1ea4f0cebbea9be787 Mon Sep 17 00:00:00 2001 From: Ruslan Sayfutdinov Date: Mon, 8 Apr 2024 16:53:36 -0700 Subject: [PATCH 0748/1133] buck2/prelude: support remote_execution_dependencies in genrule Summary: Allow to pass `remote_execution_dependencies` from genrule. Reviewed By: stepancheg Differential Revision: D55644204 fbshipit-source-id: 025b4ef12dec2ce28563ab86c819862419d26966 --- prelude/genrule.bzl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/prelude/genrule.bzl b/prelude/genrule.bzl index 5018f50a7..dc92072d9 100644 --- a/prelude/genrule.bzl +++ b/prelude/genrule.bzl @@ -81,6 +81,7 @@ def genrule_attributes() -> dict[str, Attr]: "metadata_env_var": attrs.option(attrs.string(), default = None), "metadata_path": attrs.option(attrs.string(), default = None), "no_outputs_cleanup": attrs.bool(default = False), + "remote_execution_dependencies": attrs.list(attrs.dict(key = attrs.string(), value = attrs.string()), default = []), "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), "_genrule_toolchain": attrs.default_only(attrs.toolchain_dep(default = "toolchains//:genrule", providers = [GenruleToolchainInfo])), } @@ -327,6 +328,8 @@ def process_genrule( metadata_args["metadata_env_var"] = ctx.attrs.metadata_env_var if ctx.attrs.metadata_path: metadata_args["metadata_path"] = ctx.attrs.metadata_path + if ctx.attrs.remote_execution_dependencies: + metadata_args["remote_execution_dependencies"] = ctx.attrs.remote_execution_dependencies category = "genrule" if ctx.attrs.type != None: From 74abc1fa8b2dcc787b01c6fddd7db1bba6131be8 Mon Sep 17 00:00:00 2001 From: Ian Levesque Date: Tue, 9 Apr 2024 01:27:04 -0700 Subject: [PATCH 0749/1133] Add use_derived_apk option for android_bundle Summary: This new option can be used to output the universal derived APK as the default output from android_bundle rules rather than the AAB itself. The AAB is still available as a named output in this case. Reviewed By: IanChilds Differential Revision: D54979768 fbshipit-source-id: ce0edb05ae1c17614caaab31af3ba7a481f34060 --- prelude/android/android.bzl | 1 + prelude/android/android_bundle.bzl | 20 ++++++++++++- prelude/android/android_toolchain.bzl | 2 ++ prelude/android/bundletool_util.bzl | 42 +++++++++++++++++++++++++++ 4 files changed, 64 insertions(+), 1 deletion(-) create mode 100644 prelude/android/bundletool_util.bzl diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 380b653da..3b4a4affb 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -127,6 +127,7 @@ extra_attributes = { "native_library_merge_code_generator": attrs.option(attrs.exec_dep(), default = None), "native_library_merge_glue": attrs.option(attrs.split_transition_dep(cfg = cpu_split_transition), default = None), "native_library_merge_linker_args": attrs.option(attrs.dict(key = attrs.string(), value = attrs.list(attrs.arg())), default = None), + "use_derived_apk": attrs.bool(default = False), "_android_toolchain": toolchains_common.android(), "_cxx_toolchain": attrs.split_transition_dep(cfg = cpu_split_transition, default = "toolchains//:android-hack"), "_dex_toolchain": toolchains_common.dex(), diff --git a/prelude/android/android_bundle.bzl b/prelude/android/android_bundle.bzl index 6484af2d5..b224f9bef 100644 --- a/prelude/android/android_bundle.bzl +++ b/prelude/android/android_bundle.bzl @@ -9,6 +9,8 @@ load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_binary.bzl", "get_binary_info") load("@prelude//android:android_providers.bzl", "AndroidAabInfo", "AndroidBinaryNativeLibsInfo", "AndroidBinaryResourcesInfo", "DexFilesInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//android:bundletool_util.bzl", "derive_universal_apk") +load("@prelude//java:java_providers.bzl", "KeystoreInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") def android_bundle_impl(ctx: AnalysisContext) -> list[Provider]: @@ -25,9 +27,25 @@ def android_bundle_impl(ctx: AnalysisContext) -> list[Provider]: validation_deps_outputs = get_validation_deps_outputs(ctx), ) + sub_targets = {} + sub_targets.update(android_binary_info.sub_targets) + if ctx.attrs.use_derived_apk: + keystore = ctx.attrs.keystore[KeystoreInfo] + default_output = derive_universal_apk( + ctx, + android_toolchain = ctx.attrs._android_toolchain[AndroidToolchainInfo], + app_bundle = output_bundle, + keystore = keystore, + ) + sub_targets["aab"] = [DefaultInfo( + default_outputs = [output_bundle], + )] + else: + default_output = output_bundle + java_packaging_deps = android_binary_info.java_packaging_deps return [ - DefaultInfo(default_output = output_bundle, other_outputs = android_binary_info.materialized_artifacts, sub_targets = android_binary_info.sub_targets), + DefaultInfo(default_output = default_output, other_outputs = android_binary_info.materialized_artifacts, sub_targets = sub_targets), AndroidAabInfo(aab = output_bundle, manifest = android_binary_info.resources_info.manifest, materialized_artifacts = android_binary_info.materialized_artifacts), TemplatePlaceholderInfo( keyed_variables = { diff --git a/prelude/android/android_toolchain.bzl b/prelude/android/android_toolchain.bzl index 89059e948..663892d88 100644 --- a/prelude/android/android_toolchain.bzl +++ b/prelude/android/android_toolchain.bzl @@ -22,6 +22,7 @@ AndroidToolchainInfo = provider( "apk_builder": provider_field(typing.Any, default = None), "apk_module_graph": provider_field(typing.Any, default = None), "app_without_resources_stub": provider_field(typing.Any, default = None), + "bundle_apks_builder": provider_field(typing.Any, default = None), "bundle_builder": provider_field(typing.Any, default = None), "combine_native_library_dirs": provider_field(typing.Any, default = None), "compress_libraries": provider_field(typing.Any, default = None), @@ -50,6 +51,7 @@ AndroidToolchainInfo = provider( "mini_aapt": provider_field(typing.Any, default = None), "native_libs_as_assets_metadata": provider_field(typing.Any, default = None), "optimized_proguard_config": provider_field(typing.Any, default = None), + "p7zip": provider_field(typing.Any, default = None), "package_meta_inf_version_files": provider_field(typing.Any, default = None), "package_strings_as_assets": provider_field(typing.Any, default = None), "prebuilt_aar_resources_have_low_priority": provider_field(typing.Any, default = None), diff --git a/prelude/android/bundletool_util.bzl b/prelude/android/bundletool_util.bzl new file mode 100644 index 000000000..9105e1ccb --- /dev/null +++ b/prelude/android/bundletool_util.bzl @@ -0,0 +1,42 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# +# This source code is licensed under both the MIT license found in the +# LICENSE-MIT file in the root directory of this source tree and the Apache +# License, Version 2.0 found in the LICENSE-APACHE file in the root directory +# of this source tree. + +load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//java:java_providers.bzl", "KeystoreInfo") # @unused used as type + +def derive_universal_apk( + ctx: AnalysisContext, + android_toolchain: AndroidToolchainInfo, + app_bundle: Artifact, + keystore: [KeystoreInfo, None]) -> Artifact: + output_apk = ctx.actions.declare_output("universal.apk") + + bundle_apks_builder_args = cmd_args([ + android_toolchain.bundle_apks_builder[RunInfo], + "--input-bundle", + app_bundle, + "--p7zip", + android_toolchain.p7zip, + "--aapt2", + android_toolchain.aapt2, + "--zipalign", + android_toolchain.zipalign[RunInfo], + "--output-apk", + output_apk.as_output(), + ]) + + if keystore: + bundle_apks_builder_args.add(cmd_args([ + "--keystore", + keystore.store, + "--keystore-properties", + keystore.properties, + ])) + + ctx.actions.run(bundle_apks_builder_args, category = "bundle_build", identifier = "build_universal_apk") + + return output_apk From 1aacb63bb955f94222828c4236d42b7fcdea3246 Mon Sep 17 00:00:00 2001 From: Neil Mitchell Date: Tue, 9 Apr 2024 01:38:52 -0700 Subject: [PATCH 0750/1133] Make zip use the standard _ convention for impl Summary: We don't want the implementation available outside the file, so hide it. Reviewed By: JakobDegen Differential Revision: D55893388 fbshipit-source-id: 188e1397f94a449e723b075d746159da968cdf2c --- prelude/zip_file/zip_file.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/zip_file/zip_file.bzl b/prelude/zip_file/zip_file.bzl index 9876039b8..3cf3c8017 100644 --- a/prelude/zip_file/zip_file.bzl +++ b/prelude/zip_file/zip_file.bzl @@ -8,7 +8,7 @@ load("@prelude//decls/toolchains_common.bzl", "toolchains_common") load(":zip_file_toolchain.bzl", "ZipFileToolchainInfo") -def zip_file_impl(ctx: AnalysisContext) -> list[Provider]: +def _zip_file_impl(ctx: AnalysisContext) -> list[Provider]: """ zip_file() rule implementation @@ -64,7 +64,7 @@ def zip_file_impl(ctx: AnalysisContext) -> list[Provider]: return [DefaultInfo(default_output = output)] implemented_rules = { - "zip_file": zip_file_impl, + "zip_file": _zip_file_impl, } extra_attributes = { From 368e2e3a68642efb8251663c2e6fc1aacefd5701 Mon Sep 17 00:00:00 2001 From: Luisa Vasquez Gomez Date: Tue, 9 Apr 2024 02:43:48 -0700 Subject: [PATCH 0751/1133] buck2: docs: refactor string macros docs Summary: So seems these docs already existed but were kind of unfindable, moving them to a more visible place now that these are not genrule specific. Also removed the maven_coords since nobody seems to be using that at all. Motivation: https://fb.workplace.com/groups/buck2users/permalink/3630651430524545/ (today) https://fb.workplace.com/groups/buck2users/permalink/3627648377491517/ (last week) Reviewed By: iguridi Differential Revision: D55877028 fbshipit-source-id: 359d77de85bc809dcf780d902a236c7bc831af30 --- prelude/decls/genrule_common.bzl | 64 -------------------------------- 1 file changed, 64 deletions(-) diff --git a/prelude/decls/genrule_common.bzl b/prelude/decls/genrule_common.bzl index aa142a558..53a98a666 100644 --- a/prelude/decls/genrule_common.bzl +++ b/prelude/decls/genrule_common.bzl @@ -88,70 +88,6 @@ def _cmd_arg(): A temporary directory which can be used for intermediate results and will not be bundled into the output. - - - ##### String parameter macros - - It is also possible to expand references to other rules within the - `cmd`, using builtin `string parameter macros` - . - All build rules expanded in the command are automatically considered - to be dependencies of the `genrule()`. - - - Note that the paths returned by these macros are *relative* paths. Using - relative paths ensures that your builds are *hermetic*, that - is, they are reproducible across different machine environments. - - - Additionally, if you embed these paths in a shell script, you should - execute that script using the `sh\\_binary()`rule and include - the targets for these paths in the `resources` argument of - that `sh_binary` rule. These are the same targets that you - pass to the string parameter macros. - - - `$(classpath //path/to:target)` - - - Expands to the transitive classpath of the specified build - rule, provided that the rule has a Java classpath. If the rule - does not have (or contribute to) a classpath, then an - exception is thrown and the build breaks. - - - `$(exe //path/to:target)` - - - Expands a build rule that results in an executable to the - commands necessary to run that executable. For example, - a `java_binary()` might expand to a call - to `java -jar path/to/target.jar` . Files that are - executable (perhaps generated by a `genrule()`) - are also expanded. If the build rule does not generate an - executable output, then an exception is thrown and the build - breaks. - - - `$(location //path/to:target)` - - - Expands to the location of the output of the specified build - rule. This means that you can refer to the output without - needing to be aware of how Buck is storing data on the disk - mid-build. - - - `$(maven_coords //path/to:target)` - - - Expands to the Maven coordinates for the specified build rule. - This allows you to access the Maven coordinates for - Maven-aware build rules. The format of the expansion is: - - ``` - - ``` """), } From f5e30e990f00f7fb6ee3c34e33ced2098e88cec6 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Tue, 9 Apr 2024 08:41:10 -0700 Subject: [PATCH 0752/1133] Fix: compile C/CXX files when build cgo_library Summary: I've forgot to include C/CXX on D55810151 Reviewed By: lcpoletto Differential Revision: D55918589 fbshipit-source-id: 2b09c90f7c03ca2d5da419c2ae98dabd2b8d07ac --- prelude/go/go_list.bzl | 9 +++++++-- prelude/go/package_builder.bzl | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/prelude/go/go_list.bzl b/prelude/go/go_list.bzl index db1c46396..2b133f220 100644 --- a/prelude/go/go_list.bzl +++ b/prelude/go/go_list.bzl @@ -11,6 +11,7 @@ load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") GoListOut = record( go_files = field(list[Artifact], default = []), c_files = field(list[Artifact], default = []), + cxx_files = field(list[Artifact], default = []), cgo_files = field(list[Artifact], default = []), s_files = field(list[Artifact], default = []), test_go_files = field(list[Artifact], default = []), @@ -38,7 +39,7 @@ def go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_r ["--go", go_toolchain.go], ["--workdir", srcs_dir], ["--output", go_list_out.as_output()], - "-json=GoFiles,CgoFiles,CFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", + "-json=GoFiles,CgoFiles,CFiles,CXXFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", ["-tags", ",".join(tags) if tags else []], ".", ] @@ -50,7 +51,7 @@ def go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_r def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: ArtifactValue) -> GoListOut: go_list = go_list_out.read_json() - go_files, cgo_files, c_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [], [] + go_files, cgo_files, c_files, cxx_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [], [], [] for src in srcs: # remove package_root prefix from src artifact path to match `go list` outout format @@ -61,6 +62,8 @@ def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: Arti cgo_files.append(src) if src_path in go_list.get("CFiles", []): c_files.append(src) + if src_path in go_list.get("CXXFiles", []): + cxx_files.append(src) if src_path in go_list.get("SFiles", []): s_files.append(src) if src_path in go_list.get("TestGoFiles", []): @@ -72,6 +75,8 @@ def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: Arti return GoListOut( go_files = go_files, + c_files = c_files, + cxx_files = cxx_files, cgo_files = cgo_files, s_files = s_files, test_go_files = test_go_files, diff --git a/prelude/go/package_builder.bzl b/prelude/go/package_builder.bzl index 08493aeca..34175026c 100644 --- a/prelude/go/package_builder.bzl +++ b/prelude/go/package_builder.bzl @@ -79,7 +79,7 @@ def build_package( symabis = _symabis(ctx, pkg_name, go_list.s_files, assembler_flags, shared) # Generate CGO and C sources. - cgo_go_files, cgo_o_files, cgo_gen_tmp_dir = build_cgo(ctx, go_list.cgo_files, go_list.c_files) + cgo_go_files, cgo_o_files, cgo_gen_tmp_dir = build_cgo(ctx, go_list.cgo_files, go_list.c_files + go_list.cxx_files) ctx.actions.copy_dir(outputs[cgo_gen_dir], cgo_gen_tmp_dir) go_files = go_list.go_files + cgo_go_files From e45de2b0c466e7b388e47942de11742905262b4a Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Tue, 9 Apr 2024 09:03:11 -0700 Subject: [PATCH 0753/1133] Simplify extra dep handling (take 2) Summary: A lot of the extra dep/runtime files/debuginfo handling would needlessly propagate path information separately, just to throw it away. Reviewed By: dtolnay Differential Revision: D55317139 fbshipit-source-id: c3b7f1bd758455282e1957986e0cbd1fa694c15c --- prelude/python/make_py_package.bzl | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 61db2f5cf..5f1d5fad3 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -42,7 +42,7 @@ PexModules = record( # providers. PexProviders = record( default_output = field(Artifact), - other_outputs = list[(ArgLike, str)], + other_outputs = list[ArgLike], other_outputs_prefix = str | None, hidden_resources = list[ArgLike], sub_targets = dict[str, list[Provider]], @@ -60,7 +60,7 @@ def make_py_package_providers( def make_default_info(pex: PexProviders) -> Provider: return DefaultInfo( default_output = pex.default_output, - other_outputs = [a for a, _ in pex.other_outputs] + pex.hidden_resources, + other_outputs = pex.other_outputs + pex.hidden_resources, sub_targets = pex.sub_targets, ) @@ -207,7 +207,7 @@ def _make_py_package_impl( shared_libraries: bool, preload_libraries: cmd_args, common_modules_args: cmd_args, - dep_artifacts: list[(ArgLike, str)], + dep_artifacts: list[ArgLike], debug_artifacts: list[(ArgLike, str)], main: EntryPoint, hidden_resources: list[ArgLike] | None, @@ -274,7 +274,7 @@ def _make_py_package_impl( # For inplace builds add local artifacts to outputs so they get properly materialized runtime_files.extend(dep_artifacts) - runtime_files.append((symlink_tree_path, symlink_tree_path.short_path)) + runtime_files.append(symlink_tree_path) # For standalone builds, or builds setting make_py_package we generate args for calling make_par.py if standalone or make_py_package_cmd != None: @@ -329,7 +329,7 @@ def _make_py_package_impl( other_outputs_prefix = symlink_tree_path.short_path if symlink_tree_path != None else None, hidden_resources = hidden_resources, sub_targets = {}, - run_cmd = cmd_args(run_args).hidden([a for a, _ in runtime_files] + hidden_resources), + run_cmd = cmd_args(run_args).hidden(runtime_files + hidden_resources), ) def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[(ArgLike, str)]) -> list[Provider]: @@ -388,7 +388,7 @@ def _pex_modules_common_args( ctx: AnalysisContext, pex_modules: PexModules, extra_manifests: list[ArgLike], - shared_libraries: dict[str, LinkedObject]) -> (cmd_args, list[(ArgLike, str)], list[(ArgLike, str)]): + shared_libraries: dict[str, LinkedObject]) -> (cmd_args, list[ArgLike], list[(ArgLike, str)]): srcs = [] src_artifacts = [] deps = [] @@ -408,9 +408,9 @@ def _pex_modules_common_args( if extra_manifests: srcs.extend(extra_manifests) - deps.extend(src_artifacts) + deps.extend([a[0] for a in src_artifacts]) resources = pex_modules.manifests.resource_manifests() - deps.extend(pex_modules.manifests.resource_artifacts_with_paths()) + deps.extend([a[0] for a in pex_modules.manifests.resource_artifacts_with_paths()]) src_manifests_path = ctx.actions.write( "__src_manifests.txt", @@ -471,7 +471,7 @@ def _pex_modules_common_args( debug_artifacts.extend(dwp) - deps.extend([(lib.output, name) for name, lib in shared_libraries.items()]) + deps.extend([lib.output for lib in shared_libraries.values()]) external_debug_info = project_artifacts( ctx.actions, @@ -486,7 +486,7 @@ def _pex_modules_common_args( def _pex_modules_args( ctx: AnalysisContext, common_args: cmd_args, - dep_artifacts: list[(ArgLike, str)], + dep_artifacts: list[ArgLike], debug_artifacts: list[(ArgLike, str)], symlink_tree_path: Artifact | None, manifest_module: ArgLike | None, @@ -507,7 +507,7 @@ def _pex_modules_args( if pex_modules.compile: pyc_mode = PycInvalidationMode("UNCHECKED_HASH") if symlink_tree_path == None else PycInvalidationMode("CHECKED_HASH") bytecode_manifests = pex_modules.manifests.bytecode_manifests(pyc_mode) - dep_artifacts.extend(pex_modules.manifests.bytecode_artifacts_with_paths(pyc_mode)) + dep_artifacts.extend([a[0] for a in pex_modules.manifests.bytecode_artifacts_with_paths(pyc_mode)]) bytecode_manifests_path = ctx.actions.write( "__bytecode_manifests{}.txt".format(output_suffix), @@ -524,7 +524,7 @@ def _pex_modules_args( else: # Accumulate all the artifacts we depend on. Only add them to the command # if we are not going to create symlinks. - cmd.hidden([a for a, _ in dep_artifacts]) + cmd.hidden(dep_artifacts) cmd.hidden([a for a, _ in debug_artifacts]) From a105447434bec992c627928c6befad0d2d87158a Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Tue, 9 Apr 2024 11:47:25 -0700 Subject: [PATCH 0754/1133] Fix adding `resources` to wheel Summary: Resources are packaged as a tuple. Previously, the code would just crash when it encountered a resource. Reviewed By: rsdcastro Differential Revision: D55926733 fbshipit-source-id: c3eade2f1f15b19153c0ad6abd639340d86c4208 --- prelude/python/python_wheel.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/python/python_wheel.bzl b/prelude/python/python_wheel.bzl index 635e990bd..8ff19f3ed 100644 --- a/prelude/python/python_wheel.bzl +++ b/prelude/python/python_wheel.bzl @@ -89,7 +89,8 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: if manifests.srcs != None: srcs.append(manifests.srcs) if manifests.resources != None: - srcs.append(manifests.resources) + expect(not manifests.resources[1]) + srcs.append(manifests.resources[0]) if manifests.extensions != None: toolchain_info = get_cxx_toolchain_info(ctx) items = manifests.extensions.items() From 60d7aa39584a395c899db5188825c0ca2356776d Mon Sep 17 00:00:00 2001 From: Scott Cao Date: Tue, 9 Apr 2024 12:15:49 -0700 Subject: [PATCH 0755/1133] Delete non-toolchain rule cxx_toolchain_override a toolchain rule Summary: We currently have two variants of `cxx_toolchain_override` rule, the original one that's not a proper toolchain rule and `cxx_toolchain_override_inheriting_target_platform` that is a toolchain rule. Previous diffs in the stack removed all usages of non-toolchain rule `cxx_toolchain_override` rule, so now we can delete the non-toolchain rule variant. This diff does the following: 1. Delete non-toolchain rule `cxx_toolchain_override` 2. Rename `cxx_toolchain_override_inheriting_target_platform` rule to `cxx_toolchain_override`. 3. Remove `inherit_target_platform` attribute from the `cxx_toolchain_override` macros and targets. Reviewed By: iguridi Differential Revision: D55829562 fbshipit-source-id: c4a80d3c3f3f17841299f31b1640634a481bfdde --- prelude/cxx/user/cxx_toolchain_override.bzl | 6 ------ prelude/native.bzl | 8 ++------ prelude/user/all.bzl | 3 +-- 3 files changed, 3 insertions(+), 14 deletions(-) diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index f8dbb5bff..0abe9c068 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -229,12 +229,6 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): cxx_toolchain_override_registration_spec = RuleRegistrationSpec( name = "cxx_toolchain_override", impl = _cxx_toolchain_override, - attrs = _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule = False), -) - -cxx_toolchain_override_inheriting_target_platform_registration_spec = RuleRegistrationSpec( - name = "cxx_toolchain_override_inheriting_target_platform", - impl = _cxx_toolchain_override, attrs = _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule = True), is_toolchain_rule = True, ) diff --git a/prelude/native.bzl b/prelude/native.bzl index b05f61cb1..84a4825df 100644 --- a/prelude/native.bzl +++ b/prelude/native.bzl @@ -399,13 +399,9 @@ def _cxx_toolchain_macro_stub(inherit_target_platform = False, **kwargs): **kwargs ) -def _cxx_toolchain_override_macro_stub(inherit_target_platform = False, **kwargs): - if inherit_target_platform: - rule = _user_rules["cxx_toolchain_override_inheriting_target_platform"] - else: - rule = _user_rules["cxx_toolchain_override"] +def _cxx_toolchain_override_macro_stub(**kwargs): cxx_toolchain_macro_impl( - cxx_toolchain_rule = rule, + cxx_toolchain_rule = _user_rules["cxx_toolchain_override"], **kwargs ) diff --git a/prelude/user/all.bzl b/prelude/user/all.bzl index f9bccdf2c..f3488918b 100644 --- a/prelude/user/all.bzl +++ b/prelude/user/all.bzl @@ -13,7 +13,7 @@ load("@prelude//apple/user:apple_toolchain_override.bzl", _apple_toolchain_overr load("@prelude//apple/user:apple_tools.bzl", _apple_tools_spec = "registration_spec") load("@prelude//apple/user:apple_watchos_bundle.bzl", _apple_watchos_bundle_spec = "registration_spec") load("@prelude//apple/user:resource_group_map.bzl", _resource_group_map_spec = "registration_spec") -load("@prelude//cxx/user:cxx_toolchain_override.bzl", _cxx_toolchain_override_inheriting_target_platform_spec = "cxx_toolchain_override_inheriting_target_platform_registration_spec", _cxx_toolchain_override_spec = "cxx_toolchain_override_registration_spec") +load("@prelude//cxx/user:cxx_toolchain_override.bzl", _cxx_toolchain_override_spec = "cxx_toolchain_override_registration_spec") load("@prelude//cxx/user:link_group_map.bzl", _link_group_map_spec = "registration_spec") load(":cxx_headers_bundle.bzl", _cxx_headers_bundle_spec = "registration_spec") load(":extract_archive.bzl", _extract_archive_spec = "registration_spec") @@ -30,7 +30,6 @@ _all_specs = [ _apple_toolchain_override_spec, _cxx_headers_bundle_spec, _cxx_toolchain_override_spec, - _cxx_toolchain_override_inheriting_target_platform_spec, _apple_simulators_spec, _write_file_spec, _mockingbird_mock_spec, From 993efab85f36d25d076b29ffba7eb62bff5dbd2a Mon Sep 17 00:00:00 2001 From: Scott Cao Date: Tue, 9 Apr 2024 12:15:49 -0700 Subject: [PATCH 0756/1133] Clean up inheriting target platform attrs from cxx_toolchain_override Summary: Cleanup diff as a follow-up to the previous diff. Deletes `_cxx_toolchain_override_inheriting_target_platform_attrs` because it is now only invoked with is_toolchain_rule=True Reviewed By: iguridi Differential Revision: D55829670 fbshipit-source-id: bea72f6396ff37f2a24b9c19b86af3af241c599d --- prelude/cxx/user/cxx_toolchain_override.bzl | 45 +++++++++------------ 1 file changed, 20 insertions(+), 25 deletions(-) diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index 0abe9c068..de7519b4f 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -166,27 +166,27 @@ def _cxx_toolchain_override(ctx): split_debug_mode = SplitDebugMode(value_or(ctx.attrs.split_debug_mode, base_toolchain.split_debug_mode.value)), ) -def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): - dep_type = attrs.exec_dep if is_toolchain_rule else attrs.dep - base_dep_type = attrs.toolchain_dep if is_toolchain_rule else attrs.dep - return { +cxx_toolchain_override_registration_spec = RuleRegistrationSpec( + name = "cxx_toolchain_override", + impl = _cxx_toolchain_override, + attrs = { "additional_c_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "additional_cxx_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "archive_objects_locally": attrs.option(attrs.bool(), default = None), - "archiver": attrs.option(dep_type(providers = [RunInfo]), default = None), + "archiver": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "archiver_supports_argfiles": attrs.option(attrs.bool(), default = None), - "as_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "as_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "as_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "as_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "asm_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "asm_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "asm_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "asm_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "base": base_dep_type(providers = [CxxToolchainInfo]), + "base": attrs.toolchain_dep(providers = [CxxToolchainInfo]), "bolt_enabled": attrs.option(attrs.bool(), default = None), - "c_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "c_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "c_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "c_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "cxx_compiler": attrs.option(dep_type(providers = [RunInfo]), default = None), + "cxx_compiler": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "cxx_compiler_flags": attrs.option(attrs.list(attrs.arg()), default = None), "cxx_preprocessor_flags": attrs.option(attrs.list(attrs.arg()), default = None), "force_full_hybrid_if_capable": attrs.option(attrs.bool(), default = None), @@ -196,22 +196,22 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "link_libraries_locally": attrs.option(attrs.bool(), default = None), "link_style": attrs.option(attrs.enum(LinkStyle.values()), default = None), "link_weight": attrs.option(attrs.int(), default = None), - "linker": attrs.option(dep_type(providers = [RunInfo]), default = None), + "linker": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "linker_flags": attrs.option(attrs.list(attrs.arg()), default = None), "linker_type": attrs.option(attrs.enum(LinkerType), default = None), - "llvm_link": attrs.option(dep_type(providers = [RunInfo]), default = None), + "llvm_link": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "lto_mode": attrs.option(attrs.enum(LtoMode.values()), default = None), - "mk_comp_db": attrs.option(dep_type(providers = [RunInfo]), default = None), - "mk_hmap": attrs.option(dep_type(providers = [RunInfo]), default = None), - "mk_shlib_intf": attrs.option(dep_type(providers = [RunInfo]), default = None), - "nm": attrs.option(dep_type(providers = [RunInfo]), default = None), - "objcopy": attrs.option(dep_type(providers = [RunInfo]), default = None), + "mk_comp_db": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "mk_hmap": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "mk_shlib_intf": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "nm": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), + "objcopy": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "object_format": attrs.enum(CxxObjectFormat.values(), default = "native"), "pic_behavior": attrs.enum(PicBehavior.values(), default = "supported"), "platform_deps_aliases": attrs.option(attrs.list(attrs.string()), default = None), "platform_name": attrs.option(attrs.string(), default = None), "post_linker_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "ranlib": attrs.option(dep_type(providers = [RunInfo]), default = None), + "ranlib": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "sanitizer_runtime_enabled": attrs.bool(default = False), "sanitizer_runtime_files": attrs.option(attrs.set(attrs.dep(), sorted = True, default = []), default = None), # Use `attrs.dep()` as it's not a tool, always propagate target platform "shared_library_interface_mode": attrs.option(attrs.enum(ShlibInterfacesMode.values()), default = None), @@ -219,16 +219,11 @@ def _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule): "shared_library_name_format": attrs.option(attrs.string(), default = None), "shared_library_versioned_name_format": attrs.option(attrs.string(), default = None), "split_debug_mode": attrs.option(attrs.enum(SplitDebugMode.values()), default = None), - "strip": attrs.option(dep_type(providers = [RunInfo]), default = None), + "strip": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "strip_all_flags": attrs.option(attrs.list(attrs.arg()), default = None), "strip_debug_flags": attrs.option(attrs.list(attrs.arg()), default = None), "strip_non_global_flags": attrs.option(attrs.list(attrs.arg()), default = None), "use_archiver_flags": attrs.option(attrs.bool(), default = None), - } | cxx_toolchain_allow_cache_upload_args() - -cxx_toolchain_override_registration_spec = RuleRegistrationSpec( - name = "cxx_toolchain_override", - impl = _cxx_toolchain_override, - attrs = _cxx_toolchain_override_inheriting_target_platform_attrs(is_toolchain_rule = True), + } | cxx_toolchain_allow_cache_upload_args(), is_toolchain_rule = True, ) From 6259ed32987785f2446f0efa4d9bdc01ead825d7 Mon Sep 17 00:00:00 2001 From: Scott Cao Date: Tue, 9 Apr 2024 12:15:49 -0700 Subject: [PATCH 0757/1133] Default inherit_target_platform to True on cxx_toolchain macro Summary: Set default for `inherit_target_platform` to True on cxx_toolchain macro and delete `inherit_target_platform=True` on cxx_toolchain targets. Sets up next diff to delete this attribute from the macros Reviewed By: iguridi Differential Revision: D55840576 fbshipit-source-id: 7484f36b86218a2f562dcfc42064adee3c59ce8e --- prelude/native.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/native.bzl b/prelude/native.bzl index 84a4825df..f2c4747a0 100644 --- a/prelude/native.bzl +++ b/prelude/native.bzl @@ -381,7 +381,7 @@ def _swift_toolchain_macro_stub(**kwargs): **kwargs ) -def _cxx_toolchain_macro_stub(inherit_target_platform = False, **kwargs): +def _cxx_toolchain_macro_stub(inherit_target_platform = True, **kwargs): if inherit_target_platform: rule = cxx_toolchain_inheriting_target_platform if is_full_meta_repo(): From 3c7423a70c2017ab88232c10201fa7200897a8d0 Mon Sep 17 00:00:00 2001 From: Scott Cao Date: Tue, 9 Apr 2024 12:15:49 -0700 Subject: [PATCH 0758/1133] Delete `inherit_target_platform` attr from cxx_toolchain macro Summary: Now every cxx_toolchain target will always use a toolchain rule. Reviewed By: iguridi Differential Revision: D55841029 fbshipit-source-id: 0a6b8ed89d34986a9acb296dd85f2b21a3556528 --- prelude/native.bzl | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/prelude/native.bzl b/prelude/native.bzl index f2c4747a0..414e00d1f 100644 --- a/prelude/native.bzl +++ b/prelude/native.bzl @@ -381,21 +381,17 @@ def _swift_toolchain_macro_stub(**kwargs): **kwargs ) -def _cxx_toolchain_macro_stub(inherit_target_platform = True, **kwargs): - if inherit_target_platform: - rule = cxx_toolchain_inheriting_target_platform - if is_full_meta_repo(): - cache_links = kwargs.get("cache_links") - kwargs["cache_links"] = select({ - "DEFAULT": cache_links, - "ovr_config//build_mode:fbcode-build-info-mode-disable": True, - "ovr_config//build_mode:fbcode-build-info-mode-full": False, - "ovr_config//build_mode:fbcode-build-info-mode-stable": True, - }) - else: - rule = __rules__["cxx_toolchain"] +def _cxx_toolchain_macro_stub(**kwargs): + if is_full_meta_repo(): + cache_links = kwargs.get("cache_links") + kwargs["cache_links"] = select({ + "DEFAULT": cache_links, + "ovr_config//build_mode:fbcode-build-info-mode-disable": True, + "ovr_config//build_mode:fbcode-build-info-mode-full": False, + "ovr_config//build_mode:fbcode-build-info-mode-stable": True, + }) cxx_toolchain_macro_impl( - cxx_toolchain_rule = rule, + cxx_toolchain_rule = cxx_toolchain_inheriting_target_platform, **kwargs ) From 54ef32324216027e1db8e801eaafbfd3a0579d58 Mon Sep 17 00:00:00 2001 From: Thomas David Cuvillier Date: Wed, 10 Apr 2024 04:32:25 -0700 Subject: [PATCH 0759/1133] Remove tpx label from oss Summary: Tpx labels do not belong in the open source part of buck2. Reviewed By: mapoulin Differential Revision: D55920860 fbshipit-source-id: 24429096c25be28414f95d698b0d77cbdc7f2d8a --- prelude/erlang/erlang_tests.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/erlang/erlang_tests.bzl b/prelude/erlang/erlang_tests.bzl index 4463060c3..ec592d622 100644 --- a/prelude/erlang/erlang_tests.bzl +++ b/prelude/erlang/erlang_tests.bzl @@ -231,7 +231,7 @@ def erlang_test_impl(ctx: AnalysisContext) -> list[Provider]: type = "erlang_test", command = [cmd], env = ctx.attrs.env, - labels = ["tpx-fb-test-type=16"] + ctx.attrs.labels, + labels = ctx.attrs.labels, contacts = ctx.attrs.contacts, run_from_project_root = True, use_project_relative_paths = True, From 26f345b3bd6ba5efdf3f3e37d668917d00cbac31 Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 10 Apr 2024 05:17:20 -0700 Subject: [PATCH 0760/1133] Simplify relative path calculation Summary: These launcher scripts are hard. First, with simpler scripts, it is easier to debug them. Second, scripts are not supposed to do black magic like strip three first character because we assume they are `../`). Rules are only allowed to reference paths they are given and should not assume anything about actual paths. Simplify it a little: use `parent=1` to obtain resources parent directory instead of doing it inside the script. I suspect this was implemented before `parent=` parameter added. Reviewed By: JakobDegen Differential Revision: D55820440 fbshipit-source-id: 7f646470cd5ce9a6c62f545e4cb56b630a370f1b --- prelude/sh_binary.bzl | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/prelude/sh_binary.bzl b/prelude/sh_binary.bzl index 679c813a4..2c35ce1f8 100644 --- a/prelude/sh_binary.bzl +++ b/prelude/sh_binary.bzl @@ -55,10 +55,7 @@ def _generate_script( "set -e", # This is awkward for two reasons: args doesn't support format strings # and will insert a newline between items and so __RESOURCES_ROOT - # is put in a bash array, and we want it to be relative to script's - # dir, not the script itself, but there's no way to do that in - # starlark. To deal with this, we strip the first 3 characters - # (`../`). + # is put in a bash array. "__RESOURCES_ROOT=(", resources_dir, ")", @@ -72,7 +69,7 @@ def _generate_script( # identify what the right format is. For now, this variable lets # callees disambiguate (see D28960177 for more context). "export BUCK_SH_BINARY_VERSION_UNSTABLE=2", - "export BUCK_PROJECT_ROOT=$__SCRIPT_DIR/\"${__RESOURCES_ROOT:3}\"", + "export BUCK_PROJECT_ROOT=$__SCRIPT_DIR/\"${__RESOURCES_ROOT}\"", # In buck1, the paths for resources that are outputs of rules have # different paths in BUCK_PROJECT_ROOT and # BUCK_DEFAULT_RUNTIME_RESOURCES, but we use the same paths. buck1's @@ -82,7 +79,7 @@ def _generate_script( # sources, the paths are the same for both. "export BUCK_DEFAULT_RUNTIME_RESOURCES=\"$BUCK_PROJECT_ROOT\"", "exec \"$BUCK_PROJECT_ROOT/{}\" \"$@\"".format(main_link), - ]).relative_to(script) + ]).relative_to(script, parent = 1) else: script_content = cmd_args([ "@echo off", @@ -96,11 +93,10 @@ def _generate_script( # Get parent folder. 'for %%a in ("%__SRC%") do set "__SCRIPT_DIR=%%~dpa"', "set BUCK_SH_BINARY_VERSION_UNSTABLE=2", - # ':~3' strips the first 3 chars of __RESOURCES_ROOT. - "set BUCK_PROJECT_ROOT=%__SCRIPT_DIR%\\!__RESOURCES_ROOT:~3!", + "set BUCK_PROJECT_ROOT=%__SCRIPT_DIR%\\%__RESOURCES_ROOT%", "set BUCK_DEFAULT_RUNTIME_RESOURCES=%BUCK_PROJECT_ROOT%", "%BUCK_PROJECT_ROOT%\\{} %*".format(main_link), - ]).relative_to(script) + ]).relative_to(script, parent = 1) actions.write( script, script_content, From f2a94435e1ec13e56581b988ed576fe38afc9d60 Mon Sep 17 00:00:00 2001 From: Alexey Kozhevnikov Date: Wed, 10 Apr 2024 06:50:03 -0700 Subject: [PATCH 0761/1133] use flags override for codesign on copy paths Summary: Use codesign flags override for codesign on copy items if provided. Also amend incremental state accordingly and logic to default to non-incremental bundling (if flags changed for codesign on copy item). Reviewed By: milend Differential Revision: D55867562 fbshipit-source-id: 819ef4848008fb4cae5cf19f78eb58fa1f8939bf --- .../tools/bundling/assemble_bundle_types.py | 6 +- .../apple/tools/bundling/incremental_state.py | 26 +++++-- .../tools/bundling/incremental_state_test.py | 7 ++ .../apple/tools/bundling/incremental_utils.py | 9 ++- .../tools/bundling/incremental_utils_test.py | 74 +++++++++++++++++-- prelude/apple/tools/bundling/main.py | 7 +- .../newer_version_incremental_state.json | 2 +- .../valid_incremental_state.json | 6 +- 8 files changed, 121 insertions(+), 16 deletions(-) diff --git a/prelude/apple/tools/bundling/assemble_bundle_types.py b/prelude/apple/tools/bundling/assemble_bundle_types.py index 716fe26d1..3345c0177 100644 --- a/prelude/apple/tools/bundling/assemble_bundle_types.py +++ b/prelude/apple/tools/bundling/assemble_bundle_types.py @@ -49,7 +49,11 @@ def __hash__(self: BundleSpecItem) -> int: self.dst, self.codesign_on_copy, self.codesign_entitlements, - self.codesign_flags_override, + ( + tuple(self.codesign_flags_override) + if self.codesign_flags_override is not None + else hash(None) + ), ) ) diff --git a/prelude/apple/tools/bundling/incremental_state.py b/prelude/apple/tools/bundling/incremental_state.py index c17fe2b25..4946319d9 100644 --- a/prelude/apple/tools/bundling/incremental_state.py +++ b/prelude/apple/tools/bundling/incremental_state.py @@ -17,7 +17,7 @@ from apple.tools.code_signing.codesign_bundle import CodesignConfiguration -_VERSION = 5 +_VERSION = 6 @dataclass @@ -47,9 +47,23 @@ class CodesignedOnCopy: """ Digest of entitlements used when the given path is codesigned on copy """ + codesign_flags_override: Optional[List[str]] + """ + If present, overrides codesign arguments (which are used for root bundle) when the given path is codesigned on copy + """ def __hash__(self: CodesignedOnCopy) -> int: - return hash((self.path, self.entitlements_digest)) + return hash( + ( + self.path, + self.entitlements_digest, + ( + tuple(self.codesign_flags_override) + if self.codesign_flags_override is not None + else hash(None) + ), + ) + ) @dataclass @@ -93,11 +107,12 @@ def default(self, o: object) -> object: result["resolved_symlink"] = str(o.resolved_symlink) return result elif isinstance(o, CodesignedOnCopy): - result = { - "path": str(o.path), - } + result = {} + result["path"] = str(o.path) if o.entitlements_digest is not None: result["entitlements_digest"] = str(o.entitlements_digest) + if o.codesign_flags_override is not None: + result["codesign_flags_override"] = o.codesign_flags_override return result else: return super().default(o) @@ -127,6 +142,7 @@ def _object_hook( else: dict["path"] = Path(dict.pop("path")) dict["entitlements_digest"] = dict.pop("entitlements_digest", None) + dict["codesign_flags_override"] = dict.pop("codesign_flags_override", None) return CodesignedOnCopy(**dict) diff --git a/prelude/apple/tools/bundling/incremental_state_test.py b/prelude/apple/tools/bundling/incremental_state_test.py index a61b4d4aa..c0c197427 100644 --- a/prelude/apple/tools/bundling/incremental_state_test.py +++ b/prelude/apple/tools/bundling/incremental_state_test.py @@ -52,10 +52,17 @@ def test_valid_state_is_parsed_successfully(self): CodesignedOnCopy( path=Path("Resources/bar.txt"), entitlements_digest=None, + codesign_flags_override=None, ), CodesignedOnCopy( path=Path("Resources/baz.txt"), entitlements_digest="abc", + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("Resources/qux.txt"), + entitlements_digest=None, + codesign_flags_override=["--deep", "--force"], ), ], codesign_identity="Johny Appleseed", diff --git a/prelude/apple/tools/bundling/incremental_utils.py b/prelude/apple/tools/bundling/incremental_utils.py index 9b4de2840..0f6739080 100644 --- a/prelude/apple/tools/bundling/incremental_utils.py +++ b/prelude/apple/tools/bundling/incremental_utils.py @@ -77,6 +77,7 @@ def should_assemble_incrementally( Path(i.codesign_entitlements) if i.codesign_entitlements else None ), incremental_context=incremental_context, + codesign_flags_override=i.codesign_flags_override, ) for i in spec if i.codesign_on_copy @@ -91,7 +92,7 @@ def should_assemble_incrementally( ) if not codesign_on_copy_paths_are_compatible: logging.getLogger(__name__).info( - f"Decided not to assemble incrementally — there is at least one artifact `{list(codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build - current_codesigned_on_copy_items)[0]}` that was code signed on copy in previous build which is present in current run and not code signed on copy (or codesigned but with a different set of entitlements)." + f"Decided not to assemble incrementally — there is at least one artifact `{list(codesigned_on_copy_paths_from_previous_build_which_are_present_in_current_build - current_codesigned_on_copy_items)[0]}` that was code signed on copy in previous build which is present in current run and not code signed on copy (or codesigned but with a different set of entitlements and flags)." ) return codesign_on_copy_paths_are_compatible @@ -191,7 +192,10 @@ def _list_directory_deterministically(directory: Path) -> List[Path]: def codesigned_on_copy_item( - path: Path, entitlements: Optional[Path], incremental_context: IncrementalContext + path: Path, + entitlements: Optional[Path], + incremental_context: IncrementalContext, + codesign_flags_override: Optional[List[str]], ) -> CodesignedOnCopy: if entitlements is not None: digest = incremental_context.metadata.get(entitlements) @@ -204,4 +208,5 @@ def codesigned_on_copy_item( return CodesignedOnCopy( path=path, entitlements_digest=digest, + codesign_flags_override=codesign_flags_override, ) diff --git a/prelude/apple/tools/bundling/incremental_utils_test.py b/prelude/apple/tools/bundling/incremental_utils_test.py index 7b5817e20..2a714c6fc 100644 --- a/prelude/apple/tools/bundling/incremental_utils_test.py +++ b/prelude/apple/tools/bundling/incremental_utils_test.py @@ -210,9 +210,15 @@ def test_run_incrementally_when_codesign_on_copy_paths_match(self): codesigned=True, codesign_configuration=None, codesigned_on_copy=[ - CodesignedOnCopy(path=Path("foo"), entitlements_digest=None), CodesignedOnCopy( - path=Path("baz"), entitlements_digest="entitlements_digest" + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=None, + ), + CodesignedOnCopy( + path=Path("baz"), + entitlements_digest="entitlements_digest", + codesign_flags_override=None, ), ], codesign_identity="same_identity", @@ -250,7 +256,11 @@ def test_not_run_incrementally_when_codesign_on_copy_paths_mismatch(self): codesign_configuration=None, # but it was codesigned in old build codesigned_on_copy=[ - CodesignedOnCopy(path=Path("foo"), entitlements_digest=None) + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=None, + ) ], codesign_identity="same_identity", codesign_arguments=[], @@ -291,7 +301,11 @@ def test_not_run_incrementally_when_codesign_on_copy_entitlements_mismatch(self) codesigned=True, codesign_configuration=None, codesigned_on_copy=[ - CodesignedOnCopy(path=Path("foo"), entitlements_digest="old_digest") + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest="old_digest", + codesign_flags_override=None, + ) ], codesign_identity="same_identity", codesign_arguments=[], @@ -306,6 +320,52 @@ def test_not_run_incrementally_when_codesign_on_copy_entitlements_mismatch(self) incremental_context.metadata[Path("baz/entitlements.plist")] = "old_digest" self.assertTrue(should_assemble_incrementally(spec, incremental_context)) + def test_not_run_incrementally_when_codesign_on_copy_flags_mismatch(self): + spec = [ + BundleSpecItem( + src="src/foo", + dst="foo", + codesign_on_copy=True, + codesign_flags_override=["--force"], + ) + ] + incremental_context = IncrementalContext( + metadata={ + Path("src/foo"): "digest", + }, + state=IncrementalState( + items=[ + IncrementalStateItem( + source=Path("src/foo"), + destination_relative_to_bundle=Path("foo"), + digest="digest", + resolved_symlink=None, + ) + ], + codesigned=True, + codesign_configuration=None, + codesigned_on_copy=[ + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=["--force", "--deep"], + ) + ], + codesign_identity="same_identity", + codesign_arguments=[], + swift_stdlib_paths=[], + ), + codesigned=True, + codesign_configuration=None, + codesign_identity="same_identity", + codesign_arguments=[], + ) + self.assertFalse(should_assemble_incrementally(spec, incremental_context)) + incremental_context.state.codesigned_on_copy[0].codesign_flags_override = [ + "--force" + ] + self.assertTrue(should_assemble_incrementally(spec, incremental_context)) + def test_not_run_incrementally_when_codesign_arguments_mismatch(self): spec = [ BundleSpecItem( @@ -365,7 +425,11 @@ def test_not_run_incrementally_when_codesign_configurations_mismatch(self): # Dry codesigned in old build codesign_configuration=CodesignConfiguration.dryRun, codesigned_on_copy=[ - CodesignedOnCopy(path=Path("foo"), entitlements_digest=None) + CodesignedOnCopy( + path=Path("foo"), + entitlements_digest=None, + codesign_flags_override=None, + ) ], codesign_identity="same_identity", codesign_arguments=[], diff --git a/prelude/apple/tools/bundling/main.py b/prelude/apple/tools/bundling/main.py index 758f97e31..e43fc673c 100644 --- a/prelude/apple/tools/bundling/main.py +++ b/prelude/apple/tools/bundling/main.py @@ -395,7 +395,11 @@ def _main() -> None: entitlements=( Path(i.codesign_entitlements) if i.codesign_entitlements else None ), - flags=args.codesign_args, + flags=( + i.codesign_flags_override + if (i.codesign_flags_override is not None) + else args.codesign_args + ), ) for i in spec if i.codesign_on_copy @@ -553,6 +557,7 @@ def _write_incremental_state( Path(i.codesign_entitlements) if i.codesign_entitlements else None ), incremental_context=incremental_context, + codesign_flags_override=i.codesign_flags_override, ) for i in spec if i.codesign_on_copy diff --git a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json index 1caf23bcf..79b549518 100644 --- a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json +++ b/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json @@ -1,5 +1,5 @@ { - "version": 6, + "version": 7, "data": { "something": [] } diff --git a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json index c3e879c07..5cf74b2e6 100644 --- a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json +++ b/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json @@ -25,6 +25,10 @@ { "path": "Resources/baz.txt", "entitlements_digest": "abc" + }, + { + "path": "Resources/qux.txt", + "codesign_flags_override": ["--deep", "--force"] } ], "codesign_identity": "Johny Appleseed", @@ -35,5 +39,5 @@ "swift_stdlib_paths": [ "Frameworks/libswiftCore.dylib" ], - "version": 5 + "version": 6 } From 474e8cd4725786507cc26d60eca0563fc24fde14 Mon Sep 17 00:00:00 2001 From: Cameron Pickett Date: Wed, 10 Apr 2024 07:29:15 -0700 Subject: [PATCH 0762/1133] Normalize crate name at prelude layer Summary: D55932781 was working around an issue with `fb_xplat_rust_library`. The issue is that `fb_xplat_rust_library` expands into multiple targets. E.g. ``` fb_xplat_rust_library( name = "foo-bar", ... ) ``` expands into ``` rust_library( name = "foo-bar", crate = "foo-bar", ) rust_library( name = "foo-barAndroid", crate = "foo-bar", ) rust_library( name = "foo-barApple", crate = "foo-bar", ) ``` In order to keep the `crate` name consistent across all `fb_xplat` macro targets, https://fburl.com/code/o7i02r3f sets the crate field directly. However, buck2 prelude does *not* currently normalize the passed in `crate`, so I either need to (a) duplicate that normalization logic into `fb_xplat_rust_library`, or (b) adjust buck2 prelude to always normalize. I opted for the latter to avoid duplication. Reviewed By: dtolnay Differential Revision: D55935600 fbshipit-source-id: 926947e73bfd6f1cf6cadf45fd4b19c4a2ea21e5 --- prelude/rust/link_info.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 0e6add967..65c01365e 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -587,6 +587,6 @@ def attr_crate(ctx: AnalysisContext) -> CrateName: if dynamic: dynamic = dynamic.get(DefaultInfo).default_outputs[0] return CrateName( - simple = ctx.attrs.crate or normalize_crate(ctx.label.name), + simple = normalize_crate(ctx.attrs.crate or ctx.label.name), dynamic = dynamic, ) From 77a978ee4bd308f657ff914c885ed46bdd896b87 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 10 Apr 2024 08:24:06 -0700 Subject: [PATCH 0763/1133] Drop sources splitting code from cgo_library Summary: This makes no sense anymore as we join them a few lines later Reviewed By: yarikk Differential Revision: D55882184 fbshipit-source-id: 4e54927989a49edc49bd2e4a949f78127a63a9a6 --- prelude/go/cgo_library.bzl | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index f4982f4ab..665f4db91 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -28,17 +28,6 @@ load(":packages.bzl", "go_attr_pkg_name", "merge_pkgs") def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: pkg_name = go_attr_pkg_name(ctx) - # Separate sources into C++ and CGO sources. - cgo_srcs = [] - cxx_srcs = [] - for src in ctx.attrs.srcs: - if src.extension == ".go": - cgo_srcs.append(src) - elif src.extension in (".c", ".cpp"): - cxx_srcs.append(src) - else: - fail("unexpected extension: {}".format(src)) - shared = ctx.attrs._compile_shared race = ctx.attrs._race coverage_mode = GoCoverageMode(ctx.attrs._coverage_mode) if ctx.attrs._coverage_mode else None @@ -47,7 +36,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: compiled_pkg = build_package( ctx, pkg_name, - ctx.attrs.go_srcs + cgo_srcs + cxx_srcs, + ctx.attrs.go_srcs + ctx.attrs.srcs, package_root = ctx.attrs.package_root, deps = ctx.attrs.deps + ctx.attrs.exported_deps, shared = shared, From 379094ef23541391cce9a3e2b8f07b1dae1c96b9 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Wed, 10 Apr 2024 09:11:17 -0700 Subject: [PATCH 0764/1133] add overrides to control remote linking Summary: Add target attribute overrides to be able to set remote linking. Reviewed By: chatura-atapattu Differential Revision: D55933539 fbshipit-source-id: bcdbe7849a4105b826f1c8e60f804cb835672687 --- prelude/apple/apple_macro_layer.bzl | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/prelude/apple/apple_macro_layer.bzl b/prelude/apple/apple_macro_layer.bzl index ffb2735f4..8cdb75bfd 100644 --- a/prelude/apple/apple_macro_layer.bzl +++ b/prelude/apple/apple_macro_layer.bzl @@ -32,6 +32,13 @@ APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE = AppleBuckConfigAttributeOverride( skip_if_false = True, ) +APPLE_LINK_LIBRARIES_REMOTELY_OVERRIDE = AppleBuckConfigAttributeOverride( + name = "link_execution_preference", + key = "link_libraries_remotely_override", + value_if_true = "remote", + skip_if_false = True, +) + APPLE_STRIPPED_DEFAULT = AppleBuckConfigAttributeOverride( name = "_stripped_default", key = "stripped_default", @@ -40,20 +47,29 @@ APPLE_STRIPPED_DEFAULT = AppleBuckConfigAttributeOverride( _APPLE_LIBRARY_LOCAL_EXECUTION_OVERRIDES = [ APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE, + APPLE_LINK_LIBRARIES_REMOTELY_OVERRIDE, AppleBuckConfigAttributeOverride(name = APPLE_ARCHIVE_OBJECTS_LOCALLY_OVERRIDE_ATTR_NAME, key = "archive_objects_locally_override"), ] -_APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES = [ +# If both configs are set the last one wins +_APPLE_BINARY_EXECUTION_OVERRIDES = [ AppleBuckConfigAttributeOverride( name = "link_execution_preference", key = "link_binaries_locally_override", value_if_true = "local", skip_if_false = True, ), + AppleBuckConfigAttributeOverride( + name = "link_execution_preference", + key = "link_binaries_remotely_override", + value_if_true = "remote", + skip_if_false = True, + ), ] _APPLE_TEST_LOCAL_EXECUTION_OVERRIDES = [ APPLE_LINK_LIBRARIES_LOCALLY_OVERRIDE, + APPLE_LINK_LIBRARIES_REMOTELY_OVERRIDE, ] def apple_macro_layer_set_bool_override_attrs_from_config(overrides: list[AppleBuckConfigAttributeOverride]) -> dict[str, Select]: @@ -108,7 +124,7 @@ def apple_library_macro_impl(apple_library_rule = None, **kwargs): def apple_binary_macro_impl(apple_binary_rule = None, apple_universal_executable = None, **kwargs): dsym_args = apple_dsym_config() kwargs.update(dsym_args) - kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_BINARY_LOCAL_EXECUTION_OVERRIDES)) + kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config(_APPLE_BINARY_EXECUTION_OVERRIDES)) kwargs.update(apple_macro_layer_set_bool_override_attrs_from_config([APPLE_STRIPPED_DEFAULT])) original_binary_name = kwargs.pop("name") From 0c31286822abf1aefaa83ec8a23e1b673eb27269 Mon Sep 17 00:00:00 2001 From: Artem Pianykh Date: Wed, 10 Apr 2024 13:23:20 -0700 Subject: [PATCH 0765/1133] Expose 'local_enabled' prop in `remote_execution` configuration label Summary: Currently, when you set 'remote_execution' label you loose the ability to run a test locally. This diff exposes 'local_enabled' property which is set to `False` by default for backwards compatibility. Reviewed By: asm89 Differential Revision: D55912663 fbshipit-source-id: c5a2ea867610b4bebc7bf33a2d46b0a9b7970527 --- prelude/tests/re_utils.bzl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/prelude/tests/re_utils.bzl b/prelude/tests/re_utils.bzl index 514396604..c75dd8519 100644 --- a/prelude/tests/re_utils.bzl +++ b/prelude/tests/re_utils.bzl @@ -45,6 +45,7 @@ def get_re_executors_from_props(ctx: AnalysisContext) -> ([CommandExecutorConfig listing_capabilities = re_props_copy.pop("listing_capabilities", None) remote_cache_enabled = re_props_copy.pop("remote_cache_enabled", None) re_dependencies = re_props_copy.pop("dependencies", []) + local_enabled = re_props_copy.pop("local_enabled", False) if re_props_copy: unexpected_props = ", ".join(re_props_copy.keys()) fail("found unexpected re props: " + unexpected_props) @@ -55,7 +56,7 @@ def get_re_executors_from_props(ctx: AnalysisContext) -> ([CommandExecutorConfig remote_execution_action_key = "{}={}".format(build_mode_info.cell, build_mode_info.mode) default_executor = CommandExecutorConfig( - local_enabled = False, + local_enabled = local_enabled, remote_enabled = True, remote_execution_properties = capabilities, remote_execution_use_case = use_case or "tpx-default", @@ -66,7 +67,7 @@ def get_re_executors_from_props(ctx: AnalysisContext) -> ([CommandExecutorConfig listing_executor = default_executor if listing_capabilities: listing_executor = CommandExecutorConfig( - local_enabled = False, + local_enabled = local_enabled, remote_enabled = True, remote_execution_properties = listing_capabilities, remote_execution_use_case = use_case or "tpx-default", From 25bbe05e7ec2057389e050f02045422735d44a5c Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Wed, 10 Apr 2024 14:06:34 -0700 Subject: [PATCH 0766/1133] Add `libraries_query` to `python_wheel` Summary: Since `libraries` is non-transitive, it can make it difficult/tedious to add lots of libs to the wheel. This diff adds a `libraries_query` param to handle more intereseting cases (e.g. emulate transitive deps: P1209537744). Reviewed By: manav-a Differential Revision: D55830509 fbshipit-source-id: ba7a535fb281087f478c912faf9fb7f5eef3699f --- prelude/python/python_wheel.bzl | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/prelude/python/python_wheel.bzl b/prelude/python/python_wheel.bzl index 8ff19f3ed..04956e159 100644 --- a/prelude/python/python_wheel.bzl +++ b/prelude/python/python_wheel.bzl @@ -82,9 +82,17 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: for key, val in ctx.attrs.extra_metadata.items(): cmd.add("--metadata={}:{}".format(key, val)) + libraries = {} + for lib in ctx.attrs.libraries: + libraries[lib.label] = lib + if ctx.attrs.libraries_query != None: + for lib in ctx.attrs.libraries_query: + if PythonLibraryInfo in lib: + libraries[lib.label] = lib + srcs = [] extensions = {} - for dep in ctx.attrs.libraries: + for dep in libraries.values(): manifests = dep[PythonLibraryInfo].manifests.value if manifests.srcs != None: srcs.append(manifests.srcs) @@ -105,13 +113,13 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: prefer_stripped = ctx.attrs.prefer_stripped_objects, )) link_infos = get_linkable_graph_node_map_func(dep[LinkableGraph])() - for dep in _link_deps( + for ext_dep in _link_deps( link_infos, root.deps, LinkStrategy("static_pic"), toolchain_info.pic_behavior, ): - node = link_infos[dep] + node = link_infos[ext_dep] output_style = get_lib_output_style( LinkStrategy("static_pic"), node.preferred_linkage, @@ -196,6 +204,7 @@ python_wheel = rule( ), constraint_overrides = attrs.list(attrs.string(), default = []), libraries = attrs.list(attrs.dep(providers = [PythonLibraryInfo]), default = []), + libraries_query = attrs.option(attrs.query(), default = None), prefer_stripped_objects = attrs.default_only(attrs.bool(default = False)), _wheel = attrs.default_only(attrs.exec_dep(default = "prelude//python/tools:wheel")), _cxx_toolchain = toolchains_common.cxx(), From b1d410c0903eba80045071f178cabbf7d803f34b Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Wed, 10 Apr 2024 14:16:21 -0700 Subject: [PATCH 0767/1133] Disable selection of cgo_files for all rules except cgo_library Summary: Currently it breaks rules like go_test because they don't have cxx attributes Differential Revision: D55975943 fbshipit-source-id: d70d1f1befdd66fe036e8b54e1c81778e4a0fd5a --- prelude/go/go_binary.bzl | 2 ++ prelude/go/go_exported_library.bzl | 2 ++ prelude/go/go_test.bzl | 2 ++ 3 files changed, 6 insertions(+) diff --git a/prelude/go/go_binary.bzl b/prelude/go/go_binary.bzl index 72977df5c..4ff23d357 100644 --- a/prelude/go/go_binary.bzl +++ b/prelude/go/go_binary.bzl @@ -29,6 +29,8 @@ def go_binary_impl(ctx: AnalysisContext) -> list[Provider]: compiler_flags = ctx.attrs.compiler_flags, race = ctx.attrs._race, embedcfg = ctx.attrs.embedcfg, + # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. + force_disable_cgo = True, ) (bin, runtime_files, external_debug_info) = link( ctx, diff --git a/prelude/go/go_exported_library.bzl b/prelude/go/go_exported_library.bzl index 71e4ff4ba..b032b35cf 100644 --- a/prelude/go/go_exported_library.bzl +++ b/prelude/go/go_exported_library.bzl @@ -28,6 +28,8 @@ def go_exported_library_impl(ctx: AnalysisContext) -> list[Provider]: shared = True, race = ctx.attrs._race, embedcfg = ctx.attrs.embedcfg, + # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. + force_disable_cgo = True, ) (bin, runtime_files, _external_debug_info) = link( ctx, diff --git a/prelude/go/go_test.bzl b/prelude/go/go_test.bzl index efcd0dd0a..46b530262 100644 --- a/prelude/go/go_test.bzl +++ b/prelude/go/go_test.bzl @@ -87,6 +87,8 @@ def go_test_impl(ctx: AnalysisContext) -> list[Provider]: race = ctx.attrs._race, embedcfg = ctx.attrs.embedcfg, tests = True, + # We need to set CGO_DESABLED for "pure" Go libraries, otherwise CGo files may be selected for compilation. + force_disable_cgo = True, ) if coverage_mode != None: From 06c8d3f931496bb1c962575f9050a71c633e9e11 Mon Sep 17 00:00:00 2001 From: Xiaoya Xiang Date: Wed, 10 Apr 2024 15:13:33 -0700 Subject: [PATCH 0768/1133] Default buck run to use inplace builds MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: # Motivation “buck run” is used to build and execute a target locally from a checked out repro. When users invoke 'buck run,' they typically won't need to manage the generated artifact directly, such as relocating the PAR to another location. By defaulting to an 'inplace' build, we eliminate the need and cost associated with building a standalone PAR. # Implementation we are copying RunInfo from inplace sub-target to default, so that when "buck run" is called, it always uses inplace run command. BuildInfo doesn't matter because it's skipped in buck run: https://www.internalfb.com/code/fbsource/[950d867aaf316b38e8e61fec6a388ba1cc15cfe2]/fbcode/buck2/app/buck2_client/src/commands/run.rs?lines=114-115 # Estimated Impact https://docs.google.com/document/d/1OFudzJWnFU6F1ycIwmww_gpqRN13AtYzc692FaFLwzI/edit#heading=h.f2352lez6lhp Reviewed By: aleivag Differential Revision: D45283624 fbshipit-source-id: 182a9228fb7d45639ecf01f14e97dd637f230b21 --- prelude/python/make_py_package.bzl | 6 +++++- prelude/python/python_binary.bzl | 4 ++-- prelude/python/tools/make_py_package_inplace.py | 4 ++++ prelude/rules_impl.bzl | 1 + 4 files changed, 12 insertions(+), 3 deletions(-) diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 5f1d5fad3..0619f9a9b 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -64,7 +64,11 @@ def make_default_info(pex: PexProviders) -> Provider: sub_targets = pex.sub_targets, ) -def make_run_info(pex: PexProviders) -> Provider: +def make_run_info(pex: PexProviders, run_with_inplace: bool = False) -> Provider: + if run_with_inplace and "inplace" in pex.sub_targets: + # If running with inplace, we want to use the RunInfo of inplace subtarget. + return pex.sub_targets["inplace"][1] + return RunInfo(pex.run_cmd) def _srcs(srcs: list[typing.Any], format = "{}") -> cmd_args: diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 3e3250ee3..0283decea 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -84,7 +84,7 @@ load( "EntryPointKind", "PythonLibraryInterface", ) -load(":make_py_package.bzl", "PexModules", "PexProviders", "make_default_info", "make_py_package") +load(":make_py_package.bzl", "PexModules", "PexProviders", "make_default_info", "make_py_package", "make_run_info") load( ":manifest.bzl", "create_dep_manifest_for_source_map", @@ -771,5 +771,5 @@ def python_binary_impl(ctx: AnalysisContext) -> list[Provider]: ) return [ make_default_info(pex), - RunInfo(pex.run_cmd), + make_run_info(pex, ctx.attrs.run_with_inplace), ] diff --git a/prelude/python/tools/make_py_package_inplace.py b/prelude/python/tools/make_py_package_inplace.py index d966770e9..fbaee2029 100755 --- a/prelude/python/tools/make_py_package_inplace.py +++ b/prelude/python/tools/make_py_package_inplace.py @@ -139,6 +139,10 @@ def parse_args() -> argparse.Namespace: ) # Compatibility with existing make_par scripts parser.add_argument("--passthrough", action="append", default=[]) + # No-op, added for compatibility with existing make_par scripts + parser.add_argument( + "--omnibus-debug-info", choices=["separate", "strip", "extract"] + ) return parser.parse_args() diff --git a/prelude/rules_impl.bzl b/prelude/rules_impl.bzl index db6213548..d2c27f14f 100644 --- a/prelude/rules_impl.bzl +++ b/prelude/rules_impl.bzl @@ -315,6 +315,7 @@ def _python_executable_attrs(): "package_split_dwarf_dwp": attrs.bool(default = False), "par_style": attrs.option(attrs.string(), default = None), "resources": attrs.named_set(attrs.one_of(attrs.dep(), attrs.source(allow_directory = True)), sorted = True, default = []), + "run_with_inplace": attrs.bool(default = False), "runtime_env": attrs.option(attrs.dict(key = attrs.string(), value = attrs.string()), default = None), "standalone_build_args": attrs.list(attrs.arg(), default = []), "static_extension_finder": attrs.source(default = "prelude//python/tools:static_extension_finder.py"), From 2441f6a0b8eb75c642a51bf33d61de006417a35b Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Wed, 10 Apr 2024 19:17:55 -0700 Subject: [PATCH 0769/1133] Represent shared libs as a list instead of dict (take 2) Summary: This refactors internal bookkeeping of shared libs to use `list`s of `SharedLibrary` objects, instead of `dict`s mapping the libs `SONAME` to the `SharedLibrary` and defers the process of "merging" them to `dict` until needed. This helps move towards supporting shared libs where we don't know the `SONAME` at analysis time. Reviewed By: dtolnay Differential Revision: D55335442 fbshipit-source-id: 4bf5e5fad7854bcea0f0c211eba56de953dda739 --- .../android_binary_native_library_rules.bzl | 5 +- prelude/android/voltron.bzl | 6 +- prelude/cxx/cxx_executable.bzl | 49 ++++---- prelude/cxx/cxx_link_utility.bzl | 23 ++-- prelude/cxx/cxx_types.bzl | 2 +- prelude/go/link.bzl | 4 +- prelude/haskell/haskell.bzl | 20 ++-- prelude/haskell/haskell_ghci.bzl | 7 +- prelude/java/java_binary.bzl | 8 +- prelude/java/java_test.bzl | 16 ++- prelude/julia/julia_binary.bzl | 16 ++- prelude/linking/shared_libraries.bzl | 105 ++++++++++-------- prelude/python/python_binary.bzl | 14 +-- prelude/rust/build.bzl | 5 +- prelude/rust/rust_binary.bzl | 38 ++++--- 15 files changed, 180 insertions(+), 138 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 8b7a47621..dbf6d967d 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -57,6 +57,7 @@ load( "get_strip_non_global_flags", "merge_shared_libraries", "traverse_shared_library_info", + "with_unique_sonames", ) load("@prelude//linking:strip.bzl", "strip_object") load("@prelude//linking:types.bzl", "Linkage") @@ -870,8 +871,8 @@ def get_default_shared_libs(ctx: AnalysisContext, deps: list[Dependency], shared deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), ) return { - so_name: shared_lib - for so_name, shared_lib in traverse_shared_library_info(shared_library_info).items() + soname: shared_lib + for soname, shared_lib in with_unique_sonames(traverse_shared_library_info(shared_library_info)).items() if not (shared_libraries_to_exclude and shared_libraries_to_exclude.contains(shared_lib.label.raw_target())) } diff --git a/prelude/android/voltron.bzl b/prelude/android/voltron.bzl index 5c116a9cd..d8d433e93 100644 --- a/prelude/android/voltron.bzl +++ b/prelude/android/voltron.bzl @@ -68,7 +68,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions, ctx.label, [android_packageable_info], - traversed_shared_library_info.values(), + traversed_shared_library_info, ctx.attrs._android_toolchain[AndroidToolchainInfo], ctx.attrs.application_module_configs, ctx.attrs.application_module_dependencies, @@ -86,7 +86,7 @@ def android_app_modularity_impl(ctx: AnalysisContext) -> list[Provider]: ]).hidden(targets_to_jars_args) if ctx.attrs.should_include_libraries: - targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), so_name], delimiter = " ") for so_name, shared_lib in traversed_shared_library_info.items()] + targets_to_so_names_args = [cmd_args([str(shared_lib.label.raw_target()), shared_lib.soname], delimiter = " ") for shared_lib in traversed_shared_library_info] targets_to_so_names = ctx.actions.write("targets_to_so_names.txt", targets_to_so_names_args) cmd.add([ "--targets-to-so-names", @@ -121,7 +121,7 @@ def get_target_to_module_mapping(ctx: AnalysisContext, deps_by_platform: dict[st ctx.actions, deps = filter(None, [x.get(SharedLibraryInfo) for x in deps]), ) - shared_libraries.extend(traverse_shared_library_info(shared_library_info).values()) + shared_libraries.extend(traverse_shared_library_info(shared_library_info)) cmd, output = _get_base_cmd_and_output( ctx.actions, diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 3e902568e..65bd2bbb6 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -69,6 +69,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type "merge_shared_libraries", "traverse_shared_library_info", ) @@ -178,7 +179,7 @@ CxxExecutableOutput = record( # materialized when this executable is the output of a build, not when it is # used by other rules. They become other_outputs on DefaultInfo. external_debug_info_artifacts = list[TransitiveSetArgsProjection], - shared_libs = dict[str, LinkedObject], + shared_libs = list[SharedLibrary], # All link group links that were generated in the executable. auto_link_groups = field(dict[str, LinkedObject], {}), compilation_db = CxxCompilationDbInfo, @@ -428,11 +429,10 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [get_link_group_map_json(ctx, filtered_targets)] # Set up shared libraries symlink tree only when needed - shared_libs = {} + shared_libs = [] # Add in extra, rule-specific shared libs. - for name, shlib in impl_params.extra_shared_libs.items(): - shared_libs[name] = shlib.lib + shared_libs.extend(impl_params.extra_shared_libs) # Only setup a shared library symlink tree when shared linkage or link_groups is used gnu_use_link_groups = cxx_is_gnu(ctx) and link_group_mappings @@ -452,17 +452,18 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, labels_to_links_map = labels_to_links_map, ) - def shlib_filter(_name, shared_lib): - return not gnu_use_link_groups or is_link_group_shlib(shared_lib.label, link_group_ctx) - - for name, shared_lib in traverse_shared_library_info(shlib_info, filter_func = shlib_filter).items(): - shared_libs[name] = shared_lib.lib + for shlib in traverse_shared_library_info(shlib_info): + if not gnu_use_link_groups or is_link_group_shlib(shlib.label, link_group_ctx): + shared_libs.append(shlib) if gnu_use_link_groups: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.update(link_group_lib.shared_libs) + shared_libs.extend([ + SharedLibrary(soname = name, lib = lib, label = ctx.label) + for name, lib in link_group_lib.shared_libs.items() + ]) toolchain_info = get_cxx_toolchain_info(ctx) linker_info = toolchain_info.linker_info @@ -500,7 +501,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, ctx, # If shlib lib tree generation is enabled, pass in the shared libs (which # will trigger the necessary link tree and link args). - shared_libs if impl_params.exe_shared_libs_link_tree else {}, + shared_libs if impl_params.exe_shared_libs_link_tree else [], impl_params.executable_name, linker_info.binary_extension, link_options( @@ -540,29 +541,29 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, sub_targets["rpath-tree"] = [DefaultInfo( default_output = shared_libs_symlink_tree, other_outputs = [ - lib.output - for lib in shared_libs.values() + shlib.lib.output + for shlib in shared_libs ] + [ - lib.dwp - for lib in shared_libs.values() - if lib.dwp + shlib.lib.dwp + for shlib in shared_libs + if shlib.lib.dwp ], )] sub_targets["shared-libraries"] = [DefaultInfo( default_output = ctx.actions.write_json( binary.output.basename + ".shared-libraries.json", { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, name) for name in shared_libs.keys()], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, name) for name, lib in shared_libs.items() if lib.dwp], + "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs], + "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs if shlib.lib.dwp], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if shared_libs_symlink_tree else [], }, ), sub_targets = { - name: [DefaultInfo( - default_output = lib.output, - sub_targets = {"dwp": [DefaultInfo(default_output = lib.dwp)]} if lib.dwp else {}, + shlib.soname: [DefaultInfo( + default_output = shlib.lib.output, + sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] - for name, lib in shared_libs.items() + for shlib in shared_libs }, )] if link_group_mappings: @@ -650,7 +651,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, actions = ctx.actions, children = ( [binary.external_debug_info] + - [s.external_debug_info for s in shared_libs.values()] + + [s.lib.external_debug_info for s in shared_libs] + impl_params.additional.static_external_debug_info ), ) @@ -713,7 +714,7 @@ _CxxLinkExecutableResult = record( def _link_into_executable( ctx: AnalysisContext, - shared_libs: dict[str, LinkedObject], + shared_libs: list[SharedLibrary], executable_name: [str, None], binary_extension: str, opts: LinkOptions) -> _CxxLinkExecutableResult: diff --git a/prelude/cxx/cxx_link_utility.bzl b/prelude/cxx/cxx_link_utility.bzl index cdfb1badf..1acd47c41 100644 --- a/prelude/cxx/cxx_link_utility.bzl +++ b/prelude/cxx/cxx_link_utility.bzl @@ -14,11 +14,15 @@ load( "@prelude//linking:link_info.bzl", "LinkArgs", "LinkOrdering", # @unused Used as a type - "LinkedObject", # @unused Used as a type "unpack_link_args", "unpack_link_args_filelist", ) load("@prelude//linking:lto.bzl", "LtoMode") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type + "create_shlib_symlink_tree", +) load("@prelude//utils:arglike.bzl", "ArgLike") # @unused Used as a type def generates_split_debug(toolchain: CxxToolchainInfo): @@ -213,7 +217,7 @@ def executable_shared_lib_arguments( ctx: AnalysisContext, cxx_toolchain: CxxToolchainInfo, output: Artifact, - shared_libs: dict[str, LinkedObject]) -> ExecutableSharedLibArguments: + shared_libs: list[SharedLibrary]) -> ExecutableSharedLibArguments: extra_link_args = [] runtime_files = [] shared_libs_symlink_tree = None @@ -222,7 +226,7 @@ def executable_shared_lib_arguments( # of a build. Do not add to runtime_files. external_debug_info = project_artifacts( actions = ctx.actions, - tsets = [shlib.external_debug_info for shlib in shared_libs.values()], + tsets = [shlib.lib.external_debug_info for shlib in shared_libs], ) linker_type = cxx_toolchain.linker_info.type @@ -230,16 +234,17 @@ def executable_shared_lib_arguments( if len(shared_libs) > 0: if linker_type == "windows": shared_libs_symlink_tree = [ctx.actions.symlink_file( - shlib.output.basename, - shlib.output, - ) for _, shlib in shared_libs.items()] + shlib.lib.output.basename, + shlib.lib.output, + ) for shlib in shared_libs] runtime_files.extend(shared_libs_symlink_tree) # Windows doesn't support rpath. else: - shared_libs_symlink_tree = ctx.actions.symlinked_dir( - shared_libs_symlink_tree_name(output), - {name: shlib.output for name, shlib in shared_libs.items()}, + shared_libs_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = shared_libs_symlink_tree_name(output), + shared_libs = shared_libs, ) runtime_files.append(shared_libs_symlink_tree) rpath_reference = get_rpath_origin(linker_type) diff --git a/prelude/cxx/cxx_types.bzl b/prelude/cxx/cxx_types.bzl index 1fec9301a..f74dd4956 100644 --- a/prelude/cxx/cxx_types.bzl +++ b/prelude/cxx/cxx_types.bzl @@ -178,7 +178,7 @@ CxxRuleConstructorParams = record( # shared libs to include in the symlink tree). extra_link_roots = field(list[LinkableProviders], []), # Additional shared libs to "package". - extra_shared_libs = field(dict[str, SharedLibrary], {}), + extra_shared_libs = field(list[SharedLibrary], []), auto_link_group_specs = field([list[LinkGroupLibSpec], None], None), link_group_info = field([LinkGroupInfo, None], None), # Whether to use pre-stripped objects when linking. diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index 361449efc..eecb5ebbf 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -79,9 +79,7 @@ def _process_shared_dependencies( ctx.actions, deps = filter(None, map_idx(SharedLibraryInfo, deps)), ) - shared_libs = {} - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): - shared_libs[name] = shared_lib.lib + shared_libs = traverse_shared_library_info(shlib_info) return executable_shared_lib_arguments( ctx, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index ab8eaa794..3cf905e2d 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -131,8 +131,10 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", "SharedLibraryInfo", "create_shared_libraries", + "create_shlib_symlink_tree", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -1025,7 +1027,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: deps = slis, ) - sos = {} + sos = [] if link_group_info != None: own_binary_link_flags = [] @@ -1129,15 +1131,16 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: labels_to_links_map = labels_to_links_map, ) - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): + for shared_lib in traverse_shared_library_info(shlib_info): label = shared_lib.label if is_link_group_shlib(label, link_group_ctx): - sos[name] = shared_lib.lib + sos.append(shared_lib) # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - sos.update(link_group_lib.shared_libs) + for soname, lib in link_group_lib.shared_libs.items(): + sos.append(SharedLibrary(soname = soname, lib = lib, label = ctx.label)) else: nlis = [] @@ -1150,8 +1153,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: li = lib.get(MergedLinkInfo) if li != None: nlis.append(li) - for name, shared_lib in traverse_shared_library_info(shlib_info).items(): - sos[name] = shared_lib.lib + sos.extend(traverse_shared_library_info(shlib_info)) infos = get_link_args_for_strategy(ctx, nlis, to_link_strategy(link_style)) if link_style in [LinkStyle("static"), LinkStyle("static_pic")]: @@ -1188,7 +1190,11 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: rpath_ref = get_rpath_origin(get_cxx_toolchain_info(ctx).linker_info.type) rpath_ldflag = "-Wl,{}/{}".format(rpath_ref, sos_dir) link.add("-optl", "-Wl,-rpath", "-optl", rpath_ldflag) - symlink_dir = ctx.actions.symlinked_dir(sos_dir, {n: o.output for n, o in sos.items()}) + symlink_dir = create_shlib_symlink_tree( + actions = ctx.actions, + out = sos_dir, + shared_libs = sos, + ) run.hidden(symlink_dir) providers = [ diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index 053b3adcc..bde2123db 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -52,6 +52,7 @@ load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "traverse_shared_library_info", + "with_unique_sonames", ) load("@prelude//linking:types.bzl", "Linkage") load( @@ -482,10 +483,10 @@ def _build_preload_deps_root( if SharedLibraryInfo in preload_dep: slib_info = preload_dep[SharedLibraryInfo] - shlib = traverse_shared_library_info(slib_info).items() + shlib = traverse_shared_library_info(slib_info) - for shlib_name, shared_lib in shlib: - preload_symlinks[shlib_name] = shared_lib.lib.output + for soname, shared_lib in with_unique_sonames(shlib).items(): + preload_symlinks[soname] = shared_lib.lib.output # TODO(T150785851): build or get SO for direct preload_deps # TODO(T150785851): find out why the only SOs missing are the ones from diff --git a/prelude/java/java_binary.bzl b/prelude/java/java_binary.bzl index d80498ef9..d11069b35 100644 --- a/prelude/java/java_binary.bzl +++ b/prelude/java/java_binary.bzl @@ -22,7 +22,7 @@ load( "get_java_packaging_info", ) -def _generate_script(generate_wrapper: bool, native_libs: dict[str, SharedLibrary]) -> bool: +def _generate_script(generate_wrapper: bool, native_libs: list[SharedLibrary]) -> bool: # if `generate_wrapper` is set and no native libs then it should be a wrapper script as result, # otherwise fat jar will be generated (inner jar or script will be included inside a final fat jar) return generate_wrapper and len(native_libs) == 0 @@ -31,7 +31,7 @@ def _create_fat_jar( ctx: AnalysisContext, java_toolchain: JavaToolchainInfo, jars: cmd_args, - native_libs: dict[str, SharedLibrary], + native_libs: list[SharedLibrary], do_not_create_inner_jar: bool, generate_wrapper: bool) -> list[Artifact]: extension = "sh" if _generate_script(generate_wrapper, native_libs) else "jar" @@ -55,7 +55,7 @@ def _create_fat_jar( ) args += [ "--native_libs_file", - ctx.actions.write("native_libs", [cmd_args([so_name, native_lib.lib.output], delimiter = " ") for so_name, native_lib in native_libs.items()]), + ctx.actions.write("native_libs", [cmd_args([native_lib.soname, native_lib.lib.output], delimiter = " ") for native_lib in native_libs]), ] if do_not_create_inner_jar: args += [ @@ -107,7 +107,7 @@ def _create_fat_jar( outputs.append(classpath_args_output) fat_jar_cmd = cmd_args(args) - fat_jar_cmd.hidden(jars, [native_lib.lib.output for native_lib in native_libs.values()]) + fat_jar_cmd.hidden(jars, [native_lib.lib.output for native_lib in native_libs]) ctx.actions.run( fat_jar_cmd, diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index e2d086d47..0e1038258 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -14,7 +14,13 @@ load("@prelude//java:java_library.bzl", "build_java_library") load("@prelude//java:java_providers.bzl", "JavaLibraryInfo", "JavaPackagingInfo", "get_all_java_packaging_deps_tset") load("@prelude//java:java_toolchain.bzl", "JavaTestToolchainInfo", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") -load("@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibraryInfo", + "create_shlib_symlink_tree", + "merge_shared_libraries", + "traverse_shared_library_info", +) load( "@prelude//tests:re_utils.bzl", "get_re_executors_from_props", @@ -182,8 +188,10 @@ def _get_native_libs_env(ctx: AnalysisContext) -> dict: deps = shared_library_infos, ) - native_linkables = traverse_shared_library_info(shared_library_info) - cxx_library_symlink_tree_dict = {so_name: shared_lib.lib.output for so_name, shared_lib in native_linkables.items()} - cxx_library_symlink_tree = ctx.actions.symlinked_dir("cxx_library_symlink_tree", cxx_library_symlink_tree_dict) + cxx_library_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = "cxx_library_symlink_tree", + shared_libs = traverse_shared_library_info(shared_library_info), + ) return {"BUCK_LD_SYMLINK_TREE": cxx_library_symlink_tree} diff --git a/prelude/julia/julia_binary.bzl b/prelude/julia/julia_binary.bzl index c1cedffee..0aeb2a016 100644 --- a/prelude/julia/julia_binary.bzl +++ b/prelude/julia/julia_binary.bzl @@ -5,7 +5,12 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "create_shlib_symlink_tree", + "merge_shared_libraries", + "traverse_shared_library_info", +) load("@prelude//utils:utils.bzl", "flatten") load(":julia_info.bzl", "JuliaLibraryInfo", "JuliaLibraryTSet", "JuliaToolchainInfo") @@ -47,12 +52,13 @@ def build_jll_shlibs_mapping(ctx: AnalysisContext, json_info_file: Artifact): filter(None, [d.shared_library_info for d in deps]), )) - shared_libs_symlink_tree = ctx.actions.symlinked_dir( - "__shared_libs_symlink_tree__", - {name: shlib.lib.output for name, shlib in shlibs.items()}, + shared_libs_symlink_tree = create_shlib_symlink_tree( + actions = ctx.actions, + out = "__shared_libs_symlink_tree__", + shared_libs = shlibs, ) - shlib_label_to_soname = {shlib.label: name for name, shlib in shlibs.items()} + shlib_label_to_soname = {shlib.label: shlib.soname for shlib in shlibs} # iterate through all the jll libraries json_info = [] diff --git a/prelude/linking/shared_libraries.bzl b/prelude/linking/shared_libraries.bzl index 25c3eeca6..a93310fa8 100644 --- a/prelude/linking/shared_libraries.bzl +++ b/prelude/linking/shared_libraries.bzl @@ -19,13 +19,13 @@ SharedLibrary = record( # for downstream rules to reproduce the shared library with some modifications (for example # android relinker will link again with an added version script argument). # TODO(cjhopman): This is currently always available. - link_args = field(list[LinkArgs] | None), + link_args = field(list[LinkArgs] | None, None), # The sonames of the shared libraries that this links against. # TODO(cjhopman): This is currently always available. - shlib_deps = field(list[str] | None), - stripped_lib = field(Artifact | None), - can_be_asset = field(bool), - for_primary_apk = field(bool), + shlib_deps = field(list[str] | None, None), + stripped_lib = field(Artifact | None, None), + can_be_asset = field(bool, False), + for_primary_apk = field(bool, False), soname = field(str), label = field(Label), ) @@ -35,7 +35,7 @@ SharedLibraries = record( # Since the SONAME is what the dynamic loader uses to uniquely identify # libraries, using this as the key allows easily detecting conflicts from # dependencies. - libraries = field(dict[str, SharedLibrary]), + libraries = field(list[SharedLibrary]), ) # T-set of SharedLibraries @@ -62,7 +62,7 @@ def create_shared_libraries( """ cxx_toolchain = getattr(ctx.attrs, "_cxx_toolchain", None) return SharedLibraries( - libraries = {name: SharedLibrary( + libraries = [SharedLibrary( lib = shlib, stripped_lib = strip_object( ctx, @@ -76,41 +76,9 @@ def create_shared_libraries( for_primary_apk = getattr(ctx.attrs, "used_by_wrap_script", False), label = ctx.label, soname = name, - ) for (name, shlib) in libraries.items()}, + ) for (name, shlib) in libraries.items()], ) -# We do a lot of merging library maps, so don't use O(n) type annotations -def _merge_lib_map( - # dict[str, SharedLibrary] - dest_mapping, - # [dict[str, SharedLibrary] - mapping_to_merge, - filter_func) -> None: - """ - Merges a mapping_to_merge into `dest_mapping`. Fails if different libraries - map to the same name. - """ - for (name, src) in mapping_to_merge.items(): - if filter_func != None and not filter_func(name, src): - continue - existing = dest_mapping.get(name) - if existing != None and existing.lib != src.lib: - error = ( - "Duplicate library {}! Conflicting mappings:\n" + - "{} from {}\n" + - "{} from {}" - ) - fail( - error.format( - name, - existing.lib, - existing.label, - src.lib, - src.label, - ), - ) - dest_mapping[name] = src - # Merge multiple SharedLibraryInfo. The value in `node` represents a set of # SharedLibraries that is provided by the target being analyzed. It's optional # because that might not always exist, e.g. a Python library can pass through @@ -131,11 +99,58 @@ def merge_shared_libraries( set = actions.tset(SharedLibrariesTSet, **kwargs) if kwargs else None return SharedLibraryInfo(set = set) -def traverse_shared_library_info( - info: SharedLibraryInfo, - filter_func = None): # -> dict[str, SharedLibrary]: - libraries = {} +def traverse_shared_library_info(info: SharedLibraryInfo): # -> list[SharedLibrary]: + libraries = [] if info.set: for libs in info.set.traverse(): - _merge_lib_map(libraries, libs.libraries, filter_func) + libraries.extend(libs.libraries) return libraries + +# Helper to merge shlibs, throwing an error if more than one have the same SONAME. +def _merge_shlibs( + shared_libs: list[SharedLibrary], + resolve_soname: typing.Callable) -> dict[str, SharedLibrary]: + merged = {} + for shlib in shared_libs: + soname = resolve_soname(shlib.soname) + existing = merged.get(soname) + if existing != None and existing.lib != shlib.lib: + error = ( + "Duplicate library {}! Conflicting mappings:\n" + + "{} from {}\n" + + "{} from {}" + ) + fail( + error.format( + shlib.soname, + existing.lib, + existing.label, + shlib.lib, + shlib.label, + ), + ) + merged[soname] = shlib + return merged + +def with_unique_sonames(shared_libs: list[SharedLibrary]) -> dict[str, SharedLibrary]: + """ + Convert a list of `SharedLibrary`s to a map of unique SONAMEs to the + corresponding `SharedLibrary`. + + Will fail if the same SONAME maps to multiple `SharedLibrary`s. + """ + return _merge_shlibs( + shared_libs = shared_libs, + resolve_soname = lambda s: s, + ) + +def create_shlib_symlink_tree(actions: AnalysisActions, out: str, shared_libs: list[SharedLibrary]): + """ + Merged shared libs into a symlink tree mapping the library's SONAME to + it's artifact. + """ + merged = with_unique_sonames(shared_libs = shared_libs) + return actions.symlinked_dir( + out, + {name: shlib.lib.output for name, shlib in merged.items()}, + ) diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 0283decea..9ee92ca55 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -455,8 +455,8 @@ def _convert_python_library_to_executable( # Convert preloaded deps to a set of their names to be loaded by. preload_labels = {d.label: None for d in ctx.attrs.preload_deps} preload_names = { - name: None - for name, shared_lib in library.shared_libraries().items() + shared_lib.soname: None + for shared_lib in library.shared_libraries() if shared_lib.label in preload_labels } @@ -632,8 +632,8 @@ def _convert_python_library_to_executable( extra["native-executable"] = [DefaultInfo(default_output = executable_info.binary, sub_targets = executable_info.sub_targets)] # Add sub-targets for libs. - for name, lib in executable_info.shared_libs.items(): - extra[name] = [DefaultInfo(default_output = lib.output)] + for shlib in executable_info.shared_libs: + extra[shlib.soname] = [DefaultInfo(default_output = shlib.lib.output)] for name, group in executable_info.auto_link_groups.items(): extra[name] = [DefaultInfo(default_output = group.output)] @@ -650,8 +650,8 @@ def _convert_python_library_to_executable( # Put native libraries into the runtime location, as we need to unpack # potentially all of them before startup. native_libs = { - paths.join("runtime", "lib", name): lib - for name, lib in executable_info.shared_libs.items() + paths.join("runtime", "lib", shlib.soname): shlib.lib + for shlib in executable_info.shared_libs } preload_names = [paths.join("runtime", "lib", n) for n in preload_names] @@ -665,7 +665,7 @@ def _convert_python_library_to_executable( extra_artifacts["static_extension_finder.py"] = ctx.attrs.static_extension_finder else: - native_libs = {name: shared_lib.lib for name, shared_lib in library.shared_libraries().items()} + native_libs = {shared_lib.soname: shared_lib.lib for shared_lib in library.shared_libraries()} if dbg_source_db: extra_artifacts["dbg-db.json"] = dbg_source_db.default_outputs[0] diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index f238cb81f..85e0306ce 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -316,14 +316,13 @@ def generate_rustdoc_test( ) # Gather and setup symlink tree of transitive shared library deps. - shared_libs = {} + shared_libs = [] if link_strategy == LinkStrategy("shared"): shlib_info = merge_shared_libraries( ctx.actions, deps = inherited_shared_libs(ctx, doc_dep_ctx), ) - for soname, shared_lib in traverse_shared_library_info(shlib_info).items(): - shared_libs[soname] = shared_lib.lib + shared_libs.extend(traverse_shared_library_info(shlib_info)) executable_args = executable_shared_lib_arguments( ctx, compile_ctx.cxx_toolchain_info, diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 62912c8e8..72b05f635 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -40,6 +40,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -133,7 +134,7 @@ def _rust_binary_common( output = ctx.actions.declare_output(name) # Gather and setup symlink tree of transitive shared library deps. - shared_libs = {} + shared_libs = [] rust_cxx_link_group_info = None link_group_mappings = {} @@ -170,17 +171,18 @@ def _rust_binary_common( labels_to_links_map = labels_to_links_map, ) - def shlib_filter(_name, shared_lib): - return not rust_cxx_link_group_info or is_link_group_shlib(shared_lib.label, link_group_ctx) - - for soname, shared_lib in traverse_shared_library_info(shlib_info, filter_func = shlib_filter).items(): - shared_libs[soname] = shared_lib.lib + for shlib in traverse_shared_library_info(shlib_info): + if not rust_cxx_link_group_info or is_link_group_shlib(shlib.label, link_group_ctx): + shared_libs.append(shlib) if rust_cxx_link_group_info: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.update(link_group_lib.shared_libs) + shared_libs.extend([ + SharedLibrary(soname = name, lib = lib, label = ctx.label) + for name, lib in link_group_lib.shared_libs.items() + ]) # link groups shared libraries link args are directly added to the link command, # we don't have to add them here @@ -239,17 +241,17 @@ def _rust_binary_common( default_output = ctx.actions.write_json( name + ".shared-libraries.json", { - "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, name) for name in shared_libs.keys()], - "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, name) for name, lib in shared_libs.items() if lib.dwp], + "libraries": ["{}:{}[shared-libraries][{}]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs], + "librariesdwp": ["{}:{}[shared-libraries][{}][dwp]".format(ctx.label.path, ctx.label.name, shlib.soname) for shlib in shared_libs if shlib.lib.dwp], "rpathtree": ["{}:{}[rpath-tree]".format(ctx.label.path, ctx.label.name)] if executable_args.shared_libs_symlink_tree else [], }, ), sub_targets = { - name: [DefaultInfo( - default_output = lib.output, - sub_targets = {"dwp": [DefaultInfo(default_output = lib.dwp)]} if lib.dwp else {}, + shlib.soname: [DefaultInfo( + default_output = shlib.lib.output, + sub_targets = {"dwp": [DefaultInfo(default_output = shlib.lib.dwp)]} if shlib.lib.dwp else {}, )] - for name, lib in shared_libs.items() + for shlib in shared_libs }, )] @@ -257,12 +259,12 @@ def _rust_binary_common( sub_targets_for_link_strategy["rpath-tree"] = [DefaultInfo( default_output = executable_args.shared_libs_symlink_tree, other_outputs = [ - lib.output - for lib in shared_libs.values() + shlib.lib.output + for shlib in shared_libs ] + [ - lib.dwp - for lib in shared_libs.values() - if lib.dwp + shlib.lib.dwp + for shlib in shared_libs + if shlib.lib.dwp ], )] From 7fcc699542b977c7b2434a1e280dd86445bfa205 Mon Sep 17 00:00:00 2001 From: Michael Podtserkovskii Date: Thu, 11 Apr 2024 07:05:34 -0700 Subject: [PATCH 0770/1133] Use header files from `go list` output Summary: cgo_library should work the same as before, but this change is a key step on the way of enabling building cgo libraries with go_library rule Reviewed By: yarikk Differential Revision: D55925502 fbshipit-source-id: a2fa3e7b55b3a5c73f85d48231a6cc0c046197d5 --- prelude/go/cgo_builder.bzl | 27 +++++++++++++-------------- prelude/go/cgo_library.bzl | 2 +- prelude/go/go_list.bzl | 8 ++++++-- prelude/go/package_builder.bzl | 2 +- 4 files changed, 21 insertions(+), 18 deletions(-) diff --git a/prelude/go/cgo_builder.bzl b/prelude/go/cgo_builder.bzl index 855695664..7d377e623 100644 --- a/prelude/go/cgo_builder.bzl +++ b/prelude/go/cgo_builder.bzl @@ -6,10 +6,6 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") -load( - "@prelude//apple:xcode.bzl", - "get_project_root_file", -) load( "@prelude//cxx:compile.bzl", "CxxSrcWithFlags", # @unused Used as a type @@ -20,7 +16,7 @@ load( "@prelude//cxx:cxx_types.bzl", "CxxRuleConstructorParams", # @unused Used as a type ) -load("@prelude//cxx:headers.bzl", "cxx_get_regular_cxx_headers_layout", "prepare_headers") +load("@prelude//cxx:headers.bzl", "cxx_attr_header_namespace", "cxx_get_regular_cxx_headers_layout", "prepare_headers") load( "@prelude//cxx:preprocessor.bzl", "CPreprocessor", @@ -28,7 +24,6 @@ load( "CPreprocessorInfo", "cxx_inherited_preprocessor_infos", "cxx_merge_cpreprocessors", - "cxx_private_preprocessor_info", ) load( "@prelude//linking:link_info.bzl", @@ -113,18 +108,22 @@ def _cxx_wrapper(ctx: AnalysisContext, own_pre: list[CPreprocessor], inherited_p os = ScriptOs("windows" if ctx.attrs._exec_os_type[OsLookup].platform == "windows" else "unix"), ) -def build_cgo(ctx: AnalysisContext, cgo_files: list[Artifact], c_files: list[Artifact]) -> (list[Artifact], list[Artifact], Artifact): +# build CPreprocessor similar as cxx_private_preprocessor_info does, but with our filtered headers +def _own_pre(ctx: AnalysisContext, h_files: list[Artifact]) -> CPreprocessor: + namespace = cxx_attr_header_namespace(ctx) + header_map = {paths.join(namespace, h.short_path): h for h in h_files} + header_root = prepare_headers(ctx, header_map, "h_files-private-headers", None) + + return CPreprocessor( + relative_args = CPreprocessorArgs(args = ["-I", header_root.include_path] if header_root != None else []), + ) + +def build_cgo(ctx: AnalysisContext, cgo_files: list[Artifact], h_files: list[Artifact], c_files: list[Artifact]) -> (list[Artifact], list[Artifact], Artifact): if len(cgo_files) == 0: return [], [], ctx.actions.copied_dir("cgo_gen_tmp", {}) - project_root_file = get_project_root_file(ctx) - # Gather preprocessor inputs. - (own_pre, _) = cxx_private_preprocessor_info( - ctx, - cxx_get_regular_cxx_headers_layout(ctx), - project_root_file = project_root_file, - ) + own_pre = _own_pre(ctx, h_files) inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) # Separate sources into C++ and GO sources. diff --git a/prelude/go/cgo_library.bzl b/prelude/go/cgo_library.bzl index 665f4db91..33157caad 100644 --- a/prelude/go/cgo_library.bzl +++ b/prelude/go/cgo_library.bzl @@ -36,7 +36,7 @@ def cgo_library_impl(ctx: AnalysisContext) -> list[Provider]: compiled_pkg = build_package( ctx, pkg_name, - ctx.attrs.go_srcs + ctx.attrs.srcs, + ctx.attrs.go_srcs + ctx.attrs.srcs + ctx.attrs.headers, package_root = ctx.attrs.package_root, deps = ctx.attrs.deps + ctx.attrs.exported_deps, shared = shared, diff --git a/prelude/go/go_list.bzl b/prelude/go/go_list.bzl index 2b133f220..dc63379ef 100644 --- a/prelude/go/go_list.bzl +++ b/prelude/go/go_list.bzl @@ -10,6 +10,7 @@ load(":toolchain.bzl", "GoToolchainInfo", "get_toolchain_env_vars") GoListOut = record( go_files = field(list[Artifact], default = []), + h_files = field(list[Artifact], default = []), c_files = field(list[Artifact], default = []), cxx_files = field(list[Artifact], default = []), cgo_files = field(list[Artifact], default = []), @@ -39,7 +40,7 @@ def go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_r ["--go", go_toolchain.go], ["--workdir", srcs_dir], ["--output", go_list_out.as_output()], - "-json=GoFiles,CgoFiles,CFiles,CXXFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", + "-json=GoFiles,CgoFiles,HFiles,CFiles,CXXFiles,SFiles,TestGoFiles,XTestGoFiles,EmbedFiles", ["-tags", ",".join(tags) if tags else []], ".", ] @@ -51,7 +52,7 @@ def go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_r def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: ArtifactValue) -> GoListOut: go_list = go_list_out.read_json() - go_files, cgo_files, c_files, cxx_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [], [], [] + go_files, cgo_files, h_files, c_files, cxx_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [], [], [], [] for src in srcs: # remove package_root prefix from src artifact path to match `go list` outout format @@ -60,6 +61,8 @@ def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: Arti go_files.append(src) if src_path in go_list.get("CgoFiles", []): cgo_files.append(src) + if src_path in go_list.get("HFiles", []): + h_files.append(src) if src_path in go_list.get("CFiles", []): c_files.append(src) if src_path in go_list.get("CXXFiles", []): @@ -75,6 +78,7 @@ def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: Arti return GoListOut( go_files = go_files, + h_files = h_files, c_files = c_files, cxx_files = cxx_files, cgo_files = cgo_files, diff --git a/prelude/go/package_builder.bzl b/prelude/go/package_builder.bzl index 34175026c..55a96efee 100644 --- a/prelude/go/package_builder.bzl +++ b/prelude/go/package_builder.bzl @@ -79,7 +79,7 @@ def build_package( symabis = _symabis(ctx, pkg_name, go_list.s_files, assembler_flags, shared) # Generate CGO and C sources. - cgo_go_files, cgo_o_files, cgo_gen_tmp_dir = build_cgo(ctx, go_list.cgo_files, go_list.c_files + go_list.cxx_files) + cgo_go_files, cgo_o_files, cgo_gen_tmp_dir = build_cgo(ctx, go_list.cgo_files, go_list.h_files, go_list.c_files + go_list.cxx_files) ctx.actions.copy_dir(outputs[cgo_gen_dir], cgo_gen_tmp_dir) go_files = go_list.go_files + cgo_go_files From bf440e72d31574d26a473084d54a69f501f1a6a3 Mon Sep 17 00:00:00 2001 From: Maximilian Gerlach Date: Thu, 11 Apr 2024 08:00:03 -0700 Subject: [PATCH 0771/1133] Better error message when app id can't be parsed from entitlements Summary: Provide a more elaborate error message in case we can't successfully parse the app id to make debugging faster for next time Reviewed By: blackm00n Differential Revision: D55891451 fbshipit-source-id: a0821c63867d6ec9bb2a1b4e8590d4c62f3e1c55 --- prelude/apple/tools/code_signing/app_id.py | 22 ++++++++---- .../apple/tools/code_signing/app_id_test.py | 36 +++++++++++++++---- 2 files changed, 45 insertions(+), 13 deletions(-) diff --git a/prelude/apple/tools/code_signing/app_id.py b/prelude/apple/tools/code_signing/app_id.py index deac46eba..d657a4da9 100644 --- a/prelude/apple/tools/code_signing/app_id.py +++ b/prelude/apple/tools/code_signing/app_id.py @@ -36,7 +36,12 @@ class _ReGroupName(str, Enum): def from_string(cls, string: str) -> AppId: match = re.match(cls._re_pattern, string) if not match: - raise RuntimeError("Malformed app ID string: {}".format(string)) + raise RuntimeError( + "Malformed app ID string: '{}'. " + "We expected a prefix of a ten-character alphanumeric sequence and a Bundle ID which may be a fully-qualified name or a wildcard ending in '*'.".format( + string + ) + ) return AppId( match.group(cls._ReGroupName.team_id), match.group(cls._ReGroupName.bundle_id), @@ -45,8 +50,13 @@ def from_string(cls, string: str) -> AppId: # Returns the App ID if it can be inferred from keys in the entitlement. Otherwise, it returns `None`. @staticmethod def infer_from_entitlements(entitlements: Dict[str, Any]) -> Optional[AppId]: - keychain_access_groups = entitlements.get("keychain-access-groups") - if not keychain_access_groups: - return None - app_id_string = keychain_access_groups[0] - return AppId.from_string(app_id_string) + try: + keychain_access_groups = entitlements.get("keychain-access-groups") + if not keychain_access_groups: + return None + app_id_string = keychain_access_groups[0] + return AppId.from_string(app_id_string) + except Exception as e: + raise RuntimeError( + "Error when parsing the entitlements for the app ID: {}".format(e) + ) diff --git a/prelude/apple/tools/code_signing/app_id_test.py b/prelude/apple/tools/code_signing/app_id_test.py index 2875a129a..ab1c32bb8 100644 --- a/prelude/apple/tools/code_signing/app_id_test.py +++ b/prelude/apple/tools/code_signing/app_id_test.py @@ -23,14 +23,36 @@ def test_string_parsing(self): expected = AppId("ABCDE12345", "*") self.assertEqual(expected, result) - with self.assertRaisesRegex(RuntimeError, "Malformed app ID string: invalid."): + with self.assertRaisesRegex( + RuntimeError, + "Malformed app ID string: 'invalid.'. We expected a prefix of a ten-character alphanumeric sequence and a Bundle ID which may be a fully-qualified name or a wildcard ending in '*'.", + ): _ = AppId.from_string("invalid.") def test_entitlements_parsing(self): - file = pkg_resources.resource_stream( + with pkg_resources.resource_stream( __name__, "test_resources/Entitlements.plist" - ) - entitlements = plistlib.load(file) - result = AppId.infer_from_entitlements(entitlements) - expected = AppId("ABCDE12345", "com.example.TestApp") - self.assertEqual(expected, result) + ) as file: + entitlements = plistlib.load(file) + result = AppId.infer_from_entitlements(entitlements) + expected = AppId("ABCDE12345", "com.example.TestApp") + self.assertEqual(expected, result) + + invalid_file = b""" + + + + keychain-access-groups + + com.facebook.CommonTestHost + p + + """ + + invalid_entitlement = plistlib.loads(invalid_file) + with self.assertRaisesRegex( + RuntimeError, + "Error when parsing the entitlements for the app ID: Malformed app ID string: 'com.facebook.CommonTestHost'. " + "We expected a prefix of a ten-character alphanumeric sequence and a Bundle ID which may be a fully-qualified name or a wildcard ending in '*'.", + ): + AppId.infer_from_entitlements(invalid_entitlement) From 8d710897b41dad5a758f89de0977b52e1739b310 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Thu, 11 Apr 2024 08:12:29 -0700 Subject: [PATCH 0772/1133] don't use MergedLinkInfo for shared interface propagation Summary: Use a SharedInterfaceInfo provider to propagate partial shared interface files instead of relying on MergedLinkInfo. The latter is not used during link group builds, and provides de-duplication functionality that is not required for TBD interfaces. Reviewed By: blackm00n Differential Revision: D55889687 fbshipit-source-id: de05070b0a39501a411de974c8dbef37ca71f7d5 --- prelude/cxx/cxx_library.bzl | 57 +++++++++++------------- prelude/cxx/shared_library_interface.bzl | 22 ++++++++- prelude/linking/link_info.bzl | 41 ++++------------- 3 files changed, 55 insertions(+), 65 deletions(-) diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index eb18a2c87..f4d8909c7 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -67,7 +67,6 @@ load( "LinkStrategy", "LinkedObject", # @unused Used as a type "ObjectsLinkable", - "SharedInterfaceLinkable", "SharedLibLinkable", "SwiftRuntimeLinkable", # @unused Used as a type "SwiftmoduleLinkable", # @unused Used as a type @@ -198,6 +197,8 @@ load( ) load( ":shared_library_interface.bzl", + "SharedInterfaceInfo", # @unused Used as a type + "create_shared_interface_info", "create_tbd", "merge_tbds", "shared_library_interface", @@ -441,7 +442,6 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc providers.append(comp_db_info) # TBD generation is done per-target for stub_from_headers mode and collected at link time. - shared_interface_linkable = None tbd_outputs = impl_params.extra_shared_library_interfaces if impl_params.extra_shared_library_interfaces else [] if impl_params.shared_library_interface_target and \ cxx_use_shlib_intfs_mode(ctx, ShlibInterfacesMode("stub_from_headers")): @@ -454,13 +454,6 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc ) tbd_outputs.append(cxx_tbd_output) sub_targets["tbd"] = [DefaultInfo(default_output = cxx_tbd_output)] - shared_interface_linkable = SharedInterfaceLinkable( - interfaces = make_artifact_tset( - actions = ctx.actions, - label = ctx.label, - artifacts = tbd_outputs, - ), - ) # Link Groups link_group = get_link_group(ctx) @@ -491,7 +484,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc frameworks_linkable = apple_create_frameworks_linkable(ctx) swiftmodule_linkable = impl_params.swiftmodule_linkable swift_runtime_linkable = create_swift_runtime_linkable(ctx) - dep_infos, link_group_map, link_execution_preference = _get_shared_library_links( + dep_infos, link_group_map, link_execution_preference, shared_interface_info = _get_shared_library_links( ctx, get_linkable_graph_node_map_func(deps_linkable_graph), link_group, @@ -505,6 +498,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc swiftmodule_linkable, force_static_follows_dependents = impl_params.link_groups_force_static_follows_dependents, swift_runtime_linkable = swift_runtime_linkable, + tbd_outputs = tbd_outputs, ) if impl_params.generate_sub_targets.link_group_map and link_group_map: sub_targets[LINK_GROUP_MAP_DATABASE_SUB_TARGET] = [link_group_map] @@ -526,7 +520,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc extra_static_linkables = extra_static_linkables, gnu_use_link_groups = cxx_is_gnu(ctx) and bool(link_group_mappings), link_execution_preference = link_execution_preference, - tbd_outputs = tbd_outputs, + shared_interface_info = shared_interface_info, ) solib_as_dict = {library_outputs.solib[0]: library_outputs.solib[1]} if library_outputs.solib else {} @@ -561,6 +555,11 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # Add any subtargets for this output style. output_style_sub_targets.update(output.sub_targets) + # TBD outputs are collected for each link unit, so propagate whenever + # a library is being linked statically. + if output_style != LibOutputStyle("shared_lib") and shared_interface_info != None: + output_style_providers.append(shared_interface_info) + if impl_params.generate_sub_targets.link_style_outputs: if output: sub_targets[subtarget_for_output_style(output_style)] = [DefaultInfo( @@ -622,7 +621,6 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # Export link info from out (exported) deps. exported_deps = inherited_exported_link, frameworks_linkable = frameworks_linkable, - shared_interfaces_linkable = shared_interface_linkable, swiftmodule_linkable = swiftmodule_linkable, swift_runtime_linkable = swift_runtime_linkable, ) @@ -660,7 +658,6 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc pic_behavior = pic_behavior, preferred_linkage = Linkage("static"), frameworks_linkable = frameworks_linkable, - shared_interfaces_linkable = shared_interface_linkable, swiftmodule_linkable = swiftmodule_linkable, ), LinkGroupLibInfo(libs = {}), SharedLibraryInfo(set = None)] + additional_providers @@ -849,6 +846,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc pass default_output = unknown() + default_info = DefaultInfo( default_output = default_output.default if default_output != None else None, other_outputs = default_output.other if default_output != None else [], @@ -965,7 +963,7 @@ def _form_library_outputs( extra_static_linkables: list[[FrameworksLinkable, SwiftmoduleLinkable, SwiftRuntimeLinkable]], gnu_use_link_groups: bool, link_execution_preference: LinkExecutionPreference, - tbd_outputs: list[Artifact]) -> _CxxAllLibraryOutputs: + shared_interface_info: [SharedInterfaceInfo, None]) -> _CxxAllLibraryOutputs: # Build static/shared libs and the link info we use to export them to dependents. outputs = {} solib = None @@ -1061,7 +1059,7 @@ def _form_library_outputs( extra_linker_flags = extra_linker_flags, link_ordering = map_val(LinkOrdering, ctx.attrs.link_ordering), link_execution_preference = link_execution_preference, - tbd_outputs = tbd_outputs, + shared_interface_info = shared_interface_info, ) shlib = result.link_result.linked_object info = result.info @@ -1163,8 +1161,9 @@ def _get_shared_library_links( force_link_group_linking, frameworks_linkable: [FrameworksLinkable, None], swiftmodule_linkable: [SwiftmoduleLinkable, None], + tbd_outputs: list[Artifact], force_static_follows_dependents: bool = True, - swift_runtime_linkable: [SwiftRuntimeLinkable, None] = None) -> (LinkArgs, [DefaultInfo, None], LinkExecutionPreference): + swift_runtime_linkable: [SwiftRuntimeLinkable, None] = None) -> (LinkArgs, [DefaultInfo, None], LinkExecutionPreference, [SharedInterfaceInfo, None]): """ Returns LinkArgs with the content to link, and a link group map json output if applicable. @@ -1179,7 +1178,8 @@ def _get_shared_library_links( # If we're not filtering for link groups, link against the shared dependencies if not link_group_mappings and not force_link_group_linking: - deps_merged_link_infos = cxx_inherited_link_info(dedupe(flatten([non_exported_deps, exported_deps]))) + deps = dedupe(flatten([non_exported_deps, exported_deps])) + deps_merged_link_infos = cxx_inherited_link_info(deps) link_strategy = cxx_attr_link_strategy(ctx.attrs) @@ -1189,6 +1189,10 @@ def _get_shared_library_links( # Not all rules calling `cxx_library_parameterized` have `link_execution_preference`. Notably `cxx_python_extension`. link_execution_preference = get_link_execution_preference(ctx, []) if hasattr(ctx.attrs, "link_execution_preference") else LinkExecutionPreference("any") + # Collect the TBD interface providers for this link unit and strategy. + # These are merged when linking shared library output. + shared_interface_info = create_shared_interface_info(ctx, tbd_outputs, deps) + return apple_build_link_args_with_deduped_flags( ctx, deps_merged_link_infos, @@ -1200,7 +1204,7 @@ def _get_shared_library_links( process_link_strategy_for_pic_behavior(link_strategy, pic_behavior), swiftmodule_linkable, swift_runtime_linkable = swift_runtime_linkable, - ), None, link_execution_preference + ), None, link_execution_preference, shared_interface_info # Else get filtered link group links prefer_stripped = cxx_is_gnu(ctx) and ctx.attrs.prefer_stripped_objects @@ -1238,7 +1242,7 @@ def _get_shared_library_links( if additional_links: filtered_links.append(additional_links) - return LinkArgs(infos = filtered_links), get_link_group_map_json(ctx, filtered_targets), link_execution_preference + return LinkArgs(infos = filtered_links), get_link_group_map_json(ctx, filtered_targets), link_execution_preference, None def _use_pic(output_style: LibOutputStyle) -> bool: """ @@ -1391,7 +1395,7 @@ def _shared_library( extra_linker_flags: list[ArgLike], link_execution_preference: LinkExecutionPreference, link_ordering: [LinkOrdering, None], - tbd_outputs: list[Artifact]) -> _CxxSharedLibraryResult: + shared_interface_info: [SharedInterfaceInfo, None]) -> _CxxSharedLibraryResult: """ Generate a shared library and the associated native link info used by dependents to link against it. @@ -1473,18 +1477,9 @@ def _shared_library( # to wait for dependent libraries to link. # If the tbd output is missing this is a non apple_library target, # so skip producing the interface. - if len(tbd_outputs) > 0: + if shared_interface_info != None: # collect tbd output from providers and merge - all_deps = dedupe(cxx_attr_deps(ctx) + cxx_attr_exported_deps(ctx)) - deps_merged_link_infos = cxx_inherited_link_info(all_deps) - children = [li.shared_interfaces[LinkStrategy("shared")].interfaces for li in deps_merged_link_infos] - tbd_set = make_artifact_tset( - actions = ctx.actions, - label = ctx.label, - artifacts = tbd_outputs, - children = children, - ) - exported_shlib = merge_tbds(ctx, soname, tbd_set) + exported_shlib = merge_tbds(ctx, soname, shared_interface_info.interfaces) elif not gnu_use_link_groups: # TODO(agallagher): There's a bug in shlib intfs interacting with link # groups, where we don't include the symbols we're meant to export from diff --git a/prelude/cxx/shared_library_interface.bzl b/prelude/cxx/shared_library_interface.bzl index 74a0d3aba..f742c6c0e 100644 --- a/prelude/cxx/shared_library_interface.bzl +++ b/prelude/cxx/shared_library_interface.bzl @@ -5,13 +5,19 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//:artifact_tset.bzl", "ArtifactTSet", "project_artifacts") +load("@prelude//:artifact_tset.bzl", "ArtifactTSet", "make_artifact_tset", "project_artifacts") load("@prelude//:paths.bzl", "paths") load("@prelude//cxx:preprocessor.bzl", "CPreprocessor", "CPreprocessorInfo") load(":cxx_context.bzl", "get_cxx_toolchain_info") load(":cxx_toolchain_types.bzl", "CxxToolchainInfo") load(":headers.bzl", "CHeader") +# The transitive artifacts of partial shared interface for a library. +# These need to be collected and merged to produce the final shared interface. +SharedInterfaceInfo = provider(fields = { + "interfaces": provider_field(ArtifactTSet), +}) + def _shared_library_interface( ctx: AnalysisContext, output: str, @@ -156,3 +162,17 @@ def merge_tbds(ctx: AnalysisContext, soname: str, tbd_set: ArtifactTSet) -> Arti identifier = ctx.attrs.name, ) return tbd_file + +def create_shared_interface_info(ctx: AnalysisContext, tbd_outputs: list[Artifact], deps: list[Dependency]) -> [SharedInterfaceInfo, None]: + children = [d[SharedInterfaceInfo].interfaces for d in deps if SharedInterfaceInfo in d] + if len(tbd_outputs) == 0 and len(children) == 0: + return None + + return SharedInterfaceInfo( + interfaces = make_artifact_tset( + actions = ctx.actions, + label = ctx.label, + artifacts = tbd_outputs, + children = children, + ), + ) diff --git a/prelude/linking/link_info.bzl b/prelude/linking/link_info.bzl index 6062a4aab..64c56380b 100644 --- a/prelude/linking/link_info.bzl +++ b/prelude/linking/link_info.bzl @@ -123,12 +123,6 @@ FrameworksLinkable = record( library_names = field(list[str], []), ) -# The transitive artifacts of partial shared interface for a linkable. -# These need to be collected and merged to produce the final shared interface. -SharedInterfaceLinkable = record( - interfaces = field(ArtifactTSet, ArtifactTSet()), -) - SwiftmoduleLinkable = record( swiftmodules = field(ArtifactTSet, ArtifactTSet()), ) @@ -140,7 +134,14 @@ SwiftRuntimeLinkable = record( runtime_required = field(bool, False), ) -LinkableTypes = [ArchiveLinkable, SharedLibLinkable, ObjectsLinkable, FrameworksLinkable, SwiftRuntimeLinkable, SwiftmoduleLinkable] +LinkableTypes = [ + ArchiveLinkable, + SharedLibLinkable, + ObjectsLinkable, + FrameworksLinkable, + SwiftRuntimeLinkable, + SwiftmoduleLinkable, +] LinkerFlags = record( flags = field(list[typing.Any], []), @@ -252,7 +253,6 @@ def append_linkable_args(args: cmd_args, linkable: LinkableTypes): else: args.add(linkable.objects) elif isinstance(linkable, FrameworksLinkable) or \ - isinstance(linkable, SharedInterfaceLinkable) or \ isinstance(linkable, SwiftRuntimeLinkable) or \ isinstance(linkable, SwiftmoduleLinkable): # These flags are handled separately so they can be deduped. @@ -295,7 +295,6 @@ def link_info_filelist(value: LinkInfo) -> list[Artifact]: if linkable.linker_type == "darwin": filelists += linkable.objects elif isinstance(linkable, FrameworksLinkable) or \ - isinstance(linkable, SharedInterfaceLinkable) or \ isinstance(linkable, SwiftRuntimeLinkable) or \ isinstance(linkable, SwiftmoduleLinkable): pass @@ -446,7 +445,6 @@ MergedLinkInfo = provider(fields = [ # To save on repeated computation of transitive LinkInfos, we store a dedupped # structure, based on the link-style. "frameworks", # dict[LinkStrategy, FrameworksLinkable | None] - "shared_interfaces", # dict[LinkStrategy, SharedInterfaceLinkable | None] "swiftmodules", # dict[LinkStrategy, SwiftmoduleLinkable | None] "swift_runtime", # dict[LinkStrategy, SwiftRuntimeLinkable | None] ]) @@ -491,7 +489,6 @@ def create_merged_link_info( # Link info to always propagate from exported deps. exported_deps: list[MergedLinkInfo] = [], frameworks_linkable: [FrameworksLinkable, None] = None, - shared_interfaces_linkable: [SharedInterfaceLinkable, None] = None, swiftmodule_linkable: [SwiftmoduleLinkable, None] = None, swift_runtime_linkable: [SwiftRuntimeLinkable, None] = None) -> MergedLinkInfo: """ @@ -501,7 +498,6 @@ def create_merged_link_info( infos = {} external_debug_info = {} frameworks = {} - shared_interfaces = {} swift_runtime = {} swiftmodules = {} @@ -514,7 +510,6 @@ def create_merged_link_info( children = [] external_debug_info_children = [] framework_linkables = [] - shared_interface_linkables = [] swift_runtime_linkables = [] swiftmodule_linkables = [] @@ -528,9 +523,6 @@ def create_merged_link_info( framework_linkables.append(frameworks_linkable) framework_linkables += [dep_info.frameworks[link_strategy] for dep_info in exported_deps] - shared_interface_linkables.append(shared_interfaces_linkable) - shared_interface_linkables += [dep_info.shared_interfaces[link_strategy] for dep_info in exported_deps] - swiftmodule_linkables.append(swiftmodule_linkable) swiftmodule_linkables += [dep_info.swiftmodules[link_strategy] for dep_info in exported_deps] @@ -548,7 +540,6 @@ def create_merged_link_info( external_debug_info_children.append(value) framework_linkables.append(dep_info.frameworks[link_strategy]) - shared_interface_linkables.append(dep_info.shared_interfaces[link_strategy]) swiftmodule_linkables.append(dep_info.swiftmodules[link_strategy]) swift_runtime_linkables.append(dep_info.swift_runtime[link_strategy]) @@ -562,7 +553,6 @@ def create_merged_link_info( external_debug_info_children.append(value) frameworks[link_strategy] = merge_framework_linkables(framework_linkables) - shared_interfaces[link_strategy] = merge_shared_interface_linkables(ctx, shared_interface_linkables) swift_runtime[link_strategy] = merge_swift_runtime_linkables(swift_runtime_linkables) swiftmodules[link_strategy] = merge_swiftmodule_linkables(ctx, swiftmodule_linkables) @@ -589,7 +579,6 @@ def create_merged_link_info( _infos = infos, _external_debug_info = external_debug_info, frameworks = frameworks, - shared_interfaces = shared_interfaces, swift_runtime = swift_runtime, swiftmodules = swiftmodules, ) @@ -605,7 +594,6 @@ def create_merged_link_info_for_propagation( merged = {} merged_external_debug_info = {} frameworks = {} - shared_interfaces = {} swift_runtime = {} swiftmodules = {} for link_strategy in LinkStrategy: @@ -619,14 +607,12 @@ def create_merged_link_info_for_propagation( children = filter(None, [x._external_debug_info.get(link_strategy) for x in xs]), ) frameworks[link_strategy] = merge_framework_linkables([x.frameworks[link_strategy] for x in xs]) - shared_interfaces[link_strategy] = merge_shared_interface_linkables(ctx, [x.shared_interfaces[link_strategy] for x in xs]) swift_runtime[link_strategy] = merge_swift_runtime_linkables([x.swift_runtime[link_strategy] for x in xs]) swiftmodules[link_strategy] = merge_swiftmodule_linkables(ctx, [x.swiftmodules[link_strategy] for x in xs]) return MergedLinkInfo( _infos = merged, _external_debug_info = merged_external_debug_info, frameworks = frameworks, - shared_interfaces = shared_interfaces, swift_runtime = swift_runtime, swiftmodules = swiftmodules, ) @@ -885,17 +871,6 @@ def merge_framework_linkables(linkables: list[[FrameworksLinkable, None]]) -> Fr library_names = unique_library_names.keys(), ) -def merge_shared_interface_linkables(ctx: AnalysisContext, linkables: list[[SharedInterfaceLinkable, None]]) -> SharedInterfaceLinkable: - return SharedInterfaceLinkable(interfaces = make_artifact_tset( - actions = ctx.actions, - label = ctx.label, - children = [ - linkable.interfaces - for linkable in linkables - if linkable != None - ], - )) - def merge_swiftmodule_linkables(ctx: AnalysisContext, linkables: list[[SwiftmoduleLinkable, None]]) -> SwiftmoduleLinkable: return SwiftmoduleLinkable(swiftmodules = make_artifact_tset( actions = ctx.actions, From 163bdde3fbc2ec6c78c85b8af952d65f31a90024 Mon Sep 17 00:00:00 2001 From: Richard Howell Date: Thu, 11 Apr 2024 08:12:29 -0700 Subject: [PATCH 0773/1133] create SharedInterfaceInfo providers with link groups Summary: Generate the SharedInterfaceInfo provider when building with link groups. Reviewed By: blackm00n Differential Revision: D55889920 fbshipit-source-id: e78804873191cea19cd501549a5a1f4fd14f238f --- prelude/cxx/cxx_library.bzl | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index f4d8909c7..c29cef41a 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -1242,7 +1242,12 @@ def _get_shared_library_links( if additional_links: filtered_links.append(additional_links) - return LinkArgs(infos = filtered_links), get_link_group_map_json(ctx, filtered_targets), link_execution_preference, None + # Collect the TBD providers from the targets in this link group, these will + # be merged when linking shared library output. + link_group_deps = [d for d in dedupe(non_exported_deps + exported_deps) if d.label in filtered_labels_to_links_map] + shared_interface_info = create_shared_interface_info(ctx, tbd_outputs, link_group_deps) + + return LinkArgs(infos = filtered_links), get_link_group_map_json(ctx, filtered_targets), link_execution_preference, shared_interface_info def _use_pic(output_style: LibOutputStyle) -> bool: """ From 6cf3cbed9fbf586bcfa0185b0cf98f7d02722fa7 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Thu, 11 Apr 2024 08:32:40 -0700 Subject: [PATCH 0774/1133] Use `SharedLibrary` to represent shlibs (take 2) Summary: Make consistent with `SharedLibraryInfo` and use `SharedLibrary` objects to hold the node shlibs instead of `LinkedObject`. Reviewed By: dtolnay Differential Revision: D55335565 fbshipit-source-id: 921c1a02784f0f95e22552cdad4be54cff727b35 --- .../android_binary_native_library_rules.bzl | 8 +++-- prelude/cxx/cxx.bzl | 5 +-- prelude/cxx/cxx_executable.bzl | 5 +-- prelude/cxx/cxx_library.bzl | 7 +++-- prelude/cxx/link_groups.bzl | 9 +++++- prelude/cxx/omnibus.bzl | 31 ++++++++++++++----- prelude/cxx/prebuilt_cxx_library_group.bzl | 5 +-- prelude/haskell/haskell.bzl | 14 ++++----- prelude/haskell/haskell_ghci.bzl | 4 +-- prelude/linking/link_groups.bzl | 9 ++++-- prelude/linking/linkable_graph.bzl | 9 ++++-- prelude/python/python_binary.bzl | 2 +- prelude/rust/rust_binary.bzl | 6 +--- prelude/rust/rust_library.bzl | 5 +-- 14 files changed, 73 insertions(+), 46 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index dbf6d967d..4f6340c9a 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -1045,7 +1045,7 @@ def _shared_lib_for_prebuilt_shared( transitive_linkable_cache: dict[Label, bool], platform: [str, None] = None) -> SharedLibrary: expect( - len(node_data.shared_libs) == 1, + len(node_data.shared_libs.libraries) == 1, "unexpected shared_libs length for somerge of {} ({})".format(target, node_data.shared_libs), ) @@ -1063,7 +1063,9 @@ def _shared_lib_for_prebuilt_shared( "prebuilt shared library `{}` with exported_deps not supported by somerge".format(target), ) - soname, shlib = node_data.shared_libs.items()[0] + shlib = node_data.shared_libs.libraries[0] + soname = shlib.soname + shlib = shlib.lib output_path = _platform_output_path(soname, platform) return SharedLibrary( lib = shlib, @@ -1270,7 +1272,7 @@ def _get_merged_linkables_for_platform( expect(target_to_link_group[key] == group) node = linkable_nodes[key] - default_solibs = list(node.shared_libs.keys()) + default_solibs = list([shlib.soname for shlib in node.shared_libs.libraries]) if not default_solibs and node.preferred_linkage == Linkage("static"): default_solibs = [node.default_soname] diff --git a/prelude/cxx/cxx.bzl b/prelude/cxx/cxx.bzl index 4783ab695..dea2b33c4 100644 --- a/prelude/cxx/cxx.bzl +++ b/prelude/cxx/cxx.bzl @@ -556,9 +556,10 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: )) # Propagate shared libraries up the tree. + shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in exported_first_order_deps]), )) @@ -603,7 +604,7 @@ def prebuilt_cxx_library_impl(ctx: AnalysisContext) -> list[Provider]: # If we don't have link input for this link style, we pass in `None` so # that omnibus knows to avoid it. link_infos = libraries, - shared_libs = solibs, + shared_libs = shared_libs, linker_flags = linker_flags, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, ), diff --git a/prelude/cxx/cxx_executable.bzl b/prelude/cxx/cxx_executable.bzl index 65bd2bbb6..7ed994a87 100644 --- a/prelude/cxx/cxx_executable.bzl +++ b/prelude/cxx/cxx_executable.bzl @@ -460,10 +460,7 @@ def cxx_executable(ctx: AnalysisContext, impl_params: CxxRuleConstructorParams, # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.extend([ - SharedLibrary(soname = name, lib = lib, label = ctx.label) - for name, lib in link_group_lib.shared_libs.items() - ]) + shared_libs.extend(link_group_lib.shared_libs.libraries) toolchain_info = get_cxx_toolchain_info(ctx) linker_info = toolchain_info.linker_info diff --git a/prelude/cxx/cxx_library.bzl b/prelude/cxx/cxx_library.bzl index c29cef41a..9dcc67be5 100644 --- a/prelude/cxx/cxx_library.bzl +++ b/prelude/cxx/cxx_library.bzl @@ -523,6 +523,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc shared_interface_info = shared_interface_info, ) solib_as_dict = {library_outputs.solib[0]: library_outputs.solib[1]} if library_outputs.solib else {} + shared_libs = create_shared_libraries(ctx, solib_as_dict) for _, link_style_output in library_outputs.outputs.items(): for key in link_style_output.sub_targets.keys(): @@ -635,7 +636,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc if impl_params.generate_providers.shared_libraries: providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solib_as_dict), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in non_exported_deps]) + filter(None, [x.get(SharedLibraryInfo) for x in exported_deps]), )) @@ -744,7 +745,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc # that omnibus knows to avoid it. include_in_android_mergemap = getattr(ctx.attrs, "include_in_android_merge_map_output", True) and default_output != None, link_infos = library_outputs.link_infos, - shared_libs = solib_as_dict, + shared_libs = shared_libs, linker_flags = linker_flags, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, ), @@ -861,7 +862,7 @@ def cxx_library_parameterized(ctx: AnalysisContext, impl_params: CxxRuleConstruc merge_link_group_lib_info( label = ctx.label, name = link_group, - shared_libs = solib_as_dict, + shared_libs = shared_libs, shared_link_infos = library_outputs.link_infos.get(LibOutputStyle("shared_lib")), deps = exported_deps + non_exported_deps, ), diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index c317deaf0..933648fb7 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -45,6 +45,10 @@ load( "get_linkable_graph_node_map_func", "get_transitive_deps", ) +load( + "@prelude//linking:shared_libraries.bzl", + "create_shared_libraries", +) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") @@ -906,7 +910,10 @@ def create_link_groups( linked_link_groups[link_group_spec.group.name] = _LinkedLinkGroup( artifact = link_group_lib, library = None if not link_group_spec.is_shared_lib else LinkGroupLib( - shared_libs = {link_group_spec.name: link_group_lib}, + shared_libs = create_shared_libraries( + ctx = ctx, + libraries = {link_group_spec.name: link_group_lib}, + ), shared_link_infos = LinkInfos( default = wrap_link_info( link_info, diff --git a/prelude/cxx/omnibus.bzl b/prelude/cxx/omnibus.bzl index 15e2f52f2..283ad6239 100644 --- a/prelude/cxx/omnibus.bzl +++ b/prelude/cxx/omnibus.bzl @@ -37,6 +37,10 @@ load( "linkable_deps", "linkable_graph", ) +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", +) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:expect.bzl", "expect") load( @@ -118,7 +122,7 @@ OmnibusRootProduct = record( # The result of the omnibus link. OmnibusSharedLibraries = record( omnibus = field([CxxLinkResult, None], None), - libraries = field(dict[str, LinkedObject], {}), + libraries = field(list[SharedLibrary], []), roots = field(dict[Label, OmnibusRootProduct], {}), exclusion_roots = field(list[Label]), excluded = field(list[Label]), @@ -512,9 +516,9 @@ def _create_omnibus( root_products.values(), # ... and the shared libs from excluded nodes. [ - shared_lib.output + shared_lib.lib.output for label in spec.excluded - for shared_lib in spec.link_infos[label].shared_libs.values() + for shared_lib in spec.link_infos[label].shared_libs.libraries ], # Extract explicit global symbol names from flags in all body link args. global_symbols_link_args, @@ -685,7 +689,7 @@ def create_omnibus_libraries( # Create dummy omnibus dummy_omnibus = create_dummy_omnibus(ctx, extra_ldflags) - libraries = {} + libraries = [] root_products = {} # Link all root nodes against the dummy libomnibus lib. @@ -704,7 +708,13 @@ def create_omnibus_libraries( allow_cache_upload = True, ) if root.name != None: - libraries[root.name] = product.shared_library + libraries.append( + SharedLibrary( + soname = root.name, + lib = product.shared_library, + label = label, + ), + ) root_products[label] = product # If we have body nodes, then link them into the monolithic libomnibus.so. @@ -719,12 +729,17 @@ def create_omnibus_libraries( prefer_stripped_objects, allow_cache_upload = True, ) - libraries[_omnibus_soname(ctx)] = omnibus.linked_object + libraries.append( + SharedLibrary( + soname = _omnibus_soname(ctx), + lib = omnibus.linked_object, + label = ctx.label, + ), + ) # For all excluded nodes, just add their regular shared libs. for label in spec.excluded: - for name, lib in spec.link_infos[label].shared_libs.items(): - libraries[name] = lib + libraries.extend(spec.link_infos[label].shared_libs.libraries) return OmnibusSharedLibraries( omnibus = omnibus, diff --git a/prelude/cxx/prebuilt_cxx_library_group.bzl b/prelude/cxx/prebuilt_cxx_library_group.bzl index 2cfbdae5f..2effc957f 100644 --- a/prelude/cxx/prebuilt_cxx_library_group.bzl +++ b/prelude/cxx/prebuilt_cxx_library_group.bzl @@ -335,9 +335,10 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: )) # Propagate shared libraries up the tree. + shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, filter(None, [x.get(SharedLibraryInfo) for x in deps + exported_deps]), )) @@ -352,7 +353,7 @@ def prebuilt_cxx_library_group_impl(ctx: AnalysisContext) -> list[Provider]: exported_deps = exported_deps, preferred_linkage = preferred_linkage, link_infos = libraries, - shared_libs = solibs, + shared_libs = shared_libs, can_be_asset = getattr(ctx.attrs, "can_be_asset", False) or False, # TODO(cjhopman): this should be set to non-None default_soname = None, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 3cf905e2d..9e83b8ae2 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -131,7 +131,6 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", "SharedLibraryInfo", "create_shared_libraries", "create_shlib_symlink_tree", @@ -334,6 +333,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: solibs = {} for soname, lib in ctx.attrs.shared_libs.items(): solibs[soname] = LinkedObject(output = lib, unstripped_output = lib) + shared_libs = create_shared_libraries(ctx, solibs) linkable_graph = create_linkable_graph( ctx, @@ -343,7 +343,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: ctx = ctx, exported_deps = ctx.attrs.deps, link_infos = {_to_lib_output_style(s): v for s, v in link_infos.items()}, - shared_libs = solibs, + shared_libs = shared_libs, default_soname = None, ), ), @@ -361,7 +361,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: cxx_merge_cpreprocessors(ctx, [own_pp_info], inherited_pp_info), merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, shared_library_infos, ), merge_link_group_lib_info(deps = ctx.attrs.deps), @@ -758,6 +758,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage, pic_behavior, ) + shared_libs = create_shared_libraries(ctx, solibs) # TODO(cjhopman): this haskell implementation does not consistently handle LibOutputStyle # and LinkStrategy as expected and it's hard to tell what the intent of the existing code is @@ -800,7 +801,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: preferred_linkage = preferred_linkage, exported_deps = ctx.attrs.deps, link_infos = {_to_lib_output_style(s): v for s, v in link_infos.items()}, - shared_libs = solibs, + shared_libs = shared_libs, # TODO(cjhopman): this should be set to non-None default_soname = None, ), @@ -846,7 +847,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: cxx_merge_cpreprocessors(ctx, pp, inherited_pp_info), merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, shared_library_infos, ), haskell_haddock_lib(ctx, pkgname), @@ -1139,8 +1140,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - for soname, lib in link_group_lib.shared_libs.items(): - sos.append(SharedLibrary(soname = soname, lib = lib, label = ctx.label)) + sos.extend(link_group_lib.shared_libs.libraries) else: nlis = [] diff --git a/prelude/haskell/haskell_ghci.bzl b/prelude/haskell/haskell_ghci.bzl index bde2123db..70042d8a8 100644 --- a/prelude/haskell/haskell_ghci.bzl +++ b/prelude/haskell/haskell_ghci.bzl @@ -288,8 +288,8 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: shared_li = node.link_infos.get(output_style, None) if shared_li != None: tp_deps_shared_link_infos[node_label] = shared_li.default - for libname, linkObject in node.shared_libs.items(): - so_symlinks[libname] = linkObject.output + for shlib in node.shared_libs.libraries: + so_symlinks[shlib.soname] = shlib.lib.output # Create symlinks to the TP dependencies' SOs so_symlinks_root_path = ctx.label.name + ".so-symlinks" diff --git a/prelude/linking/link_groups.bzl b/prelude/linking/link_groups.bzl index 6d6bdbef3..a3fb2e1c5 100644 --- a/prelude/linking/link_groups.bzl +++ b/prelude/linking/link_groups.bzl @@ -12,7 +12,10 @@ load( load( ":link_info.bzl", "LinkInfos", - "LinkedObject", +) +load( + ":shared_libraries.bzl", + "SharedLibraries", ) # Information about a linkable node which explicitly sets `link_group`. @@ -20,7 +23,7 @@ LinkGroupLib = record( # The label of the owning target (if any). label = field([Label, None], None), # The shared libs to package for this link group. - shared_libs = field(dict[str, LinkedObject]), + shared_libs = field(SharedLibraries), # The link info to link against this link group. shared_link_infos = field(LinkInfos), ) @@ -48,7 +51,7 @@ def gather_link_group_libs( def merge_link_group_lib_info( label: [Label, None] = None, name: [str, None] = None, - shared_libs: [dict[str, LinkedObject], None] = None, + shared_libs: [SharedLibraries, None] = None, shared_link_infos: [LinkInfos, None] = None, deps: list[Dependency] = [], children: list[LinkGroupLibInfo] = []) -> LinkGroupLibInfo: diff --git a/prelude/linking/linkable_graph.bzl b/prelude/linking/linkable_graph.bzl index 3f026e8b4..4f252002a 100644 --- a/prelude/linking/linkable_graph.bzl +++ b/prelude/linking/linkable_graph.bzl @@ -25,13 +25,16 @@ load( "LinkInfo", # @unused Used as a type "LinkInfos", "LinkStrategy", - "LinkedObject", "LinkerFlags", "MergedLinkInfo", "get_lib_output_style", "get_output_styles_for_linkage", _get_link_info = "get_link_info", ) +load( + ":shared_libraries.bzl", + "SharedLibraries", +) # A provider with information used to link a rule into a shared library. # Potential omnibus roots must provide this so that omnibus can link them @@ -80,7 +83,7 @@ LinkableNode = record( # Shared libraries provided by this target. Used if this target is # excluded. - shared_libs = field(dict[str, LinkedObject], {}), + shared_libs = field(SharedLibraries, SharedLibraries(libraries = [])), # The soname this node would use in default link strategies. May be used by non-default # link strategies as a lib's soname. @@ -160,7 +163,7 @@ def create_linkable_node( deps: list[Dependency | LinkableGraph] = [], exported_deps: list[Dependency | LinkableGraph] = [], link_infos: dict[LibOutputStyle, LinkInfos] = {}, - shared_libs: dict[str, LinkedObject] = {}, + shared_libs: SharedLibraries = SharedLibraries(libraries = []), can_be_asset: bool = True, include_in_android_mergemap: bool = True, linker_flags: [LinkerFlags, None] = None, diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 9ee92ca55..89c92cebd 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -497,7 +497,7 @@ def _convert_python_library_to_executable( dest: (omnibus_libs.roots[label].shared_library, label) for dest, (_, label) in extensions.items() } - native_libs = omnibus_libs.libraries + native_libs = {shlib.soname: shlib.lib for shlib in omnibus_libs.libraries} omnibus_providers = [] diff --git a/prelude/rust/rust_binary.bzl b/prelude/rust/rust_binary.bzl index 72b05f635..2364a5b6d 100644 --- a/prelude/rust/rust_binary.bzl +++ b/prelude/rust/rust_binary.bzl @@ -40,7 +40,6 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", "merge_shared_libraries", "traverse_shared_library_info", ) @@ -179,10 +178,7 @@ def _rust_binary_common( # When there are no matches for a pattern based link group, # `link_group_mappings` will not have an entry associated with the lib. for _name, link_group_lib in link_group_libs.items(): - shared_libs.extend([ - SharedLibrary(soname = name, lib = lib, label = ctx.label) - for name, lib in link_group_lib.shared_libs.items() - ]) + shared_libs.extend(link_group_lib.shared_libs.libraries) # link groups shared libraries link args are directly added to the link command, # we don't have to add them here diff --git a/prelude/rust/rust_library.bzl b/prelude/rust/rust_library.bzl index 885286b4b..89917b34c 100644 --- a/prelude/rust/rust_library.bzl +++ b/prelude/rust/rust_library.bzl @@ -851,9 +851,10 @@ def _native_providers( ) # Native shared library provider. + shared_libs = create_shared_libraries(ctx, solibs) providers.append(merge_shared_libraries( ctx.actions, - create_shared_libraries(ctx, solibs), + shared_libs, inherited_shlibs, )) @@ -890,7 +891,7 @@ def _native_providers( deps = inherited_link_graphs, exported_deps = inherited_exported_deps, link_infos = link_infos, - shared_libs = solibs, + shared_libs = shared_libs, default_soname = shlib_name, include_in_android_mergemap = False, ), From b796f45e99f8c5550e70627b3eabe1c14e6b6712 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Thu, 11 Apr 2024 08:34:23 -0700 Subject: [PATCH 0775/1133] Use different categories for different kinds of dexing Summary: We want to make it clearer what kind of dexing we are doing when we are looking at data. Reviewed By: navidqar Differential Revision: D55967442 fbshipit-source-id: ae56a8d763f29bc33842a665d47b6dfdab4597f2 --- prelude/android/dex_rules.bzl | 4 ++-- prelude/java/dex.bzl | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/prelude/android/dex_rules.bzl b/prelude/android/dex_rules.bzl index 9e0b15c27..3031a82f6 100644 --- a/prelude/android/dex_rules.bzl +++ b/prelude/android/dex_rules.bzl @@ -110,7 +110,7 @@ def get_single_primary_dex( if not is_optimized: d8_cmd.add("--no-optimize") - ctx.actions.run(d8_cmd, category = "d8", identifier = "{}:{}".format(ctx.label.package, ctx.label.name)) + ctx.actions.run(d8_cmd, category = "get_single_primary_dex", identifier = "{}:{}".format(ctx.label.package, ctx.label.name)) return DexFilesInfo( primary_dex = output_dex_file, @@ -620,7 +620,7 @@ def _merge_dexes( ctx.actions.run( d8_cmd, - category = "d8", + category = "merge_dexes", identifier = "{}:{} {}".format(ctx.label.package, ctx.label.name, output_dex_file.short_path), ) diff --git a/prelude/java/dex.bzl b/prelude/java/dex.bzl index d2fd2f6dd..0d53812fe 100644 --- a/prelude/java/dex.bzl +++ b/prelude/java/dex.bzl @@ -71,7 +71,7 @@ def get_dex_produced_from_java_library( identifier = "{}:{} {}".format(ctx.label.package, ctx.label.name, output_dex_file.short_path) ctx.actions.run( d8_cmd, - category = "d8", + category = "pre_dex", identifier = identifier, ) From 8a0a39933fb1b218a7d8f4ffbde25dddaacd7b99 Mon Sep 17 00:00:00 2001 From: Alexander Kampmann Date: Thu, 11 Apr 2024 09:12:45 -0700 Subject: [PATCH 0776/1133] fix test, disable pre-dex if preprocessing is used Summary: In android instrumentation tests, preprocessing only works if pre-dex is disabled. This diff makes sure that predex is not used when we are using preprocessing. Also, I am fixing the unit test for this. Reviewed By: IanChilds Differential Revision: D56012546 fbshipit-source-id: 3e2bc0c3f4158d85f63c2363faf4310ff5464551 --- prelude/android/android_instrumentation_apk.bzl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/prelude/android/android_instrumentation_apk.bzl b/prelude/android/android_instrumentation_apk.bzl index a9cacf4a7..79e44ede4 100644 --- a/prelude/android/android_instrumentation_apk.bzl +++ b/prelude/android/android_instrumentation_apk.bzl @@ -21,6 +21,9 @@ load("@prelude//utils:expect.bzl", "expect") def android_instrumentation_apk_impl(ctx: AnalysisContext): _verify_params(ctx) + # jar preprocessing cannot be used when the jars were dexed already, so we have to disable predex when we want to preprocess the jars. + disable_pre_dex = ctx.attrs.disable_pre_dex or ctx.attrs.preprocess_java_classes_bash + apk_under_test_info = ctx.attrs.apk[AndroidApkUnderTestInfo] # android_instrumentation_apk uses the same platforms as the APK-under-test @@ -68,7 +71,7 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): enhance_ctx = create_enhancement_context(ctx) materialized_artifacts = [] - if not ctx.attrs.disable_pre_dex: + if not disable_pre_dex: pre_dexed_libs = [java_packaging_dep.dex for java_packaging_dep in java_packaging_deps] if ctx.attrs.use_split_dex: dex_merge_config = get_split_dex_merge_config(ctx, android_toolchain) @@ -100,7 +103,6 @@ def android_instrumentation_apk_impl(ctx: AnalysisContext): jars_to_owners.keys(), ) - enhance_ctx = create_enhancement_context(ctx) native_library_info = get_android_binary_native_library_info( enhance_ctx, android_packageable_info, From 192501d41bbef476e592c4df26dd60de378b2acb Mon Sep 17 00:00:00 2001 From: Alexander Kampmann Date: Thu, 11 Apr 2024 09:12:45 -0700 Subject: [PATCH 0777/1133] provide a map from input jar name to target Summary: This diff provides preprocessors with a map that maps jar names to target names. Within the preprocessor, this can be used to e.g. exclude third-part dependencies. Reviewed By: IanChilds Differential Revision: D55963900 fbshipit-source-id: 48bda9e75bc140ed7f02539f319ca012bdf5c069 --- prelude/android/preprocess_java_classes.bzl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/prelude/android/preprocess_java_classes.bzl b/prelude/android/preprocess_java_classes.bzl index c23e5116c..c5b841cf6 100644 --- a/prelude/android/preprocess_java_classes.bzl +++ b/prelude/android/preprocess_java_classes.bzl @@ -20,15 +20,18 @@ def get_preprocessed_java_classes(enhance_ctx: EnhancementContext, input_jars: d input_srcs = {} output_jars_to_owners = {} output_dir = ctx.actions.declare_output("preprocessed_java_classes/output_dir") + input_jars_to_owners = {} for i, (input_jar, target_label) in enumerate(input_jars.items()): expect(input_jar.extension == ".jar", "Expected {} to have extension .jar!".format(input_jar)) jar_name = "{}_{}".format(i, input_jar.basename) input_srcs[jar_name] = input_jar + input_jars_to_owners[jar_name] = target_label output_jar = output_dir.project(jar_name) output_jars_to_owners[output_jar] = target_label input_dir = ctx.actions.symlinked_dir("preprocessed_java_classes/input_dir", input_srcs) + input_jars_map = ctx.actions.write_json("preprocessed_java_classes/input_jars_map.json", input_jars_to_owners) materialized_artifacts_dir = ctx.actions.declare_output("preprocessed_java_classes/materialized_artifacts") env = { @@ -37,6 +40,7 @@ def get_preprocessed_java_classes(enhance_ctx: EnhancementContext, input_jars: d delimiter = get_path_separator_for_exec_os(ctx), ), "IN_JARS_DIR": cmd_args(input_dir), + "IN_JARS_MAP": cmd_args(input_jars_map), "MATERIALIZED_ARTIFACTS_DIR": materialized_artifacts_dir.as_output(), "OUT_JARS_DIR": output_dir.as_output(), "PREPROCESS": ctx.attrs.preprocess_java_classes_bash, @@ -64,5 +68,6 @@ def get_preprocessed_java_classes(enhance_ctx: EnhancementContext, input_jars: d ctx.actions.run(preprocess_cmd, env = env, category = "preprocess_java_classes") enhance_ctx.debug_output("preprocess_java_classes_input_dir", input_dir) + enhance_ctx.debug_output("preprocess_java_classes_input_jars_map", input_jars_map) return output_jars_to_owners, materialized_artifacts_dir From 45604ff1c687ee78ec6e4817f3cd80f391867a78 Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Thu, 11 Apr 2024 10:13:40 -0700 Subject: [PATCH 0778/1133] Move merge_class_to_source_maps to java_toolchain Summary: We want to use this in `android_apk` too so that we can support Android instrumentation tests. So, move it to the `java_toolchain`, which is where all the other tools relating to class-to-source map processing live anyway. Reviewed By: navidqar Differential Revision: D55964552 fbshipit-source-id: 5b9e1afac9cb68d5839c5bf0436ca8234d745627 --- prelude/java/class_to_srcs.bzl | 5 ++--- prelude/java/java_test.bzl | 5 +++-- prelude/java/java_toolchain.bzl | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/prelude/java/class_to_srcs.bzl b/prelude/java/class_to_srcs.bzl index 5da9ef1d3..f4538738b 100644 --- a/prelude/java/class_to_srcs.bzl +++ b/prelude/java/class_to_srcs.bzl @@ -7,7 +7,6 @@ load( "@prelude//java:java_toolchain.bzl", - "JavaTestToolchainInfo", # @unused Used as a type "JavaToolchainInfo", # @unused Used as a type ) @@ -109,11 +108,11 @@ def maybe_create_class_to_source_map_debuginfo( def merge_class_to_source_map_from_jar( actions: AnalysisActions, name: str, - java_test_toolchain: JavaTestToolchainInfo, + java_toolchain: JavaToolchainInfo, relative_to: [CellRoot, None], deps: list[JavaClassToSourceMapInfo]) -> Artifact: output = actions.declare_output(name) - cmd = cmd_args(java_test_toolchain.merge_class_to_source_maps[RunInfo]) + cmd = cmd_args(java_toolchain.merge_class_to_source_maps[RunInfo]) cmd.add(cmd_args(output.as_output(), format = "--output={}")) if relative_to != None: cmd.add(cmd_args(str(relative_to), format = "--relative-to={}")) diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index 0e1038258..dfc1949f3 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -52,8 +52,9 @@ def build_junit_test( extra_cmds: list = [], extra_classpath_entries: list[Artifact] = []) -> ExternalRunnerTestInfo: java_test_toolchain = ctx.attrs._java_test_toolchain[JavaTestToolchainInfo] + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo] - java = ctx.attrs.java[RunInfo] if ctx.attrs.java else ctx.attrs._java_toolchain[JavaToolchainInfo].java_for_tests + java = ctx.attrs.java[RunInfo] if ctx.attrs.java else java_toolchain.java_for_tests cmd = [java] + extra_cmds + ctx.attrs.vm_args + ["-XX:-MaxFDLimit"] if java_test_toolchain.jvm_args: @@ -153,7 +154,7 @@ def build_junit_test( transitive_class_to_src_map = merge_class_to_source_map_from_jar( actions = ctx.actions, name = ctx.label.name + ".transitive_class_to_src.json", - java_test_toolchain = java_test_toolchain, + java_toolchain = java_toolchain, relative_to = ctx.label.cell_root if run_from_cell_root else None, deps = [tests_class_to_source_info], ) diff --git a/prelude/java/java_toolchain.bzl b/prelude/java/java_toolchain.bzl index fbd474fa3..b27227ac6 100644 --- a/prelude/java/java_toolchain.bzl +++ b/prelude/java/java_toolchain.bzl @@ -49,6 +49,7 @@ JavaToolchainInfo = provider( "javacd_jvm_args_target": provider_field(typing.Any, default = None), "javacd_main_class": provider_field(typing.Any, default = None), "javacd_worker": provider_field(typing.Any, default = None), + "merge_class_to_source_maps": provider_field(typing.Any, default = None), "nullsafe": provider_field(typing.Any, default = None), "nullsafe_extra_args": provider_field(typing.Any, default = None), "nullsafe_signatures": provider_field(typing.Any, default = None), @@ -72,7 +73,6 @@ JavaTestToolchainInfo = provider( "junit_test_runner_main_class_args": provider_field(typing.Any, default = None), "jvm_args": provider_field(typing.Any, default = None), "list_class_names": provider_field(typing.Any, default = None), - "merge_class_to_source_maps": provider_field(typing.Any, default = None), "test_runner_library_jar": provider_field(typing.Any, default = None), "testng_test_runner_main_class_args": provider_field(typing.Any, default = None), "use_java_custom_class_loader": provider_field(typing.Any, default = None), From 15350b88344b98f786ebc4406a55b34ce6a3909d Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Thu, 11 Apr 2024 10:13:40 -0700 Subject: [PATCH 0779/1133] Create class_to_source_map_info for Android binaries too Summary: `android_instrumentation_test`s use Android binaries, so we need to create this mapping so that we can support those tests too. Reviewed By: navidqar Differential Revision: D55964840 fbshipit-source-id: 9bb1558f8741574ce5d7c7eb74b8546f735e2059 --- prelude/java/utils/java_utils.bzl | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/prelude/java/utils/java_utils.bzl b/prelude/java/utils/java_utils.bzl index 2e7a24534..b1629768d 100644 --- a/prelude/java/utils/java_utils.bzl +++ b/prelude/java/utils/java_utils.bzl @@ -123,14 +123,13 @@ def get_class_to_source_map_info( class_to_srcs_debuginfo = None if outputs != None: name = ctx.label.name - if not ctx.attrs._is_building_android_binary: - class_to_srcs = create_class_to_source_map_from_jar( - actions = ctx.actions, - java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], - name = name + ".class_to_srcs.json", - jar = outputs.classpath_entry.full_library, - srcs = ctx.attrs.srcs, - ) + class_to_srcs = create_class_to_source_map_from_jar( + actions = ctx.actions, + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], + name = name + ".class_to_srcs.json", + jar = outputs.classpath_entry.full_library, + srcs = ctx.attrs.srcs, + ) class_to_srcs_debuginfo = maybe_create_class_to_source_map_debuginfo( actions = ctx.actions, java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], From a7346c72c8ec0fc5567c7dffb63a5cc7d4dc47cc Mon Sep 17 00:00:00 2001 From: Ian Childs Date: Thu, 11 Apr 2024 10:13:40 -0700 Subject: [PATCH 0780/1133] Add transitive_class_to_src_map subtarget to android_apk Summary: `android_instrumentation_test` uses an Android binary, so add this subtarget to enable code coverage to be done properly. Reviewed By: navidqar Differential Revision: D55916845 fbshipit-source-id: 74831329a5888412ace47489ab00b3eeb9a190e9 --- prelude/android/android_apk.bzl | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/prelude/android/android_apk.bzl b/prelude/android/android_apk.bzl index afc461e5b..b22c3387f 100644 --- a/prelude/android/android_apk.bzl +++ b/prelude/android/android_apk.bzl @@ -9,7 +9,9 @@ load("@prelude//:validation_deps.bzl", "get_validation_deps_outputs") load("@prelude//android:android_binary.bzl", "get_binary_info") load("@prelude//android:android_providers.bzl", "AndroidApkInfo", "AndroidApkUnderTestInfo", "AndroidBinaryNativeLibsInfo", "AndroidBinaryResourcesInfo", "DexFilesInfo", "ExopackageInfo") load("@prelude//android:android_toolchain.bzl", "AndroidToolchainInfo") +load("@prelude//java:class_to_srcs.bzl", "merge_class_to_source_map_from_jar") load("@prelude//java:java_providers.bzl", "KeystoreInfo") +load("@prelude//java:java_toolchain.bzl", "JavaToolchainInfo") load("@prelude//java/utils:java_more_utils.bzl", "get_path_separator_for_exec_os") load("@prelude//java/utils:java_utils.bzl", "get_class_to_source_map_info") load("@prelude//utils:set.bzl", "set") @@ -52,6 +54,14 @@ def android_apk_impl(ctx: AnalysisContext) -> list[Provider]: outputs = None, deps = android_binary_info.deps_by_platform[android_binary_info.primary_platform], ) + transitive_class_to_src_map = merge_class_to_source_map_from_jar( + actions = ctx.actions, + name = ctx.label.name + ".transitive_class_to_src.json", + java_toolchain = ctx.attrs._java_toolchain[JavaToolchainInfo], + relative_to = None, + deps = [class_to_srcs], + ) + sub_targets["transitive_class_to_src_map"] = [DefaultInfo(default_output = transitive_class_to_src_map)] # We can only be sure that an APK has native libs if it has any shared libraries. Prebuilt native libraries dirs can exist but be empty. definitely_has_native_libs = bool(native_library_info.shared_libraries) From 6e4589d3bfc4fa31dd9951489afc61634844b090 Mon Sep 17 00:00:00 2001 From: Ian Levesque Date: Thu, 11 Apr 2024 10:52:28 -0700 Subject: [PATCH 0781/1133] Improve apk_genrule for AAB rules with named outputs and clearer inputs Summary: apk_genrule had "support" for AABs (it could take in an "aab" parameter instead of "apk"). Other than that it was pretty inadequate. To support this more completely: 1. Add the missing parameters from normal genrules: "default_outs" and "outs". That makes this a drop-in replacement in more places and gives more control over how you want to handle the output(s) name(s). 2. The default output name for an AAB should not end in ".apk", and it makes little sense for the input to be in an env var "$APK." Introduce "$AAB" as the input and default the output name to end in ".aab". This also has the side effect of catching genrules that really shouldn't be run on AABs, because they will fail on the missing $APK env var. Reviewed By: IanChilds Differential Revision: D55904373 fbshipit-source-id: cdd23f6812909ac30972d5f7c028a7e5726cac2b --- prelude/android/android.bzl | 2 ++ prelude/android/apk_genrule.bzl | 19 ++++++++++++++----- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/prelude/android/android.bzl b/prelude/android/android.bzl index 3b4a4affb..9bf6f53ec 100644 --- a/prelude/android/android.bzl +++ b/prelude/android/android.bzl @@ -204,6 +204,8 @@ extra_attributes = { "_build_only_native_code": attrs.default_only(attrs.bool(default = is_build_only_native_code())), }, "apk_genrule": genrule_attributes() | { + "default_outs": attrs.option(attrs.set(attrs.string(), sorted = False), default = None), + "outs": attrs.option(attrs.dict(key = attrs.string(), value = attrs.set(attrs.string(), sorted = False), sorted = False), default = None), "type": attrs.string(default = "apk"), "_android_toolchain": toolchains_common.android(), "_exec_os_type": buck.exec_os_type_arg(), diff --git a/prelude/android/apk_genrule.bzl b/prelude/android/apk_genrule.bzl index 0c50c05d8..934b6838b 100644 --- a/prelude/android/apk_genrule.bzl +++ b/prelude/android/apk_genrule.bzl @@ -22,6 +22,10 @@ def apk_genrule_impl(ctx: AnalysisContext) -> list[Provider]: input_manifest = input_android_apk_info.manifest input_materialized_artifacts = input_android_apk_info.materialized_artifacts input_android_apk_under_test_info = ctx.attrs.apk[AndroidApkUnderTestInfo] + + env_vars = { + "APK": cmd_args(input_apk), + } else: input_android_aab_info = ctx.attrs.aab[AndroidAabInfo] expect(input_android_aab_info != None, "'aab' attribute must be an Android Bundle!") @@ -31,14 +35,19 @@ def apk_genrule_impl(ctx: AnalysisContext) -> list[Provider]: input_manifest = input_android_aab_info.manifest input_materialized_artifacts = input_android_aab_info.materialized_artifacts - env_vars = { - "APK": cmd_args(input_apk), - } + env_vars = { + "AAB": cmd_args(input_apk), + } # Like buck1, we ignore the 'out' attribute and construct the output path ourselves. - output_apk_name = "{}.apk".format(ctx.label.name) + if ctx.attrs.outs: + genrule_output_name = None + elif ctx.attrs.aab: + genrule_output_name = "{}.aab".format(ctx.label.name) + else: + genrule_output_name = "{}.apk".format(ctx.label.name) - genrule_providers = process_genrule(ctx, output_apk_name, None, env_vars, other_outputs = input_materialized_artifacts) + genrule_providers = process_genrule(ctx, genrule_output_name, ctx.attrs.outs, env_vars, other_outputs = input_materialized_artifacts) expect( len(genrule_providers) == 1 and isinstance(genrule_providers[0], DefaultInfo), From dc0e57ec6fbe3af829115d68902cf8d4a6c295d8 Mon Sep 17 00:00:00 2001 From: Neil Mitchell Date: Thu, 11 Apr 2024 11:13:58 -0700 Subject: [PATCH 0782/1133] Hide unused symbols Summary: These aren't used outside the file, so make them private. Reviewed By: samkevich Differential Revision: D56014554 fbshipit-source-id: 5048d6a9150abe63285f678bfbfede409cb08b9c --- prelude/java/gwt_binary.bzl | 4 ++-- prelude/java/java_library.bzl | 8 ++++---- prelude/java/javacd_jar_creator.bzl | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/prelude/java/gwt_binary.bzl b/prelude/java/gwt_binary.bzl index 2c13c70a8..78829e8e7 100644 --- a/prelude/java/gwt_binary.bzl +++ b/prelude/java/gwt_binary.bzl @@ -14,7 +14,7 @@ load( "get_all_java_packaging_deps", ) -GWT_COMPILER_CLASS = "com.google.gwt.dev.Compiler" +_GWT_COMPILER_CLASS = "com.google.gwt.dev.Compiler" def gwt_binary_impl(ctx: AnalysisContext) -> list[Provider]: expect(ctx.attrs.local_workers > 0, "local workers must be greater than zero") @@ -35,7 +35,7 @@ def gwt_binary_impl(ctx: AnalysisContext) -> list[Provider]: ctx.attrs.vm_args, "-classpath", cmd_args(module_deps_classpath + deps_classpath, delimiter = get_path_separator_for_exec_os(ctx)), - GWT_COMPILER_CLASS, + _GWT_COMPILER_CLASS, "-war", output.as_output(), "-style", diff --git a/prelude/java/java_library.bzl b/prelude/java/java_library.bzl index a5e1ab59a..8f093766c 100644 --- a/prelude/java/java_library.bzl +++ b/prelude/java/java_library.bzl @@ -59,7 +59,7 @@ def _process_classpath( # add classpath args file to cmd cmd.add(option_name, classpath_args_file) -def classpath_args(ctx: AnalysisContext, args): +def _classpath_args(ctx: AnalysisContext, args): return cmd_args(args, delimiter = get_path_separator_for_exec_os(ctx)) def _process_plugins( @@ -110,7 +110,7 @@ def _process_plugins( processors_classpath_tset = None if processors_classpath_tset: - processors_classpath = classpath_args(ctx, processors_classpath_tset.project_as_args("full_jar_args")) + processors_classpath = _classpath_args(ctx, processors_classpath_tset.project_as_args("full_jar_args")) _process_classpath( ctx.actions, processors_classpath, @@ -171,7 +171,7 @@ def _append_javac_params( if compiling_classpath: _process_classpath( ctx.actions, - classpath_args(ctx, compiling_classpath), + _classpath_args(ctx, compiling_classpath), cmd, declare_prefixed_name("classpath_args", actions_identifier), "--javac_classpath_file", @@ -188,7 +188,7 @@ def _append_javac_params( if bootclasspath_list: _process_classpath( ctx.actions, - classpath_args(ctx, bootclasspath_list), + _classpath_args(ctx, bootclasspath_list), cmd, declare_prefixed_name("bootclasspath_args", actions_identifier), "--javac_bootclasspath_file", diff --git a/prelude/java/javacd_jar_creator.bzl b/prelude/java/javacd_jar_creator.bzl index c13152edd..ed8bdcec7 100644 --- a/prelude/java/javacd_jar_creator.bzl +++ b/prelude/java/javacd_jar_creator.bzl @@ -46,7 +46,7 @@ load( ) load("@prelude//utils:expect.bzl", "expect") -base_command_params = struct( +_base_command_params = struct( withDownwardApi = True, spoolMode = "DIRECT_TO_JAR", ) @@ -136,7 +136,7 @@ def create_jar_artifact_javacd( ) return struct( - baseCommandParams = base_command_params, + _baseCommandParams = _base_command_params, libraryJarCommand = struct( baseJarCommand = base_jar_command, libraryJarBaseCommand = struct( @@ -182,7 +182,7 @@ def create_jar_artifact_javacd( ) return struct( - baseCommandParams = base_command_params, + _baseCommandParams = _base_command_params, abiJarCommand = abi_command, ) From 30e699e041372862ac512e59cd360c2b0401cc16 Mon Sep 17 00:00:00 2001 From: Neil Mitchell Date: Thu, 11 Apr 2024 11:13:58 -0700 Subject: [PATCH 0783/1133] Delete unused file Summary: I introduced this for the type checker, but never ended up using it. Delete. Reviewed By: samkevich Differential Revision: D56014557 fbshipit-source-id: 0e019f5a7e85de5f7dccd31a58d40e1f8b8da0e3 --- prelude/builtin.bzl | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 prelude/builtin.bzl diff --git a/prelude/builtin.bzl b/prelude/builtin.bzl deleted file mode 100644 index ef1dd61fc..000000000 --- a/prelude/builtin.bzl +++ /dev/null @@ -1,12 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -# Definitions we have builtin to Buck. -# Useful for running the Starlark checker on the files. - -def DefaultInfo(): - pass From f9e005a256354dcbb5ff7bffedafc82c4cbdd4b1 Mon Sep 17 00:00:00 2001 From: Neil Mitchell Date: Thu, 11 Apr 2024 11:13:58 -0700 Subject: [PATCH 0784/1133] Delete unused file Summary: Not used, so should go away. Reviewed By: samkevich Differential Revision: D56014553 fbshipit-source-id: 8d3a8c4f1eadfa3bbdb806d83c5b8106f3babc91 --- prelude/platforms/apple/attr.bzl | 15 --------------- 1 file changed, 15 deletions(-) delete mode 100644 prelude/platforms/apple/attr.bzl diff --git a/prelude/platforms/apple/attr.bzl b/prelude/platforms/apple/attr.bzl deleted file mode 100644 index 1ca4e368e..000000000 --- a/prelude/platforms/apple/attr.bzl +++ /dev/null @@ -1,15 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -load("@prelude//platforms/apple:constants.bzl", "APPLE_PLATFORMS_KEY") - -def add_apple_platforms_attr(attributes): - # Add _apple_platforms to all rules so that we may query the target platform to use until we support configuration - # modifiers and can use them to set the configuration to use for operations. - # Map of string identifer to platform. - attributes[APPLE_PLATFORMS_KEY] = attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}) - return attributes From 5ed7b9ddc103917d346eff0009cd78f881446aa7 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Thu, 11 Apr 2024 11:27:03 -0700 Subject: [PATCH 0785/1133] Fix spelling Reviewed By: scottcao Differential Revision: D55998698 fbshipit-source-id: b88cf6865f5a3dfd45fd41bc18acd40431e2007a --- prelude/apple/apple_library.bzl | 4 ++-- prelude/apple/apple_test.bzl | 2 +- prelude/apple/debug.bzl | 2 +- prelude/apple/swift/swift_compilation.bzl | 2 +- prelude/apple/tools/make_modulemap.py | 2 +- prelude/cxx/compile.bzl | 2 +- prelude/cxx/link_types.bzl | 2 +- prelude/cxx/user/cxx_toolchain_override.bzl | 2 +- prelude/debugging/labels.bzl | 2 +- prelude/decls/erlang_rules.bzl | 20 +++++++++---------- prelude/decls/go_common.bzl | 2 +- .../common/src/buck_ct_provider.erl | 4 ++-- .../test_binary/src/test_runner.erl | 2 +- .../common_test/test_cli_lib/src/test.erl | 2 +- .../test_exec/src/ct_daemon_hooks.erl | 2 +- prelude/erlang/toolchain/edoc_cli.escript | 2 +- prelude/export_exe.bzl | 2 +- prelude/go/go_list.bzl | 4 ++-- prelude/go/link.bzl | 2 +- prelude/go/packages.bzl | 2 +- prelude/go/toolchain.bzl | 4 ++-- prelude/go/transitions/defs.bzl | 8 ++++---- prelude/haskell/ide/ide.bxl | 4 ++-- prelude/java/java_test.bzl | 2 +- prelude/java/tools/gen_class_to_source_map.py | 4 ++-- prelude/rules.bzl | 2 +- prelude/rust/build.bzl | 2 +- prelude/rust/link_info.bzl | 2 +- prelude/sh_test.bzl | 2 +- prelude/toolchains/conan/conan_common.py | 2 +- prelude/utils/buckconfig.bzl | 2 +- prelude/utils/type_defs.bzl | 4 ++-- 32 files changed, 51 insertions(+), 51 deletions(-) diff --git a/prelude/apple/apple_library.bzl b/prelude/apple/apple_library.bzl index 659fb7752..5d8cec599 100644 --- a/prelude/apple/apple_library.bzl +++ b/prelude/apple/apple_library.bzl @@ -86,9 +86,9 @@ load(":xcode.bzl", "apple_populate_xcode_attributes") load(":xctest_swift_support.bzl", "xctest_swift_support_info") AppleSharedLibraryMachOFileType = enum( - # dynamicly bound shared library file + # dynamically bound shared library file "dylib", - # dynamicly bound bundle file aka Mach-O bundle + # dynamically bound bundle file aka Mach-O bundle "bundle", ) diff --git a/prelude/apple/apple_test.bzl b/prelude/apple/apple_test.bzl index 59c8cb09b..c1b5fbe34 100644 --- a/prelude/apple/apple_test.bzl +++ b/prelude/apple/apple_test.bzl @@ -70,7 +70,7 @@ def apple_test_impl(ctx: AnalysisContext) -> [list[Provider], Promise]: # any xctests altogether, provided the test dylib is adhoc signed shared_library_flags += entitlements_link_flags(ctx) - # The linker will incluide adhoc signature for ARM64 by default, lets + # The linker will include adhoc signature for ARM64 by default, lets # ensure we always have an adhoc signature regardless of arch/linker logic. shared_library_flags += ["-Wl,-adhoc_codesign"] diff --git a/prelude/apple/debug.bzl b/prelude/apple/debug.bzl index d3385d68a..e7a91d114 100644 --- a/prelude/apple/debug.bzl +++ b/prelude/apple/debug.bzl @@ -26,7 +26,7 @@ AppleDebuggableInfo = provider( # a. the owning library target to artifacts, or # b. the owning bundle target to filtered artifacts "debug_info_tset": provider_field(ArtifactTSet), - # In the case of b above, contians the map of library target to artifacts, else None + # In the case of b above, contains the map of library target to artifacts, else None "filtered_map": provider_field([dict[Label, list[Artifact]], None], default = None), }, ) diff --git a/prelude/apple/swift/swift_compilation.bzl b/prelude/apple/swift/swift_compilation.bzl index d350bab84..4fa4e0b90 100644 --- a/prelude/apple/swift/swift_compilation.bzl +++ b/prelude/apple/swift/swift_compilation.bzl @@ -822,7 +822,7 @@ def get_swift_debug_infos( ctx: AnalysisContext, swift_dependency_info: [SwiftDependencyInfo, None], swift_output: [SwiftCompilationOutput, None]) -> SwiftDebugInfo: - # When determing the debug info for shared libraries, if the shared library is a link group, we rely on the link group links to + # When determining the debug info for shared libraries, if the shared library is a link group, we rely on the link group links to # obtain the debug info for linked libraries and only need to provide any swift debug info for this library itself. Otherwise # if linking standard shared, we need to obtain the transitive debug info. if get_link_group(ctx): diff --git a/prelude/apple/tools/make_modulemap.py b/prelude/apple/tools/make_modulemap.py index f92f037df..b73d43e2e 100755 --- a/prelude/apple/tools/make_modulemap.py +++ b/prelude/apple/tools/make_modulemap.py @@ -81,7 +81,7 @@ def _write_submodules( module = root_module for i, component in enumerate(h.split(os.sep)): if i == 0 and component == name: - # The common case is we have a singe header path prefix that matches the module name. + # The common case is we have a single header path prefix that matches the module name. # In this case we add the headers directly to the root module. pass else: diff --git a/prelude/cxx/compile.bzl b/prelude/cxx/compile.bzl index 628df680e..c7b0e4a26 100644 --- a/prelude/cxx/compile.bzl +++ b/prelude/cxx/compile.bzl @@ -572,7 +572,7 @@ def _dep_file_type(ext: CxxExtension) -> [DepFileType, None]: # Should it? return None - # Return the file type aswell + # Return the file type as well if ext.value in (".cpp", ".cc", ".mm", ".cxx", ".c++", ".h", ".hpp", ".hh", ".h++", ".hxx"): return DepFileType("cpp") elif ext.value in (".c", ".m"): diff --git a/prelude/cxx/link_types.bzl b/prelude/cxx/link_types.bzl index 8e065a8a6..677e32e9d 100644 --- a/prelude/cxx/link_types.bzl +++ b/prelude/cxx/link_types.bzl @@ -72,7 +72,7 @@ def link_options( __private_use_link_options_function_to_construct = None, ) -# A marker instance to differentiate explicitly-passed None and a field tha +# A marker instance to differentiate explicitly-passed None and a field that # isn't provided in merge_link_options. _NotProvided = record() _NOT_PROVIDED = _NotProvided() diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/prelude/cxx/user/cxx_toolchain_override.bzl index de7519b4f..da9416a2b 100644 --- a/prelude/cxx/user/cxx_toolchain_override.bzl +++ b/prelude/cxx/user/cxx_toolchain_override.bzl @@ -70,7 +70,7 @@ def _cxx_toolchain_override(ctx): linker_type = ctx.attrs.linker_type if ctx.attrs.linker_type != None else base_linker_info.type pdb_expected = is_pdb_generated(linker_type, ctx.attrs.linker_flags) if ctx.attrs.linker_flags != None else base_linker_info.is_pdb_generated - # This handles case when linker type is overriden to non-windows from + # This handles case when linker type is overridden to non-windows from # windows but linker flags are inherited. # When it's changed from non-windows to windows but flags are not changed, # we can't inspect base linker flags and disable PDB subtargets. diff --git a/prelude/debugging/labels.bzl b/prelude/debugging/labels.bzl index adc5589dc..5364e5d60 100644 --- a/prelude/debugging/labels.bzl +++ b/prelude/debugging/labels.bzl @@ -12,7 +12,7 @@ # For example: # Running "buck run //another:target" (or via using [RunInfo]) should produce `ExecInfo` as its stdout -# 3. If target has a label `dbg:info:ref=//another:target` we assume a presense of //another:target which we can inspect for the presense of relevant providers (see fdb.bxl) +# 3. If target has a label `dbg:info:ref=//another:target` we assume a presence of //another:target which we can inspect for the presence of relevant providers (see fdb.bxl) # This label indicates where to locate "[RunInfo]" which would output `ExecInfo` -compatible output DBG_INFO_EXEC = "dbg:info:exec" diff --git a/prelude/decls/erlang_rules.bzl b/prelude/decls/erlang_rules.bzl index 01d726411..f4207d879 100644 --- a/prelude/decls/erlang_rules.bzl +++ b/prelude/decls/erlang_rules.bzl @@ -88,7 +88,7 @@ rules_attributes = { This attribute controls if the output of the builds also create edoc chunks. """), "env": attrs.option(attrs.dict(key = attrs.string(), value = attrs.string()), default = None, doc = """ - The `env` field allows to set the application env variables. The key value pairs will materialise in tha applications `.app` + The `env` field allows to set the application env variables. The key value pairs will materialise in the application's `.app` file and can then be accessed by [`application:get_env/2`](https://www.erlang.org/doc/man/application.html#get_env-2). """), "erl_opts": attrs.option(attrs.list(attrs.string()), default = None, doc = """ @@ -122,7 +122,7 @@ rules_attributes = { applications `.app` file and can be accessed by `file:consult/1`. """), "include_src": attrs.bool(default = True, doc = """ - This field controlls if the generated application directory contains a src/ directory with the Erlang code or not. + This field controls if the generated application directory contains a src/ directory with the Erlang code or not. """), "includes": attrs.list(attrs.source(), default = [], doc = """ The public header files accessible via `-include_lib("appname/include/header.hrl")` from other erlang files. @@ -133,9 +133,9 @@ rules_attributes = { of the corresponding Erlang terms. """), "peek_private_includes": attrs.bool(default = False, doc = """ - This attribute allows you to use the private includes of the applictions dependencies. This can be useful for + This attribute allows you to use the private includes of the application's dependencies. This can be useful for test applications, to create shared abstractions for tests. It's not advisable to use this attribute for prodution - code. All private inclues transitively must be non-ambiguous. + code. All private includes transitively must be non-ambiguous. """), "resources": attrs.list(attrs.dep(), default = [], doc = """ The `resources` field specifies targets whose default output are placed in the applications `priv/` directory. For @@ -176,7 +176,7 @@ rules_attributes = { `resources` field, the `priv` folders files can then be accessed by `escript"extract/2`. """), "main_module": attrs.option(attrs.string(), default = None, doc = """ - Overrides the default main module. Instead of defering the main module from the scripts filename, the specified module + Overrides the default main module. Instead of deferring the main module from the scripts filename, the specified module is used. That module needs to export a `main/1` function that is called as entry point. """), "resources": attrs.list(attrs.dep(), default = [], doc = """ @@ -209,11 +209,11 @@ rules_attributes = { [`permanent`](https://www.erlang.org/doc/man/application.html#type-restart_type). """), "include_erts": attrs.bool(default = False, doc = """ - This field controls wether OTP applications and the Erlang runtime system should be included as part of the release. + This field controls whether OTP applications and the Erlang runtime system should be included as part of the release. Please note, that at the moment the erts folder is just `erts/`. """), "multi_toolchain": attrs.option(attrs.list(attrs.dep()), default = None, doc = """ - This field controls wether the release should be built with a single toolchain, or multiple toolchains. In the + This field controls whether the release should be built with a single toolchain, or multiple toolchains. In the latter case, all output paths are prefixed with the toolchain name. """), "overlays": attrs.dict(key = attrs.string(), value = attrs.list(attrs.dep()), default = {}, doc = """ @@ -272,7 +272,7 @@ rules_attributes = { "suite": attrs.source(doc = """ The source file for the test suite. If you are using the macro, you should use the `suites` attribute instead. - The suites attribtue specify which erlang_test targets should be generated. For each suite "path_to_suite/suite_SUITE.erl" an + The suites attribute specifies which erlang_test targets should be generated. For each suite "path_to_suite/suite_SUITE.erl" an implicit 'erlang_test' target suite_SUITE will be generated. """), "_artifact_annotation_mfa": attrs.string(default = "artifact_annotations:default_annotation/1"), @@ -528,7 +528,7 @@ erlang_test = prelude_rule( For each suite `_SUITE.erl`, if a data_dir `_SUITE_data` is present along the suite, (as per [the data_dir naming scheme for ct](https://www.erlang.org/doc/apps/common_test/write_test_chapter.html#data-and-private-directories)), - it will automatically adds the coresponding resource target to the generated test target of the suite. + it will automatically adds the corresponding resource target to the generated test target of the suite. Resources will be placed in the [Data directory (data_dir)](https://www.erlang.org/doc/apps/common_test/write_test_chapter.html#data_priv_dir) of each of the suite. @@ -544,7 +544,7 @@ erlang_test = prelude_rule( of the tests. One can call - - `buck2 build //my_app:test_SUITE` to compile the test files together with its depedencies. + - `buck2 build //my_app:test_SUITE` to compile the test files together with its dependencies. - `buck2 test //my_app:other_test_SUITE` to run the test. - `buck2 run //my_app:other_test_SUITE` to open an interactive test shell, where tests can be run iteratively. diff --git a/prelude/decls/go_common.bzl b/prelude/decls/go_common.bzl index a3a58a4e0..d9bcec48a 100644 --- a/prelude/decls/go_common.bzl +++ b/prelude/decls/go_common.bzl @@ -138,7 +138,7 @@ def _cgo_enabled_arg(): return { "cgo_enabled": attrs.option(attrs.bool(), default = None, doc = """ Experimental: Analog of CGO_ENABLED environment-variable. - None will be coverted to True if cxx_toolchain availabe for current configuration, otherwiese False. + None will be converted to True if cxx_toolchain available for current configuration, otherwise False. """), } diff --git a/prelude/erlang/common_test/common/src/buck_ct_provider.erl b/prelude/erlang/common_test/common/src/buck_ct_provider.erl index 2d04eb4dd..3807f1123 100644 --- a/prelude/erlang/common_test/common/src/buck_ct_provider.erl +++ b/prelude/erlang/common_test/common/src/buck_ct_provider.erl @@ -115,7 +115,7 @@ execute_method_on_provider(Method, ProviderName, ProviderState, Args) -> {error, Reason} -> ErrorMsg = unicode:characters_to_list( io_lib:format( - "Method ~p on provider ~p with sate ~p ~n returned with error ~p ~n", [ + "Method ~p on provider ~p with state ~p ~n returned with error ~p ~n", [ Method, ProviderName, ProviderState, Reason ] ) @@ -138,7 +138,7 @@ execute_method_on_provider(Method, ProviderName, ProviderState, Args) -> catch Class:Reason:StackTrace -> ErrorMsg = unicode:characters_to_list( - io_lib:format("Method ~p on provider ~p with sate ~p ~n ~s ~n", [ + io_lib:format("Method ~p on provider ~p with state ~p ~n ~s ~n", [ Method, ProviderName, ProviderState, diff --git a/prelude/erlang/common_test/test_binary/src/test_runner.erl b/prelude/erlang/common_test/test_binary/src/test_runner.erl index a022ec418..6a8233f38 100644 --- a/prelude/erlang/common_test/test_binary/src/test_runner.erl +++ b/prelude/erlang/common_test/test_binary/src/test_runner.erl @@ -172,7 +172,7 @@ provide_output_file( Tests, Suite, "test binary internal crash", ResultExec, OutLog ); Other when Other =:= passed orelse Other =:= timeout -> - % Here we either pased or timeout. + % Here we either passed or timeout. case file:read_file(ResultsFile) of {ok, JsonFile} -> TreeResults = binary_to_term(JsonFile), diff --git a/prelude/erlang/common_test/test_cli_lib/src/test.erl b/prelude/erlang/common_test/test_cli_lib/src/test.erl index 28f069f94..5bf567920 100644 --- a/prelude/erlang/common_test/test_cli_lib/src/test.erl +++ b/prelude/erlang/common_test/test_cli_lib/src/test.erl @@ -191,7 +191,7 @@ run(RegExOrId) -> ChangedCount -> io:format("reloaded ~p modules ~P~n", [ChangedCount, Loaded, 10]), % There were some changes, so list the tests again, then run but without recompiling changes - % Note that if called with the RegEx insted of ToRun test list like above, do_plain_test_run/1 will list the tests again + % Note that if called with the RegEx instead of ToRun test list like above, do_plain_test_run/1 will list the tests again do_plain_test_run(RegExOrId) end; Error -> diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl index d1026c559..913edb8ee 100644 --- a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl +++ b/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl @@ -167,7 +167,7 @@ initialize_hooks() -> end || {Mod, Opts, Prio} <- NormalizedConfiguredHooks ], - %% according to documentation, if two hooks have the same ID, the latter one get's dropped + %% according to documentation, if two hooks have the same ID, the latter one gets dropped PreInitHooks0 = lists:ukeysort(2, HooksWithId), %% now sort with configured prio the inits (default prio being 0) PreInitHooks1 = lists:keysort(1, PreInitHooks0), diff --git a/prelude/erlang/toolchain/edoc_cli.escript b/prelude/erlang/toolchain/edoc_cli.escript index d61997d9d..ab7cf6cdd 100644 --- a/prelude/erlang/toolchain/edoc_cli.escript +++ b/prelude/erlang/toolchain/edoc_cli.escript @@ -85,7 +85,7 @@ verify_files_exist(#{files := Files, out_dir := OutputDir}) -> true -> true; false -> - io:format(standard_error, "error: coudn't generate ~s~n", [ChunkPath]), + io:format(standard_error, "error: couldn't generate ~s~n", [ChunkPath]), false end end, diff --git a/prelude/export_exe.bzl b/prelude/export_exe.bzl index af37ff6b0..d1134c29f 100644 --- a/prelude/export_exe.bzl +++ b/prelude/export_exe.bzl @@ -45,7 +45,7 @@ export_exe = rule( src = "bin/script.sh", ) - The latter form allows executing checked in binaries with required resouces (eg. runtime shared libraries) + The latter form allows executing checked in binaries with required resources (eg. runtime shared libraries) without unnecessary indirection via another rule which allows args, like command_alias. Eg. instead of export_file( diff --git a/prelude/go/go_list.bzl b/prelude/go/go_list.bzl index dc63379ef..e13192c73 100644 --- a/prelude/go/go_list.bzl +++ b/prelude/go/go_list.bzl @@ -27,7 +27,7 @@ def go_list(ctx: AnalysisContext, pkg_name: str, srcs: list[Artifact], package_r go_list_out = ctx.actions.declare_output(paths.basename(pkg_name) + "_go_list.json") - # Create file sructure that `go list` can recognize + # Create file structure that `go list` can recognize # Use copied_dir, because embed doesn't work with symlinks srcs_dir = ctx.actions.copied_dir( "__{}_srcs_dir__".format(paths.basename(pkg_name)), @@ -55,7 +55,7 @@ def parse_go_list_out(srcs: list[Artifact], package_root: str, go_list_out: Arti go_files, cgo_files, h_files, c_files, cxx_files, s_files, test_go_files, x_test_go_files, embed_files = [], [], [], [], [], [], [], [], [] for src in srcs: - # remove package_root prefix from src artifact path to match `go list` outout format + # remove package_root prefix from src artifact path to match `go list` output format src_path = src.short_path.removeprefix(package_root).lstrip("/") if src_path in go_list.get("GoFiles", []): go_files.append(src) diff --git a/prelude/go/link.bzl b/prelude/go/link.bzl index eecb5ebbf..8c767c0d8 100644 --- a/prelude/go/link.bzl +++ b/prelude/go/link.bzl @@ -165,7 +165,7 @@ def link( # Delegate to C++ linker... # TODO: It feels a bit inefficient to generate a wrapper file for every # link. Is there some way to etract the first arg of `RunInfo`? Or maybe - # we can generate te platform-specific stuff once and re-use? + # we can generate the platform-specific stuff once and re-use? cxx_link_cmd = cmd_args( [ cxx_toolchain.linker_info.linker, diff --git a/prelude/go/packages.bzl b/prelude/go/packages.bzl index 67bfa892d..0fc12828d 100644 --- a/prelude/go/packages.bzl +++ b/prelude/go/packages.bzl @@ -66,7 +66,7 @@ def make_importcfg( # Hack: we use cmd_args get "artifact" valid path and write it to a file. content.append(cmd_args("packagefile ", name_, "=", pkg_, delimiter = "")) - # Future work: support importmap in buck rules insted of hacking here. + # Future work: support importmap in buck rules instead of hacking here. if with_importmap and name_.startswith("third-party-source/go/"): real_name_ = name_.removeprefix("third-party-source/go/") content.append(cmd_args("importmap ", real_name_, "=", name_, delimiter = "")) diff --git a/prelude/go/toolchain.bzl b/prelude/go/toolchain.bzl index adf2f7c53..4501187aa 100644 --- a/prelude/go/toolchain.bzl +++ b/prelude/go/toolchain.bzl @@ -40,7 +40,7 @@ GoToolchainInfo = provider( def get_toolchain_env_vars(toolchain: GoToolchainInfo, force_disable_cgo = False) -> dict[str, str | cmd_args]: env = { "GOARCH": toolchain.env_go_arch, - # opt-out from Go1.20 coverage redisign + # opt-out from Go1.20 coverage redesign "GOEXPERIMENT": "nocoverageredesign", "GOOS": toolchain.env_go_os, } @@ -75,5 +75,5 @@ def evaluate_cgo_enabled(toolchain: GoToolchainInfo, cgo_enabled: [bool, None]) if cgo_enabled != None: return cgo_enabled - # Return True if cxx_toolchain availabe for current configuration, otherwiese to False. + # Return True if cxx_toolchain available for current configuration, otherwise to False. return cxx_toolchain_available diff --git a/prelude/go/transitions/defs.bzl b/prelude/go/transitions/defs.bzl index 1ad7f390c..d56a7b13f 100644 --- a/prelude/go/transitions/defs.bzl +++ b/prelude/go/transitions/defs.bzl @@ -12,7 +12,7 @@ def _cgo_enabled_transition(platform, refs, attrs): constraints = platform.configuration.constraints # Cancel transition if the value already set - # to enable using configuration modifiers for overiding this option + # to enable using configuration modifiers for overriding this option cgo_enabled_setting = refs.cgo_enabled_auto[ConstraintValueInfo].setting if cgo_enabled_setting.label in constraints: return platform @@ -55,7 +55,7 @@ def _race_transition(platform, refs, attrs): constraints = platform.configuration.constraints # Cancel transition if the value already set - # to enable using configuration modifiers for overiding this option + # to enable using configuration modifiers for overriding this option race_setting = refs.race_false[ConstraintValueInfo].setting if race_setting.label in constraints: return platform @@ -82,7 +82,7 @@ def _coverage_mode_transition(platform, refs, attrs): constraints = platform.configuration.constraints # Cancel transition if the value already set - # to enable using configuration modifiers for overiding this option + # to enable using configuration modifiers for overriding this option coverage_mode_setting = refs.coverage_mode_set[ConstraintValueInfo].setting if coverage_mode_setting.label in constraints: return platform @@ -116,7 +116,7 @@ def _tags_transition(platform, refs, attrs): for tag in attrs.tags: ref_name = "tag_{}__value".format(tag) if not hasattr(refs, ref_name): - fail("Add tag '{}' to .buckconfig attrubute `go.allowed_tags` to allow using it".format(tag)) + fail("Add tag '{}' to .buckconfig attribute `go.allowed_tags` to allow using it".format(tag)) tag_value = getattr(refs, ref_name)[ConstraintValueInfo] constraints[tag_value.setting.label] = tag_value diff --git a/prelude/haskell/ide/ide.bxl b/prelude/haskell/ide/ide.bxl index a583d292a..1607d45d3 100644 --- a/prelude/haskell/ide/ide.bxl +++ b/prelude/haskell/ide/ide.bxl @@ -28,7 +28,7 @@ load("@prelude//paths.bzl", "paths") # 1. Finding its owner target, if the input is a file # 2. Finding the target's "project", which involves a rdeps search # 3. Computing the project solution (flags, sources and dependencies) -# 4. Outputing the solution as JSON +# 4. Outputting the solution as JSON _HASKELL_BIN = "prelude//rules.bzl:haskell_binary" _HASKELL_IDE = "prelude//rules.bzl:haskell_ide" @@ -170,7 +170,7 @@ def _solution_for_haskell_lib(ctx, target, exclude): import_dirs = {} root = ctx.root() for key, item in target_srcs.items(): - # because BXL wont give you the path of an ensured artifact + # because BXL won't give you the path of an ensured artifact sp = get_path_without_materialization(item, ctx) (_, ext) = paths.split_extension(sp) diff = sp.removesuffix(paths.replace_extension(key, ext)) diff --git a/prelude/java/java_test.bzl b/prelude/java/java_test.bzl index dfc1949f3..f117b4aa2 100644 --- a/prelude/java/java_test.bzl +++ b/prelude/java/java_test.bzl @@ -87,7 +87,7 @@ def build_junit_test( re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote - # excution options were specified. + # execution options were specified. run_from_cell_root = "buck2_run_from_cell_root" in labels uses_java8 = "run_with_java8" in labels diff --git a/prelude/java/tools/gen_class_to_source_map.py b/prelude/java/tools/gen_class_to_source_map.py index 85af85916..3c01a8d24 100644 --- a/prelude/java/tools/gen_class_to_source_map.py +++ b/prelude/java/tools/gen_class_to_source_map.py @@ -64,9 +64,9 @@ def main(argv): break if not found: - # If the class is not present in the sources, we stil want to + # If the class is not present in the sources, we still want to # include it if it has a prefix that we are interested in. - # certain classes in "androidx.databinding.*" are generated and its useful to know their presense in jars + # certain classes in "androidx.databinding.*" are generated and it's useful to know their presence in jars for prefix in args.include_classes_prefixes: if classname.startswith(prefix): classes.append( diff --git a/prelude/rules.bzl b/prelude/rules.bzl index 64ab5e5f3..4852dc8c4 100644 --- a/prelude/rules.bzl +++ b/prelude/rules.bzl @@ -53,7 +53,7 @@ def _mk_rule(rule_spec: typing.Any, extra_attrs: dict[str, typing.Any] = dict(), # Add _apple_platforms to all rules so that we may query the target platform to use until we support configuration # modifiers and can use them to set the configuration to use for operations. - # Map of string identifer to platform. + # Map of string identifier to platform. attributes["_apple_platforms"] = attrs.dict(key = attrs.string(), value = attrs.dep(), sorted = False, default = {}) extra_args = dict(kwargs) diff --git a/prelude/rust/build.bzl b/prelude/rust/build.bzl index 85e0306ce..182cec2df 100644 --- a/prelude/rust/build.bzl +++ b/prelude/rust/build.bzl @@ -872,7 +872,7 @@ def _compute_common_args( # don't contain any generated code. Rustc can't distinguish these # from real rlibs, and so doesn't throw an error # - # The benefit of doing this is that there's no requirment that the + # The benefit of doing this is that there's no requirement that the # dependency's generated code be provided to the linker via an rlib. It # could be provided by other means, say, a link group dep_metadata_kind = dep_metadata_of_emit(emit) diff --git a/prelude/rust/link_info.bzl b/prelude/rust/link_info.bzl index 65c01365e..4f9984ed0 100644 --- a/prelude/rust/link_info.bzl +++ b/prelude/rust/link_info.bzl @@ -230,7 +230,7 @@ def enable_link_groups( specified_link_strategy: LinkStrategy, is_binary: bool): if not (cxx_is_gnu(ctx) and is_binary): - # check minium requirements + # check minimum requirements return False if link_strategy == LinkStrategy("shared") or link_strategy != specified_link_strategy: # check whether we should run link groups analysis for the given link strategy diff --git a/prelude/sh_test.bzl b/prelude/sh_test.bzl index d51eeed8e..b95bc0cd7 100644 --- a/prelude/sh_test.bzl +++ b/prelude/sh_test.bzl @@ -45,7 +45,7 @@ def sh_test_impl(ctx: AnalysisContext) -> list[Provider]: re_executor, executor_overrides = get_re_executors_from_props(ctx) # We implicitly make the target run from the project root if remote - # excution options were specified + # execution options were specified run_from_project_root = "buck2_run_from_project_root" in (ctx.attrs.labels or []) or re_executor != None # TODO support default info and runinfo properly by writing a sh script that invokes the command properly diff --git a/prelude/toolchains/conan/conan_common.py b/prelude/toolchains/conan/conan_common.py index 7abe794c7..7f324df0c 100644 --- a/prelude/toolchains/conan/conan_common.py +++ b/prelude/toolchains/conan/conan_common.py @@ -161,7 +161,7 @@ def conan_env(user_home=None, trace_log=None): # env["CONAN_REVISIONS_ENABLED"] = "1" # Prevent over-allocation. - # TODO[AH] Support parallized package builds and set an appropriate action + # TODO[AH] Support parallelized package builds and set an appropriate action # weight using the `weight` parameter to `ctx.actions.run`. # Note that not all Conan packages respect the `CONAN_CPU_COUNT` setting. env["CONAN_CPU_COUNT"] = "1" diff --git a/prelude/utils/buckconfig.bzl b/prelude/utils/buckconfig.bzl index bc03a75c4..8d747bd09 100644 --- a/prelude/utils/buckconfig.bzl +++ b/prelude/utils/buckconfig.bzl @@ -188,4 +188,4 @@ def resolve_alias(alias): return target else: alias = target - fail("This should never happen - either the alias exists or it doesnt") + fail("This should never happen - either the alias exists or it doesn't") diff --git a/prelude/utils/type_defs.bzl b/prelude/utils/type_defs.bzl index 79ee96ba9..3ab5d21b5 100644 --- a/prelude/utils/type_defs.bzl +++ b/prelude/utils/type_defs.bzl @@ -88,7 +88,7 @@ def is_bool(arg): """Checks if provided instance is a boolean value. Args: - arg: An instance ot check. type: Any + arg: An instance of check. type: Any Returns: True for boolean values, False otherwise. rtype: bool @@ -101,7 +101,7 @@ def is_number(arg): """Checks if provided instance is a number value. Args: - arg: An instance ot check. type: Any + arg: An instance of check. type: Any Returns: True for number values, False otherwise. rtype: bool From e1520832030244a1644922564fca115176d951c5 Mon Sep 17 00:00:00 2001 From: Neil Mitchell Date: Thu, 11 Apr 2024 12:27:36 -0700 Subject: [PATCH 0786/1133] Delete entirely unused file Summary: Doesn't seem to have been used or meaningfully modified in a while. Reviewed By: samkevich Differential Revision: D56014552 fbshipit-source-id: 51b864a8dc72ffaa77770ea7fc59e04964a26832 --- prelude/tests/tpx_re_legacy.bzl | 57 --------------------------------- 1 file changed, 57 deletions(-) delete mode 100644 prelude/tests/tpx_re_legacy.bzl diff --git a/prelude/tests/tpx_re_legacy.bzl b/prelude/tests/tpx_re_legacy.bzl deleted file mode 100644 index a47c911eb..000000000 --- a/prelude/tests/tpx_re_legacy.bzl +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright (c) Meta Platforms, Inc. and affiliates. -# -# This source code is licensed under both the MIT license found in the -# LICENSE-MIT file in the root directory of this source tree and the Apache -# License, Version 2.0 found in the LICENSE-APACHE file in the root directory -# of this source tree. - -load("@prelude//utils:expect.bzl", "expect") - -_RE_ENABLED = "supports_remote_execution" -_RE_OPTS_LABEL_PREFIX = "re_opts_capabilities=" -_RE_OPTS_KEYS = ["platform", "subplatform", "gpu_name"] - -def _parse_re_opts(labels: list[str]) -> [dict[str, str], None]: - """ - Parse out JSON-embedded RE options like: - 're_opts_capabilities={"platform": gpu-remote-execution, "gpu_name": "A100"}' - """ - - for label in labels: - if label.startswith(_RE_OPTS_LABEL_PREFIX): - result = json.decode(label[len(_RE_OPTS_LABEL_PREFIX):]) - for key in result.keys(): - expect(key in _RE_OPTS_KEYS, "unexpected key in RE options label: {}", key) - return result - - return None - -# TODO(agallagher): Parsing RE options via JSON embedded in labels isn't a great -# UI, and we just do it here to support existing use cases. Ideally, though, we'd -# present a better UI (e.g. an `re_opts` param for tests) and use that instead. -def get_re_executor_from_labels(labels: list[str]) -> [CommandExecutorConfig, None]: - """ - Parse legacy RE-enablement test labels and use them to configure a test RE - executor to run the test with. - - The UI is best documented at: - https://www.internalfb.com/intern/wiki/Remote_Execution/Users/GPU_RE_Contbuild_Migration/ - """ - - # If the special "RE enabled" label isn't present, abort. - if _RE_ENABLED not in labels: - return None - - # If there's no options found in labels, don't use RE. This diverges from - # v1 behavior, but v2+tpx needs some platform to be set and so we probably - # want to the toolchain tp provide some exec-platform compatible platform. - re_opts = _parse_re_opts(labels) - if re_opts == None: - return None - - return CommandExecutorConfig( - local_enabled = False, - remote_enabled = True, - remote_execution_properties = re_opts, - remote_execution_use_case = "tpx-default", - ) From ea9f1df5cb0ee9f28ce240805812fea7f87d96ef Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Thu, 11 Apr 2024 18:52:57 -0700 Subject: [PATCH 0787/1133] Defer resolving shared lib sonames (take 2) Summary: This pushes shared lib SONAME resolution down into Python package handling, to where the SONAMEs are actually used. This will facilitate later changes to support libraries with SONAMEs which aren't known at analysis time. Reviewed By: dtolnay Differential Revision: D55369283 fbshipit-source-id: a87c26a43712777a0ae5b1fb851aad9d04e61b0d --- prelude/cxx/link_groups.bzl | 15 +++- prelude/python/make_py_package.bzl | 89 +++++++++++++-------- prelude/python/python_binary.bzl | 121 ++++++++++++++++++----------- 3 files changed, 144 insertions(+), 81 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index 933648fb7..a3f0cb1fe 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -47,7 +47,8 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "create_shared_libraries", + "SharedLibraries", + "SharedLibrary", ) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -141,6 +142,7 @@ LinkGroupLibSpec = record( root = field([LinkableRootInfo, None], None), # The link group to link. group = field(Group), + label = field(Label | None, None), ) _LinkedLinkGroup = record( @@ -910,9 +912,14 @@ def create_link_groups( linked_link_groups[link_group_spec.group.name] = _LinkedLinkGroup( artifact = link_group_lib, library = None if not link_group_spec.is_shared_lib else LinkGroupLib( - shared_libs = create_shared_libraries( - ctx = ctx, - libraries = {link_group_spec.name: link_group_lib}, + shared_libs = SharedLibraries( + libraries = [ + SharedLibrary( + label = link_group_spec.label or ctx.label, + soname = link_group_spec.name, + lib = link_group_lib, + ), + ], ), shared_link_infos = LinkInfos( default = wrap_link_info( diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 0619f9a9b..0822ca27d 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -12,13 +12,14 @@ execution load("@prelude//:artifact_tset.bzl", "project_artifacts") load("@prelude//:local_only.bzl", "package_python_locally") +load("@prelude//:paths.bzl", "paths") load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_is_gnu", ) load( - "@prelude//linking:link_info.bzl", - "LinkedObject", # @unused Used as a type + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", # @unused Used as a type ) load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -34,7 +35,6 @@ PexModules = record( manifests = field(PythonLibraryManifestsInterface), extensions = field(ManifestInfo | None, None), extra_manifests = field(ManifestInfo | None, None), - debuginfo_manifest = field(ManifestInfo | None, None), compile = field(bool, False), ) @@ -122,10 +122,11 @@ def make_py_package( package_style: PackageStyle, build_args: list[ArgLike], pex_modules: PexModules, - shared_libraries: dict[str, (LinkedObject, bool)], + shared_libraries: list[(str, SharedLibrary, bool)], main: EntryPoint, hidden_resources: list[ArgLike] | None, - allow_cache_upload: bool) -> PexProviders: + allow_cache_upload: bool, + debuginfo_files: list[(str | (str, SharedLibrary, str), Artifact)] = []) -> PexProviders: """ Passes a standardized set of flags to a `make_py_package` binary to create a python "executable". @@ -148,14 +149,22 @@ def make_py_package( if pex_modules.extensions: srcs.append(pex_modules.extensions.manifest) - preload_libraries = _preload_libraries_args(ctx, shared_libraries) + preload_libraries = _preload_libraries_args( + ctx = ctx, + shared_libraries = [ + (libdir, shlib) + for libdir, shlib, preload in shared_libraries + if preload + ], + ) startup_function = generate_startup_function_loader(ctx) manifest_module = generate_manifest_module(ctx, python_toolchain, srcs) common_modules_args, dep_artifacts, debug_artifacts = _pex_modules_common_args( ctx, pex_modules, [startup_function] if startup_function else [], - {name: lib for name, (lib, _) in shared_libraries.items()}, + [(shlib, libdir) for libdir, shlib, _ in shared_libraries], + debuginfo_files = debuginfo_files, ) default = _make_py_package_impl( @@ -212,7 +221,7 @@ def _make_py_package_impl( preload_libraries: cmd_args, common_modules_args: cmd_args, dep_artifacts: list[ArgLike], - debug_artifacts: list[(ArgLike, str)], + debug_artifacts: list[(str, ArgLike)], main: EntryPoint, hidden_resources: list[ArgLike] | None, manifest_module: ArgLike | None, @@ -336,17 +345,16 @@ def _make_py_package_impl( run_cmd = cmd_args(run_args).hidden(runtime_files + hidden_resources), ) -def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[(ArgLike, str)]) -> list[Provider]: - out = ctx.actions.write_json("debuginfo.manifest.json", debug_artifacts) - return [DefaultInfo(default_output = out, other_outputs = [a for a, _ in debug_artifacts])] +def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[(str, ArgLike)]) -> list[Provider]: + out = ctx.actions.write_json("debuginfo.manifest.json", [(s, d) for d, s in debug_artifacts]) + return [DefaultInfo(default_output = out, other_outputs = [d for d, _ in debug_artifacts])] -def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: dict[str, (LinkedObject, bool)]) -> cmd_args: +def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(str, SharedLibrary)]) -> cmd_args: preload_libraries_path = ctx.actions.write( "__preload_libraries.txt", cmd_args([ - "--preload={}".format(name) - for name, (_, preload) in shared_libraries.items() - if preload + "--preload={}".format(paths.join(libdir, shlib.soname)) + for libdir, shlib in shared_libraries ]), ) return cmd_args(preload_libraries_path, format = "@{}") @@ -392,7 +400,8 @@ def _pex_modules_common_args( ctx: AnalysisContext, pex_modules: PexModules, extra_manifests: list[ArgLike], - shared_libraries: dict[str, LinkedObject]) -> (cmd_args, list[ArgLike], list[(ArgLike, str)]): + shared_libraries: list[(SharedLibrary, str)], + debuginfo_files: list[(str | (str, SharedLibrary, str), Artifact)]) -> (cmd_args, list[ArgLike], list[(str, ArgLike)]): srcs = [] src_artifacts = [] deps = [] @@ -425,14 +434,17 @@ def _pex_modules_common_args( _srcs(resources, format = "--resource-manifest={}"), ) - native_libraries = [s.output for s in shared_libraries.values()] + native_libraries = [shlib.lib.output for shlib, _ in shared_libraries] native_library_srcs_path = ctx.actions.write( "__native_libraries___srcs.txt", _srcs(native_libraries, format = "--native-library-src={}"), ) native_library_dests_path = ctx.actions.write( "__native_libraries___dests.txt", - ["--native-library-dest={}".format(lib) for lib in shared_libraries], + [ + "--native-library-dest={}".format(paths.join(libdir, shlib.soname)) + for shlib, libdir in shared_libraries + ], ) src_manifest_args = cmd_args(src_manifests_path).hidden(srcs) @@ -445,29 +457,40 @@ def _pex_modules_common_args( cmd.add(cmd_args(native_library_srcs_args, format = "@{}")) cmd.add(cmd_args(native_library_dests_path, format = "@{}")) - if pex_modules.debuginfo_manifest: - debuginfo_files = pex_modules.debuginfo_manifest.artifacts + if debuginfo_files: debuginfo_srcs_path = ctx.actions.write( "__debuginfo___srcs.txt", - _srcs([src for src, _ in debuginfo_files], format = "--debuginfo-src={}"), + _srcs([src for _, src in debuginfo_files], format = "--debuginfo-src={}"), ) debuginfo_srcs_args = cmd_args(debuginfo_srcs_path) cmd.add(cmd_args(debuginfo_srcs_args, format = "@{}")) - debug_artifacts.extend(debuginfo_files) + for name, artifact in debuginfo_files: + if type(name) != type(""): + libdir, shlib, ext = name + name = paths.join(libdir, shlib.soname + ext) + debug_artifacts.append((name, artifact)) if ctx.attrs.package_split_dwarf_dwp: if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): # rename to match extracted debuginfo package - dwp = [(s.dwp, "{}.debuginfo.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] + dwp = [ + (paths.join(libdir, "{}.debuginfo.dwp".format(shlib.soname)), shlib.lib.dwp) + for shlib, libdir in shared_libraries + if shlib.lib.dwp != None + ] else: - dwp = [(s.dwp, "{}.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] + dwp = [ + (paths.join(libdir, "{}.dwp".format(shlib.soname)), shlib.lib.dwp) + for shlib, libdir in shared_libraries + if shlib.lib.dwp != None + ] dwp_srcs_path = ctx.actions.write( "__dwp___srcs.txt", - _srcs([src for src, _ in dwp], format = "--dwp-src={}"), + _srcs([src for _, src in dwp], format = "--dwp-src={}"), ) dwp_dests_path = ctx.actions.write( "__dwp___dests.txt", - _srcs([dest for _, dest in dwp], format = "--dwp-dest={}"), + _srcs([dest for dest, _ in dwp], format = "--dwp-dest={}"), ) dwp_srcs_args = cmd_args(dwp_srcs_path) cmd.add(cmd_args(dwp_srcs_args, format = "@{}")) @@ -475,15 +498,19 @@ def _pex_modules_common_args( debug_artifacts.extend(dwp) - deps.extend([lib.output for lib in shared_libraries.values()]) + for shlib, _ in shared_libraries: + deps.append(shlib.lib.output) external_debug_info = project_artifacts( ctx.actions, - [lib.external_debug_info for lib in shared_libraries.values()], + [ + shlib.lib.external_debug_info + for shlib, _ in shared_libraries + ], ) # HACK: external_debug_info has an empty path - debug_artifacts.extend([(d, "") for d in external_debug_info]) + debug_artifacts.extend([("", d) for d in external_debug_info]) return (cmd, deps, debug_artifacts) @@ -491,7 +518,7 @@ def _pex_modules_args( ctx: AnalysisContext, common_args: cmd_args, dep_artifacts: list[ArgLike], - debug_artifacts: list[(ArgLike, str)], + debug_artifacts: list[(str, ArgLike)], symlink_tree_path: Artifact | None, manifest_module: ArgLike | None, pex_modules: PexModules, @@ -530,7 +557,7 @@ def _pex_modules_args( # if we are not going to create symlinks. cmd.hidden(dep_artifacts) - cmd.hidden([a for a, _ in debug_artifacts]) + cmd.hidden([s for _, s in debug_artifacts]) return cmd diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 89c92cebd..14a343e58 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -71,7 +71,12 @@ load( "LinkableProviders", # @unused Used as a type "linkables", ) -load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") +load( + "@prelude//linking:shared_libraries.bzl", + "SharedLibrary", + "merge_shared_libraries", + "traverse_shared_library_info", +) load("@prelude//linking:strip.bzl", "strip_debug_with_gnu_debuglink") load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:utils.bzl", "flatten", "value_or") @@ -164,6 +169,7 @@ def _get_root_link_group_specs( name = dep.linkable_root_info.name, is_shared_lib = True, root = dep.linkable_root_info, + label = dep.linkable_graph.nodes.value.label, group = Group( name = dep.linkable_root_info.name, mappings = [ @@ -205,15 +211,6 @@ def _get_root_link_group_specs( return specs -def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[str, (LinkedObject, Label | bool)], dict[str, Artifact]): - debuginfo_artifacts = {} - transformed = {} - for name, (artifact, extra) in data.items(): - stripped_binary, debuginfo = strip_debug_with_gnu_debuglink(ctx, name, artifact.unstripped_output) - transformed[name] = LinkedObject(output = stripped_binary, unstripped_output = artifact.unstripped_output, dwp = artifact.dwp), extra - debuginfo_artifacts[name + ".debuginfo"] = debuginfo - return transformed, debuginfo_artifacts - def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[Group]: """ Create link group mappings for shared-only libs that'll force the link to @@ -454,11 +451,6 @@ def _convert_python_library_to_executable( # Convert preloaded deps to a set of their names to be loaded by. preload_labels = {d.label: None for d in ctx.attrs.preload_deps} - preload_names = { - shared_lib.soname: None - for shared_lib in library.shared_libraries() - if shared_lib.label in preload_labels - } extensions = {} extra_artifacts = {} @@ -497,7 +489,7 @@ def _convert_python_library_to_executable( dest: (omnibus_libs.roots[label].shared_library, label) for dest, (_, label) in extensions.items() } - native_libs = {shlib.soname: shlib.lib for shlib in omnibus_libs.libraries} + shared_libs = [("", shlib) for shlib in omnibus_libs.libraries] omnibus_providers = [] @@ -649,23 +641,29 @@ def _convert_python_library_to_executable( # Put native libraries into the runtime location, as we need to unpack # potentially all of them before startup. - native_libs = { - paths.join("runtime", "lib", shlib.soname): shlib.lib - for shlib in executable_info.shared_libs - } - preload_names = [paths.join("runtime", "lib", n) for n in preload_names] + shared_libs = [("runtime/lib", s) for s in executable_info.shared_libs] # TODO expect(len(executable_info.runtime_files) == 0, "OH NO THERE ARE RUNTIME FILES") extra_artifacts.update(dict(extension_info.artifacts)) - native_libs["runtime/bin/{}".format(ctx.attrs.executable_name)] = LinkedObject( - output = executable_info.binary, - unstripped_output = executable_info.binary, - dwp = executable_info.dwp, - ) + shared_libs.append(( + "runtime/bin", + SharedLibrary( + soname = ctx.attrs.executable_name, + label = ctx.label, + lib = LinkedObject( + output = executable_info.binary, + unstripped_output = executable_info.binary, + dwp = executable_info.dwp, + ), + ), + )) extra_artifacts["static_extension_finder.py"] = ctx.attrs.static_extension_finder else: - native_libs = {shared_lib.soname: shared_lib.lib for shared_lib in library.shared_libraries()} + shared_libs = [ + ("", shared_lib) + for shared_lib in library.shared_libraries() + ] if dbg_source_db: extra_artifacts["dbg-db.json"] = dbg_source_db.default_outputs[0] @@ -675,28 +673,58 @@ def _convert_python_library_to_executable( extra_manifests = create_manifest_for_source_map(ctx, "extra_manifests", extra_artifacts) - shared_libraries = {} - debuginfo_artifacts = {} - # Create the map of native libraries to their artifacts and whether they # need to be preloaded. Note that we merge preload deps into regular deps # above, before gathering up all native libraries, so we're guaranteed to # have all preload libraries (and their transitive deps) here. - for name, lib in native_libs.items(): - shared_libraries[name] = lib, name in preload_names + shared_libs = [ + (libdir, shlib, shlib.label in preload_labels) + for libdir, shlib in shared_libs + ] # Strip native libraries and extensions and update the .gnu_debuglink references if we are extracting # debug symbols from the par + debuginfo_files = [] if ctx.attrs.strip_libpar == "extract" and package_style == PackageStyle("standalone") and cxx_is_gnu(ctx): - shared_libraries, library_debuginfo = _split_debuginfo(ctx, shared_libraries) - extensions, extension_debuginfo = _split_debuginfo(ctx, extensions) - debuginfo_artifacts = library_debuginfo | extension_debuginfo + stripped_shlibs = [] + for libdir, shlib, preload in shared_libs: + stripped, debuginfo = strip_debug_with_gnu_debuglink( + ctx = ctx, + name = shlib.lib.unstripped_output.basename, + obj = shlib.lib.unstripped_output, + ) + shlib = SharedLibrary( + soname = shlib.soname, + label = shlib.label, + lib = LinkedObject( + output = stripped, + unstripped_output = shlib.lib.unstripped_output, + dwp = shlib.lib.dwp, + ), + ) + stripped_shlibs.append((libdir, shlib, preload)) + debuginfo_files.append(((libdir, shlib, ".debuginfo"), debuginfo)) + shared_libs = stripped_shlibs + for name, (extension, label) in extensions.items(): + stripped, debuginfo = strip_debug_with_gnu_debuglink( + ctx = ctx, + name = name, + obj = extension.unstripped_output, + ) + extensions[name] = ( + LinkedObject( + output = stripped, + unstripped_output = extension.unstripped_output, + dwp = extension.dwp, + ), + label, + ) + debuginfo_files.append((name + ".debuginfo", debuginfo)) # Combine sources and extensions into a map of all modules. pex_modules = PexModules( manifests = library.manifests(), extra_manifests = extra_manifests, - debuginfo_manifest = create_manifest_for_source_map(ctx, "debuginfo", debuginfo_artifacts) if debuginfo_artifacts else None, compile = compile, extensions = create_manifest_for_extensions( ctx, @@ -709,16 +737,17 @@ def _convert_python_library_to_executable( # Build the PEX. pex = make_py_package( - ctx, - python_toolchain, - ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, - package_style, - ctx.attrs.build_args, - pex_modules, - shared_libraries, - main, - hidden_resources, - allow_cache_upload, + ctx = ctx, + python_toolchain = python_toolchain, + make_py_package_cmd = ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, + package_style = package_style, + build_args = ctx.attrs.build_args, + pex_modules = pex_modules, + shared_libraries = shared_libs, + main = main, + hidden_resources = hidden_resources, + allow_cache_upload = allow_cache_upload, + debuginfo_files = debuginfo_files, ) pex.sub_targets.update(extra) From 7635a1835da05f4589186b6e443b8602baeba79c Mon Sep 17 00:00:00 2001 From: Yulong Zhang Date: Fri, 12 Apr 2024 00:41:53 -0700 Subject: [PATCH 0788/1133] Revert D55369283: Defer resolving shared lib sonames (take 2) Differential Revision: D55369283 Original commit changeset: a87c26a43712 Original Phabricator Diff: D55369283 fbshipit-source-id: bb0206dfd13ec82e86026599a4381a6322c79718 --- prelude/cxx/link_groups.bzl | 15 +--- prelude/python/make_py_package.bzl | 89 ++++++++------------- prelude/python/python_binary.bzl | 121 +++++++++++------------------ 3 files changed, 81 insertions(+), 144 deletions(-) diff --git a/prelude/cxx/link_groups.bzl b/prelude/cxx/link_groups.bzl index a3f0cb1fe..933648fb7 100644 --- a/prelude/cxx/link_groups.bzl +++ b/prelude/cxx/link_groups.bzl @@ -47,8 +47,7 @@ load( ) load( "@prelude//linking:shared_libraries.bzl", - "SharedLibraries", - "SharedLibrary", + "create_shared_libraries", ) load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -142,7 +141,6 @@ LinkGroupLibSpec = record( root = field([LinkableRootInfo, None], None), # The link group to link. group = field(Group), - label = field(Label | None, None), ) _LinkedLinkGroup = record( @@ -912,14 +910,9 @@ def create_link_groups( linked_link_groups[link_group_spec.group.name] = _LinkedLinkGroup( artifact = link_group_lib, library = None if not link_group_spec.is_shared_lib else LinkGroupLib( - shared_libs = SharedLibraries( - libraries = [ - SharedLibrary( - label = link_group_spec.label or ctx.label, - soname = link_group_spec.name, - lib = link_group_lib, - ), - ], + shared_libs = create_shared_libraries( + ctx = ctx, + libraries = {link_group_spec.name: link_group_lib}, ), shared_link_infos = LinkInfos( default = wrap_link_info( diff --git a/prelude/python/make_py_package.bzl b/prelude/python/make_py_package.bzl index 0822ca27d..0619f9a9b 100644 --- a/prelude/python/make_py_package.bzl +++ b/prelude/python/make_py_package.bzl @@ -12,14 +12,13 @@ execution load("@prelude//:artifact_tset.bzl", "project_artifacts") load("@prelude//:local_only.bzl", "package_python_locally") -load("@prelude//:paths.bzl", "paths") load( "@prelude//cxx:cxx_library_utility.bzl", "cxx_is_gnu", ) load( - "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", # @unused Used as a type + "@prelude//linking:link_info.bzl", + "LinkedObject", # @unused Used as a type ) load("@prelude//os_lookup:defs.bzl", "OsLookup") load("@prelude//utils:arglike.bzl", "ArgLike") @@ -35,6 +34,7 @@ PexModules = record( manifests = field(PythonLibraryManifestsInterface), extensions = field(ManifestInfo | None, None), extra_manifests = field(ManifestInfo | None, None), + debuginfo_manifest = field(ManifestInfo | None, None), compile = field(bool, False), ) @@ -122,11 +122,10 @@ def make_py_package( package_style: PackageStyle, build_args: list[ArgLike], pex_modules: PexModules, - shared_libraries: list[(str, SharedLibrary, bool)], + shared_libraries: dict[str, (LinkedObject, bool)], main: EntryPoint, hidden_resources: list[ArgLike] | None, - allow_cache_upload: bool, - debuginfo_files: list[(str | (str, SharedLibrary, str), Artifact)] = []) -> PexProviders: + allow_cache_upload: bool) -> PexProviders: """ Passes a standardized set of flags to a `make_py_package` binary to create a python "executable". @@ -149,22 +148,14 @@ def make_py_package( if pex_modules.extensions: srcs.append(pex_modules.extensions.manifest) - preload_libraries = _preload_libraries_args( - ctx = ctx, - shared_libraries = [ - (libdir, shlib) - for libdir, shlib, preload in shared_libraries - if preload - ], - ) + preload_libraries = _preload_libraries_args(ctx, shared_libraries) startup_function = generate_startup_function_loader(ctx) manifest_module = generate_manifest_module(ctx, python_toolchain, srcs) common_modules_args, dep_artifacts, debug_artifacts = _pex_modules_common_args( ctx, pex_modules, [startup_function] if startup_function else [], - [(shlib, libdir) for libdir, shlib, _ in shared_libraries], - debuginfo_files = debuginfo_files, + {name: lib for name, (lib, _) in shared_libraries.items()}, ) default = _make_py_package_impl( @@ -221,7 +212,7 @@ def _make_py_package_impl( preload_libraries: cmd_args, common_modules_args: cmd_args, dep_artifacts: list[ArgLike], - debug_artifacts: list[(str, ArgLike)], + debug_artifacts: list[(ArgLike, str)], main: EntryPoint, hidden_resources: list[ArgLike] | None, manifest_module: ArgLike | None, @@ -345,16 +336,17 @@ def _make_py_package_impl( run_cmd = cmd_args(run_args).hidden(runtime_files + hidden_resources), ) -def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[(str, ArgLike)]) -> list[Provider]: - out = ctx.actions.write_json("debuginfo.manifest.json", [(s, d) for d, s in debug_artifacts]) - return [DefaultInfo(default_output = out, other_outputs = [d for d, _ in debug_artifacts])] +def _debuginfo_subtarget(ctx: AnalysisContext, debug_artifacts: list[(ArgLike, str)]) -> list[Provider]: + out = ctx.actions.write_json("debuginfo.manifest.json", debug_artifacts) + return [DefaultInfo(default_output = out, other_outputs = [a for a, _ in debug_artifacts])] -def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: list[(str, SharedLibrary)]) -> cmd_args: +def _preload_libraries_args(ctx: AnalysisContext, shared_libraries: dict[str, (LinkedObject, bool)]) -> cmd_args: preload_libraries_path = ctx.actions.write( "__preload_libraries.txt", cmd_args([ - "--preload={}".format(paths.join(libdir, shlib.soname)) - for libdir, shlib in shared_libraries + "--preload={}".format(name) + for name, (_, preload) in shared_libraries.items() + if preload ]), ) return cmd_args(preload_libraries_path, format = "@{}") @@ -400,8 +392,7 @@ def _pex_modules_common_args( ctx: AnalysisContext, pex_modules: PexModules, extra_manifests: list[ArgLike], - shared_libraries: list[(SharedLibrary, str)], - debuginfo_files: list[(str | (str, SharedLibrary, str), Artifact)]) -> (cmd_args, list[ArgLike], list[(str, ArgLike)]): + shared_libraries: dict[str, LinkedObject]) -> (cmd_args, list[ArgLike], list[(ArgLike, str)]): srcs = [] src_artifacts = [] deps = [] @@ -434,17 +425,14 @@ def _pex_modules_common_args( _srcs(resources, format = "--resource-manifest={}"), ) - native_libraries = [shlib.lib.output for shlib, _ in shared_libraries] + native_libraries = [s.output for s in shared_libraries.values()] native_library_srcs_path = ctx.actions.write( "__native_libraries___srcs.txt", _srcs(native_libraries, format = "--native-library-src={}"), ) native_library_dests_path = ctx.actions.write( "__native_libraries___dests.txt", - [ - "--native-library-dest={}".format(paths.join(libdir, shlib.soname)) - for shlib, libdir in shared_libraries - ], + ["--native-library-dest={}".format(lib) for lib in shared_libraries], ) src_manifest_args = cmd_args(src_manifests_path).hidden(srcs) @@ -457,40 +445,29 @@ def _pex_modules_common_args( cmd.add(cmd_args(native_library_srcs_args, format = "@{}")) cmd.add(cmd_args(native_library_dests_path, format = "@{}")) - if debuginfo_files: + if pex_modules.debuginfo_manifest: + debuginfo_files = pex_modules.debuginfo_manifest.artifacts debuginfo_srcs_path = ctx.actions.write( "__debuginfo___srcs.txt", - _srcs([src for _, src in debuginfo_files], format = "--debuginfo-src={}"), + _srcs([src for src, _ in debuginfo_files], format = "--debuginfo-src={}"), ) debuginfo_srcs_args = cmd_args(debuginfo_srcs_path) cmd.add(cmd_args(debuginfo_srcs_args, format = "@{}")) - for name, artifact in debuginfo_files: - if type(name) != type(""): - libdir, shlib, ext = name - name = paths.join(libdir, shlib.soname + ext) - debug_artifacts.append((name, artifact)) + debug_artifacts.extend(debuginfo_files) if ctx.attrs.package_split_dwarf_dwp: if ctx.attrs.strip_libpar == "extract" and get_package_style(ctx) == PackageStyle("standalone") and cxx_is_gnu(ctx): # rename to match extracted debuginfo package - dwp = [ - (paths.join(libdir, "{}.debuginfo.dwp".format(shlib.soname)), shlib.lib.dwp) - for shlib, libdir in shared_libraries - if shlib.lib.dwp != None - ] + dwp = [(s.dwp, "{}.debuginfo.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] else: - dwp = [ - (paths.join(libdir, "{}.dwp".format(shlib.soname)), shlib.lib.dwp) - for shlib, libdir in shared_libraries - if shlib.lib.dwp != None - ] + dwp = [(s.dwp, "{}.dwp".format(n)) for n, s in shared_libraries.items() if s.dwp != None] dwp_srcs_path = ctx.actions.write( "__dwp___srcs.txt", - _srcs([src for _, src in dwp], format = "--dwp-src={}"), + _srcs([src for src, _ in dwp], format = "--dwp-src={}"), ) dwp_dests_path = ctx.actions.write( "__dwp___dests.txt", - _srcs([dest for dest, _ in dwp], format = "--dwp-dest={}"), + _srcs([dest for _, dest in dwp], format = "--dwp-dest={}"), ) dwp_srcs_args = cmd_args(dwp_srcs_path) cmd.add(cmd_args(dwp_srcs_args, format = "@{}")) @@ -498,19 +475,15 @@ def _pex_modules_common_args( debug_artifacts.extend(dwp) - for shlib, _ in shared_libraries: - deps.append(shlib.lib.output) + deps.extend([lib.output for lib in shared_libraries.values()]) external_debug_info = project_artifacts( ctx.actions, - [ - shlib.lib.external_debug_info - for shlib, _ in shared_libraries - ], + [lib.external_debug_info for lib in shared_libraries.values()], ) # HACK: external_debug_info has an empty path - debug_artifacts.extend([("", d) for d in external_debug_info]) + debug_artifacts.extend([(d, "") for d in external_debug_info]) return (cmd, deps, debug_artifacts) @@ -518,7 +491,7 @@ def _pex_modules_args( ctx: AnalysisContext, common_args: cmd_args, dep_artifacts: list[ArgLike], - debug_artifacts: list[(str, ArgLike)], + debug_artifacts: list[(ArgLike, str)], symlink_tree_path: Artifact | None, manifest_module: ArgLike | None, pex_modules: PexModules, @@ -557,7 +530,7 @@ def _pex_modules_args( # if we are not going to create symlinks. cmd.hidden(dep_artifacts) - cmd.hidden([s for _, s in debug_artifacts]) + cmd.hidden([a for a, _ in debug_artifacts]) return cmd diff --git a/prelude/python/python_binary.bzl b/prelude/python/python_binary.bzl index 14a343e58..89c92cebd 100644 --- a/prelude/python/python_binary.bzl +++ b/prelude/python/python_binary.bzl @@ -71,12 +71,7 @@ load( "LinkableProviders", # @unused Used as a type "linkables", ) -load( - "@prelude//linking:shared_libraries.bzl", - "SharedLibrary", - "merge_shared_libraries", - "traverse_shared_library_info", -) +load("@prelude//linking:shared_libraries.bzl", "merge_shared_libraries", "traverse_shared_library_info") load("@prelude//linking:strip.bzl", "strip_debug_with_gnu_debuglink") load("@prelude//linking:types.bzl", "Linkage") load("@prelude//utils:utils.bzl", "flatten", "value_or") @@ -169,7 +164,6 @@ def _get_root_link_group_specs( name = dep.linkable_root_info.name, is_shared_lib = True, root = dep.linkable_root_info, - label = dep.linkable_graph.nodes.value.label, group = Group( name = dep.linkable_root_info.name, mappings = [ @@ -211,6 +205,15 @@ def _get_root_link_group_specs( return specs +def _split_debuginfo(ctx, data: dict[str, (typing.Any, Label | bool)]) -> (dict[str, (LinkedObject, Label | bool)], dict[str, Artifact]): + debuginfo_artifacts = {} + transformed = {} + for name, (artifact, extra) in data.items(): + stripped_binary, debuginfo = strip_debug_with_gnu_debuglink(ctx, name, artifact.unstripped_output) + transformed[name] = LinkedObject(output = stripped_binary, unstripped_output = artifact.unstripped_output, dwp = artifact.dwp), extra + debuginfo_artifacts[name + ".debuginfo"] = debuginfo + return transformed, debuginfo_artifacts + def _get_shared_only_groups(shared_only_libs: list[LinkableProviders]) -> list[Group]: """ Create link group mappings for shared-only libs that'll force the link to @@ -451,6 +454,11 @@ def _convert_python_library_to_executable( # Convert preloaded deps to a set of their names to be loaded by. preload_labels = {d.label: None for d in ctx.attrs.preload_deps} + preload_names = { + shared_lib.soname: None + for shared_lib in library.shared_libraries() + if shared_lib.label in preload_labels + } extensions = {} extra_artifacts = {} @@ -489,7 +497,7 @@ def _convert_python_library_to_executable( dest: (omnibus_libs.roots[label].shared_library, label) for dest, (_, label) in extensions.items() } - shared_libs = [("", shlib) for shlib in omnibus_libs.libraries] + native_libs = {shlib.soname: shlib.lib for shlib in omnibus_libs.libraries} omnibus_providers = [] @@ -641,29 +649,23 @@ def _convert_python_library_to_executable( # Put native libraries into the runtime location, as we need to unpack # potentially all of them before startup. - shared_libs = [("runtime/lib", s) for s in executable_info.shared_libs] + native_libs = { + paths.join("runtime", "lib", shlib.soname): shlib.lib + for shlib in executable_info.shared_libs + } + preload_names = [paths.join("runtime", "lib", n) for n in preload_names] # TODO expect(len(executable_info.runtime_files) == 0, "OH NO THERE ARE RUNTIME FILES") extra_artifacts.update(dict(extension_info.artifacts)) - shared_libs.append(( - "runtime/bin", - SharedLibrary( - soname = ctx.attrs.executable_name, - label = ctx.label, - lib = LinkedObject( - output = executable_info.binary, - unstripped_output = executable_info.binary, - dwp = executable_info.dwp, - ), - ), - )) + native_libs["runtime/bin/{}".format(ctx.attrs.executable_name)] = LinkedObject( + output = executable_info.binary, + unstripped_output = executable_info.binary, + dwp = executable_info.dwp, + ) extra_artifacts["static_extension_finder.py"] = ctx.attrs.static_extension_finder else: - shared_libs = [ - ("", shared_lib) - for shared_lib in library.shared_libraries() - ] + native_libs = {shared_lib.soname: shared_lib.lib for shared_lib in library.shared_libraries()} if dbg_source_db: extra_artifacts["dbg-db.json"] = dbg_source_db.default_outputs[0] @@ -673,58 +675,28 @@ def _convert_python_library_to_executable( extra_manifests = create_manifest_for_source_map(ctx, "extra_manifests", extra_artifacts) + shared_libraries = {} + debuginfo_artifacts = {} + # Create the map of native libraries to their artifacts and whether they # need to be preloaded. Note that we merge preload deps into regular deps # above, before gathering up all native libraries, so we're guaranteed to # have all preload libraries (and their transitive deps) here. - shared_libs = [ - (libdir, shlib, shlib.label in preload_labels) - for libdir, shlib in shared_libs - ] + for name, lib in native_libs.items(): + shared_libraries[name] = lib, name in preload_names # Strip native libraries and extensions and update the .gnu_debuglink references if we are extracting # debug symbols from the par - debuginfo_files = [] if ctx.attrs.strip_libpar == "extract" and package_style == PackageStyle("standalone") and cxx_is_gnu(ctx): - stripped_shlibs = [] - for libdir, shlib, preload in shared_libs: - stripped, debuginfo = strip_debug_with_gnu_debuglink( - ctx = ctx, - name = shlib.lib.unstripped_output.basename, - obj = shlib.lib.unstripped_output, - ) - shlib = SharedLibrary( - soname = shlib.soname, - label = shlib.label, - lib = LinkedObject( - output = stripped, - unstripped_output = shlib.lib.unstripped_output, - dwp = shlib.lib.dwp, - ), - ) - stripped_shlibs.append((libdir, shlib, preload)) - debuginfo_files.append(((libdir, shlib, ".debuginfo"), debuginfo)) - shared_libs = stripped_shlibs - for name, (extension, label) in extensions.items(): - stripped, debuginfo = strip_debug_with_gnu_debuglink( - ctx = ctx, - name = name, - obj = extension.unstripped_output, - ) - extensions[name] = ( - LinkedObject( - output = stripped, - unstripped_output = extension.unstripped_output, - dwp = extension.dwp, - ), - label, - ) - debuginfo_files.append((name + ".debuginfo", debuginfo)) + shared_libraries, library_debuginfo = _split_debuginfo(ctx, shared_libraries) + extensions, extension_debuginfo = _split_debuginfo(ctx, extensions) + debuginfo_artifacts = library_debuginfo | extension_debuginfo # Combine sources and extensions into a map of all modules. pex_modules = PexModules( manifests = library.manifests(), extra_manifests = extra_manifests, + debuginfo_manifest = create_manifest_for_source_map(ctx, "debuginfo", debuginfo_artifacts) if debuginfo_artifacts else None, compile = compile, extensions = create_manifest_for_extensions( ctx, @@ -737,17 +709,16 @@ def _convert_python_library_to_executable( # Build the PEX. pex = make_py_package( - ctx = ctx, - python_toolchain = python_toolchain, - make_py_package_cmd = ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, - package_style = package_style, - build_args = ctx.attrs.build_args, - pex_modules = pex_modules, - shared_libraries = shared_libs, - main = main, - hidden_resources = hidden_resources, - allow_cache_upload = allow_cache_upload, - debuginfo_files = debuginfo_files, + ctx, + python_toolchain, + ctx.attrs.make_py_package[RunInfo] if ctx.attrs.make_py_package != None else None, + package_style, + ctx.attrs.build_args, + pex_modules, + shared_libraries, + main, + hidden_resources, + allow_cache_upload, ) pex.sub_targets.update(extra) From 9e880aaab8adc69d28c76e38577aa0e4cec3528b Mon Sep 17 00:00:00 2001 From: Jason Sendros-Keshka Date: Fri, 12 Apr 2024 01:40:38 -0700 Subject: [PATCH 0789/1133] Allow targets to specify a debug port for JavaCD/KotlinCD Summary: Creates a way to specify debug port through attrs instead of toolchain. This lets us debug multiple targets per run by specifying the debug port per-target. Reviewed By: IanChilds Differential Revision: D55969833 fbshipit-source-id: bceb2d4b80d9062cb5efac5652f6045ba7a08b17 --- prelude/java/java_library.bzl | 8 ++++++-- prelude/java/javacd_jar_creator.bzl | 8 +++++--- prelude/jvm/cd_jar_creator_util.bzl | 14 +++++++++++--- prelude/kotlin/kotlin_library.bzl | 1 + prelude/kotlin/kotlincd_jar_creator.bzl | 8 +++++--- 5 files changed, 28 insertions(+), 11 deletions(-) diff --git a/prelude/java/java_library.bzl b/prelude/java/java_library.bzl index 8f093766c..ec6c74d19 100644 --- a/prelude/java/java_library.bzl +++ b/prelude/java/java_library.bzl @@ -294,7 +294,8 @@ def compile_to_jar( additional_classpath_entries: [list[Artifact], None] = None, additional_compiled_srcs: Artifact | None = None, bootclasspath_entries: [list[Artifact], None] = None, - is_creating_subtarget: bool = False) -> JavaCompileOutputs: + is_creating_subtarget: bool = False, + debug_port: [int, None] = None) -> JavaCompileOutputs: if not additional_classpath_entries: additional_classpath_entries = [] if not bootclasspath_entries: @@ -348,6 +349,7 @@ def compile_to_jar( bootclasspath_entries, is_building_android_binary, is_creating_subtarget, + debug_port, ) def _create_jar_artifact( @@ -375,7 +377,8 @@ def _create_jar_artifact( additional_compiled_srcs: Artifact | None, bootclasspath_entries: list[Artifact], _is_building_android_binary: bool, - _is_creating_subtarget: bool = False) -> JavaCompileOutputs: + _is_creating_subtarget: bool = False, + _debug_port: [int, None] = None) -> JavaCompileOutputs: """ Creates jar artifact. @@ -570,6 +573,7 @@ def build_java_library( "additional_compiled_srcs": additional_compiled_srcs, "annotation_processor_properties": annotation_processor_properties, "bootclasspath_entries": bootclasspath_entries, + "debug_port": getattr(ctx.attrs, "debug_port", None), "deps": first_order_deps, "javac_tool": derive_javac(ctx.attrs.javac) if ctx.attrs.javac else None, "manifest_file": manifest_file, diff --git a/prelude/java/javacd_jar_creator.bzl b/prelude/java/javacd_jar_creator.bzl index ed8bdcec7..730ca0f5e 100644 --- a/prelude/java/javacd_jar_creator.bzl +++ b/prelude/java/javacd_jar_creator.bzl @@ -76,7 +76,8 @@ def create_jar_artifact_javacd( additional_compiled_srcs: Artifact | None, bootclasspath_entries: list[Artifact], is_building_android_binary: bool, - is_creating_subtarget: bool = False) -> JavaCompileOutputs: + is_creating_subtarget: bool = False, + debug_port: [int, None] = None) -> JavaCompileOutputs: if javac_tool != None: # TODO(cjhopman): We can probably handle this better. I think we should be able to just use the non-javacd path. fail("cannot set explicit javac on library when using javacd") @@ -213,8 +214,9 @@ def create_jar_artifact_javacd( compiler = compiler, main_class = java_toolchain.javacd_main_class, worker = java_toolchain.javacd_worker[WorkerInfo], - debug_port = java_toolchain.javacd_debug_port, - debug_target = java_toolchain.javacd_debug_target, + target_specified_debug_port = debug_port, + toolchain_specified_debug_port = java_toolchain.javacd_debug_port, + toolchain_specified_debug_target = java_toolchain.javacd_debug_target, extra_jvm_args = java_toolchain.javacd_jvm_args, extra_jvm_args_target = java_toolchain.javacd_jvm_args_target, ) diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/prelude/jvm/cd_jar_creator_util.bzl index a1ca8e531..0fcd2d49a 100644 --- a/prelude/jvm/cd_jar_creator_util.bzl +++ b/prelude/jvm/cd_jar_creator_util.bzl @@ -401,8 +401,9 @@ def prepare_cd_exe( compiler: Artifact, main_class: str, worker: WorkerInfo, - debug_port: [int, None], - debug_target: [Label, None], + target_specified_debug_port: [int, None], + toolchain_specified_debug_port: [int, None], + toolchain_specified_debug_target: [Label, None], extra_jvm_args: list[str], extra_jvm_args_target: list[Label]) -> tuple: local_only = False @@ -436,7 +437,14 @@ def prepare_cd_exe( "--add-opens=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED", ] - if debug_port and qualified_name == qualified_name_with_subtarget(debug_target): + if target_specified_debug_port: + debug_port = target_specified_debug_port + elif toolchain_specified_debug_port and qualified_name == qualified_name_with_subtarget(toolchain_specified_debug_target): + debug_port = toolchain_specified_debug_port + else: + debug_port = None + + if debug_port: # Do not use a worker when debugging is enabled local_only = True jvm_args.extend(["-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address={}".format(debug_port)]) diff --git a/prelude/kotlin/kotlin_library.bzl b/prelude/kotlin/kotlin_library.bzl index 417d3001c..f034f59da 100644 --- a/prelude/kotlin/kotlin_library.bzl +++ b/prelude/kotlin/kotlin_library.bzl @@ -357,6 +357,7 @@ def build_kotlin_library( annotation_processor_params = annotation_processor_properties.annotation_processor_params + ksp_annotation_processor_properties.annotation_processor_params, ), "bootclasspath_entries": bootclasspath_entries, + "debug_port": getattr(ctx.attrs, "debug_port", None), "deps": deps, "extra_kotlinc_arguments": ctx.attrs.extra_kotlinc_arguments, "friend_paths": ctx.attrs.friend_paths, diff --git a/prelude/kotlin/kotlincd_jar_creator.bzl b/prelude/kotlin/kotlincd_jar_creator.bzl index 521b8c766..c3df00a28 100644 --- a/prelude/kotlin/kotlincd_jar_creator.bzl +++ b/prelude/kotlin/kotlincd_jar_creator.bzl @@ -78,7 +78,8 @@ def create_jar_artifact_kotlincd( k2: bool, is_creating_subtarget: bool = False, optional_dirs: list[OutputArtifact] = [], - jar_postprocessor: [RunInfo, None] = None) -> JavaCompileOutputs: + jar_postprocessor: [RunInfo, None] = None, + debug_port: [int, None] = None) -> JavaCompileOutputs: resources_map = get_resources_map( java_toolchain = java_toolchain, package = label.package, @@ -260,8 +261,9 @@ def create_jar_artifact_kotlincd( compiler = compiler, main_class = kotlin_toolchain.kotlincd_main_class, worker = kotlin_toolchain.kotlincd_worker[WorkerInfo], - debug_port = kotlin_toolchain.kotlincd_debug_port, - debug_target = kotlin_toolchain.kotlincd_debug_target, + target_specified_debug_port = debug_port, + toolchain_specified_debug_port = kotlin_toolchain.kotlincd_debug_port, + toolchain_specified_debug_target = kotlin_toolchain.kotlincd_debug_target, extra_jvm_args = kotlin_toolchain.kotlincd_jvm_args, extra_jvm_args_target = kotlin_toolchain.kotlincd_jvm_args_target, ) From fd1d8ecd4748d576ccb6997c297f3a752b9cc162 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 15 Apr 2024 17:07:27 +0200 Subject: [PATCH 0790/1133] dynamic_output(outputs) must be output artifact --- prelude/haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 39d3d1396..eb777fa01 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -564,7 +564,7 @@ def compile( ctx.actions.dynamic_output( dynamic = [md_file], inputs = ctx.attrs.srcs, - outputs = interfaces + objects + stub_dirs, + outputs = [o.as_output() for o in interfaces + objects + stub_dirs], f = do_compile) stubs_dir = ctx.actions.declare_output("stubs-" + artifact_suffix, dir=True) From 69243fdea305f102ce042794129aef0406708200 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 16 Apr 2024 17:37:56 +0200 Subject: [PATCH 0791/1133] [buck2] Use the real library package db when calling haddock --- prelude/haskell/compile.bzl | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index eb777fa01..d25688c50 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -284,7 +284,8 @@ def _common_compile_args( enable_th: bool, pkgname: str | None, modname: str | None = None, - transitive_deps: [None, dict[str, list[str]]] = None) -> cmd_args: + transitive_deps: [None, dict[str, list[str]]] = None, + use_empty_lib = True) -> cmd_args: toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] compile_args = cmd_args() @@ -310,7 +311,7 @@ def _common_compile_args( link_style, specify_pkg_version = False, enable_profiling = enable_profiling, - use_empty_lib = True, + use_empty_lib = use_empty_lib, transitive_deps = transitive_deps, pkgname = pkgname, ) @@ -342,6 +343,7 @@ def _common_compile_args( return compile_args +# NOTE this function is currently only used by `haskell_haddock_lib` def compile_args( ctx: AnalysisContext, link_style: LinkStyle, @@ -358,7 +360,8 @@ def compile_args( # be parsed when inside an argsfile. compile_cmd.add(ctx.attrs.compiler_flags) - compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname) + # TODO[CB] use the empty lib once using hi haddock + compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, use_empty_lib = False) if getattr(ctx.attrs, "main", None) != None: compile_args.add(["-main-is", ctx.attrs.main]) From 914f9839ae261b3808af0b4d76fbc6a736214baa Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 16 Apr 2024 17:50:13 +0200 Subject: [PATCH 0792/1133] [buck2] Use `value` field of tset, remove `root` reduction for `HaskellLibraryInfoTSet` --- prelude/haskell/haskell.bzl | 4 ++-- prelude/haskell/library_info.bzl | 6 ------ 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 9e83b8ae2..c1e86b870 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -542,7 +542,7 @@ def _build_haskell_lib( linfos = [x.prof_info if enable_profiling else x.info for x in hlis] - uniq_infos = [x[link_style].reduce("root") for x in linfos] + uniq_infos = [x[link_style].value for x in linfos] toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] if link_style == LinkStyle("shared"): @@ -1159,7 +1159,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: if link_style in [LinkStyle("static"), LinkStyle("static_pic")]: hlis = attr_deps_haskell_link_infos_sans_template_deps(ctx) linfos = [x.prof_info if enable_profiling else x.info for x in hlis] - uniq_infos = [x[link_style].reduce("root") for x in linfos] + uniq_infos = [x[link_style].value for x in linfos] pkgname = ctx.label.name + "-link" linkable_artifacts = [ diff --git a/prelude/haskell/library_info.bzl b/prelude/haskell/library_info.bzl index 0a87443bc..6b920a7ae 100644 --- a/prelude/haskell/library_info.bzl +++ b/prelude/haskell/library_info.bzl @@ -53,15 +53,9 @@ def _project_as_package_db(lib: HaskellLibraryInfo): def _project_as_empty_package_db(lib: HaskellLibraryInfo): return cmd_args("-package-db", lib.empty_db) -def _direct_deps(_children: list[HaskellLibraryInfo | None], lib: HaskellLibraryInfo | None) -> HaskellLibraryInfo | None: - return lib - HaskellLibraryInfoTSet = transitive_set( args_projections = { "package_db": _project_as_package_db, "empty_package_db": _project_as_empty_package_db, }, - reductions = { - "root": _direct_deps, - } ) From 46f76f554537c49c84fc91d052df1e4ce4234bd7 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 17 Apr 2024 09:05:43 +0200 Subject: [PATCH 0793/1133] Do not use `--reflink=auto` option for `cp` When using isolation, the default `cp` command will be used. On Darwin, the standard cp command does not support any option starting with double dashes. --- prelude/haskell/haskell_haddock.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/haskell_haddock.bzl b/prelude/haskell/haskell_haddock.bzl index c0db8ecc9..f1a7fda42 100644 --- a/prelude/haskell/haskell_haddock.bzl +++ b/prelude/haskell/haskell_haddock.bzl @@ -140,7 +140,8 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: cmd_args(cmd, delimiter = " ", quote = "shell"), [ cmd_args( - ["cp", "-Rf", "--reflink=auto", cmd_args(dir, format = "{}/*"), out.as_output()], + # NOTE could use --reflink=auto if cp command supports it + ["cp", "-Rf", cmd_args(dir, format = "{}/*"), out.as_output()], delimiter = " ", ) for dir in dep_htmls ], From e6e02ea057bcf0dd472beb3b1a9ebd5ee2a3e5cd Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 15 Apr 2024 17:13:34 +0200 Subject: [PATCH 0794/1133] Track DynamicCompileResultInfo (empty for now) --- prelude/haskell/compile.bzl | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index d25688c50..d1aedfa1d 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -39,12 +39,16 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_traversal") load("@prelude//utils:strings.bzl", "strip_prefix") +DynamicCompileResultInfo = record( +) + # The type of the return value of the `_compile()` function. CompileResultInfo = record( objects = field(list[Artifact]), hi = field(list[Artifact]), stubs = field(Artifact), producing_indices = field(bool), + dynamic = field(None | DynamicValue), ) CompileArgsInfo = record( @@ -405,6 +409,7 @@ def compile_args( hi = [hi], stubs = stubs, producing_indices = producing_indices, + dynamic = None, ), srcs = srcs, args_for_cmd = compile_cmd, @@ -461,6 +466,7 @@ def _compile_module_args( hi = his, stubs = stubs, producing_indices = producing_indices, + dynamic = None, ), srcs = srcs, args_for_cmd = compile_cmd, @@ -564,7 +570,7 @@ def compile( objects = [object for module in modules.values() for object in module.objects] stub_dirs = [module.stub_dir for module in modules.values()] - ctx.actions.dynamic_output( + dynamic = ctx.actions.dynamic_output( dynamic = [md_file], inputs = ctx.attrs.srcs, outputs = [o.as_output() for o in interfaces + objects + stub_dirs], @@ -595,4 +601,5 @@ def compile( hi = interfaces, stubs = stubs_dir, producing_indices = False, + dynamic = dynamic, ) From 03ab91e4fc09bf64a7878b0cbefe9bc37288f2be Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 15 Apr 2024 17:27:26 +0200 Subject: [PATCH 0795/1133] Forward dynamic into HaskellLibraryInfo --- prelude/haskell/haskell.bzl | 10 ++++++++++ prelude/haskell/library_info.bzl | 2 ++ 2 files changed, 12 insertions(+) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index c1e86b870..8c5fb9831 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -239,6 +239,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: import_dirs = {}, stub_dirs = [], id = ctx.attrs.id, + dynamic = None, libs = libs, version = ctx.attrs.version, is_prebuilt = True, @@ -250,6 +251,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: import_dirs = {}, stub_dirs = [], id = ctx.attrs.id, + dynamic = None, libs = prof_libs, version = ctx.attrs.version, is_prebuilt = True, @@ -603,6 +605,10 @@ def _build_haskell_lib( if not non_profiling_hlib: fail("Non-profiling HaskellLibBuildOutput wasn't provided when building profiling lib") + dynamic = { + True: compiled.dynamic, + False: non_profiling_hlib.compiled.dynamic, + } import_artifacts = { True: compiled.hi, False: non_profiling_hlib.compiled.hi, @@ -614,6 +620,9 @@ def _build_haskell_lib( all_libs = libs + non_profiling_hlib.libs stub_dirs = [compiled.stubs] + [non_profiling_hlib.compiled.stubs] else: + dynamic = { + False: compiled.dynamic, + } import_artifacts = { False: compiled.hi, } @@ -649,6 +658,7 @@ def _build_haskell_lib( db = db, empty_db = empty_db, id = pkgname, + dynamic = dynamic, # TODO(ah) refine with dynamic projections import_dirs = import_artifacts, objects = object_artifacts, stub_dirs = stub_dirs, diff --git a/prelude/haskell/library_info.bzl b/prelude/haskell/library_info.bzl index 6b920a7ae..c66b559c3 100644 --- a/prelude/haskell/library_info.bzl +++ b/prelude/haskell/library_info.bzl @@ -28,6 +28,8 @@ HaskellLibraryInfo = record( empty_db = Artifact, # e.g. "base-4.13.0.0" id = str, + # dynamic dependency information + dynamic = None | dict[bool, DynamicValue], # Import dirs indexed by profiling enabled/disabled import_dirs = dict[bool, list[Artifact]], # Object files indexed by profiling enabled/disabled From 5018847115c71c894724ea37e2cac6175ba33cc7 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 15 Apr 2024 17:43:31 +0200 Subject: [PATCH 0796/1133] Access the dynamic value --- prelude/haskell/compile.bzl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index d1aedfa1d..1e95a3bc6 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -216,6 +216,11 @@ def get_packages_info( for lib in direct_deps_link_info ]) + for lib in direct_deps_link_info: + info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + direct = info.reduce("root") + dynamic = direct.dynamic + # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) exposed_package_imports = [] From b38f17924f6ef627f490391c1405f783f897827d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 15 Apr 2024 18:02:00 +0200 Subject: [PATCH 0797/1133] expose dynamic information --- prelude/haskell/compile.bzl | 31 +++++++++++++++++++++---------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 1e95a3bc6..637c0e0f9 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -39,8 +39,9 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_traversal") load("@prelude//utils:strings.bzl", "strip_prefix") -DynamicCompileResultInfo = record( -) +DynamicCompileResultInfo = provider(fields = { + "value": typing.Any, +}) # The type of the return value of the `_compile()` function. CompileResultInfo = record( @@ -48,7 +49,7 @@ CompileResultInfo = record( hi = field(list[Artifact]), stubs = field(Artifact), producing_indices = field(bool), - dynamic = field(None | DynamicValue), + dynamic = field(typing.Any | DynamicValue), ) CompileArgsInfo = record( @@ -294,7 +295,7 @@ def _common_compile_args( pkgname: str | None, modname: str | None = None, transitive_deps: [None, dict[str, list[str]]] = None, - use_empty_lib = True) -> cmd_args: + use_empty_lib = True) -> (typing.Any, cmd_args): toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] compile_args = cmd_args() @@ -350,7 +351,12 @@ def _common_compile_args( if pkgname: compile_args.add(["-this-unit-id", pkgname]) - return compile_args + dynamic = struct( + imports = packages_info.exposed_package_imports, + objects = packages_info.exposed_package_objects, + ) + + return dynamic, compile_args # NOTE this function is currently only used by `haskell_haddock_lib` def compile_args( @@ -370,7 +376,7 @@ def compile_args( compile_cmd.add(ctx.attrs.compiler_flags) # TODO[CB] use the empty lib once using hi haddock - compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, use_empty_lib = False) + _, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, use_empty_lib = False) if getattr(ctx.attrs, "main", None) != None: compile_args.add(["-main-is", ctx.attrs.main]) @@ -440,7 +446,7 @@ def _compile_module_args( compile_cmd.add(ctx.attrs.compiler_flags) compile_cmd.add("-c") - compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), transitive_deps = transitive_deps) + dynamic, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), transitive_deps = transitive_deps) objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] @@ -471,7 +477,7 @@ def _compile_module_args( hi = his, stubs = stubs, producing_indices = producing_indices, - dynamic = None, + dynamic = dynamic, ), srcs = srcs, args_for_cmd = compile_cmd, @@ -493,7 +499,7 @@ def _compile_module( outputs: dict[Artifact, Artifact], artifact_suffix: str, pkgname: str | None = None, -) -> None: +) -> typing.Any: module = modules[module_name] haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -533,6 +539,8 @@ def _compile_module( ctx.actions.run(compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name) + # TODO(ah) attach intra-package deps + return args.result.dynamic # Compile all the context's sources. @@ -554,9 +562,10 @@ def compile( transitive_deps = md["transitive_deps"] mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } + dynamic = {} for module_name in post_order_traversal(graph): - _compile_module( + dynamic[module_name] = _compile_module( ctx, link_style = link_style, enable_profiling = enable_profiling, @@ -571,6 +580,8 @@ def compile( pkgname = pkgname, ) + return [DynamicCompileResultInfo(value = dynamic)] + interfaces = [interface for module in modules.values() for interface in module.interfaces] objects = [object for module in modules.values() for object in module.objects] stub_dirs = [module.stub_dir for module in modules.values()] From 527ef99259ba5c76853f54f063767a3fcd9a52fd Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 15 Apr 2024 18:16:11 +0200 Subject: [PATCH 0798/1133] propagate resolved dict --- prelude/haskell/compile.bzl | 24 ++++++++++++++++++------ 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 637c0e0f9..298a03090 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -205,6 +205,7 @@ def get_packages_info( specify_pkg_version: bool, enable_profiling: bool, use_empty_lib: bool, + resolved: None | dict[DynamicValue, typing.Any] = None, transitive_deps: [None, dict[str, list[str]]] = None, pkgname: str | None = None) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -217,10 +218,14 @@ def get_packages_info( for lib in direct_deps_link_info ]) - for lib in direct_deps_link_info: - info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - direct = info.reduce("root") - dynamic = direct.dynamic + if resolved != None: + for lib in direct_deps_link_info: + info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + direct = info.reduce("root") + dynamic = direct.dynamic[enable_profiling] + # TODO(ah) look up + # resolved = resolved[dynamic] + # print("!!!", resolved) # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) @@ -294,6 +299,7 @@ def _common_compile_args( enable_th: bool, pkgname: str | None, modname: str | None = None, + resolved: None | dict[DynamicValue, typing.Any] = None, transitive_deps: [None, dict[str, list[str]]] = None, use_empty_lib = True) -> (typing.Any, cmd_args): toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] @@ -322,6 +328,7 @@ def _common_compile_args( specify_pkg_version = False, enable_profiling = enable_profiling, use_empty_lib = use_empty_lib, + resolved = resolved, transitive_deps = transitive_deps, pkgname = pkgname, ) @@ -434,6 +441,7 @@ def _compile_module_args( enable_profiling: bool, enable_th: bool, outputs: dict[Artifact, Artifact], + resolved: dict[DynamicValue, typing.Any], pkgname = None, transitive_deps: [None, dict[str, list[str]]] = None) -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -446,7 +454,7 @@ def _compile_module_args( compile_cmd.add(ctx.attrs.compiler_flags) compile_cmd.add("-c") - dynamic, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), transitive_deps = transitive_deps) + dynamic, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, transitive_deps = transitive_deps) objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] @@ -497,6 +505,7 @@ def _compile_module( graph: dict[str, list[str]], transitive_deps: dict[str, list[str]], outputs: dict[Artifact, Artifact], + resolved: dict[DynamicValue, typing.Any], artifact_suffix: str, pkgname: str | None = None, ) -> typing.Any: @@ -505,7 +514,7 @@ def _compile_module( haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] compile_cmd = cmd_args(haskell_toolchain.compiler) - args = _compile_module_args(ctx, module, link_style, enable_profiling, enable_th, outputs, pkgname, transitive_deps = transitive_deps) + args = _compile_module_args(ctx, module, link_style, enable_profiling, enable_th, outputs, resolved, pkgname, transitive_deps = transitive_deps) if args.args_for_file: if haskell_toolchain.use_argsfile: @@ -555,6 +564,7 @@ def compile( modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) def do_compile(ctx, artifacts, outputs, md_file=md_file, modules=modules): + resolved = {} md = artifacts[md_file].read_json() th_modules = md["th_modules"] module_map = md["module_mapping"] @@ -575,6 +585,7 @@ def compile( graph = graph, transitive_deps = transitive_deps[module_name], outputs = outputs, + resolved = resolved, md_file=md_file, artifact_suffix = artifact_suffix, pkgname = pkgname, @@ -588,6 +599,7 @@ def compile( dynamic = ctx.actions.dynamic_output( dynamic = [md_file], + promises = [], inputs = ctx.attrs.srcs, outputs = [o.as_output() for o in interfaces + objects + stub_dirs], f = do_compile) From 7897e3ae5583a0b755238ecaa0320f16631ee5c3 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 15 Apr 2024 18:19:32 +0200 Subject: [PATCH 0799/1133] pass promises to dynamic_output --- prelude/haskell/compile.bzl | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 298a03090..9984f64b7 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -599,7 +599,15 @@ def compile( dynamic = ctx.actions.dynamic_output( dynamic = [md_file], - promises = [], + promises = [ + info.reduce("root").dynamic[enable_profiling] + for lib in attr_deps_haskell_link_infos(ctx) + for info in [ + lib.prof_info[link_style] + if enable_profiling else + lib.info[link_style] + ] + ], inputs = ctx.attrs.srcs, outputs = [o.as_output() for o in interfaces + objects + stub_dirs], f = do_compile) From 5e65985965af4559d8836c6e9c905f81f9862bc3 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Apr 2024 16:25:38 +0200 Subject: [PATCH 0800/1133] add `resolved` argument to haskell --- prelude/haskell/compile.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 9984f64b7..55be66c67 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -563,8 +563,7 @@ def compile( modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) - def do_compile(ctx, artifacts, outputs, md_file=md_file, modules=modules): - resolved = {} + def do_compile(ctx, artifacts, resolved, outputs, md_file=md_file, modules=modules): md = artifacts[md_file].read_json() th_modules = md["th_modules"] module_map = md["module_mapping"] From a179b2bd6f814ddceb722ad3ca08567b244df239 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Apr 2024 16:25:57 +0200 Subject: [PATCH 0801/1133] update dynamic_output use-sites --- prelude/android/android_binary_native_library_rules.bzl | 6 +++--- prelude/android/android_binary_resources_rules.bzl | 4 ++-- prelude/android/dex_rules.bzl | 6 +++--- prelude/apple/user/apple_selective_debugging.bzl | 2 +- prelude/cxx/dist_lto/dist_lto.bzl | 8 ++++---- prelude/erlang/erlang_build.bzl | 2 +- prelude/ocaml/ocaml.bzl | 2 +- 7 files changed, 15 insertions(+), 15 deletions(-) diff --git a/prelude/android/android_binary_native_library_rules.bzl b/prelude/android/android_binary_native_library_rules.bzl index 4f6340c9a..a26cca22c 100644 --- a/prelude/android/android_binary_native_library_rules.bzl +++ b/prelude/android/android_binary_native_library_rules.bzl @@ -241,7 +241,7 @@ def get_android_binary_native_library_info( ), ) - def dynamic_native_libs_info(ctx: AnalysisContext, artifacts, outputs): + def dynamic_native_libs_info(ctx: AnalysisContext, artifacts, resolved, outputs): get_module_from_target = all_targets_in_root_module if apk_module_graph_file: get_module_from_target = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts).target_to_module_mapping_function @@ -1713,7 +1713,7 @@ def extract_provided_symbols(ctx: AnalysisContext, toolchain: CxxToolchainInfo, return extract_global_syms(ctx, toolchain, lib, "relinker_extract_provided_symbols") def create_relinker_version_script(actions: AnalysisActions, relinker_allowlist: list[regex], output: Artifact, provided_symbols: Artifact, needed_symbols: list[Artifact]): - def create_version_script(ctx, artifacts, outputs): + def create_version_script(ctx, artifacts, resolved, outputs): all_needed_symbols = {} for symbols_file in needed_symbols: for line in artifacts[symbols_file].read_string().strip().split("\n"): @@ -1752,7 +1752,7 @@ def extract_undefined_symbols(ctx: AnalysisContext, toolchain: CxxToolchainInfo, return extract_undefined_syms(ctx, toolchain, lib, "relinker_extract_undefined_symbols") def union_needed_symbols(actions: AnalysisActions, output: Artifact, needed_symbols: list[Artifact]): - def compute_union(ctx, artifacts, outputs): + def compute_union(ctx, artifacts, resolved, outputs): unioned_symbols = {} for symbols_file in needed_symbols: for line in artifacts[symbols_file].read_string().strip().split("\n"): diff --git a/prelude/android/android_binary_resources_rules.bzl b/prelude/android/android_binary_resources_rules.bzl index dbe23e0cc..526b1c0fc 100644 --- a/prelude/android/android_binary_resources_rules.bzl +++ b/prelude/android/android_binary_resources_rules.bzl @@ -499,7 +499,7 @@ def _get_module_manifests( module_manifests_dir = ctx.actions.declare_output("module_manifests_dir", dir = True) - def get_manifests_modular(ctx: AnalysisContext, artifacts, outputs): + def get_manifests_modular(ctx: AnalysisContext, artifacts, resolved, outputs): apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) merged_module_manifests = {} @@ -591,7 +591,7 @@ def _merge_assets( else: module_assets_apks_dir = None - def merge_assets_modular(ctx: AnalysisContext, artifacts, outputs): + def merge_assets_modular(ctx: AnalysisContext, artifacts, resolved, outputs): apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) module_to_assets_dirs = {} diff --git a/prelude/android/dex_rules.bzl b/prelude/android/dex_rules.bzl index 3031a82f6..23f34bc00 100644 --- a/prelude/android/dex_rules.bzl +++ b/prelude/android/dex_rules.bzl @@ -144,7 +144,7 @@ def get_multi_dex( inputs = [apk_module_graph_file] if apk_module_graph_file else [ctx.actions.write("empty_artifact_for_multi_dex_dynamic_action", [])] outputs = [primary_dex_file, primary_dex_class_names, root_module_secondary_dex_output_dir, secondary_dex_dir] - def do_multi_dex(ctx: AnalysisContext, artifacts, outputs): + def do_multi_dex(ctx: AnalysisContext, artifacts, resolved, outputs): apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) if apk_module_graph_file else get_root_module_only_apk_module_graph_info() target_to_module_mapping_function = apk_module_graph_info.target_to_module_mapping_function module_to_jars = {} @@ -436,7 +436,7 @@ def merge_to_split_dex( outputs = [primary_dex_output, primary_dex_artifact_list, primary_dex_class_names_list, root_module_secondary_dexes_dir, non_root_module_secondary_dexes_dir] - def merge_pre_dexed_libs(ctx: AnalysisContext, artifacts, outputs): + def merge_pre_dexed_libs(ctx: AnalysisContext, artifacts, resolved, outputs): apk_module_graph_info = get_apk_module_graph_info(ctx, apk_module_graph_file, artifacts) if apk_module_graph_file else get_root_module_only_apk_module_graph_info() module_to_canary_class_name_function = apk_module_graph_info.module_to_canary_class_name_function sorted_pre_dexed_inputs = _sort_pre_dexed_files( @@ -541,7 +541,7 @@ def merge_to_split_dex( secondary_dexes_for_symlinking[_get_secondary_dex_subdir(module)] = secondary_dex_subdir if metadata_dot_txt_files_by_module: - def write_metadata_dot_txts(ctx: AnalysisContext, artifacts, outputs): + def write_metadata_dot_txts(ctx: AnalysisContext, artifacts, resolved, outputs): for voltron_module, metadata_dot_txt in metadata_dot_txt_files_by_module.items(): metadata_line_artifacts = metadata_line_artifacts_by_module[voltron_module] expect(metadata_line_artifacts != None, "Should have metadata lines!") diff --git a/prelude/apple/user/apple_selective_debugging.bzl b/prelude/apple/user/apple_selective_debugging.bzl index 69f49a5f4..c3981944d 100644 --- a/prelude/apple/user/apple_selective_debugging.bzl +++ b/prelude/apple/user/apple_selective_debugging.bzl @@ -110,7 +110,7 @@ def _impl(ctx: AnalysisContext) -> list[Provider]: if json_type != _SelectiveDebuggingJsonType("targets"): return inner_ctx.actions.write(output_name, sorted(set(package_names).list())) - def scrub_selected_debug_paths_action(dynamic_ctx: AnalysisContext, artifacts, outputs): + def scrub_selected_debug_paths_action(dynamic_ctx: AnalysisContext, artifacts, resolved, outputs): packages = [ # "cell//path/to/some/thing:target" -> "path/to/some/thing" target.split("//")[1].split(":")[0] diff --git a/prelude/cxx/dist_lto/dist_lto.bzl b/prelude/cxx/dist_lto/dist_lto.bzl index 710d94747..08ec963fa 100644 --- a/prelude/cxx/dist_lto/dist_lto.bzl +++ b/prelude/cxx/dist_lto/dist_lto.bzl @@ -284,7 +284,7 @@ def cxx_dist_link( final_link_index = ctx.actions.declare_output(output.basename + ".final_link_index") def dynamic_plan(link_plan: Artifact, index_argsfile_out: Artifact, final_link_index: Artifact): - def plan(ctx: AnalysisContext, artifacts, outputs): + def plan(ctx: AnalysisContext, artifacts, resolved, outputs): # buildifier: disable=uninitialized def add_pre_flags(idx: int): if idx in pre_post_flags: @@ -424,7 +424,7 @@ def cxx_dist_link( # produced it re-runs. And so, with a single dynamic_output, we'd need to # re-run all actions when any of the plans changed. def dynamic_optimize(name: str, initial_object: Artifact, bc_file: Artifact, plan: Artifact, opt_object: Artifact): - def optimize_object(ctx: AnalysisContext, artifacts, outputs): + def optimize_object(ctx: AnalysisContext, artifacts, resolved, outputs): plan_json = artifacts[plan].read_json() # If the object was not compiled with thinlto flags, then there @@ -470,7 +470,7 @@ def cxx_dist_link( ctx.actions.dynamic_output(dynamic = [plan], inputs = [], outputs = [opt_object.as_output()], f = optimize_object) def dynamic_optimize_archive(archive: _ArchiveLinkData): - def optimize_archive(ctx: AnalysisContext, artifacts, outputs): + def optimize_archive(ctx: AnalysisContext, artifacts, resolved, outputs): plan_json = artifacts[archive.plan].read_json() if "objects" not in plan_json or not plan_json["objects"] or lazy.is_all(lambda e: not e["is_bc"], plan_json["objects"]): # Nothing in this directory was lto-able; let's just copy the archive. @@ -552,7 +552,7 @@ def cxx_dist_link( linker_argsfile_out = ctx.actions.declare_output(output.basename + ".thinlto.link.argsfile") - def thin_lto_final_link(ctx: AnalysisContext, artifacts, outputs): + def thin_lto_final_link(ctx: AnalysisContext, artifacts, resolved, outputs): plan = artifacts[link_plan_out].read_json() link_args = cmd_args() plan_index = {int(k): v for k, v in plan["index"].items()} diff --git a/prelude/erlang/erlang_build.bzl b/prelude/erlang/erlang_build.bzl index 7cef3a8a3..1d731a786 100644 --- a/prelude/erlang/erlang_build.bzl +++ b/prelude/erlang/erlang_build.bzl @@ -476,7 +476,7 @@ def _build_erl( identifier = action_identifier(toolchain, src.basename), ) - def dynamic_lambda(ctx: AnalysisContext, artifacts, outputs): + def dynamic_lambda(ctx: AnalysisContext, artifacts, resolved, outputs): erl_opts = _get_erl_opts(ctx, toolchain, src) erlc_cmd = cmd_args( [ diff --git a/prelude/ocaml/ocaml.bzl b/prelude/ocaml/ocaml.bzl index d7137a87d..c0a8aa8a9 100644 --- a/prelude/ocaml/ocaml.bzl +++ b/prelude/ocaml/ocaml.bzl @@ -462,7 +462,7 @@ def _compile(ctx: AnalysisContext, compiler: cmd_args, build_mode: BuildMode) -> # includes the compiler include path. global_include_paths = _include_paths_in_context(ctx, build_mode) - def f(ctx: AnalysisContext, artifacts, outputs): + def f(ctx: AnalysisContext, artifacts, resolved, outputs): # A pair of mappings that detail which source files depend on which. See # [Note: Dynamic dependencies] in 'makefile.bzl'. makefile, makefile2 = parse_makefile(artifacts[depends_output].read_string(), srcs, opaque_enabled) From 2b7e86046e20ccd44f1f0ffb58966bf05413d175 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Apr 2024 16:29:40 +0200 Subject: [PATCH 0802/1133] Extract resolved DynamicCompileResultInfo --- prelude/haskell/compile.bzl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 55be66c67..e6376fc6d 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -223,9 +223,7 @@ def get_packages_info( info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] direct = info.reduce("root") dynamic = direct.dynamic[enable_profiling] - # TODO(ah) look up - # resolved = resolved[dynamic] - # print("!!!", resolved) + dynamic_info = resolved[dynamic][DynamicCompileResultInfo] # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) From 49f8fcd4b41ca0583e9d521582ca46843f48beec Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Apr 2024 16:44:12 +0200 Subject: [PATCH 0803/1133] Collect from direct dependencies --- prelude/haskell/compile.bzl | 33 ++++++++++++++++++--------------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index e6376fc6d..e77ef2c0e 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -218,13 +218,6 @@ def get_packages_info( for lib in direct_deps_link_info ]) - if resolved != None: - for lib in direct_deps_link_info: - info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - direct = info.reduce("root") - dynamic = direct.dynamic[enable_profiling] - dynamic_info = resolved[dynamic][DynamicCompileResultInfo] - # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) exposed_package_imports = [] @@ -234,25 +227,35 @@ def get_packages_info( packagedb_args = cmd_args() - if transitive_deps != None: + if resolved != None and transitive_deps != None: lib_objects = {} lib_interfaces = {} - for lib in libs.traverse(): - lib_objects[lib.name] = {} - lib_interfaces[lib.name] = {} - for o in lib.objects[enable_profiling]: + for lib in direct_deps_link_info: + info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + direct = info.reduce("root") + dynamic = direct.dynamic[enable_profiling] + dynamic_info = resolved[dynamic][DynamicCompileResultInfo] + + lib_objects[direct.name] = {} + lib_interfaces[direct.name] = {} + + for o in direct.objects[enable_profiling]: # this should prefer the dyn_o -- since it is used for TH - lib_objects[lib.name][src_to_module_name(o.short_path)] = o + lib_objects[direct.name][src_to_module_name(o.short_path)] = o - for hi in lib.import_dirs[enable_profiling]: + for hi in direct.import_dirs[enable_profiling]: mod_name = src_to_module_name(hi.short_path) - lib_interfaces[lib.name].setdefault(mod_name, []).append(hi) + lib_interfaces[direct.name].setdefault(mod_name, []).append(hi) for pkg, mods in transitive_deps.items(): if pkg == pkgname: # Skip dependencies from the same package. continue + if pkg not in lib_objects: + # Skip transitive dependencies + # TODO(ah) only iterate over direct dependencies + continue for mod in mods: exposed_package_objects.append(lib_objects[pkg][mod]) exposed_package_imports.extend(lib_interfaces[pkg][mod]) From 15c2ff040b319800aa3d41dfc5d85dd2bc1a6656 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Apr 2024 16:59:12 +0200 Subject: [PATCH 0804/1133] Compiled module info and tset type --- prelude/haskell/compile.bzl | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index e77ef2c0e..a1bfbd195 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -39,6 +39,24 @@ load("@prelude//:paths.bzl", "paths") load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_traversal") load("@prelude//utils:strings.bzl", "strip_prefix") +CompiledModuleInfo = provider(fields = { + "interface": field(Artifact), + "object": field(Artifact), +}) + +def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: + return cmd_args(mod.interface) + +def _compiled_module_project_as_objects(mod: CompiledModuleInfo) -> cmd_args: + return cmd_args(mod.object) + +CompiledModuleTSet = transitive_set( + args_projections = { + "interfaces": _compiled_module_project_as_interfaces, + "objects": _compiled_module_project_as_objects, + }, +) + DynamicCompileResultInfo = provider(fields = { "value": typing.Any, }) From b3b1ffddd4d9de91082a1f19c5d0fe1b8fe8093e Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Apr 2024 17:52:14 +0200 Subject: [PATCH 0805/1133] Track transitive module deps in tset --- prelude/haskell/compile.bzl | 93 ++++++++++++++++++++----------------- 1 file changed, 51 insertions(+), 42 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index a1bfbd195..97dc69c88 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -40,8 +40,8 @@ load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_t load("@prelude//utils:strings.bzl", "strip_prefix") CompiledModuleInfo = provider(fields = { - "interface": field(Artifact), - "object": field(Artifact), + "interface": provider_field(Artifact), + "object": provider_field(Artifact), }) def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: @@ -58,7 +58,7 @@ CompiledModuleTSet = transitive_set( ) DynamicCompileResultInfo = provider(fields = { - "value": typing.Any, + "modules": dict[str, CompiledModuleTSet], }) # The type of the return value of the `_compile()` function. @@ -78,6 +78,7 @@ CompileArgsInfo = record( ) PackagesInfo = record( + exposed_package_modules = field(None | list[CompiledModuleTSet]), exposed_package_imports = field(list[Artifact]), exposed_package_objects = field(list[Artifact]), exposed_package_libs = cmd_args, @@ -238,6 +239,7 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) + exposed_package_modules = None exposed_package_imports = [] exposed_package_objects = [] exposed_package_libs = cmd_args() @@ -246,8 +248,7 @@ def get_packages_info( packagedb_args = cmd_args() if resolved != None and transitive_deps != None: - lib_objects = {} - lib_interfaces = {} + exposed_package_modules = [] for lib in direct_deps_link_info: info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] @@ -255,28 +256,13 @@ def get_packages_info( dynamic = direct.dynamic[enable_profiling] dynamic_info = resolved[dynamic][DynamicCompileResultInfo] - lib_objects[direct.name] = {} - lib_interfaces[direct.name] = {} - - for o in direct.objects[enable_profiling]: - # this should prefer the dyn_o -- since it is used for TH - lib_objects[direct.name][src_to_module_name(o.short_path)] = o - - for hi in direct.import_dirs[enable_profiling]: - mod_name = src_to_module_name(hi.short_path) - lib_interfaces[direct.name].setdefault(mod_name, []).append(hi) - - for pkg, mods in transitive_deps.items(): - if pkg == pkgname: - # Skip dependencies from the same package. - continue - if pkg not in lib_objects: - # Skip transitive dependencies - # TODO(ah) only iterate over direct dependencies + # TODO(ah) only track direct package deps + if direct.name not in transitive_deps: + # We don't depend on this package continue - for mod in mods: - exposed_package_objects.append(lib_objects[pkg][mod]) - exposed_package_imports.extend(lib_interfaces[pkg][mod]) + + for mod in transitive_deps.get(direct.name, []): + exposed_package_modules.append(dynamic_info.modules[mod]) else: for lib in libs.traverse(): exposed_package_imports.extend(lib.import_dirs[enable_profiling]) @@ -302,6 +288,7 @@ def get_packages_info( exposed_package_args.add(package_flag, pkg_name) return PackagesInfo( + exposed_package_modules = exposed_package_modules, exposed_package_imports = exposed_package_imports, exposed_package_objects = exposed_package_objects, exposed_package_libs = exposed_package_libs, @@ -353,11 +340,13 @@ def _common_compile_args( ) compile_args.add(packages_info.exposed_package_args) - compile_args.hidden(packages_info.exposed_package_imports) + if packages_info.exposed_package_modules == None: + compile_args.hidden(packages_info.exposed_package_imports) compile_args.add(packages_info.packagedb_args) if enable_th: compile_args.add(packages_info.exposed_package_libs) - if modname: + if modname and packages_info.exposed_package_modules == None: + # TODO(ah) remove this for o in packages_info.exposed_package_objects: if o.extension != ".o": prefix = o.owner.name + "-" + modname @@ -377,10 +366,7 @@ def _common_compile_args( if pkgname: compile_args.add(["-this-unit-id", pkgname]) - dynamic = struct( - imports = packages_info.exposed_package_imports, - objects = packages_info.exposed_package_objects, - ) + dynamic = packages_info.exposed_package_modules return dynamic, compile_args @@ -520,6 +506,7 @@ def _compile_module( enable_th: bool, module_name: str, modules: dict[str, _Module], + module_tsets: dict[str, CompiledModuleTSet], md_file: Artifact, graph: dict[str, list[str]], transitive_deps: dict[str, list[str]], @@ -559,16 +546,36 @@ def _compile_module( ) ) - for dep_name in breadth_first_traversal(graph, [module_name])[1:]: - dep = modules[dep_name] - compile_cmd.hidden(dep.interfaces) - if enable_th: - compile_cmd.hidden(dep.objects) + # Transitive module dependencies from other packages. + cross_package_modules = args.result.dynamic + # Transitive module dependencies from the same package. + this_package_modules = [ + module_tsets[dep_name] + for dep_name in graph[module_name] + ] + + dependency_modules = ctx.actions.tset( + CompiledModuleTSet, + children = cross_package_modules + this_package_modules, + ) + + compile_cmd.hidden(dependency_modules.project_as_args("interfaces")) + if enable_th: + # TODO(ah) perform the `.dyn_o` to `.o` dance. + compile_cmd.hidden(dependency_modules.project_as_args("objects")) ctx.actions.run(compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name) - # TODO(ah) attach intra-package deps - return args.result.dynamic + module_tset = ctx.actions.tset( + CompiledModuleTSet, + value = CompiledModuleInfo( + interface = module.interfaces[0], + object = module.objects[0], + ), + children = cross_package_modules + this_package_modules, + ) + + return module_tset # Compile all the context's sources. @@ -590,16 +597,17 @@ def compile( transitive_deps = md["transitive_deps"] mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } - dynamic = {} + module_tsets = {} for module_name in post_order_traversal(graph): - dynamic[module_name] = _compile_module( + module_tsets[module_name] = _compile_module( ctx, link_style = link_style, enable_profiling = enable_profiling, enable_th = module_name in th_modules, module_name = module_name, modules = mapped_modules, + module_tsets = module_tsets, graph = graph, transitive_deps = transitive_deps[module_name], outputs = outputs, @@ -608,8 +616,9 @@ def compile( artifact_suffix = artifact_suffix, pkgname = pkgname, ) + print("\n\n!!!", ctx.label.name, module_name, list(module_tsets[module_name].traverse())) - return [DynamicCompileResultInfo(value = dynamic)] + return [DynamicCompileResultInfo(modules = module_tsets)] interfaces = [interface for module in modules.values() for interface in module.interfaces] objects = [object for module in modules.values() for object in module.objects] From e65e76119ed1326ee0dbdda9e2c731eae75f4f0d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Apr 2024 18:01:53 +0200 Subject: [PATCH 0806/1133] Add .o -> .dyn_o symlinks if needed --- prelude/haskell/compile.bzl | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 97dc69c88..d5350881a 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -42,6 +42,7 @@ load("@prelude//utils:strings.bzl", "strip_prefix") CompiledModuleInfo = provider(fields = { "interface": provider_field(Artifact), "object": provider_field(Artifact), + "object_dot_o": provider_field(Artifact), }) def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: @@ -50,10 +51,14 @@ def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: def _compiled_module_project_as_objects(mod: CompiledModuleInfo) -> cmd_args: return cmd_args(mod.object) +def _compiled_module_project_as_objects_dot_o(mod: CompiledModuleInfo) -> cmd_args: + return cmd_args(mod.object_dot_o) + CompiledModuleTSet = transitive_set( args_projections = { "interfaces": _compiled_module_project_as_interfaces, "objects": _compiled_module_project_as_objects, + "objects_dot_o": _compiled_module_project_as_objects_dot_o, }, ) @@ -547,7 +552,10 @@ def _compile_module( ) # Transitive module dependencies from other packages. - cross_package_modules = args.result.dynamic + cross_package_modules = ctx.actions.tset( + CompiledModuleTSet, + children = args.result.dynamic, + ) # Transitive module dependencies from the same package. this_package_modules = [ module_tsets[dep_name] @@ -556,23 +564,32 @@ def _compile_module( dependency_modules = ctx.actions.tset( CompiledModuleTSet, - children = cross_package_modules + this_package_modules, + children = [cross_package_modules] + this_package_modules, ) compile_cmd.hidden(dependency_modules.project_as_args("interfaces")) if enable_th: - # TODO(ah) perform the `.dyn_o` to `.o` dance. compile_cmd.hidden(dependency_modules.project_as_args("objects")) + compile_cmd.add(cross_package_modules.project_as_args("objects_dot_o")) ctx.actions.run(compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name) + interface = module.interfaces[0] + object = module.objects[0] + if object.extension == ".o": + object_dot_o = object + else: + object_dot_o = ctx.actions.declare_output("dot-o", paths.replace_extension(object.short_path, ".o")) + ctx.actions.symlink_file(object_dot_o, object) + module_tset = ctx.actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( - interface = module.interfaces[0], - object = module.objects[0], + interface = interface, + object = object, + object_dot_o = object_dot_o, ), - children = cross_package_modules + this_package_modules, + children = [cross_package_modules] + this_package_modules, ) return module_tset @@ -616,7 +633,6 @@ def compile( artifact_suffix = artifact_suffix, pkgname = pkgname, ) - print("\n\n!!!", ctx.label.name, module_name, list(module_tsets[module_name].traverse())) return [DynamicCompileResultInfo(modules = module_tsets)] From fbf3a7b22bb7078e207fe5f54bb047801a4cdb70 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 17 Apr 2024 10:05:41 +0200 Subject: [PATCH 0807/1133] Remove old .o -> .dyn_o symlinks --- prelude/haskell/compile.bzl | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index d5350881a..09905e169 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -345,21 +345,12 @@ def _common_compile_args( ) compile_args.add(packages_info.exposed_package_args) - if packages_info.exposed_package_modules == None: + if not modname: compile_args.hidden(packages_info.exposed_package_imports) compile_args.add(packages_info.packagedb_args) if enable_th: compile_args.add(packages_info.exposed_package_libs) - if modname and packages_info.exposed_package_modules == None: - # TODO(ah) remove this - for o in packages_info.exposed_package_objects: - if o.extension != ".o": - prefix = o.owner.name + "-" + modname - o_copy = ctx.actions.declare_output(prefix, paths.replace_extension(o.short_path, ".o")) - compile_args.add(ctx.actions.symlink_file(o_copy, o)) - else: - compile_args.add(o) - else: + if not modname: compile_args.hidden(packages_info.exposed_package_objects) # Add args from preprocess-able inputs. From 6322393c7ada4b9b5960072f5475b9a50f2d0557 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 17 Apr 2024 10:09:56 +0200 Subject: [PATCH 0808/1133] More specific name and types --- prelude/haskell/compile.bzl | 22 +++++++++++----------- prelude/haskell/haskell.bzl | 6 +++--- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 09905e169..d8f37fe3f 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -72,7 +72,7 @@ CompileResultInfo = record( hi = field(list[Artifact]), stubs = field(Artifact), producing_indices = field(bool), - dynamic = field(typing.Any | DynamicValue), + module_tsets = field(None | list[CompiledModuleTSet] | DynamicValue), ) CompileArgsInfo = record( @@ -312,7 +312,7 @@ def _common_compile_args( modname: str | None = None, resolved: None | dict[DynamicValue, typing.Any] = None, transitive_deps: [None, dict[str, list[str]]] = None, - use_empty_lib = True) -> (typing.Any, cmd_args): + use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args): toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] compile_args = cmd_args() @@ -362,9 +362,9 @@ def _common_compile_args( if pkgname: compile_args.add(["-this-unit-id", pkgname]) - dynamic = packages_info.exposed_package_modules + module_tsets = packages_info.exposed_package_modules - return dynamic, compile_args + return module_tsets, compile_args # NOTE this function is currently only used by `haskell_haddock_lib` def compile_args( @@ -428,7 +428,7 @@ def compile_args( hi = [hi], stubs = stubs, producing_indices = producing_indices, - dynamic = None, + module_tsets = None, ), srcs = srcs, args_for_cmd = compile_cmd, @@ -455,7 +455,7 @@ def _compile_module_args( compile_cmd.add(ctx.attrs.compiler_flags) compile_cmd.add("-c") - dynamic, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, transitive_deps = transitive_deps) + module_tsets, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, transitive_deps = transitive_deps) objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] @@ -486,7 +486,7 @@ def _compile_module_args( hi = his, stubs = stubs, producing_indices = producing_indices, - dynamic = dynamic, + module_tsets = module_tsets, ), srcs = srcs, args_for_cmd = compile_cmd, @@ -510,7 +510,7 @@ def _compile_module( resolved: dict[DynamicValue, typing.Any], artifact_suffix: str, pkgname: str | None = None, -) -> typing.Any: +) -> CompiledModuleTSet: module = modules[module_name] haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -545,7 +545,7 @@ def _compile_module( # Transitive module dependencies from other packages. cross_package_modules = ctx.actions.tset( CompiledModuleTSet, - children = args.result.dynamic, + children = args.result.module_tsets, ) # Transitive module dependencies from the same package. this_package_modules = [ @@ -631,7 +631,7 @@ def compile( objects = [object for module in modules.values() for object in module.objects] stub_dirs = [module.stub_dir for module in modules.values()] - dynamic = ctx.actions.dynamic_output( + dyn_module_tsets = ctx.actions.dynamic_output( dynamic = [md_file], promises = [ info.reduce("root").dynamic[enable_profiling] @@ -671,5 +671,5 @@ def compile( hi = interfaces, stubs = stubs_dir, producing_indices = False, - dynamic = dynamic, + module_tsets = dyn_module_tsets, ) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 8c5fb9831..6629532a6 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -606,8 +606,8 @@ def _build_haskell_lib( fail("Non-profiling HaskellLibBuildOutput wasn't provided when building profiling lib") dynamic = { - True: compiled.dynamic, - False: non_profiling_hlib.compiled.dynamic, + True: compiled.module_tsets, + False: non_profiling_hlib.compiled.module_tsets, } import_artifacts = { True: compiled.hi, @@ -621,7 +621,7 @@ def _build_haskell_lib( stub_dirs = [compiled.stubs] + [non_profiling_hlib.compiled.stubs] else: dynamic = { - False: compiled.dynamic, + False: compiled.module_tsets, } import_artifacts = { False: compiled.hi, From 6e37a5820e74a2b91e8cb5e9c850d39d7720545c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 17 Apr 2024 10:26:50 +0200 Subject: [PATCH 0809/1133] Expose cross-package deps in md --- prelude/haskell/tools/generate_target_metadata.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index e60a20c47..228472178 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -11,6 +11,7 @@ * `th_modules`: List of modules that require Template Haskell. * `module_mapping`: Mapping from source inferred module name to actual module name, if different. * `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. +* `package_deps`": Cross-package module dependencies, `dict[modname, dict[pkgname, list[modname]]`. * `transitive_deps`: Cross-package module dependencies in topological order starting at the leafs, `dict[modname, dict[pkgname, list[modname]]]`. """ @@ -89,6 +90,7 @@ def obtain_target_metadata(args): "th_modules": th_modules, "module_mapping": module_mapping, "module_graph": module_graph, + "package_deps": package_deps, "transitive_deps": transitive_deps, } From 08666067a41851ce3a45a8702fbddfc14de014a4 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 17 Apr 2024 10:34:41 +0200 Subject: [PATCH 0810/1133] Use the `package_deps` md field --- prelude/haskell/compile.bzl | 27 +++++++++++---------------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index d8f37fe3f..0cc07407e 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -230,7 +230,7 @@ def get_packages_info( enable_profiling: bool, use_empty_lib: bool, resolved: None | dict[DynamicValue, typing.Any] = None, - transitive_deps: [None, dict[str, list[str]]] = None, + package_deps: None | dict[str, list[str]] = None, pkgname: str | None = None) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -252,7 +252,7 @@ def get_packages_info( packagedb_args = cmd_args() - if resolved != None and transitive_deps != None: + if resolved != None and package_deps != None: exposed_package_modules = [] for lib in direct_deps_link_info: @@ -261,12 +261,7 @@ def get_packages_info( dynamic = direct.dynamic[enable_profiling] dynamic_info = resolved[dynamic][DynamicCompileResultInfo] - # TODO(ah) only track direct package deps - if direct.name not in transitive_deps: - # We don't depend on this package - continue - - for mod in transitive_deps.get(direct.name, []): + for mod in package_deps.get(direct.name, []): exposed_package_modules.append(dynamic_info.modules[mod]) else: for lib in libs.traverse(): @@ -311,7 +306,7 @@ def _common_compile_args( pkgname: str | None, modname: str | None = None, resolved: None | dict[DynamicValue, typing.Any] = None, - transitive_deps: [None, dict[str, list[str]]] = None, + package_deps: None | dict[str, list[str]] = None, use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args): toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] @@ -340,7 +335,7 @@ def _common_compile_args( enable_profiling = enable_profiling, use_empty_lib = use_empty_lib, resolved = resolved, - transitive_deps = transitive_deps, + package_deps = package_deps, pkgname = pkgname, ) @@ -444,7 +439,7 @@ def _compile_module_args( outputs: dict[Artifact, Artifact], resolved: dict[DynamicValue, typing.Any], pkgname = None, - transitive_deps: [None, dict[str, list[str]]] = None) -> CompileArgsInfo: + package_deps: None | dict[str, list[str]] = None) -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] compile_cmd = cmd_args() @@ -455,7 +450,7 @@ def _compile_module_args( compile_cmd.add(ctx.attrs.compiler_flags) compile_cmd.add("-c") - module_tsets, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, transitive_deps = transitive_deps) + module_tsets, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, package_deps = package_deps) objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] @@ -505,7 +500,7 @@ def _compile_module( module_tsets: dict[str, CompiledModuleTSet], md_file: Artifact, graph: dict[str, list[str]], - transitive_deps: dict[str, list[str]], + package_deps: dict[str, list[str]], outputs: dict[Artifact, Artifact], resolved: dict[DynamicValue, typing.Any], artifact_suffix: str, @@ -516,7 +511,7 @@ def _compile_module( haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] compile_cmd = cmd_args(haskell_toolchain.compiler) - args = _compile_module_args(ctx, module, link_style, enable_profiling, enable_th, outputs, resolved, pkgname, transitive_deps = transitive_deps) + args = _compile_module_args(ctx, module, link_style, enable_profiling, enable_th, outputs, resolved, pkgname, package_deps = package_deps) if args.args_for_file: if haskell_toolchain.use_argsfile: @@ -602,7 +597,7 @@ def compile( th_modules = md["th_modules"] module_map = md["module_mapping"] graph = md["module_graph"] - transitive_deps = md["transitive_deps"] + package_deps = md["package_deps"] mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } module_tsets = {} @@ -617,7 +612,7 @@ def compile( modules = mapped_modules, module_tsets = module_tsets, graph = graph, - transitive_deps = transitive_deps[module_name], + package_deps = package_deps.get(module_name, {}), outputs = outputs, resolved = resolved, md_file=md_file, From 5d929e7da5a312a732e83c422eed2bcd87f93c97 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 17 Apr 2024 10:36:43 +0200 Subject: [PATCH 0811/1133] Remove transitive deps generation in md This information is now tracked in Buck2 using tsets. Generating it in the metadata files is redundant. --- .../haskell/tools/generate_target_metadata.py | 41 +------------------ 1 file changed, 1 insertion(+), 40 deletions(-) diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 228472178..81dd61de4 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -4,7 +4,7 @@ * The mapping from module source file to actual module name. * The intra-package module dependency graph. -* The transitive cross-package module dependency graph. +* The cross-package module dependencies. * Which modules require Template Haskell. The result is a JSON object with the following fields: @@ -12,7 +12,6 @@ * `module_mapping`: Mapping from source inferred module name to actual module name, if different. * `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. * `package_deps`": Cross-package module dependencies, `dict[modname, dict[pkgname, list[modname]]`. -* `transitive_deps`: Cross-package module dependencies in topological order starting at the leafs, `dict[modname, dict[pkgname, list[modname]]]`. """ import argparse @@ -82,8 +81,6 @@ def obtain_target_metadata(args): package_prefixes = calc_package_prefixes(deps_md) module_mapping, module_graph, package_deps = interpret_ghc_depends( ghc_depends, args.source_prefix, package_prefixes) - transitive_deps = calc_transitive_deps( - args.pkgname, module_graph, package_deps, deps_md) return { "pkgname": args.pkgname, "output_prefix": output_prefix, @@ -91,7 +88,6 @@ def obtain_target_metadata(args): "module_mapping": module_mapping, "module_graph": module_graph, "package_deps": package_deps, - "transitive_deps": transitive_deps, } @@ -232,41 +228,6 @@ def parse_module_deps(module_deps, package_prefixes): return internal_deps, external_deps -def calc_transitive_deps(pkgname, module_graph, package_deps, deps_md): - result = {} - - topo_modules = graphlib.TopologicalSorter(module_graph).static_order() - - for modname in topo_modules: - result[modname] = {} - - for dep_pkg, dep_pkg_mods in package_deps.get(modname, {}).items(): - dep_pkg_trans_deps = deps_md[dep_pkg]["transitive_deps"] - for dep_pkg_mod in dep_pkg_mods: - for trans_pkg, trans_mods in dep_pkg_trans_deps[dep_pkg_mod].items(): - if trans_mods: - result[modname].setdefault(trans_pkg, {}).update((m, None) for m in trans_mods) - - for dep_pkg, dep_pkg_mods in package_deps.get(modname, {}).items(): - if dep_pkg_mods: - result[modname].setdefault(dep_pkg, {}).update((m, None) for m in dep_pkg_mods) - - for dep_mod in module_graph[modname]: - for trans_pkg, trans_mods in result[dep_mod].items(): - if trans_mods: - result[modname].setdefault(trans_pkg, {}).update((m, None) for m in trans_mods) - - if module_graph[modname]: - result[modname].setdefault(pkgname, {}).update((m, None) for m in module_graph[modname]) - - for modname in result: - for dep_pkg in result[modname]: - dep_mods = list(result[modname][dep_pkg].keys()) - result[modname][dep_pkg] = dep_mods - - return result - - def src_to_module_name(x): base, _ = os.path.splitext(x) return base.replace("/", ".") From c7b2549bb85deecce43080960513a54a524fedec Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 17 Apr 2024 10:39:48 +0200 Subject: [PATCH 0812/1133] TODO: remove md files from library providers They are now only needed to determine the package prefix. Determine the package prefix in some other way. --- prelude/haskell/library_info.bzl | 1 + prelude/haskell/tools/generate_target_metadata.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/prelude/haskell/library_info.bzl b/prelude/haskell/library_info.bzl index c66b559c3..4161f0742 100644 --- a/prelude/haskell/library_info.bzl +++ b/prelude/haskell/library_info.bzl @@ -12,6 +12,7 @@ # indirect dependencies for the purposes of module visibility. HaskellLibraryProvider = provider( fields = { + # TODO(ah) remove the metadata file field "metadata": provider_field(typing.Any, default = None), # Artifact "lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] "prof_lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index 81dd61de4..a4ec0a38c 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -59,6 +59,7 @@ def main(): type=str, action="append", help="Haskell module source files of the current package.") + # TODO(ah) do not depend on other md files parser.add_argument( "--dependency-metadata", required=False, @@ -144,6 +145,7 @@ def calc_package_prefixes(dependencies_metadata): Package names are stored under the marker key `//pkgname`. This is unambiguous since path components may not contain `/` characters. """ + # TODO(ah) determine package prefixes without depending on md files. result = {} for pkgname, md in dependencies_metadata.items(): path = Path(md["output_prefix"]) From 849589c6d18b7b7e65c50c788af6ce2a7f307fb2 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 17 Apr 2024 10:50:43 +0200 Subject: [PATCH 0813/1133] Explicit ResolvedDynamicValue type --- prelude/haskell/compile.bzl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 0cc07407e..76dd6a9c9 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -229,7 +229,7 @@ def get_packages_info( specify_pkg_version: bool, enable_profiling: bool, use_empty_lib: bool, - resolved: None | dict[DynamicValue, typing.Any] = None, + resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None, package_deps: None | dict[str, list[str]] = None, pkgname: str | None = None) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -305,7 +305,7 @@ def _common_compile_args( enable_th: bool, pkgname: str | None, modname: str | None = None, - resolved: None | dict[DynamicValue, typing.Any] = None, + resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None, package_deps: None | dict[str, list[str]] = None, use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args): toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] @@ -437,7 +437,7 @@ def _compile_module_args( enable_profiling: bool, enable_th: bool, outputs: dict[Artifact, Artifact], - resolved: dict[DynamicValue, typing.Any], + resolved: dict[DynamicValue, ResolvedDynamicValue], pkgname = None, package_deps: None | dict[str, list[str]] = None) -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -502,7 +502,7 @@ def _compile_module( graph: dict[str, list[str]], package_deps: dict[str, list[str]], outputs: dict[Artifact, Artifact], - resolved: dict[DynamicValue, typing.Any], + resolved: dict[DynamicValue, ResolvedDynamicValue], artifact_suffix: str, pkgname: str | None = None, ) -> CompiledModuleTSet: From 7eeabfde0ea9314751b54cc21cc972f9943519be Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 17 Apr 2024 18:28:52 +0200 Subject: [PATCH 0814/1133] "root" reduction was removed in favor of .value --- prelude/haskell/compile.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 76dd6a9c9..70b4a7e7a 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -257,7 +257,7 @@ def get_packages_info( for lib in direct_deps_link_info: info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - direct = info.reduce("root") + direct = info.value dynamic = direct.dynamic[enable_profiling] dynamic_info = resolved[dynamic][DynamicCompileResultInfo] @@ -629,7 +629,7 @@ def compile( dyn_module_tsets = ctx.actions.dynamic_output( dynamic = [md_file], promises = [ - info.reduce("root").dynamic[enable_profiling] + info.value.dynamic[enable_profiling] for lib in attr_deps_haskell_link_infos(ctx) for info in [ lib.prof_info[link_style] From 505ff0e162b59db01364c5bb9b238424be0f654a Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 18 Apr 2024 14:06:17 +0200 Subject: [PATCH 0815/1133] Provide both .(dyn_)hi and .(dyn_)o Needed as we're building with `-dynamic-too` to resolve ``` backend/src/Culture/GitHub/Types.hs:8:1: error: Failed to load dynamic interface file for Culture.Display: Bad interface file: buck-out/v2/gen/root/c1257425379aabf2/backend/src/__backend_infra__/mod-static/Culture/Display.dyn_hi buck-out/v2/gen/root/c1257425379aabf2/backend/src/__backend_infra__/mod-static/Culture/Display.dyn_hi: withBinaryFile: does not exist (No such file or directory) ``` --- prelude/haskell/compile.bzl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 70b4a7e7a..9ac1bfd7f 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -40,16 +40,16 @@ load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_t load("@prelude//utils:strings.bzl", "strip_prefix") CompiledModuleInfo = provider(fields = { - "interface": provider_field(Artifact), - "object": provider_field(Artifact), + "interfaces": provider_field(list[Artifact]), + "objects": provider_field(list[Artifact]), "object_dot_o": provider_field(Artifact), }) def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: - return cmd_args(mod.interface) + return cmd_args(mod.interfaces) def _compiled_module_project_as_objects(mod: CompiledModuleInfo) -> cmd_args: - return cmd_args(mod.object) + return cmd_args(mod.objects) def _compiled_module_project_as_objects_dot_o(mod: CompiledModuleInfo) -> cmd_args: return cmd_args(mod.object_dot_o) @@ -571,8 +571,8 @@ def _compile_module( module_tset = ctx.actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( - interface = interface, - object = object, + interfaces = module.interfaces, + objects = module.objects, object_dot_o = object_dot_o, ), children = [cross_package_modules] + this_package_modules, From 11624f4b4656f09194bf81fa21e2edff6d42e3d4 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 18 Apr 2024 14:25:49 +0200 Subject: [PATCH 0816/1133] dyn_object_dot_o - use dyn_o and rename --- prelude/haskell/compile.bzl | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 9ac1bfd7f..416c0fa6d 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -42,7 +42,7 @@ load("@prelude//utils:strings.bzl", "strip_prefix") CompiledModuleInfo = provider(fields = { "interfaces": provider_field(list[Artifact]), "objects": provider_field(list[Artifact]), - "object_dot_o": provider_field(Artifact), + "dyn_object_dot_o": provider_field(Artifact), }) def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: @@ -51,14 +51,14 @@ def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: def _compiled_module_project_as_objects(mod: CompiledModuleInfo) -> cmd_args: return cmd_args(mod.objects) -def _compiled_module_project_as_objects_dot_o(mod: CompiledModuleInfo) -> cmd_args: - return cmd_args(mod.object_dot_o) +def _compiled_module_project_as_dyn_objects_dot_o(mod: CompiledModuleInfo) -> cmd_args: + return cmd_args(mod.dyn_object_dot_o) CompiledModuleTSet = transitive_set( args_projections = { "interfaces": _compiled_module_project_as_interfaces, "objects": _compiled_module_project_as_objects, - "objects_dot_o": _compiled_module_project_as_objects_dot_o, + "dyn_objects_dot_o": _compiled_module_project_as_dyn_objects_dot_o, }, ) @@ -556,24 +556,23 @@ def _compile_module( compile_cmd.hidden(dependency_modules.project_as_args("interfaces")) if enable_th: compile_cmd.hidden(dependency_modules.project_as_args("objects")) - compile_cmd.add(cross_package_modules.project_as_args("objects_dot_o")) + compile_cmd.add(cross_package_modules.project_as_args("dyn_objects_dot_o")) ctx.actions.run(compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name) - interface = module.interfaces[0] - object = module.objects[0] + object = module.objects[-1] if object.extension == ".o": - object_dot_o = object + dyn_object_dot_o = object else: - object_dot_o = ctx.actions.declare_output("dot-o", paths.replace_extension(object.short_path, ".o")) - ctx.actions.symlink_file(object_dot_o, object) + dyn_object_dot_o = ctx.actions.declare_output("dot-o", paths.replace_extension(object.short_path, ".o")) + ctx.actions.symlink_file(dyn_object_dot_o, object) module_tset = ctx.actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( interfaces = module.interfaces, objects = module.objects, - object_dot_o = object_dot_o, + dyn_object_dot_o = dyn_object_dot_o, ), children = [cross_package_modules] + this_package_modules, ) From b9cadadb559bb43b3bdb34fbdd1c3cc080ad7dee Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 18 Apr 2024 17:29:56 +0200 Subject: [PATCH 0817/1133] Calculate package paths from package-db --- prelude/haskell/compile.bzl | 21 +++++++++++++++++++ .../haskell/tools/generate_target_metadata.py | 20 ++++++++++++------ 2 files changed, 35 insertions(+), 6 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 416c0fa6d..0d56f0012 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -197,6 +197,10 @@ def target_metadata( format="--dependency-metadata={}", )) + md_args.add( + _attr_deps_haskell_lib_package_name_and_prefix(ctx), + ) + ctx.actions.run(md_args, category = "haskell_metadata") return md_file @@ -217,6 +221,23 @@ def _attr_deps_haskell_lib_metadata_files(ctx: AnalysisContext) -> list[Artifact return result +def _attr_deps_haskell_lib_package_name_and_prefix(ctx: AnalysisContext) -> cmd_args: + args = cmd_args(prepend = "--package") + + for dep in attr_deps(ctx) + ctx.attrs.template_deps: + lib = dep.get(HaskellLibraryProvider) + if lib == None: + continue + + lib_info = lib.lib.values()[0] + args.add(cmd_args( + lib_info.name, + cmd_args(lib_info.db, parent = 1), + delimiter = ":", + )) + + return args + def _package_flag(toolchain: HaskellToolchainInfo) -> str: if toolchain.support_expose_package: return "-expose-package" diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index a4ec0a38c..e9934dca3 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -59,6 +59,13 @@ def main(): type=str, action="append", help="Haskell module source files of the current package.") + parser.add_argument( + "--package", + required=False, + type=str, + action="append", + default=[], + help="Package dependencies formated as `NAME:PREFIX_PATH`.") # TODO(ah) do not depend on other md files parser.add_argument( "--dependency-metadata", @@ -79,7 +86,7 @@ def obtain_target_metadata(args): ghc_depends, ghc_options = run_ghc_depends(args.ghc, args.ghc_arg, args.source) th_modules = determine_th_modules(ghc_options, args.source_prefix) deps_md = load_dependencies_metadata(args.dependency_metadata) - package_prefixes = calc_package_prefixes(deps_md) + package_prefixes = calc_package_prefixes(args.package) module_mapping, module_graph, package_deps = interpret_ghc_depends( ghc_depends, args.source_prefix, package_prefixes) return { @@ -89,6 +96,9 @@ def obtain_target_metadata(args): "module_mapping": module_mapping, "module_graph": module_graph, "package_deps": package_deps, + "ghc_args": args.ghc_arg, + "packages": args.package, + "ghc_depends": ghc_depends, } @@ -139,18 +149,16 @@ def load_dependencies_metadata(fnames): return result -def calc_package_prefixes(dependencies_metadata): +def calc_package_prefixes(package_specs): """Creates a trie to look up modules in dependency packages. Package names are stored under the marker key `//pkgname`. This is unambiguous since path components may not contain `/` characters. """ - # TODO(ah) determine package prefixes without depending on md files. result = {} - for pkgname, md in dependencies_metadata.items(): - path = Path(md["output_prefix"]) + for pkgname, path in (spec.split(":", 1) for spec in package_specs): layer = result - for part in path.parts: + for part in Path(path).parts: layer = layer.setdefault(part, {}) layer["//pkgname"] = pkgname return result From ccda6c5ddc9bed9f6535fbcc5dabe699356ca5d5 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 18 Apr 2024 17:33:22 +0200 Subject: [PATCH 0818/1133] Remove package-md dependency --- prelude/haskell/compile.bzl | 22 ------------- .../haskell/tools/generate_target_metadata.py | 32 ------------------- 2 files changed, 54 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 0d56f0012..4b21725bd 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -182,7 +182,6 @@ def target_metadata( ghc_args.add(ctx.attrs.compiler_flags) md_args = cmd_args(md_gen) - md_args.add("--pkgname", pkgname) md_args.add("--output", md_file.as_output()) md_args.add("--ghc", haskell_toolchain.compiler) md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) @@ -192,11 +191,6 @@ def target_metadata( ) md_args.add(cmd_args(sources, format="--source={}")) - md_args.add(cmd_args( - _attr_deps_haskell_lib_metadata_files(ctx), - format="--dependency-metadata={}", - )) - md_args.add( _attr_deps_haskell_lib_package_name_and_prefix(ctx), ) @@ -205,22 +199,6 @@ def target_metadata( return md_file -def _attr_deps_haskell_lib_metadata_files(ctx: AnalysisContext) -> list[Artifact]: - result = [] - - for dep in attr_deps(ctx) + ctx.attrs.template_deps: - lib = dep.get(HaskellLibraryProvider) - if lib == None: - continue - - md = lib.metadata - if md == None: - continue - - result.append(md) - - return result - def _attr_deps_haskell_lib_package_name_and_prefix(ctx: AnalysisContext) -> cmd_args: args = cmd_args(prepend = "--package") diff --git a/prelude/haskell/tools/generate_target_metadata.py b/prelude/haskell/tools/generate_target_metadata.py index e9934dca3..4e3eff04d 100755 --- a/prelude/haskell/tools/generate_target_metadata.py +++ b/prelude/haskell/tools/generate_target_metadata.py @@ -15,7 +15,6 @@ """ import argparse -import graphlib import json import os from pathlib import Path @@ -27,11 +26,6 @@ def main(): parser = argparse.ArgumentParser( description=__doc__, fromfile_prefix_chars="@") - parser.add_argument( - "--pkgname", - required=True, - type=str, - help="The name of the current package.") parser.add_argument( "--output", required=True, @@ -66,14 +60,6 @@ def main(): action="append", default=[], help="Package dependencies formated as `NAME:PREFIX_PATH`.") - # TODO(ah) do not depend on other md files - parser.add_argument( - "--dependency-metadata", - required=False, - default=[], - type=str, - action="append", - help="Path to the JSON metadata file of a package dependency.") args = parser.parse_args() result = obtain_target_metadata(args) @@ -82,23 +68,16 @@ def main(): def obtain_target_metadata(args): - output_prefix = os.path.dirname(args.output.name) ghc_depends, ghc_options = run_ghc_depends(args.ghc, args.ghc_arg, args.source) th_modules = determine_th_modules(ghc_options, args.source_prefix) - deps_md = load_dependencies_metadata(args.dependency_metadata) package_prefixes = calc_package_prefixes(args.package) module_mapping, module_graph, package_deps = interpret_ghc_depends( ghc_depends, args.source_prefix, package_prefixes) return { - "pkgname": args.pkgname, - "output_prefix": output_prefix, "th_modules": th_modules, "module_mapping": module_mapping, "module_graph": module_graph, "package_deps": package_deps, - "ghc_args": args.ghc_arg, - "packages": args.package, - "ghc_depends": ghc_depends, } @@ -138,17 +117,6 @@ def run_ghc_depends(ghc, ghc_args, sources): return json.load(f), json.load(o) -def load_dependencies_metadata(fnames): - result = {} - - for fname in fnames: - with open(fname) as f: - md = json.load(f) - result[md["pkgname"]] = md - - return result - - def calc_package_prefixes(package_specs): """Creates a trie to look up modules in dependency packages. From 4c732a93ebb5c9c92588d2e709334946db990166 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 18 Apr 2024 17:35:03 +0200 Subject: [PATCH 0819/1133] Remove unused metadata provider field --- prelude/haskell/haskell.bzl | 2 -- prelude/haskell/library_info.bzl | 2 -- 2 files changed, 4 deletions(-) diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 6629532a6..90beea5e4 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -304,7 +304,6 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: prof_info = prof_hlinkinfos, ) haskell_lib_provider = HaskellLibraryProvider( - metadata = None, lib = hlibinfos, prof_lib = prof_hlibinfos, ) @@ -841,7 +840,6 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: sub_targets = sub_targets, ), HaskellLibraryProvider( - metadata = md_file, lib = hlib_infos, prof_lib = prof_hlib_infos, ), diff --git a/prelude/haskell/library_info.bzl b/prelude/haskell/library_info.bzl index 4161f0742..d8cff43e2 100644 --- a/prelude/haskell/library_info.bzl +++ b/prelude/haskell/library_info.bzl @@ -12,8 +12,6 @@ # indirect dependencies for the purposes of module visibility. HaskellLibraryProvider = provider( fields = { - # TODO(ah) remove the metadata file field - "metadata": provider_field(typing.Any, default = None), # Artifact "lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] "prof_lib": provider_field(typing.Any, default = None), # dict[LinkStyle, HaskellLibraryInfo] }, From a632e295c031be0698c4f8d06eec79e487cc96f2 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Fri, 26 Apr 2024 12:48:53 -0700 Subject: [PATCH 0820/1133] remove non-prelude things --- .../0004-Reduce-max-batch-size.patch | 28 ------------------- toolchains/nix_bash_env.sh | 11 -------- 2 files changed, 39 deletions(-) delete mode 100644 nix/overlays/bazel-remote-worker/0004-Reduce-max-batch-size.patch delete mode 100755 toolchains/nix_bash_env.sh diff --git a/nix/overlays/bazel-remote-worker/0004-Reduce-max-batch-size.patch b/nix/overlays/bazel-remote-worker/0004-Reduce-max-batch-size.patch deleted file mode 100644 index 245a2f6fb..000000000 --- a/nix/overlays/bazel-remote-worker/0004-Reduce-max-batch-size.patch +++ /dev/null @@ -1,28 +0,0 @@ -commit e47452fb67065fa05410c831bc9a5de9065541f6 -Author: Claudio Bley -Date: Tue Feb 27 10:19:20 2024 +0100 - - Configure the default max batch size to leave room for headers - - The default gRPC max transport message (including headers) is 4MiB and may be - enforced by the client when receiving a response. - - This can lead to problems when the response of a batch read request is larger - than this limit. - - Leave some room for the headers by setting the max batch size to 4MB. - -diff --git a/src/tools/remote/src/main/java/com/google/devtools/build/remote/worker/CasServer.java b/src/tools/remote/src/main/java/com/google/devtools/build/remote/worker/CasServer.java -index d874b879af..6347bb669f 100644 ---- a/src/tools/remote/src/main/java/com/google/devtools/build/remote/worker/CasServer.java -+++ b/src/tools/remote/src/main/java/com/google/devtools/build/remote/worker/CasServer.java -@@ -42,7 +42,7 @@ import java.util.Set; - /** A basic implementation of a {@link ContentAddressableStorageImplBase} service. */ - final class CasServer extends ContentAddressableStorageImplBase { - private static final GoogleLogger logger = GoogleLogger.forEnclosingClass(); -- static final long MAX_BATCH_SIZE_BYTES = 1024 * 1024 * 4; -+ static final long MAX_BATCH_SIZE_BYTES = 1000 * 1000 * 4; - private final OnDiskBlobStoreCache cache; - - public CasServer(OnDiskBlobStoreCache cache) { - diff --git a/toolchains/nix_bash_env.sh b/toolchains/nix_bash_env.sh deleted file mode 100755 index 700401b6e..000000000 --- a/toolchains/nix_bash_env.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -cat > "$1" < Date: Wed, 24 Apr 2024 14:25:58 +0200 Subject: [PATCH 0821/1133] [buck2] Create ABI hash of interface files --- prelude/haskell/compile.bzl | 34 ++++++++++++++++++++++++++++++++-- prelude/haskell/haskell.bzl | 1 + 2 files changed, 33 insertions(+), 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 4b21725bd..ebb1afbf5 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -71,6 +71,7 @@ CompileResultInfo = record( objects = field(list[Artifact]), hi = field(list[Artifact]), stubs = field(Artifact), + hashes = field(list[Artifact]), producing_indices = field(bool), module_tsets = field(None | list[CompiledModuleTSet] | DynamicValue), ) @@ -95,6 +96,7 @@ PackagesInfo = record( _Module = record( source = field(Artifact), interfaces = field(list[Artifact]), + hashes = field(list[Artifact]), objects = field(list[Artifact]), stub_dir = field(Artifact), prefix_dir = field(str), @@ -123,6 +125,7 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl object_path = paths.replace_extension(src.short_path, "." + osuf) object = ctx.actions.declare_output("mod-" + suffix, object_path) objects = [object] + hashes = [ctx.actions.declare_output("mod-" + suffix, interface_path + ".hash")] if link_style in [LinkStyle("static"), LinkStyle("static_pic")]: dyn_osuf, dyn_hisuf = output_extensions(LinkStyle("shared"), enable_profiling) @@ -132,9 +135,16 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl object_path = paths.replace_extension(src.short_path, "." + dyn_osuf) object = ctx.actions.declare_output("mod-" + suffix, object_path) objects.append(object) + hashes.append(ctx.actions.declare_output("mod-" + suffix, interface_path + ".hash")) stub_dir = ctx.actions.declare_output("stub-" + suffix + "-" + module_name, dir=True) - modules[module_name] = _Module(source = src, interfaces = interfaces, objects = objects, stub_dir = stub_dir, prefix_dir = "mod-" + suffix) + modules[module_name] = _Module( + source = src, + interfaces = interfaces, + hashes = hashes, + objects = objects, + stub_dir = stub_dir, + prefix_dir = "mod-" + suffix) return modules @@ -420,6 +430,7 @@ def compile_args( result = CompileResultInfo( objects = [objects], hi = [hi], + hashes = [], stubs = stubs, producing_indices = producing_indices, module_tsets = None, @@ -478,6 +489,7 @@ def _compile_module_args( result = CompileResultInfo( objects = objects, hi = his, + hashes = [], stubs = stubs, producing_indices = producing_indices, module_tsets = module_tsets, @@ -566,6 +578,22 @@ def _compile_module( dyn_object_dot_o = ctx.actions.declare_output("dot-o", paths.replace_extension(object.short_path, ".o")) ctx.actions.symlink_file(dyn_object_dot_o, object) + ctx.actions.run( + cmd_args( + "bash", "-c", + cmd_args( + haskell_toolchain.compiler, + "--show-iface", + outputs[module.interfaces[0]], + "| grep 'ABI hash:' >", + outputs[module.hashes[0]].as_output(), + delimiter=" ", + ), + ), + category = "haskell_compile_hash_" + artifact_suffix.replace("-", "_"), + identifier = module_name, + ) + module_tset = ctx.actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( @@ -623,6 +651,7 @@ def compile( interfaces = [interface for module in modules.values() for interface in module.interfaces] objects = [object for module in modules.values() for object in module.objects] stub_dirs = [module.stub_dir for module in modules.values()] + abi_hashes = [hash for module in modules.values() for hash in module.hashes] dyn_module_tsets = ctx.actions.dynamic_output( dynamic = [md_file], @@ -636,7 +665,7 @@ def compile( ] ], inputs = ctx.attrs.srcs, - outputs = [o.as_output() for o in interfaces + objects + stub_dirs], + outputs = [o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes], f = do_compile) stubs_dir = ctx.actions.declare_output("stubs-" + artifact_suffix, dir=True) @@ -662,6 +691,7 @@ def compile( return CompileResultInfo( objects = objects, hi = interfaces, + hashes = abi_hashes, stubs = stubs_dir, producing_indices = False, module_tsets = dyn_module_tsets, diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 90beea5e4..49e0274a8 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -739,6 +739,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: prof_hlink_infos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlib, children = [li.prof_info[link_style] for li in hlis]) prof_link_infos[link_style] = hlib_build_out.link_infos else: + sub_targets['hashes'] = [DefaultInfo(default_outputs = compiled.hashes)] hlib_infos[link_style] = hlib hlink_infos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlib, children = [li.info[link_style] for li in hlis]) link_infos[link_style] = hlib_build_out.link_infos From 7cbba3906d8caeb1f13de0e9c6a22b729ed323b9 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 24 Apr 2024 15:30:49 +0200 Subject: [PATCH 0822/1133] [buck2] Add incremental_ghc.py script --- prelude/decls/haskell_common.bzl | 4 ++ prelude/haskell/compile.bzl | 12 +++-- prelude/haskell/tools/BUCK.v2 | 6 +++ prelude/haskell/tools/incremental_ghc.py | 56 ++++++++++++++++++++++++ 4 files changed, 75 insertions(+), 3 deletions(-) create mode 100755 prelude/haskell/tools/incremental_ghc.py diff --git a/prelude/decls/haskell_common.bzl b/prelude/decls/haskell_common.bzl index 20c58910c..b2f040487 100644 --- a/prelude/decls/haskell_common.bzl +++ b/prelude/decls/haskell_common.bzl @@ -46,6 +46,10 @@ def _scripts_arg(): providers = [RunInfo], default = "prelude//haskell/tools:generate_target_metadata", ), + "_incremental_ghc": attrs.dep( + providers = [RunInfo], + default = "prelude//haskell/tools:incremental_ghc", + ), } haskell_common = struct( diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index ebb1afbf5..afb541488 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -476,7 +476,7 @@ def _compile_module_args( compile_args.add("-dyno", objects[1].as_output()) compile_args.add("-dynohi", his[1].as_output()) - srcs = cmd_args(module.source) + srcs = cmd_args("--source", module.source) for (path, src) in srcs_to_pairs(ctx.attrs.srcs): # hs-boot files aren't expected to be an argument to compiler but does need # to be included in the directory of the associated src file @@ -520,7 +520,8 @@ def _compile_module( module = modules[module_name] haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - compile_cmd = cmd_args(haskell_toolchain.compiler) + compile_cmd = cmd_args(ctx.attrs._incremental_ghc[RunInfo]) + compile_cmd.add("--ghc", haskell_toolchain.compiler) args = _compile_module_args(ctx, module, link_style, enable_profiling, enable_th, outputs, resolved, pkgname, package_deps = package_deps) @@ -569,7 +570,12 @@ def _compile_module( compile_cmd.hidden(dependency_modules.project_as_args("objects")) compile_cmd.add(cross_package_modules.project_as_args("dyn_objects_dot_o")) - ctx.actions.run(compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name) + ctx.actions.run( + compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, + metadata_env_var = "ACTION_METADATA", + metadata_path = "ghc_{}.json".format(module_name), + no_outputs_cleanup = True, + ) object = module.objects[-1] if object.extension == ".o": diff --git a/prelude/haskell/tools/BUCK.v2 b/prelude/haskell/tools/BUCK.v2 index c1a362652..8796d96ef 100644 --- a/prelude/haskell/tools/BUCK.v2 +++ b/prelude/haskell/tools/BUCK.v2 @@ -11,3 +11,9 @@ prelude.python_bootstrap_binary( main = "generate_target_metadata.py", visibility = ["PUBLIC"], ) + +prelude.python_bootstrap_binary( + name = "incremental_ghc", + main = "incremental_ghc.py", + visibility = ["PUBLIC"], +) diff --git a/prelude/haskell/tools/incremental_ghc.py b/prelude/haskell/tools/incremental_ghc.py new file mode 100755 index 000000000..25350ec0b --- /dev/null +++ b/prelude/haskell/tools/incremental_ghc.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python3 + +"""Helper script to compile haskell modules incrementally + +""" + +import argparse +import graphlib +import json +import os +from pathlib import Path +import subprocess +import tempfile +import sys + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, + add_help=False, + fromfile_prefix_chars="@") + parser.add_argument( + "--ghc", + required=True, + type=str, + help="Path to the Haskell compiler GHC.") + parser.add_argument( + "--abi", + type=str, + action="append", + help="File with ABI hash for a interface file.") + parser.add_argument( + "--source", + required=True, + type=str, + help="Haskell module source file.") + + args, ghc_args = parser.parse_known_args() + + metadata_file = os.environ.get('ACTION_METADATA') + + if metadata_file: + # open metadata file as json + with open(metadata_file) as f: + digests = json.load(f) + #print(digests, file=sys.stderr) + + cmd = [ + args.ghc, + args.source, + ] + ghc_args + + subprocess.check_call(cmd) + + +if __name__ == "__main__": + main() From ce0ec4127d08104f66c53cf37493c7d09247c537 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 24 Apr 2024 17:06:46 +0200 Subject: [PATCH 0823/1133] [buck2] Pass ABI hashes to incremental ghc script --- prelude/haskell/compile.bzl | 7 +++++++ prelude/haskell/tools/incremental_ghc.py | 10 +++++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index afb541488..c00275a6a 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -40,11 +40,15 @@ load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_t load("@prelude//utils:strings.bzl", "strip_prefix") CompiledModuleInfo = provider(fields = { + "abi": provider_field(list[Artifact]), "interfaces": provider_field(list[Artifact]), "objects": provider_field(list[Artifact]), "dyn_object_dot_o": provider_field(Artifact), }) +def _compiled_module_project_as_abi(mod: CompiledModuleInfo) -> cmd_args: + return cmd_args(mod.abi) + def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: return cmd_args(mod.interfaces) @@ -56,6 +60,7 @@ def _compiled_module_project_as_dyn_objects_dot_o(mod: CompiledModuleInfo) -> cm CompiledModuleTSet = transitive_set( args_projections = { + "abi": _compiled_module_project_as_abi, "interfaces": _compiled_module_project_as_interfaces, "objects": _compiled_module_project_as_objects, "dyn_objects_dot_o": _compiled_module_project_as_dyn_objects_dot_o, @@ -565,6 +570,7 @@ def _compile_module( children = [cross_package_modules] + this_package_modules, ) + compile_cmd.hidden(dependency_modules.project_as_args("abi")) compile_cmd.hidden(dependency_modules.project_as_args("interfaces")) if enable_th: compile_cmd.hidden(dependency_modules.project_as_args("objects")) @@ -603,6 +609,7 @@ def _compile_module( module_tset = ctx.actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( + abi = module.hashes, interfaces = module.interfaces, objects = module.objects, dyn_object_dot_o = dyn_object_dot_o, diff --git a/prelude/haskell/tools/incremental_ghc.py b/prelude/haskell/tools/incremental_ghc.py index 25350ec0b..e676bfdb3 100755 --- a/prelude/haskell/tools/incremental_ghc.py +++ b/prelude/haskell/tools/incremental_ghc.py @@ -38,11 +38,19 @@ def main(): metadata_file = os.environ.get('ACTION_METADATA') + needs_recompilation = True + if metadata_file: # open metadata file as json with open(metadata_file) as f: digests = json.load(f) - #print(digests, file=sys.stderr) + # check version + assert digests.get('version') == 1 + + json.dump(digests, sys.stderr, indent=True) + print(file=sys.stderr) + + if not needs_recompilation: return cmd = [ args.ghc, From 3df240723bab9219f23b53063bcbae8fdd61eafe Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 25 Apr 2024 08:45:58 +0200 Subject: [PATCH 0824/1133] [buck2] Create set of digests from metadata file --- prelude/haskell/compile.bzl | 2 +- prelude/haskell/tools/incremental_ghc.py | 36 ++++++++++++++++++++---- 2 files changed, 32 insertions(+), 6 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index c00275a6a..07766b528 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -570,7 +570,7 @@ def _compile_module( children = [cross_package_modules] + this_package_modules, ) - compile_cmd.hidden(dependency_modules.project_as_args("abi")) + compile_cmd.add(cmd_args(dependency_modules.project_as_args("abi"), format="--abi={}")) compile_cmd.hidden(dependency_modules.project_as_args("interfaces")) if enable_th: compile_cmd.hidden(dependency_modules.project_as_args("objects")) diff --git a/prelude/haskell/tools/incremental_ghc.py b/prelude/haskell/tools/incremental_ghc.py index e676bfdb3..f20014400 100755 --- a/prelude/haskell/tools/incremental_ghc.py +++ b/prelude/haskell/tools/incremental_ghc.py @@ -9,10 +9,34 @@ import json import os from pathlib import Path +from pprint import pprint import subprocess import tempfile import sys +# this class keeps track of a path of a file and its corresponding digest +class FileDigest: + def __init__(self, path, digest): + self.path = path + self.digest = digest + + def __hash__(self): + return hash((self.path, self.digest)) + + def __eq__(self, other): + return self.path == other.path and self.digest == other.digest + + def __repr__(self): + return f"FileDigest({self.path}, {self.digest})" + + @staticmethod + def from_dict(d): + return FileDigest(d['path'], d['digest']) + + def to_dict(self): + return {'path': self.path, 'digest': self.digest} + + def main(): parser = argparse.ArgumentParser( description=__doc__, @@ -26,6 +50,7 @@ def main(): parser.add_argument( "--abi", type=str, + default=[], action="append", help="File with ABI hash for a interface file.") parser.add_argument( @@ -41,14 +66,15 @@ def main(): needs_recompilation = True if metadata_file: - # open metadata file as json with open(metadata_file) as f: - digests = json.load(f) + metadata = json.load(f) + # check version - assert digests.get('version') == 1 + assert metadata.get('version') == 1 + + digests = set([FileDigest.from_dict(entry) for entry in metadata['digests']]) - json.dump(digests, sys.stderr, indent=True) - print(file=sys.stderr) + pprint(digests, stream=sys.stderr) if not needs_recompilation: return From 81e87622a2c18877c988c0aa8e2d622c51d414dd Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 25 Apr 2024 09:04:41 +0200 Subject: [PATCH 0825/1133] [buck2] Read and write state file --- prelude/haskell/compile.bzl | 2 ++ prelude/haskell/tools/incremental_ghc.py | 37 ++++++++++++++++++------ 2 files changed, 30 insertions(+), 9 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 07766b528..12a0202a0 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -576,6 +576,8 @@ def _compile_module( compile_cmd.hidden(dependency_modules.project_as_args("objects")) compile_cmd.add(cross_package_modules.project_as_args("dyn_objects_dot_o")) + state = ctx.actions.declare_output("state-{}_{}.json".format(module_name, artifact_suffix)) + compile_cmd.add("--state", state.as_output()) ctx.actions.run( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, metadata_env_var = "ACTION_METADATA", diff --git a/prelude/haskell/tools/incremental_ghc.py b/prelude/haskell/tools/incremental_ghc.py index f20014400..21afd46a0 100755 --- a/prelude/haskell/tools/incremental_ghc.py +++ b/prelude/haskell/tools/incremental_ghc.py @@ -28,11 +28,11 @@ def __eq__(self, other): def __repr__(self): return f"FileDigest({self.path}, {self.digest})" - + @staticmethod def from_dict(d): return FileDigest(d['path'], d['digest']) - + def to_dict(self): return {'path': self.path, 'digest': self.digest} @@ -42,6 +42,11 @@ def main(): description=__doc__, add_help=False, fromfile_prefix_chars="@") + parser.add_argument( + "--state", + required=True, + help="Path to the state file.", + ) parser.add_argument( "--ghc", required=True, @@ -65,10 +70,21 @@ def main(): needs_recompilation = True + if os.path.exists(args.state): + with open(args.state) as f: + old_state = json.load(f) + + print(old_state, file=sys.stderr) + + # 1. delete file + os.remove(args.state) + else: + old_state = {'digests': []} + if metadata_file: with open(metadata_file) as f: metadata = json.load(f) - + # check version assert metadata.get('version') == 1 @@ -76,14 +92,17 @@ def main(): pprint(digests, stream=sys.stderr) - if not needs_recompilation: return + if needs_recompilation: + cmd = [ + args.ghc, + args.source, + ] + ghc_args - cmd = [ - args.ghc, - args.source, - ] + ghc_args + subprocess.check_call(cmd) - subprocess.check_call(cmd) + # 2. write file + with open(args.state, 'w') as f: + json.dump(old_state, f) if __name__ == "__main__": From 95ef5545155054acf0d13fc5e2c1444748788e50 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 25 Apr 2024 09:31:04 +0200 Subject: [PATCH 0826/1133] [buck2] Filter files from the state that have a corresponding hash file --- prelude/haskell/tools/incremental_ghc.py | 51 ++++++++++++++++-------- 1 file changed, 34 insertions(+), 17 deletions(-) diff --git a/prelude/haskell/tools/incremental_ghc.py b/prelude/haskell/tools/incremental_ghc.py index 21afd46a0..b67aaabc3 100755 --- a/prelude/haskell/tools/incremental_ghc.py +++ b/prelude/haskell/tools/incremental_ghc.py @@ -28,13 +28,22 @@ def __eq__(self, other): def __repr__(self): return f"FileDigest({self.path}, {self.digest})" - + @staticmethod def from_dict(d): - return FileDigest(d['path'], d['digest']) - + return FileDigest(Path(d['path']), d['digest']) + def to_dict(self): - return {'path': self.path, 'digest': self.digest} + return {'path': str(self.path), 'digest': self.digest} + + +class FileDigestEncoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, FileDigest): + return o.to_dict() + elif isinstance(o, set): + return [self.default(e) for e in o] + return super().default(o) def main(): @@ -54,7 +63,7 @@ def main(): help="Path to the Haskell compiler GHC.") parser.add_argument( "--abi", - type=str, + type=Path, default=[], action="append", help="File with ABI hash for a interface file.") @@ -66,7 +75,7 @@ def main(): args, ghc_args = parser.parse_known_args() - metadata_file = os.environ.get('ACTION_METADATA') + metadata_file = os.environ['ACTION_METADATA'] needs_recompilation = True @@ -79,19 +88,23 @@ def main(): # 1. delete file os.remove(args.state) else: - old_state = {'digests': []} + old_state = [] + + with open(metadata_file) as f: + metadata = json.load(f) - if metadata_file: - with open(metadata_file) as f: - metadata = json.load(f) + # check version + assert metadata.get('version') == 1 - # check version - assert metadata.get('version') == 1 + digests = set([FileDigest.from_dict(entry) for entry in metadata['digests']]) - digests = set([FileDigest.from_dict(entry) for entry in metadata['digests']]) + pprint(digests, stream=sys.stderr) - pprint(digests, stream=sys.stderr) + # filter out all files that have a corresponding ABI hash file, remove the `.hash` extension + hi_files = set([abi.with_suffix('') for abi in args.abi]) + digests = set([d for d in digests if d.path not in hi_files]) + if needs_recompilation: cmd = [ args.ghc, @@ -101,9 +114,13 @@ def main(): subprocess.check_call(cmd) # 2. write file - with open(args.state, 'w') as f: - json.dump(old_state, f) - + try: + with open(args.state, 'w') as f: + json.dump(digests, f, cls=FileDigestEncoder, indent=2) + except Exception as e: + # remove incomplete state file + os.remove(args.state) + raise e if __name__ == "__main__": main() From 4b93ebfdc8f2e447542fcc7fcee3093a8f91099b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 25 Apr 2024 10:44:13 +0200 Subject: [PATCH 0827/1133] [buck2] Re-compile only when inputs changed, ignoring interface files --- prelude/haskell/tools/incremental_ghc.py | 42 ++++++++++++++---------- 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/prelude/haskell/tools/incremental_ghc.py b/prelude/haskell/tools/incremental_ghc.py index b67aaabc3..28abd1f1a 100755 --- a/prelude/haskell/tools/incremental_ghc.py +++ b/prelude/haskell/tools/incremental_ghc.py @@ -5,7 +5,6 @@ """ import argparse -import graphlib import json import os from pathlib import Path @@ -28,11 +27,11 @@ def __eq__(self, other): def __repr__(self): return f"FileDigest({self.path}, {self.digest})" - + @staticmethod def from_dict(d): return FileDigest(Path(d['path']), d['digest']) - + def to_dict(self): return {'path': str(self.path), 'digest': self.digest} @@ -77,34 +76,39 @@ def main(): metadata_file = os.environ['ACTION_METADATA'] - needs_recompilation = True + with open(metadata_file) as f: + metadata = json.load(f) + + # check version + version = metadata.get('version') + if version != 1: + sys.exit("version of metadata file not supported: {}".format(version)) + + digests = set([FileDigest.from_dict(entry) for entry in metadata['digests']]) if os.path.exists(args.state): with open(args.state) as f: old_state = json.load(f) - print(old_state, file=sys.stderr) + old_state = set([FileDigest.from_dict(entry) for entry in old_state]) - # 1. delete file + # delete file os.remove(args.state) else: - old_state = [] - - with open(metadata_file) as f: - metadata = json.load(f) - - # check version - assert metadata.get('version') == 1 - - digests = set([FileDigest.from_dict(entry) for entry in metadata['digests']]) - - pprint(digests, stream=sys.stderr) + old_state = set() # filter out all files that have a corresponding ABI hash file, remove the `.hash` extension hi_files = set([abi.with_suffix('') for abi in args.abi]) digests = set([d for d in digests if d.path not in hi_files]) - + + diff = digests ^ old_state # changed, newly added, removed + if diff: + print("Files that changed:", file=sys.stderr) + pprint(diff, stream=sys.stderr) + + needs_recompilation = digests != old_state + if needs_recompilation: cmd = [ args.ghc, @@ -112,6 +116,8 @@ def main(): ] + ghc_args subprocess.check_call(cmd) + else: + print("No recompilation needed", file=sys.stderr) # 2. write file try: From fb64a383e23cdad7eec146ccf0bcbba26c1033f7 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 25 Apr 2024 10:45:03 +0200 Subject: [PATCH 0828/1133] [buck2] Format with black --- prelude/haskell/tools/incremental_ghc.py | 38 +++++++++++------------- 1 file changed, 18 insertions(+), 20 deletions(-) diff --git a/prelude/haskell/tools/incremental_ghc.py b/prelude/haskell/tools/incremental_ghc.py index 28abd1f1a..68b90a7b1 100755 --- a/prelude/haskell/tools/incremental_ghc.py +++ b/prelude/haskell/tools/incremental_ghc.py @@ -13,6 +13,7 @@ import tempfile import sys + # this class keeps track of a path of a file and its corresponding digest class FileDigest: def __init__(self, path, digest): @@ -30,10 +31,10 @@ def __repr__(self): @staticmethod def from_dict(d): - return FileDigest(Path(d['path']), d['digest']) + return FileDigest(Path(d["path"]), d["digest"]) def to_dict(self): - return {'path': str(self.path), 'digest': self.digest} + return {"path": str(self.path), "digest": self.digest} class FileDigestEncoder(json.JSONEncoder): @@ -47,44 +48,40 @@ def default(self, o): def main(): parser = argparse.ArgumentParser( - description=__doc__, - add_help=False, - fromfile_prefix_chars="@") + description=__doc__, add_help=False, fromfile_prefix_chars="@" + ) parser.add_argument( "--state", required=True, help="Path to the state file.", ) parser.add_argument( - "--ghc", - required=True, - type=str, - help="Path to the Haskell compiler GHC.") + "--ghc", required=True, type=str, help="Path to the Haskell compiler GHC." + ) parser.add_argument( "--abi", type=Path, default=[], action="append", - help="File with ABI hash for a interface file.") + help="File with ABI hash for a interface file.", + ) parser.add_argument( - "--source", - required=True, - type=str, - help="Haskell module source file.") + "--source", required=True, type=str, help="Haskell module source file." + ) args, ghc_args = parser.parse_known_args() - metadata_file = os.environ['ACTION_METADATA'] + metadata_file = os.environ["ACTION_METADATA"] with open(metadata_file) as f: metadata = json.load(f) # check version - version = metadata.get('version') + version = metadata.get("version") if version != 1: sys.exit("version of metadata file not supported: {}".format(version)) - digests = set([FileDigest.from_dict(entry) for entry in metadata['digests']]) + digests = set([FileDigest.from_dict(entry) for entry in metadata["digests"]]) if os.path.exists(args.state): with open(args.state) as f: @@ -98,11 +95,11 @@ def main(): old_state = set() # filter out all files that have a corresponding ABI hash file, remove the `.hash` extension - hi_files = set([abi.with_suffix('') for abi in args.abi]) + hi_files = set([abi.with_suffix("") for abi in args.abi]) digests = set([d for d in digests if d.path not in hi_files]) - diff = digests ^ old_state # changed, newly added, removed + diff = digests ^ old_state # changed, newly added, removed if diff: print("Files that changed:", file=sys.stderr) pprint(diff, stream=sys.stderr) @@ -121,12 +118,13 @@ def main(): # 2. write file try: - with open(args.state, 'w') as f: + with open(args.state, "w") as f: json.dump(digests, f, cls=FileDigestEncoder, indent=2) except Exception as e: # remove incomplete state file os.remove(args.state) raise e + if __name__ == "__main__": main() From 86ac671e0de8f7f1e6c7cd9135c7f3588980d32b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 25 Apr 2024 13:34:45 +0200 Subject: [PATCH 0829/1133] [buck2] Do not create an ABI hash for dynamic-too outputs --- prelude/haskell/compile.bzl | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 12a0202a0..7d1696784 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -40,7 +40,7 @@ load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_t load("@prelude//utils:strings.bzl", "strip_prefix") CompiledModuleInfo = provider(fields = { - "abi": provider_field(list[Artifact]), + "abi": provider_field(Artifact), "interfaces": provider_field(list[Artifact]), "objects": provider_field(list[Artifact]), "dyn_object_dot_o": provider_field(Artifact), @@ -101,7 +101,7 @@ PackagesInfo = record( _Module = record( source = field(Artifact), interfaces = field(list[Artifact]), - hashes = field(list[Artifact]), + hash = field(Artifact), objects = field(list[Artifact]), stub_dir = field(Artifact), prefix_dir = field(str), @@ -130,7 +130,7 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl object_path = paths.replace_extension(src.short_path, "." + osuf) object = ctx.actions.declare_output("mod-" + suffix, object_path) objects = [object] - hashes = [ctx.actions.declare_output("mod-" + suffix, interface_path + ".hash")] + hash = ctx.actions.declare_output("mod-" + suffix, interface_path + ".hash") if link_style in [LinkStyle("static"), LinkStyle("static_pic")]: dyn_osuf, dyn_hisuf = output_extensions(LinkStyle("shared"), enable_profiling) @@ -140,13 +140,12 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl object_path = paths.replace_extension(src.short_path, "." + dyn_osuf) object = ctx.actions.declare_output("mod-" + suffix, object_path) objects.append(object) - hashes.append(ctx.actions.declare_output("mod-" + suffix, interface_path + ".hash")) stub_dir = ctx.actions.declare_output("stub-" + suffix + "-" + module_name, dir=True) modules[module_name] = _Module( source = src, interfaces = interfaces, - hashes = hashes, + hash = hash, objects = objects, stub_dir = stub_dir, prefix_dir = "mod-" + suffix) @@ -494,7 +493,7 @@ def _compile_module_args( result = CompileResultInfo( objects = objects, hi = his, - hashes = [], + hashes = [module.hash], stubs = stubs, producing_indices = producing_indices, module_tsets = module_tsets, @@ -600,7 +599,7 @@ def _compile_module( "--show-iface", outputs[module.interfaces[0]], "| grep 'ABI hash:' >", - outputs[module.hashes[0]].as_output(), + outputs[module.hash].as_output(), delimiter=" ", ), ), @@ -611,7 +610,7 @@ def _compile_module( module_tset = ctx.actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( - abi = module.hashes, + abi = module.hash, interfaces = module.interfaces, objects = module.objects, dyn_object_dot_o = dyn_object_dot_o, @@ -666,7 +665,7 @@ def compile( interfaces = [interface for module in modules.values() for interface in module.interfaces] objects = [object for module in modules.values() for object in module.objects] stub_dirs = [module.stub_dir for module in modules.values()] - abi_hashes = [hash for module in modules.values() for hash in module.hashes] + abi_hashes = [module.hash for module in modules.values()] dyn_module_tsets = ctx.actions.dynamic_output( dynamic = [md_file], From 48d358f71ab2a983357f004e7988a738c0f1dac3 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 29 Apr 2024 16:16:30 +0200 Subject: [PATCH 0830/1133] [buck2] Use dep files instead of incremental actions Marking the interface files as unused, will make buck2 skip compilation when the abi files haven't changed. --- prelude/haskell/compile.bzl | 21 ++++-- prelude/haskell/tools/incremental_ghc.py | 85 ++++-------------------- 2 files changed, 29 insertions(+), 77 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 7d1696784..b37d8a0e8 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -480,7 +480,7 @@ def _compile_module_args( compile_args.add("-dyno", objects[1].as_output()) compile_args.add("-dynohi", his[1].as_output()) - srcs = cmd_args("--source", module.source) + srcs = cmd_args(module.source) for (path, src) in srcs_to_pairs(ctx.attrs.srcs): # hs-boot files aren't expected to be an argument to compiler but does need # to be included in the directory of the associated src file @@ -569,19 +569,28 @@ def _compile_module( children = [cross_package_modules] + this_package_modules, ) - compile_cmd.add(cmd_args(dependency_modules.project_as_args("abi"), format="--abi={}")) - compile_cmd.hidden(dependency_modules.project_as_args("interfaces")) + abi_tag = ctx.actions.artifact_tag() + + compile_cmd.hidden( + abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces"))) if enable_th: compile_cmd.hidden(dependency_modules.project_as_args("objects")) compile_cmd.add(cross_package_modules.project_as_args("dyn_objects_dot_o")) - state = ctx.actions.declare_output("state-{}_{}.json".format(module_name, artifact_suffix)) - compile_cmd.add("--state", state.as_output()) + dep_file = ctx.actions.declare_output("dep-{}_{}".format(module_name, artifact_suffix)).as_output() + + tagged_dep_file = abi_tag.tag_artifacts(dep_file) + + compile_cmd.add(abi_tag.tag_artifacts(cmd_args(dependency_modules.project_as_args("abi"), format="--abi={}"))) + compile_cmd.add("--buck2-dep", tagged_dep_file) + ctx.actions.run( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, metadata_env_var = "ACTION_METADATA", metadata_path = "ghc_{}.json".format(module_name), - no_outputs_cleanup = True, + dep_files = { + "abi": abi_tag, + } ) object = module.objects[-1] diff --git a/prelude/haskell/tools/incremental_ghc.py b/prelude/haskell/tools/incremental_ghc.py index 68b90a7b1..ca778c228 100755 --- a/prelude/haskell/tools/incremental_ghc.py +++ b/prelude/haskell/tools/incremental_ghc.py @@ -14,46 +14,14 @@ import sys -# this class keeps track of a path of a file and its corresponding digest -class FileDigest: - def __init__(self, path, digest): - self.path = path - self.digest = digest - - def __hash__(self): - return hash((self.path, self.digest)) - - def __eq__(self, other): - return self.path == other.path and self.digest == other.digest - - def __repr__(self): - return f"FileDigest({self.path}, {self.digest})" - - @staticmethod - def from_dict(d): - return FileDigest(Path(d["path"]), d["digest"]) - - def to_dict(self): - return {"path": str(self.path), "digest": self.digest} - - -class FileDigestEncoder(json.JSONEncoder): - def default(self, o): - if isinstance(o, FileDigest): - return o.to_dict() - elif isinstance(o, set): - return [self.default(e) for e in o] - return super().default(o) - - def main(): parser = argparse.ArgumentParser( description=__doc__, add_help=False, fromfile_prefix_chars="@" ) parser.add_argument( - "--state", + "--buck2-dep", required=True, - help="Path to the state file.", + help="Path to the dep file.", ) parser.add_argument( "--ghc", required=True, type=str, help="Path to the Haskell compiler GHC." @@ -65,9 +33,6 @@ def main(): action="append", help="File with ABI hash for a interface file.", ) - parser.add_argument( - "--source", required=True, type=str, help="Haskell module source file." - ) args, ghc_args = parser.parse_known_args() @@ -81,48 +46,26 @@ def main(): if version != 1: sys.exit("version of metadata file not supported: {}".format(version)) - digests = set([FileDigest.from_dict(entry) for entry in metadata["digests"]]) - - if os.path.exists(args.state): - with open(args.state) as f: - old_state = json.load(f) - - old_state = set([FileDigest.from_dict(entry) for entry in old_state]) + inputs = set(Path(entry["path"]) for entry in metadata["digests"]) - # delete file - os.remove(args.state) - else: - old_state = set() - - # filter out all files that have a corresponding ABI hash file, remove the `.hash` extension + # get interface files that have a corresponding ABI hash file hi_files = set([abi.with_suffix("") for abi in args.abi]) - digests = set([d for d in digests if d.path not in hi_files]) - - diff = digests ^ old_state # changed, newly added, removed - if diff: - print("Files that changed:", file=sys.stderr) - pprint(diff, stream=sys.stderr) + # all inputs are used *except* the hi files + used_inputs = inputs - hi_files - needs_recompilation = digests != old_state + cmd = [args.ghc] + ghc_args - if needs_recompilation: - cmd = [ - args.ghc, - args.source, - ] + ghc_args + subprocess.check_call(cmd) - subprocess.check_call(cmd) - else: - print("No recompilation needed", file=sys.stderr) - - # 2. write file + # write the dep file try: - with open(args.state, "w") as f: - json.dump(digests, f, cls=FileDigestEncoder, indent=2) + with open(args.buck2_dep, "w") as f: + f.write("\n".join(map(str, used_inputs))) + except Exception as e: - # remove incomplete state file - os.remove(args.state) + # remove incomplete dep file + os.remove(args.buck2_dep) raise e From 0ccb56f2f5b45dd175964b61eeef509d33c8ca0d Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 30 Apr 2024 08:41:03 +0200 Subject: [PATCH 0831/1133] [buck2] Rename script to ghc_wrapper.py --- prelude/decls/haskell_common.bzl | 4 ++-- prelude/haskell/compile.bzl | 2 +- prelude/haskell/tools/BUCK.v2 | 4 ++-- .../haskell/tools/{incremental_ghc.py => ghc_wrapper.py} | 6 +++++- 4 files changed, 10 insertions(+), 6 deletions(-) rename prelude/haskell/tools/{incremental_ghc.py => ghc_wrapper.py} (87%) diff --git a/prelude/decls/haskell_common.bzl b/prelude/decls/haskell_common.bzl index b2f040487..f2b8ea43a 100644 --- a/prelude/decls/haskell_common.bzl +++ b/prelude/decls/haskell_common.bzl @@ -46,9 +46,9 @@ def _scripts_arg(): providers = [RunInfo], default = "prelude//haskell/tools:generate_target_metadata", ), - "_incremental_ghc": attrs.dep( + "_ghc_wrapper": attrs.dep( providers = [RunInfo], - default = "prelude//haskell/tools:incremental_ghc", + default = "prelude//haskell/tools:ghc_wrapper", ), } diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index b37d8a0e8..c69ab50da 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -524,7 +524,7 @@ def _compile_module( module = modules[module_name] haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - compile_cmd = cmd_args(ctx.attrs._incremental_ghc[RunInfo]) + compile_cmd = cmd_args(ctx.attrs._ghc_wrapper[RunInfo]) compile_cmd.add("--ghc", haskell_toolchain.compiler) args = _compile_module_args(ctx, module, link_style, enable_profiling, enable_th, outputs, resolved, pkgname, package_deps = package_deps) diff --git a/prelude/haskell/tools/BUCK.v2 b/prelude/haskell/tools/BUCK.v2 index 8796d96ef..610e419c8 100644 --- a/prelude/haskell/tools/BUCK.v2 +++ b/prelude/haskell/tools/BUCK.v2 @@ -13,7 +13,7 @@ prelude.python_bootstrap_binary( ) prelude.python_bootstrap_binary( - name = "incremental_ghc", - main = "incremental_ghc.py", + name = "ghc_wrapper", + main = "ghc_wrapper.py", visibility = ["PUBLIC"], ) diff --git a/prelude/haskell/tools/incremental_ghc.py b/prelude/haskell/tools/ghc_wrapper.py similarity index 87% rename from prelude/haskell/tools/incremental_ghc.py rename to prelude/haskell/tools/ghc_wrapper.py index ca778c228..ee961b465 100755 --- a/prelude/haskell/tools/incremental_ghc.py +++ b/prelude/haskell/tools/ghc_wrapper.py @@ -1,6 +1,10 @@ #!/usr/bin/env python3 -"""Helper script to compile haskell modules incrementally +"""Wrapper script to call ghc. + +It accepts a dep file where all used inputs are written to. For any passed ABI +hash file, the corresponding interface is marked as unused, so these can change +without triggering compilation actions. """ From b052801dc6653e2e3a29c730a91e7c6e07ced740 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 30 Apr 2024 09:40:38 +0200 Subject: [PATCH 0832/1133] [buck2] Move ABI hash computation into ghc_wrapper script - remove gnugrep from buck2 build inputs again --- prelude/haskell/compile.bzl | 17 +---------------- prelude/haskell/haskell.bzl | 1 - prelude/haskell/tools/ghc_wrapper.py | 22 ++++++++++++++++++++++ 3 files changed, 23 insertions(+), 17 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index c69ab50da..7865b23e1 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -583,6 +583,7 @@ def _compile_module( compile_cmd.add(abi_tag.tag_artifacts(cmd_args(dependency_modules.project_as_args("abi"), format="--abi={}"))) compile_cmd.add("--buck2-dep", tagged_dep_file) + compile_cmd.add("--abi-out", outputs[module.hash].as_output()) ctx.actions.run( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, @@ -600,22 +601,6 @@ def _compile_module( dyn_object_dot_o = ctx.actions.declare_output("dot-o", paths.replace_extension(object.short_path, ".o")) ctx.actions.symlink_file(dyn_object_dot_o, object) - ctx.actions.run( - cmd_args( - "bash", "-c", - cmd_args( - haskell_toolchain.compiler, - "--show-iface", - outputs[module.interfaces[0]], - "| grep 'ABI hash:' >", - outputs[module.hash].as_output(), - delimiter=" ", - ), - ), - category = "haskell_compile_hash_" + artifact_suffix.replace("-", "_"), - identifier = module_name, - ) - module_tset = ctx.actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( diff --git a/prelude/haskell/haskell.bzl b/prelude/haskell/haskell.bzl index 49e0274a8..90beea5e4 100644 --- a/prelude/haskell/haskell.bzl +++ b/prelude/haskell/haskell.bzl @@ -739,7 +739,6 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: prof_hlink_infos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlib, children = [li.prof_info[link_style] for li in hlis]) prof_link_infos[link_style] = hlib_build_out.link_infos else: - sub_targets['hashes'] = [DefaultInfo(default_outputs = compiled.hashes)] hlib_infos[link_style] = hlib hlink_infos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlib, children = [li.info[link_style] for li in hlis]) link_infos[link_style] = hlib_build_out.link_infos diff --git a/prelude/haskell/tools/ghc_wrapper.py b/prelude/haskell/tools/ghc_wrapper.py index ee961b465..1e6e9ddbe 100755 --- a/prelude/haskell/tools/ghc_wrapper.py +++ b/prelude/haskell/tools/ghc_wrapper.py @@ -37,6 +37,12 @@ def main(): action="append", help="File with ABI hash for a interface file.", ) + parser.add_argument( + "--abi-out", + required=True, + type=Path, + help="Output path of the abi file to create.", + ) args, ghc_args = parser.parse_known_args() @@ -62,6 +68,8 @@ def main(): subprocess.check_call(cmd) + recompute_abi_hash(args.ghc, args.abi_out) + # write the dep file try: with open(args.buck2_dep, "w") as f: @@ -73,5 +81,19 @@ def main(): raise e +def recompute_abi_hash(ghc, abi_out): + """Call ghc on the hi file and write the ABI hash to abi_out.""" + hi_file = abi_out.with_suffix("") + + cmd = [ghc, "--show-iface", hi_file] + for line in subprocess.check_output(cmd, text=True).splitlines(): + if "ABI hash:" in line: + hash = line.split(":", 1)[1] + with open(abi_out, "w") as outfile: + print(hash, file=outfile) + return + raise "ABI hash not found in ghc output" + + if __name__ == "__main__": main() From 8071051a4e726845e443d7c80ab74b12a51dfbac Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 30 Apr 2024 12:09:06 +0200 Subject: [PATCH 0833/1133] [buck2] Simplify dependency file handling - do not tag ABI hash files - that means only interface files are tagged, which should all marked as unused - remove metadata processing --- prelude/haskell/compile.bzl | 4 +--- prelude/haskell/tools/ghc_wrapper.py | 31 ++-------------------------- 2 files changed, 3 insertions(+), 32 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index 7865b23e1..c6ddf2a8a 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -581,14 +581,12 @@ def _compile_module( tagged_dep_file = abi_tag.tag_artifacts(dep_file) - compile_cmd.add(abi_tag.tag_artifacts(cmd_args(dependency_modules.project_as_args("abi"), format="--abi={}"))) compile_cmd.add("--buck2-dep", tagged_dep_file) compile_cmd.add("--abi-out", outputs[module.hash].as_output()) + compile_cmd.hidden(dependency_modules.project_as_args("abi")) ctx.actions.run( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, - metadata_env_var = "ACTION_METADATA", - metadata_path = "ghc_{}.json".format(module_name), dep_files = { "abi": abi_tag, } diff --git a/prelude/haskell/tools/ghc_wrapper.py b/prelude/haskell/tools/ghc_wrapper.py index 1e6e9ddbe..fcfb8ad36 100755 --- a/prelude/haskell/tools/ghc_wrapper.py +++ b/prelude/haskell/tools/ghc_wrapper.py @@ -12,9 +12,7 @@ import json import os from pathlib import Path -from pprint import pprint import subprocess -import tempfile import sys @@ -30,13 +28,6 @@ def main(): parser.add_argument( "--ghc", required=True, type=str, help="Path to the Haskell compiler GHC." ) - parser.add_argument( - "--abi", - type=Path, - default=[], - action="append", - help="File with ABI hash for a interface file.", - ) parser.add_argument( "--abi-out", required=True, @@ -46,34 +37,16 @@ def main(): args, ghc_args = parser.parse_known_args() - metadata_file = os.environ["ACTION_METADATA"] - - with open(metadata_file) as f: - metadata = json.load(f) - - # check version - version = metadata.get("version") - if version != 1: - sys.exit("version of metadata file not supported: {}".format(version)) - - inputs = set(Path(entry["path"]) for entry in metadata["digests"]) - - # get interface files that have a corresponding ABI hash file - hi_files = set([abi.with_suffix("") for abi in args.abi]) - - # all inputs are used *except* the hi files - used_inputs = inputs - hi_files - cmd = [args.ghc] + ghc_args subprocess.check_call(cmd) recompute_abi_hash(args.ghc, args.abi_out) - # write the dep file + # write an empty dep file, to signal that all tagged files are unused try: with open(args.buck2_dep, "w") as f: - f.write("\n".join(map(str, used_inputs))) + f.write("\n") except Exception as e: # remove incomplete dep file From 8f2b14294d64f7f9ffa9af8231ee70f7f0cda435 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 30 Apr 2024 17:26:49 +0200 Subject: [PATCH 0834/1133] List in-package deps module objects on CLI Otherwise cross-package module dependencies incurred through dependency modules from within the same package are not taken into account. --- prelude/haskell/compile.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index c6ddf2a8a..e472df915 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -574,8 +574,7 @@ def _compile_module( compile_cmd.hidden( abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces"))) if enable_th: - compile_cmd.hidden(dependency_modules.project_as_args("objects")) - compile_cmd.add(cross_package_modules.project_as_args("dyn_objects_dot_o")) + compile_cmd.add(dependency_modules.project_as_args("dyn_objects_dot_o")) dep_file = ctx.actions.declare_output("dep-{}_{}".format(module_name, artifact_suffix)).as_output() From c32edeebdd3ebea2c477e17d16ec7dbe88041de7 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 30 Apr 2024 17:55:55 +0200 Subject: [PATCH 0835/1133] Maintain objects as explicit hidden inputs --- prelude/haskell/compile.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/prelude/haskell/compile.bzl b/prelude/haskell/compile.bzl index e472df915..e6ea50e2e 100644 --- a/prelude/haskell/compile.bzl +++ b/prelude/haskell/compile.bzl @@ -574,6 +574,7 @@ def _compile_module( compile_cmd.hidden( abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces"))) if enable_th: + compile_cmd.hidden(dependency_modules.project_as_args("objects")) compile_cmd.add(dependency_modules.project_as_args("dyn_objects_dot_o")) dep_file = ctx.actions.declare_output("dep-{}_{}".format(module_name, artifact_suffix)).as_output() From 12e711f5785b70e77aa315920040593178d39cb2 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Tue, 30 Apr 2024 16:30:52 -0700 Subject: [PATCH 0836/1133] move prelude to top-level --- prelude/.buckconfig => .buckconfig | 0 prelude/.gitignore => .gitignore | 0 prelude/BUCK => BUCK | 0 prelude/CHANGELOG.md => CHANGELOG.md | 0 prelude/CODE_OF_CONDUCT.md => CODE_OF_CONDUCT.md | 0 prelude/CONTRIBUTING.md => CONTRIBUTING.md | 0 prelude/LICENSE-APACHE => LICENSE-APACHE | 0 prelude/LICENSE-MIT => LICENSE-MIT | 0 prelude/README.md => README.md | 0 {prelude/abi => abi}/BUCK.v2 | 0 {prelude/abi => abi}/constraints/BUCK.v2 | 0 prelude/alias.bzl => alias.bzl | 0 {prelude/android => android}/aapt2_link.bzl | 0 {prelude/android => android}/android.bzl | 0 {prelude/android => android}/android_aar.bzl | 0 {prelude/android => android}/android_apk.bzl | 0 {prelude/android => android}/android_binary.bzl | 0 .../android_binary_native_library_rules.bzl | 0 .../android_binary_resources_rules.bzl | 0 .../android => android}/android_build_config.bzl | 0 {prelude/android => android}/android_bundle.bzl | 0 .../android_instrumentation_apk.bzl | 0 .../android_instrumentation_test.bzl | 0 {prelude/android => android}/android_library.bzl | 0 {prelude/android => android}/android_manifest.bzl | 0 .../android => android}/android_prebuilt_aar.bzl | 0 {prelude/android => android}/android_providers.bzl | 0 {prelude/android => android}/android_resource.bzl | 0 {prelude/android => android}/android_toolchain.bzl | 0 {prelude/android => android}/apk_genrule.bzl | 0 .../android => android}/build_only_native_code.bzl | 0 {prelude/android => android}/bundletool_util.bzl | 0 {prelude/android => android}/configuration.bzl | 0 {prelude/android => android}/constraints/BUCK.v2 | 0 {prelude/android => android}/cpu_filters.bzl | 0 {prelude/android => android}/dex_rules.bzl | 0 {prelude/android => android}/exopackage.bzl | 0 {prelude/android => android}/gen_aidl.bzl | 0 {prelude/android => android}/min_sdk_version.bzl | 0 .../android => android}/prebuilt_native_library.bzl | 0 .../android => android}/preprocess_java_classes.bzl | 0 {prelude/android => android}/proguard.bzl | 0 {prelude/android => android}/r_dot_java.bzl | 0 {prelude/android => android}/robolectric_test.bzl | 0 {prelude/android => android}/tools/BUCK.v2 | 0 .../buck_generated/AppWithoutResourcesStub.java | 0 .../tools/combine_native_library_dirs.py | 0 {prelude/android => android}/tools/filter_dex.py | 0 .../tools/filter_extra_resources.py | 0 .../tools/filter_prebuilt_native_library_dir.py | 0 .../android => android}/tools/merge_sequence.py | 0 .../tools/native_libs_as_assets_metadata.py | 0 {prelude/android => android}/tools/unpack_aar.py | 0 {prelude/android => android}/util.bzl | 0 {prelude/android => android}/voltron.bzl | 0 {prelude/apple => apple}/apple_asset_catalog.bzl | 0 .../apple_asset_catalog_compilation_options.bzl | 0 .../apple => apple}/apple_asset_catalog_types.bzl | 0 {prelude/apple => apple}/apple_binary.bzl | 0 {prelude/apple => apple}/apple_bundle.bzl | 0 {prelude/apple => apple}/apple_bundle_attrs.bzl | 0 {prelude/apple => apple}/apple_bundle_config.bzl | 0 .../apple => apple}/apple_bundle_destination.bzl | 0 {prelude/apple => apple}/apple_bundle_part.bzl | 0 {prelude/apple => apple}/apple_bundle_resources.bzl | 0 {prelude/apple => apple}/apple_bundle_types.bzl | 0 {prelude/apple => apple}/apple_bundle_utility.bzl | 0 .../apple => apple}/apple_code_signing_types.bzl | 0 {prelude/apple => apple}/apple_common.bzl | 0 {prelude/apple => apple}/apple_core_data.bzl | 0 {prelude/apple => apple}/apple_core_data_types.bzl | 0 {prelude/apple => apple}/apple_dsym.bzl | 0 {prelude/apple => apple}/apple_dsym_config.bzl | 0 {prelude/apple => apple}/apple_entitlements.bzl | 0 .../apple => apple}/apple_framework_versions.bzl | 0 {prelude/apple => apple}/apple_frameworks.bzl | 0 {prelude/apple => apple}/apple_info_plist.bzl | 0 .../apple_info_plist_substitutions_parsing.bzl | 0 {prelude/apple => apple}/apple_library.bzl | 0 {prelude/apple => apple}/apple_macro_layer.bzl | 0 {prelude/apple => apple}/apple_modular_utility.bzl | 0 {prelude/apple => apple}/apple_native.bzl | 0 {prelude/apple => apple}/apple_package.bzl | 0 {prelude/apple => apple}/apple_package_config.bzl | 0 {prelude/apple => apple}/apple_resource.bzl | 0 {prelude/apple => apple}/apple_resource_bundle.bzl | 0 {prelude/apple => apple}/apple_resource_types.bzl | 0 {prelude/apple => apple}/apple_resource_utility.bzl | 0 {prelude/apple => apple}/apple_rules_impl.bzl | 0 .../apple => apple}/apple_rules_impl_utility.bzl | 0 {prelude/apple => apple}/apple_sdk.bzl | 0 {prelude/apple => apple}/apple_sdk_metadata.bzl | 0 {prelude/apple => apple}/apple_stripping.bzl | 0 {prelude/apple => apple}/apple_swift_stdlib.bzl | 0 .../apple => apple}/apple_target_sdk_version.bzl | 0 {prelude/apple => apple}/apple_test.bzl | 0 {prelude/apple => apple}/apple_toolchain.bzl | 0 {prelude/apple => apple}/apple_toolchain_types.bzl | 0 .../apple => apple}/apple_universal_binaries.bzl | 0 .../apple => apple}/apple_universal_executable.bzl | 0 {prelude/apple => apple}/apple_utility.bzl | 0 {prelude/apple => apple}/apple_xcuitest.bzl | 0 {prelude/apple => apple}/debug.bzl | 0 .../mockingbird/mockingbird_mock.bzl | 0 .../mockingbird/mockingbird_types.bzl | 0 {prelude/apple => apple}/modulemap.bzl | 0 .../apple => apple}/prebuilt_apple_framework.bzl | 0 {prelude/apple => apple}/resource_groups.bzl | 0 {prelude/apple => apple}/scene_kit_assets.bzl | 0 {prelude/apple => apple}/scene_kit_assets_types.bzl | 0 .../swift/apple_sdk_clang_module.bzl | 0 .../swift/apple_sdk_modules_utility.bzl | 0 .../swift/apple_sdk_swift_module.bzl | 0 .../apple => apple}/swift/swift_compilation.bzl | 0 .../swift/swift_debug_info_utils.bzl | 0 .../swift/swift_incremental_support.bzl | 0 {prelude/apple => apple}/swift/swift_module_map.bzl | 0 .../apple => apple}/swift/swift_pcm_compilation.bzl | 0 .../swift/swift_pcm_compilation_types.bzl | 0 {prelude/apple => apple}/swift/swift_runtime.bzl | 0 .../swift/swift_sdk_pcm_compilation.bzl | 0 .../swift/swift_sdk_swiftinterface_compilation.bzl | 0 {prelude/apple => apple}/swift/swift_toolchain.bzl | 0 .../swift/swift_toolchain_macro_layer.bzl | 0 .../apple => apple}/swift/swift_toolchain_types.bzl | 0 {prelude/apple => apple}/swift/swift_types.bzl | 0 {prelude/apple => apple}/tools/BUCK.v2 | 0 {prelude/apple => apple}/tools/bundling/BUCK.v2 | 0 .../tools/bundling/action_metadata.py | 0 .../tools/bundling/action_metadata_test.py | 0 .../tools/bundling/assemble_bundle.py | 0 .../tools/bundling/assemble_bundle_types.py | 0 .../tools/bundling/incremental_state.py | 0 .../tools/bundling/incremental_state_test.py | 0 .../tools/bundling/incremental_utils.py | 0 .../tools/bundling/incremental_utils_test.py | 0 {prelude/apple => apple}/tools/bundling/main.py | 0 .../apple => apple}/tools/bundling/swift_support.py | 0 .../newer_version_action_metadata.json | 0 .../newer_version_incremental_state.json | 0 .../tools/bundling/test_resources/the.broken_json | 0 .../test_resources/valid_action_metadata.json | 0 .../test_resources/valid_incremental_state.json | 0 {prelude/apple => apple}/tools/code_signing/BUCK.v2 | 0 .../apple => apple}/tools/code_signing/app_id.py | 0 .../tools/code_signing/app_id_test.py | 0 .../tools/code_signing/apple_platform.py | 0 .../tools/code_signing/codesign_bundle.py | 0 .../tools/code_signing/codesign_command_factory.py | 0 .../tools/code_signing/fast_adhoc.py | 0 .../apple => apple}/tools/code_signing/identity.py | 0 .../tools/code_signing/identity_test.py | 0 .../tools/code_signing/info_plist_metadata.py | 0 .../tools/code_signing/info_plist_metadata_test.py | 0 .../tools/code_signing/list_codesign_identities.py | 0 {prelude/apple => apple}/tools/code_signing/main.py | 0 .../prepare_code_signing_entitlements.py | 0 .../prepare_code_signing_entitlements_test.py | 0 .../tools/code_signing/prepare_info_plist.py | 0 .../tools/code_signing/prepare_info_plist_test.py | 0 .../provisioning_profile_diagnostics.py | 0 .../code_signing/provisioning_profile_metadata.py | 0 .../provisioning_profile_metadata_test.py | 0 .../code_signing/provisioning_profile_selection.py | 0 .../provisioning_profile_selection_test.py | 0 .../read_provisioning_profile_command_factory.py | 0 .../code_signing/test_resources/Entitlements.plist | 0 .../test_resources/qualified_sample.mobileprovision | 0 .../test_resources/sample.mobileprovision | 0 {prelude/apple => apple}/tools/defs.bzl | 0 {prelude/apple => apple}/tools/dry_codesign_tool.py | 0 .../tools/info_plist_processor/BUCK.v2 | 0 .../tools/info_plist_processor/main.py | 0 .../tools/info_plist_processor/preprocess.py | 0 .../tools/info_plist_processor/preprocess_test.py | 0 .../tools/info_plist_processor/process.py | 0 .../tools/info_plist_processor/process_test.py | 0 {prelude/apple => apple}/tools/ipa_package_maker.py | 0 {prelude/apple => apple}/tools/linker_wrapper.py | 0 {prelude/apple => apple}/tools/make_modulemap.py | 0 .../apple => apple}/tools/make_swift_comp_db.py | 0 .../apple => apple}/tools/make_swift_interface.py | 0 {prelude/apple => apple}/tools/make_vfsoverlay.py | 0 {prelude/apple => apple}/tools/plistlib_utils.py | 0 .../tools/re_compatibility_utils/BUCK | 0 .../tools/re_compatibility_utils/writable.py | 0 .../apple => apple}/tools/resource_broker/BUCK.v2 | 0 .../tools/resource_broker/idb_companion.py | 0 .../tools/resource_broker/idb_target.py | 0 .../apple => apple}/tools/resource_broker/ios.py | 0 .../apple => apple}/tools/resource_broker/macos.py | 0 .../apple => apple}/tools/resource_broker/main.py | 0 .../tools/resource_broker/simctl_runtime.py | 0 .../tools/resource_broker/timeouts.py | 0 .../apple => apple}/tools/resource_broker/utils.py | 0 .../tools/selective_debugging/BUCK.v2 | 0 .../tools/selective_debugging/macho.py | 0 .../tools/selective_debugging/macho_parser.py | 0 .../tools/selective_debugging/main.py | 0 .../tools/selective_debugging/scrubber.py | 0 .../tools/selective_debugging/scrubber_test.py | 0 .../tools/selective_debugging/spec.py | 0 .../tools/selective_debugging/spec_test.py | 0 .../selective_debugging/test_resources/HelloWorld | Bin .../test_resources/focused_spec.json | 0 .../test_resources/focused_targets.json | 0 .../test_resources/focused_targets_empty.json | 0 .../tools/selective_debugging/utils.py | 0 .../tools/split_arch_combine_dsym_bundles_tool.py | 0 {prelude/apple => apple}/tools/swift_exec.sh | 0 .../tools/swift_objc_header_postprocess.py | 0 .../apple => apple}/user/apple_resource_bundle.bzl | 0 .../user/apple_resource_transition.bzl | 0 .../user/apple_selected_debug_path_file.bzl | 0 .../user/apple_selective_debugging.bzl | 0 {prelude/apple => apple}/user/apple_simulators.bzl | 0 .../user/apple_toolchain_override.bzl | 0 {prelude/apple => apple}/user/apple_tools.bzl | 0 .../apple => apple}/user/apple_watchos_bundle.bzl | 0 .../apple => apple}/user/cpu_split_transition.bzl | 0 .../apple => apple}/user/resource_group_map.bzl | 0 {prelude/apple => apple}/user/watch_transition.bzl | 0 {prelude/apple => apple}/xcode.bzl | 0 {prelude/apple => apple}/xcode_postbuild_script.bzl | 0 {prelude/apple => apple}/xcode_prebuild_script.bzl | 0 {prelude/apple => apple}/xctest_swift_support.bzl | 0 prelude/artifact_tset.bzl => artifact_tset.bzl | 0 prelude/artifacts.bzl => artifacts.bzl | 0 prelude/asserts.bzl => asserts.bzl | 0 prelude/attributes.bzl => attributes.bzl | 0 ...ck2_compatibility.bzl => buck2_compatibility.bzl | 0 prelude/build_mode.bzl => build_mode.bzl | 0 prelude/cache_mode.bzl => cache_mode.bzl | 0 prelude/command_alias.bzl => command_alias.bzl | 0 .../configurations => configurations}/rules.bzl | 0 {prelude/configurations => configurations}/util.bzl | 0 {prelude/cpu => cpu}/BUCK.v2 | 0 {prelude/cpu => cpu}/constraints/BUCK.v2 | 0 {prelude/csharp => csharp}/csharp.bzl | 0 {prelude/csharp => csharp}/csharp_providers.bzl | 0 {prelude/csharp => csharp}/toolchain.bzl | 0 {prelude/cxx => cxx}/anon_link.bzl | 0 {prelude/cxx => cxx}/archive.bzl | 0 {prelude/cxx => cxx}/argsfiles.bzl | 0 {prelude/cxx => cxx}/attr_selection.bzl | 0 {prelude/cxx => cxx}/bitcode.bzl | 0 {prelude/cxx => cxx}/comp_db.bzl | 0 {prelude/cxx => cxx}/compile.bzl | 0 {prelude/cxx => cxx}/compiler.bzl | 0 {prelude/cxx => cxx}/cxx.bzl | 0 {prelude/cxx => cxx}/cxx_bolt.bzl | 0 {prelude/cxx => cxx}/cxx_context.bzl | 0 {prelude/cxx => cxx}/cxx_executable.bzl | 0 {prelude/cxx => cxx}/cxx_library.bzl | 0 {prelude/cxx => cxx}/cxx_library_utility.bzl | 0 {prelude/cxx => cxx}/cxx_link_utility.bzl | 0 {prelude/cxx => cxx}/cxx_sources.bzl | 0 {prelude/cxx => cxx}/cxx_toolchain.bzl | 0 {prelude/cxx => cxx}/cxx_toolchain_macro_layer.bzl | 0 {prelude/cxx => cxx}/cxx_toolchain_types.bzl | 0 {prelude/cxx => cxx}/cxx_types.bzl | 0 {prelude/cxx => cxx}/cxx_utility.bzl | 0 {prelude/cxx => cxx}/debug.bzl | 0 {prelude/cxx => cxx}/dist_lto/README.md | 0 {prelude/cxx => cxx}/dist_lto/dist_lto.bzl | 0 {prelude/cxx => cxx}/dist_lto/tools.bzl | 0 {prelude/cxx => cxx}/dist_lto/tools/BUCK.v2 | 0 {prelude/cxx => cxx}/dist_lto/tools/__init__.py | 0 .../cxx => cxx}/dist_lto/tools/dist_lto_copy.py | 0 {prelude/cxx => cxx}/dist_lto/tools/dist_lto_opt.py | 0 .../cxx => cxx}/dist_lto/tools/dist_lto_planner.py | 0 .../cxx => cxx}/dist_lto/tools/dist_lto_prepare.py | 0 .../dist_lto/tools/tests/test_dist_lto_opt.py | 0 {prelude/cxx => cxx}/dwp.bzl | 0 {prelude/cxx => cxx}/groups.bzl | 0 {prelude/cxx => cxx}/groups_types.bzl | 0 {prelude/cxx => cxx}/headers.bzl | 0 {prelude/cxx => cxx}/link.bzl | 0 {prelude/cxx => cxx}/link_groups.bzl | 0 {prelude/cxx => cxx}/link_groups_types.bzl | 0 {prelude/cxx => cxx}/link_types.bzl | 0 {prelude/cxx => cxx}/linker.bzl | 0 {prelude/cxx => cxx}/omnibus.bzl | 0 {prelude/cxx => cxx}/platform.bzl | 0 {prelude/cxx => cxx}/prebuilt_cxx_library_group.bzl | 0 {prelude/cxx => cxx}/preprocessor.bzl | 0 {prelude/cxx => cxx}/shared_library_interface.bzl | 0 {prelude/cxx => cxx}/symbols.bzl | 0 {prelude/cxx => cxx}/tools/BUCK.v2 | 0 {prelude/cxx => cxx}/tools/defs.bzl | 0 {prelude/cxx => cxx}/tools/dep_file_processor.py | 0 {prelude/cxx => cxx}/tools/dep_file_utils.py | 0 {prelude/cxx => cxx}/tools/hmap_wrapper.py | 0 {prelude/cxx => cxx}/tools/linker_wrapper.py | 0 {prelude/cxx => cxx}/tools/make_comp_db.py | 0 {prelude/cxx => cxx}/tools/makefile_to_dep_file.py | 0 .../cxx => cxx}/tools/show_headers_to_dep_file.py | 0 .../cxx => cxx}/tools/show_includes_to_dep_file.py | 0 .../cxx => cxx}/user/cxx_toolchain_override.bzl | 0 {prelude/cxx => cxx}/user/link_group_map.bzl | 0 {prelude/cxx => cxx}/windows_resource.bzl | 0 {prelude/cxx => cxx}/xcode.bzl | 0 {prelude/debugging => debugging}/common.bzl | 0 {prelude/debugging => debugging}/ensure_dwp.bzl | 0 {prelude/debugging => debugging}/fdb.bxl | 0 .../debugging => debugging}/inspect_dbg_exec.bzl | 0 .../debugging => debugging}/inspect_default.bzl | 0 {prelude/debugging => debugging}/inspect_java.bzl | 0 {prelude/debugging => debugging}/labels.bzl | 0 {prelude/debugging => debugging}/types.bzl | 0 {prelude/decls => decls}/android_common.bzl | 0 {prelude/decls => decls}/android_rules.bzl | 0 {prelude/decls => decls}/common.bzl | 0 {prelude/decls => decls}/core_rules.bzl | 0 {prelude/decls => decls}/cxx_common.bzl | 0 {prelude/decls => decls}/cxx_rules.bzl | 0 {prelude/decls => decls}/d_common.bzl | 0 {prelude/decls => decls}/d_rules.bzl | 0 {prelude/decls => decls}/dotnet_rules.bzl | 0 {prelude/decls => decls}/erlang_rules.bzl | 0 {prelude/decls => decls}/genrule_common.bzl | 0 {prelude/decls => decls}/git_rules.bzl | 0 {prelude/decls => decls}/go_common.bzl | 0 {prelude/decls => decls}/go_rules.bzl | 0 {prelude/decls => decls}/groovy_rules.bzl | 0 {prelude/decls => decls}/halide_rules.bzl | 0 {prelude/decls => decls}/haskell_common.bzl | 0 {prelude/decls => decls}/haskell_rules.bzl | 0 {prelude/decls => decls}/ios_rules.bzl | 0 {prelude/decls => decls}/java_rules.bzl | 0 {prelude/decls => decls}/js_rules.bzl | 0 {prelude/decls => decls}/jvm_common.bzl | 0 {prelude/decls => decls}/kotlin_rules.bzl | 0 {prelude/decls => decls}/lua_common.bzl | 0 {prelude/decls => decls}/lua_rules.bzl | 0 {prelude/decls => decls}/native_common.bzl | 0 {prelude/decls => decls}/ocaml_common.bzl | 0 {prelude/decls => decls}/ocaml_rules.bzl | 0 {prelude/decls => decls}/python_common.bzl | 0 {prelude/decls => decls}/python_rules.bzl | 0 {prelude/decls => decls}/re_test_common.bzl | 0 {prelude/decls => decls}/remote_common.bzl | 0 {prelude/decls => decls}/rust_common.bzl | 0 {prelude/decls => decls}/rust_rules.bzl | 0 {prelude/decls => decls}/scala_rules.bzl | 0 {prelude/decls => decls}/shell_rules.bzl | 0 {prelude/decls => decls}/toolchains_common.bzl | 0 {prelude/decls => decls}/uncategorized_rules.bzl | 0 {prelude/dist => dist}/dist_info.bzl | 0 {prelude/docs => docs}/rules.bzl | 0 {prelude/erlang => erlang}/applications/BUCK.v2 | 0 {prelude/erlang => erlang}/common_test/.elp.toml | 0 .../erlang => erlang}/common_test/common/BUCK.v2 | 0 .../common/include/artifact_annotations.hrl | 0 .../common_test/common/include/buck_ct_records.hrl | 0 .../common_test/common/include/tpx_records.hrl | 0 .../common_test/common/src/artifact_annotations.erl | 0 .../common_test/common/src/bounded_buffer.erl | 0 .../common_test/common/src/buck_ct_parser.erl | 0 .../common_test/common/src/buck_ct_provider.erl | 0 .../common_test/common/src/ct_error_printer.erl | 0 .../common_test/common/src/execution_logs.erl | 0 .../common_test/common/src/io_buffer.erl | 0 .../common/src/test_artifact_directory.erl | 0 .../common_test/common/src/test_logger.erl | 0 .../erlang => erlang}/common_test/cth_hooks/BUCK.v2 | 0 .../common_test/cth_hooks/src/cth_tpx.erl | 0 .../common_test/cth_hooks/src/cth_tpx_role.erl | 0 .../common_test/cth_hooks/src/cth_tpx_server.erl | 0 .../common_test/cth_hooks/src/cth_tpx_test_tree.erl | 0 .../common_test/cth_hooks/src/method_ids.hrl | 0 .../common_test/test_binary/BUCK.v2 | 0 .../common_test/test_binary/src/json_interfacer.erl | 0 .../test_binary/src/junit_interfacer.erl | 0 .../common_test/test_binary/src/list_test.erl | 0 .../test_binary/src/listing_interfacer.erl | 0 .../common_test/test_binary/src/test_binary.erl | 0 .../common_test/test_binary/src/test_runner.erl | 0 .../common_test/test_cli_lib/BUCK.v2 | 0 .../common_test/test_cli_lib/src/test.erl | 0 .../test_cli_lib/test/test_cli_e2e_SUITE.erl | 0 .../test_cli_e2e_SUITE_data/test_list_SUITE.erl | 0 .../erlang => erlang}/common_test/test_exec/BUCK.v2 | 0 .../common_test/test_exec/src/ct_daemon.erl | 0 .../common_test/test_exec/src/ct_daemon_core.erl | 0 .../common_test/test_exec/src/ct_daemon_hooks.erl | 0 .../common_test/test_exec/src/ct_daemon_logger.erl | 0 .../common_test/test_exec/src/ct_daemon_node.erl | 0 .../common_test/test_exec/src/ct_daemon_printer.erl | 0 .../common_test/test_exec/src/ct_daemon_runner.erl | 0 .../common_test/test_exec/src/ct_executor.erl | 0 .../common_test/test_exec/src/ct_runner.erl | 0 .../common_test/test_exec/src/epmd_manager.erl | 0 .../common_test/test_exec/src/test_exec.app.src | 0 .../common_test/test_exec/src/test_exec.erl | 0 .../common_test/test_exec/src/test_exec_sup.erl | 0 .../test_exec/test/ct_executor_SUITE.erl | 0 {prelude/erlang => erlang}/erlang.bzl | 0 {prelude/erlang => erlang}/erlang_application.bzl | 0 .../erlang_application_includes.bzl | 0 {prelude/erlang => erlang}/erlang_build.bzl | 0 {prelude/erlang => erlang}/erlang_dependencies.bzl | 0 {prelude/erlang => erlang}/erlang_escript.bzl | 0 {prelude/erlang => erlang}/erlang_info.bzl | 0 {prelude/erlang => erlang}/erlang_ls.config | 0 .../erlang => erlang}/erlang_otp_application.bzl | 0 {prelude/erlang => erlang}/erlang_release.bzl | 0 {prelude/erlang => erlang}/erlang_shell.bzl | 0 {prelude/erlang => erlang}/erlang_tests.bzl | 0 {prelude/erlang => erlang}/erlang_toolchain.bzl | 0 {prelude/erlang => erlang}/erlang_utils.bzl | 0 {prelude/erlang => erlang}/shell/BUCK.v2 | 0 {prelude/erlang => erlang}/shell/shell.bxl | 0 .../shell/src/shell_buck2_utils.erl | 0 .../erlang => erlang}/shell/src/user_default.erl | 0 {prelude/erlang => erlang}/toolchain/BUCK.v2 | 0 .../toolchain/app_src_builder.escript | 0 .../toolchain/boot_script_builder.escript | 0 .../toolchain/dependency_analyzer.escript | 0 .../toolchain/dependency_finalizer.escript | 0 .../erlang => erlang}/toolchain/edoc_cli.escript | 0 .../toolchain/edoc_doclet_chunks.erl | 0 .../erlang => erlang}/toolchain/edoc_report.erl | 0 .../erlang => erlang}/toolchain/erlang_ls.config | 0 .../erlang => erlang}/toolchain/erlc_trampoline.sh | 0 .../toolchain/escript_builder.escript | 0 .../toolchain/escript_trampoline.sh | 0 .../toolchain/include_erts.escript | 0 .../toolchain/release_variables_builder.escript | 0 .../toolchain/transform_project_root.erl | 0 prelude/export_exe.bzl => export_exe.bzl | 0 prelude/export_file.bzl => export_file.bzl | 0 prelude/filegroup.bzl => filegroup.bzl | 0 prelude/genrule.bzl => genrule.bzl | 0 ...ule_local_labels.bzl => genrule_local_labels.bzl | 0 .../genrule_toolchain.bzl => genrule_toolchain.bzl | 0 {prelude/git => git}/git_fetch.bzl | 0 {prelude/git => git}/tools/BUCK.v2 | 0 {prelude/git => git}/tools/git_fetch.py | 0 {prelude/go => go}/cgo_builder.bzl | 0 {prelude/go => go}/cgo_library.bzl | 0 {prelude/go => go}/compile.bzl | 0 {prelude/go => go}/constraints/BUCK.v2 | 0 {prelude/go => go}/constraints/defs.bzl | 0 {prelude/go => go}/coverage.bzl | 0 {prelude/go => go}/go_binary.bzl | 0 {prelude/go => go}/go_exported_library.bzl | 0 {prelude/go => go}/go_library.bzl | 0 {prelude/go => go}/go_list.bzl | 0 {prelude/go => go}/go_stdlib.bzl | 0 {prelude/go => go}/go_test.bzl | 0 {prelude/go => go}/link.bzl | 0 {prelude/go => go}/package_builder.bzl | 0 {prelude/go => go}/packages.bzl | 0 {prelude/go => go}/toolchain.bzl | 0 {prelude/go => go}/tools/BUCK.v2 | 0 {prelude/go => go}/tools/cgo_wrapper.py | 0 {prelude/go => go}/tools/concat_files.py | 0 {prelude/go => go}/tools/gen_stdlib_importcfg.py | 0 {prelude/go => go}/tools/go_list_wrapper.py | 0 {prelude/go => go}/tools/go_wrapper.py | 0 {prelude/go => go}/tools/testmaingen.go | 0 {prelude/go => go}/transitions/defs.bzl | 0 {prelude/go => go}/transitions/tags_helper.bzl | 0 {prelude/haskell => haskell}/compile.bzl | 0 {prelude/haskell => haskell}/haskell.bzl | 0 {prelude/haskell => haskell}/haskell_ghci.bzl | 0 {prelude/haskell => haskell}/haskell_haddock.bzl | 0 {prelude/haskell => haskell}/haskell_ide.bzl | 0 {prelude/haskell => haskell}/ide/README.md | 0 {prelude/haskell => haskell}/ide/hie.yaml | 0 {prelude/haskell => haskell}/ide/ide.bxl | 0 {prelude/haskell => haskell}/library_info.bzl | 0 {prelude/haskell => haskell}/link_info.bzl | 0 {prelude/haskell => haskell}/toolchain.bzl | 0 {prelude/haskell => haskell}/tools/BUCK.v2 | 0 .../tools/generate_target_metadata.py | 0 {prelude/haskell => haskell}/tools/ghc_wrapper.py | 0 .../tools/script_template_processor.py | 0 {prelude/haskell => haskell}/util.bzl | 0 .../http_archive => http_archive}/exec_deps.bzl | 0 .../http_archive => http_archive}/http_archive.bzl | 0 .../http_archive => http_archive}/tools/BUCK.v2 | 0 .../tools/create_exclusion_list.py | 0 prelude/http_file.bzl => http_file.bzl | 0 .../ide_integrations => ide_integrations}/xcode.bzl | 0 prelude/is_buck2.bzl => is_buck2.bzl | 0 .../is_buck2_internal.bzl => is_buck2_internal.bzl | 0 .../is_full_meta_repo.bzl => is_full_meta_repo.bzl | 0 {prelude/java => java}/class_to_srcs.bzl | 0 {prelude/java => java}/dex.bzl | 0 {prelude/java => java}/dex_toolchain.bzl | 0 {prelude/java => java}/gwt_binary.bzl | 0 {prelude/java => java}/jar_genrule.bzl | 0 {prelude/java => java}/java.bzl | 0 {prelude/java => java}/java_binary.bzl | 0 {prelude/java => java}/java_library.bzl | 0 {prelude/java => java}/java_providers.bzl | 0 {prelude/java => java}/java_resources.bzl | 0 {prelude/java => java}/java_test.bzl | 0 {prelude/java => java}/java_toolchain.bzl | 0 {prelude/java => java}/javacd_jar_creator.bzl | 0 {prelude/java => java}/keystore.bzl | 0 .../plugins/java_annotation_processor.bzl | 0 {prelude/java => java}/plugins/java_plugin.bzl | 0 {prelude/java => java}/prebuilt_jar.bzl | 0 {prelude/java => java}/tools/BUCK.v2 | 0 {prelude/java => java}/tools/compile_and_package.py | 0 {prelude/java => java}/tools/fat_jar.py | 0 .../java => java}/tools/gen_class_to_source_map.py | 0 {prelude/java => java}/tools/list_class_names.py | 0 .../tools/merge_class_to_source_maps.py | 0 {prelude/java => java}/tools/utils.py | 0 {prelude/java => java}/utils/java_more_utils.bzl | 0 {prelude/java => java}/utils/java_utils.bzl | 0 {prelude/js => js}/js.bzl | 0 {prelude/js => js}/js_bundle.bzl | 0 {prelude/js => js}/js_bundle_genrule.bzl | 0 {prelude/js => js}/js_library.bzl | 0 {prelude/js => js}/js_providers.bzl | 0 {prelude/js => js}/js_utils.bzl | 0 {prelude/julia => julia}/julia.bzl | 0 {prelude/julia => julia}/julia_binary.bzl | 0 {prelude/julia => julia}/julia_info.bzl | 0 {prelude/julia => julia}/julia_library.bzl | 0 {prelude/julia => julia}/julia_test.bzl | 0 {prelude/julia => julia}/julia_toolchain.bzl | 0 {prelude/julia => julia}/tools/BUCK.v2 | 0 {prelude/julia => julia}/tools/parse_julia_cmd.py | 0 {prelude/jvm => jvm}/cd_jar_creator_util.bzl | 0 {prelude/jvm => jvm}/nullsafe.bzl | 0 {prelude/kotlin => kotlin}/kotlin.bzl | 0 {prelude/kotlin => kotlin}/kotlin_library.bzl | 0 {prelude/kotlin => kotlin}/kotlin_test.bzl | 0 {prelude/kotlin => kotlin}/kotlin_toolchain.bzl | 0 {prelude/kotlin => kotlin}/kotlin_utils.bzl | 0 {prelude/kotlin => kotlin}/kotlincd_jar_creator.bzl | 0 .../kotlin => kotlin}/tools/compile_kotlin/BUCK.v2 | 0 .../tools/compile_kotlin/compile_kotlin.py | 0 {prelude/kotlin => kotlin}/tools/defs.bzl | 0 .../tools/kapt_base64_encoder/BUCK.v2 | 0 .../com/facebook/kapt/KaptBase64Encoder.java | 0 .../linking => linking}/execution_preference.bzl | 0 {prelude/linking => linking}/link_groups.bzl | 0 {prelude/linking => linking}/link_info.bzl | 0 {prelude/linking => linking}/linkable_graph.bzl | 0 {prelude/linking => linking}/linkables.bzl | 0 {prelude/linking => linking}/lto.bzl | 0 {prelude/linking => linking}/shared_libraries.bzl | 0 {prelude/linking => linking}/strip.bzl | 0 {prelude/linking => linking}/types.bzl | 0 prelude/local_only.bzl => local_only.bzl | 0 {prelude/lua => lua}/cxx_lua_extension.bzl | 0 {prelude/lua => lua}/lua_binary.bzl | 0 {prelude/lua => lua}/lua_library.bzl | 0 {prelude/matlab => matlab}/matlab.bzl | 0 {prelude/matlab => matlab}/matlab_info.bzl | 0 {prelude/matlab => matlab}/matlab_program.bzl | 0 {prelude/matlab => matlab}/matlab_toolchain.bzl | 0 prelude/native.bzl => native.bzl | 0 {prelude/ocaml => ocaml}/attrs.bzl | 0 {prelude/ocaml => ocaml}/makefile.bzl | 0 {prelude/ocaml => ocaml}/ocaml.bzl | 0 {prelude/ocaml => ocaml}/ocaml_toolchain_types.bzl | 0 {prelude/os => os}/BUCK.v2 | 0 {prelude/os => os}/constraints/BUCK.v2 | 0 {prelude/os_lookup => os_lookup}/defs.bzl | 0 {prelude/os_lookup => os_lookup}/targets/BUCK.v2 | 0 prelude/paths.bzl => paths.bzl | 0 {prelude/platforms => platforms}/BUCK | 0 {prelude/platforms => platforms}/BUCK.v2 | 0 {prelude/platforms => platforms}/apple/BUCK | 0 {prelude/platforms => platforms}/apple/arch.bzl | 0 {prelude/platforms => platforms}/apple/base.bzl | 0 .../platforms => platforms}/apple/build_mode.bzl | 0 .../platforms => platforms}/apple/constants.bzl | 0 .../platforms => platforms}/apple/constraints/BUCK | 0 .../platforms => platforms}/apple/platforms.bzl | 0 .../platforms => platforms}/apple/platforms_map.bzl | 0 {prelude/platforms => platforms}/apple/sdk.bzl | 0 {prelude/platforms => platforms}/defs.bzl | 0 {prelude/playground => playground}/test.bxl | 0 prelude/prelude.bzl => prelude.bzl | 0 ..._request_template.md => pull_request_template.md | 0 {prelude/python => python}/compile.bzl | 0 {prelude/python => python}/cxx_python_extension.bzl | 0 {prelude/python => python}/interface.bzl | 0 {prelude/python => python}/make_py_package.bzl | 0 {prelude/python => python}/manifest.bzl | 0 {prelude/python => python}/native_python_util.bzl | 0 {prelude/python => python}/needed_coverage.bzl | 0 .../python => python}/prebuilt_python_library.bzl | 0 {prelude/python => python}/python.bzl | 0 {prelude/python => python}/python_binary.bzl | 0 {prelude/python => python}/python_library.bzl | 0 .../python_needed_coverage_test.bzl | 0 {prelude/python => python}/python_test.bzl | 0 {prelude/python => python}/python_wheel.bzl | 0 {prelude/python => python}/runtime/BUCK.v2 | 0 .../python => python}/runtime/__par__/bootstrap.py | 0 {prelude/python => python}/source_db.bzl | 0 {prelude/python => python}/sourcedb/build.bxl | 0 {prelude/python => python}/sourcedb/classic.bxl | 0 .../python => python}/sourcedb/code_navigation.bxl | 0 {prelude/python => python}/sourcedb/filter.bxl | 0 {prelude/python => python}/sourcedb/merge.bxl | 0 {prelude/python => python}/sourcedb/query.bxl | 0 {prelude/python => python}/toolchain.bzl | 0 {prelude/python => python}/tools/BUCK.v2 | 0 {prelude/python => python}/tools/__test_main__.py | 0 {prelude/python => python}/tools/compile.py | 0 .../tools/create_manifest_for_source_dir.py | 0 {prelude/python => python}/tools/embedded_main.cpp | 0 {prelude/python => python}/tools/extract.py | 0 .../python => python}/tools/fail_with_message.py | 0 .../tools/generate_static_extension_info.py | 0 {prelude/python => python}/tools/make_par/BUCK | 0 .../tools/make_par/__run_lpar_main__.py | 0 .../tools/make_par/_lpar_bootstrap.sh.template | 0 .../tools/make_par/sitecustomize.py | 0 .../tools/make_py_package_inplace.py | 0 .../tools/make_py_package_manifest_module.py | 0 .../tools/make_py_package_modules.py | 0 {prelude/python => python}/tools/make_source_db.py | 0 .../tools/make_source_db_no_deps.py | 0 {prelude/python => python}/tools/parse_imports.py | 0 {prelude/python => python}/tools/py38stdlib.py | 0 {prelude/python => python}/tools/run_inplace.py.in | 0 .../python => python}/tools/run_inplace_lite.py.in | 0 .../python => python}/tools/sourcedb_merger/BUCK.v2 | 0 .../tools/sourcedb_merger/inputs.py | 0 .../tools/sourcedb_merger/legacy_merge.py | 0 .../tools/sourcedb_merger/legacy_outputs.py | 0 .../tools/sourcedb_merger/merge.py | 0 .../tools/sourcedb_merger/outputs.py | 0 .../tools/sourcedb_merger/tests/__init__.py | 0 .../tools/sourcedb_merger/tests/inputs_test.py | 0 .../sourcedb_merger/tests/legacy_output_test.py | 0 .../tools/sourcedb_merger/tests/main.sh | 0 .../tools/sourcedb_merger/tests/outputs_test.py | 0 .../tools/static_extension_finder.py | 0 .../tools/static_extension_utils.cpp | 0 .../tools/traverse_dep_manifest.py | 0 {prelude/python => python}/tools/wheel.py | 0 {prelude/python => python}/typecheck/batch.bxl | 0 {prelude/python => python}/typing.bzl | 0 .../python_bootstrap.bzl | 0 .../tools/BUCK.v2 | 0 .../tools/win_python_wrapper.bat | 0 prelude/remote_file.bzl => remote_file.bzl | 0 prelude/resources.bzl => resources.bzl | 0 prelude/rules.bzl => rules.bzl | 0 prelude/rules_impl.bzl => rules_impl.bzl | 0 {prelude/rust => rust}/build.bzl | 0 {prelude/rust => rust}/build_params.bzl | 0 {prelude/rust => rust}/cargo_buildscript.bzl | 0 {prelude/rust => rust}/cargo_package.bzl | 0 {prelude/rust => rust}/clippy_configuration.bzl | 0 {prelude/rust => rust}/context.bzl | 0 {prelude/rust => rust}/extern.bzl | 0 {prelude/rust => rust}/failure_filter.bzl | 0 {prelude/rust => rust}/link_info.bzl | 0 {prelude/rust => rust}/outputs.bzl | 0 {prelude/rust => rust}/proc_macro_alias.bzl | 0 {prelude/rust => rust}/resources.bzl | 0 {prelude/rust => rust}/rust-analyzer/check.bxl | 0 .../rust => rust}/rust-analyzer/resolve_deps.bxl | 0 {prelude/rust => rust}/rust_binary.bzl | 0 {prelude/rust => rust}/rust_common.bzl | 0 {prelude/rust => rust}/rust_library.bzl | 0 {prelude/rust => rust}/rust_toolchain.bzl | 0 {prelude/rust => rust}/targets.bzl | 0 {prelude/rust => rust}/tools/BUCK.v2 | 0 {prelude/rust => rust}/tools/attrs.bzl | 0 {prelude/rust => rust}/tools/buildscript_run.py | 0 .../rust => rust}/tools/failure_filter_action.py | 0 {prelude/rust => rust}/tools/rustc_action.py | 0 {prelude/rust => rust}/tools/rustdoc_coverage.py | 0 .../tools/rustdoc_test_with_resources.py | 0 {prelude/rust => rust}/tools/tool_rules.bzl | 0 .../tools/transitive_dependency_symlinks.py | 0 {prelude/rust => rust}/with_workspace.bzl | 0 prelude/sh_binary.bzl => sh_binary.bzl | 0 prelude/sh_test.bzl => sh_test.bzl | 0 {prelude/test => test}/inject_test_run_info.bzl | 0 {prelude/test => test}/tools/BUCK.v2 | 0 {prelude/test => test}/tools/inject_test_env.py | 0 prelude/test_suite.bzl => test_suite.bzl | 0 {prelude/tests => tests}/re_utils.bzl | 0 .../remote_test_execution_toolchain.bzl | 0 .../third-party => third-party}/hmaptool/BUCK.v2 | 0 .../hmaptool/METADATA.bzl | 0 .../third-party => third-party}/hmaptool/README.md | 0 .../third-party => third-party}/hmaptool/hmaptool | 0 .../apple/xcode_version_checker/.gitignore | 0 .../apple/xcode_version_checker/BUCK.v2 | 0 .../apple/xcode_version_checker/Makefile | 0 .../apple/xcode_version_checker/README | 0 .../apple/xcode_version_checker/defs.bzl | 0 .../xcode_version_checker/src/xcode_exec_tester.m | 0 .../src/xcode_version_checker.m | 0 .../src/xcode_version_checks.h | 0 .../src/xcode_version_checks.m | 0 .../src/xcode_version_tester.m | 0 .../test/Xcode_14.2.0_14C18_fb_version.plist | 0 .../xcode_version_checker/xcode_version_checker | Bin {prelude/toolchains => toolchains}/conan/BUCK | 0 .../conan/buckler/conanfile.py | 0 .../toolchains => toolchains}/conan/conan_common.py | 0 .../conan/conan_generate.py | 0 .../toolchains => toolchains}/conan/conan_init.py | 0 .../toolchains => toolchains}/conan/conan_lock.py | 0 .../conan/conan_package.py | 0 .../conan/conan_package_extract.py | 0 .../toolchains => toolchains}/conan/conan_update.py | 0 {prelude/toolchains => toolchains}/conan/defs.bzl | 0 .../conan/lock_generate.py | 0 {prelude/toolchains => toolchains}/csharp.bzl | 0 {prelude/toolchains => toolchains}/cxx.bzl | 0 {prelude/toolchains => toolchains}/cxx/zig/BUCK | 0 {prelude/toolchains => toolchains}/cxx/zig/defs.bzl | 0 .../toolchains => toolchains}/cxx/zig/releases.bzl | 0 {prelude/toolchains => toolchains}/demo.bzl | 0 .../toolchains => toolchains}/execution_host.bzl | 0 {prelude/toolchains => toolchains}/genrule.bzl | 0 {prelude/toolchains => toolchains}/go.bzl | 0 {prelude/toolchains => toolchains}/haskell.bzl | 0 {prelude/toolchains => toolchains}/msvc/BUCK.v2 | 0 .../toolchains => toolchains}/msvc/run_msvc_tool.py | 0 {prelude/toolchains => toolchains}/msvc/tools.bzl | 0 {prelude/toolchains => toolchains}/msvc/vswhere.py | 0 {prelude/toolchains => toolchains}/ocaml.bzl | 0 {prelude/toolchains => toolchains}/python.bzl | 0 .../remote_test_execution.bzl | 0 {prelude/toolchains => toolchains}/rust.bzl | 0 .../tools => tools}/audit_providers_universe.bxl | 0 .../constraint_overrides.bzl | 0 {prelude/user => user}/all.bzl | 0 {prelude/user => user}/cxx_headers_bundle.bzl | 0 {prelude/user => user}/extract_archive.bzl | 0 {prelude/user => user}/rule_spec.bzl | 0 {prelude/user => user}/write_file.bzl | 0 {prelude/utils => utils}/arglike.bzl | 0 {prelude/utils => utils}/buckconfig.bzl | 0 {prelude/utils => utils}/build_target_pattern.bzl | 0 {prelude/utils => utils}/cmd_script.bzl | 0 {prelude/utils => utils}/dicts.bzl | 0 {prelude/utils => utils}/expect.bzl | 0 {prelude/utils => utils}/graph_utils.bzl | 0 {prelude/utils => utils}/host.bzl | 0 {prelude/utils => utils}/lazy.bzl | 0 {prelude/utils => utils}/pick.bzl | 0 {prelude/utils => utils}/platform_flavors_util.bzl | 0 {prelude/utils => utils}/selects.bzl | 0 {prelude/utils => utils}/set.bzl | 0 {prelude/utils => utils}/strings.bzl | 0 {prelude/utils => utils}/type_defs.bzl | 0 {prelude/utils => utils}/utils.bzl | 0 prelude/validation_deps.bzl => validation_deps.bzl | 0 {prelude/windows => windows}/tools/BUCK.v2 | 0 .../tools/msvc_hermetic_exec.bat | 0 prelude/worker_tool.bzl => worker_tool.bzl | 0 {prelude/zip_file => zip_file}/tools/BUCK.v2 | 0 {prelude/zip_file => zip_file}/tools/unzip.py | 0 {prelude/zip_file => zip_file}/zip_file.bzl | 0 .../zip_file => zip_file}/zip_file_toolchain.bzl | 0 766 files changed, 0 insertions(+), 0 deletions(-) rename prelude/.buckconfig => .buckconfig (100%) rename prelude/.gitignore => .gitignore (100%) rename prelude/BUCK => BUCK (100%) rename prelude/CHANGELOG.md => CHANGELOG.md (100%) rename prelude/CODE_OF_CONDUCT.md => CODE_OF_CONDUCT.md (100%) rename prelude/CONTRIBUTING.md => CONTRIBUTING.md (100%) rename prelude/LICENSE-APACHE => LICENSE-APACHE (100%) rename prelude/LICENSE-MIT => LICENSE-MIT (100%) rename prelude/README.md => README.md (100%) rename {prelude/abi => abi}/BUCK.v2 (100%) rename {prelude/abi => abi}/constraints/BUCK.v2 (100%) rename prelude/alias.bzl => alias.bzl (100%) rename {prelude/android => android}/aapt2_link.bzl (100%) rename {prelude/android => android}/android.bzl (100%) rename {prelude/android => android}/android_aar.bzl (100%) rename {prelude/android => android}/android_apk.bzl (100%) rename {prelude/android => android}/android_binary.bzl (100%) rename {prelude/android => android}/android_binary_native_library_rules.bzl (100%) rename {prelude/android => android}/android_binary_resources_rules.bzl (100%) rename {prelude/android => android}/android_build_config.bzl (100%) rename {prelude/android => android}/android_bundle.bzl (100%) rename {prelude/android => android}/android_instrumentation_apk.bzl (100%) rename {prelude/android => android}/android_instrumentation_test.bzl (100%) rename {prelude/android => android}/android_library.bzl (100%) rename {prelude/android => android}/android_manifest.bzl (100%) rename {prelude/android => android}/android_prebuilt_aar.bzl (100%) rename {prelude/android => android}/android_providers.bzl (100%) rename {prelude/android => android}/android_resource.bzl (100%) rename {prelude/android => android}/android_toolchain.bzl (100%) rename {prelude/android => android}/apk_genrule.bzl (100%) rename {prelude/android => android}/build_only_native_code.bzl (100%) rename {prelude/android => android}/bundletool_util.bzl (100%) rename {prelude/android => android}/configuration.bzl (100%) rename {prelude/android => android}/constraints/BUCK.v2 (100%) rename {prelude/android => android}/cpu_filters.bzl (100%) rename {prelude/android => android}/dex_rules.bzl (100%) rename {prelude/android => android}/exopackage.bzl (100%) rename {prelude/android => android}/gen_aidl.bzl (100%) rename {prelude/android => android}/min_sdk_version.bzl (100%) rename {prelude/android => android}/prebuilt_native_library.bzl (100%) rename {prelude/android => android}/preprocess_java_classes.bzl (100%) rename {prelude/android => android}/proguard.bzl (100%) rename {prelude/android => android}/r_dot_java.bzl (100%) rename {prelude/android => android}/robolectric_test.bzl (100%) rename {prelude/android => android}/tools/BUCK.v2 (100%) rename {prelude/android => android}/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java (100%) rename {prelude/android => android}/tools/combine_native_library_dirs.py (100%) rename {prelude/android => android}/tools/filter_dex.py (100%) rename {prelude/android => android}/tools/filter_extra_resources.py (100%) rename {prelude/android => android}/tools/filter_prebuilt_native_library_dir.py (100%) rename {prelude/android => android}/tools/merge_sequence.py (100%) rename {prelude/android => android}/tools/native_libs_as_assets_metadata.py (100%) rename {prelude/android => android}/tools/unpack_aar.py (100%) rename {prelude/android => android}/util.bzl (100%) rename {prelude/android => android}/voltron.bzl (100%) rename {prelude/apple => apple}/apple_asset_catalog.bzl (100%) rename {prelude/apple => apple}/apple_asset_catalog_compilation_options.bzl (100%) rename {prelude/apple => apple}/apple_asset_catalog_types.bzl (100%) rename {prelude/apple => apple}/apple_binary.bzl (100%) rename {prelude/apple => apple}/apple_bundle.bzl (100%) rename {prelude/apple => apple}/apple_bundle_attrs.bzl (100%) rename {prelude/apple => apple}/apple_bundle_config.bzl (100%) rename {prelude/apple => apple}/apple_bundle_destination.bzl (100%) rename {prelude/apple => apple}/apple_bundle_part.bzl (100%) rename {prelude/apple => apple}/apple_bundle_resources.bzl (100%) rename {prelude/apple => apple}/apple_bundle_types.bzl (100%) rename {prelude/apple => apple}/apple_bundle_utility.bzl (100%) rename {prelude/apple => apple}/apple_code_signing_types.bzl (100%) rename {prelude/apple => apple}/apple_common.bzl (100%) rename {prelude/apple => apple}/apple_core_data.bzl (100%) rename {prelude/apple => apple}/apple_core_data_types.bzl (100%) rename {prelude/apple => apple}/apple_dsym.bzl (100%) rename {prelude/apple => apple}/apple_dsym_config.bzl (100%) rename {prelude/apple => apple}/apple_entitlements.bzl (100%) rename {prelude/apple => apple}/apple_framework_versions.bzl (100%) rename {prelude/apple => apple}/apple_frameworks.bzl (100%) rename {prelude/apple => apple}/apple_info_plist.bzl (100%) rename {prelude/apple => apple}/apple_info_plist_substitutions_parsing.bzl (100%) rename {prelude/apple => apple}/apple_library.bzl (100%) rename {prelude/apple => apple}/apple_macro_layer.bzl (100%) rename {prelude/apple => apple}/apple_modular_utility.bzl (100%) rename {prelude/apple => apple}/apple_native.bzl (100%) rename {prelude/apple => apple}/apple_package.bzl (100%) rename {prelude/apple => apple}/apple_package_config.bzl (100%) rename {prelude/apple => apple}/apple_resource.bzl (100%) rename {prelude/apple => apple}/apple_resource_bundle.bzl (100%) rename {prelude/apple => apple}/apple_resource_types.bzl (100%) rename {prelude/apple => apple}/apple_resource_utility.bzl (100%) rename {prelude/apple => apple}/apple_rules_impl.bzl (100%) rename {prelude/apple => apple}/apple_rules_impl_utility.bzl (100%) rename {prelude/apple => apple}/apple_sdk.bzl (100%) rename {prelude/apple => apple}/apple_sdk_metadata.bzl (100%) rename {prelude/apple => apple}/apple_stripping.bzl (100%) rename {prelude/apple => apple}/apple_swift_stdlib.bzl (100%) rename {prelude/apple => apple}/apple_target_sdk_version.bzl (100%) rename {prelude/apple => apple}/apple_test.bzl (100%) rename {prelude/apple => apple}/apple_toolchain.bzl (100%) rename {prelude/apple => apple}/apple_toolchain_types.bzl (100%) rename {prelude/apple => apple}/apple_universal_binaries.bzl (100%) rename {prelude/apple => apple}/apple_universal_executable.bzl (100%) rename {prelude/apple => apple}/apple_utility.bzl (100%) rename {prelude/apple => apple}/apple_xcuitest.bzl (100%) rename {prelude/apple => apple}/debug.bzl (100%) rename {prelude/apple => apple}/mockingbird/mockingbird_mock.bzl (100%) rename {prelude/apple => apple}/mockingbird/mockingbird_types.bzl (100%) rename {prelude/apple => apple}/modulemap.bzl (100%) rename {prelude/apple => apple}/prebuilt_apple_framework.bzl (100%) rename {prelude/apple => apple}/resource_groups.bzl (100%) rename {prelude/apple => apple}/scene_kit_assets.bzl (100%) rename {prelude/apple => apple}/scene_kit_assets_types.bzl (100%) rename {prelude/apple => apple}/swift/apple_sdk_clang_module.bzl (100%) rename {prelude/apple => apple}/swift/apple_sdk_modules_utility.bzl (100%) rename {prelude/apple => apple}/swift/apple_sdk_swift_module.bzl (100%) rename {prelude/apple => apple}/swift/swift_compilation.bzl (100%) rename {prelude/apple => apple}/swift/swift_debug_info_utils.bzl (100%) rename {prelude/apple => apple}/swift/swift_incremental_support.bzl (100%) rename {prelude/apple => apple}/swift/swift_module_map.bzl (100%) rename {prelude/apple => apple}/swift/swift_pcm_compilation.bzl (100%) rename {prelude/apple => apple}/swift/swift_pcm_compilation_types.bzl (100%) rename {prelude/apple => apple}/swift/swift_runtime.bzl (100%) rename {prelude/apple => apple}/swift/swift_sdk_pcm_compilation.bzl (100%) rename {prelude/apple => apple}/swift/swift_sdk_swiftinterface_compilation.bzl (100%) rename {prelude/apple => apple}/swift/swift_toolchain.bzl (100%) rename {prelude/apple => apple}/swift/swift_toolchain_macro_layer.bzl (100%) rename {prelude/apple => apple}/swift/swift_toolchain_types.bzl (100%) rename {prelude/apple => apple}/swift/swift_types.bzl (100%) rename {prelude/apple => apple}/tools/BUCK.v2 (100%) rename {prelude/apple => apple}/tools/bundling/BUCK.v2 (100%) rename {prelude/apple => apple}/tools/bundling/action_metadata.py (100%) rename {prelude/apple => apple}/tools/bundling/action_metadata_test.py (100%) rename {prelude/apple => apple}/tools/bundling/assemble_bundle.py (100%) rename {prelude/apple => apple}/tools/bundling/assemble_bundle_types.py (100%) rename {prelude/apple => apple}/tools/bundling/incremental_state.py (100%) rename {prelude/apple => apple}/tools/bundling/incremental_state_test.py (100%) rename {prelude/apple => apple}/tools/bundling/incremental_utils.py (100%) rename {prelude/apple => apple}/tools/bundling/incremental_utils_test.py (100%) rename {prelude/apple => apple}/tools/bundling/main.py (100%) rename {prelude/apple => apple}/tools/bundling/swift_support.py (100%) rename {prelude/apple => apple}/tools/bundling/test_resources/newer_version_action_metadata.json (100%) rename {prelude/apple => apple}/tools/bundling/test_resources/newer_version_incremental_state.json (100%) rename {prelude/apple => apple}/tools/bundling/test_resources/the.broken_json (100%) rename {prelude/apple => apple}/tools/bundling/test_resources/valid_action_metadata.json (100%) rename {prelude/apple => apple}/tools/bundling/test_resources/valid_incremental_state.json (100%) rename {prelude/apple => apple}/tools/code_signing/BUCK.v2 (100%) rename {prelude/apple => apple}/tools/code_signing/app_id.py (100%) rename {prelude/apple => apple}/tools/code_signing/app_id_test.py (100%) rename {prelude/apple => apple}/tools/code_signing/apple_platform.py (100%) rename {prelude/apple => apple}/tools/code_signing/codesign_bundle.py (100%) rename {prelude/apple => apple}/tools/code_signing/codesign_command_factory.py (100%) rename {prelude/apple => apple}/tools/code_signing/fast_adhoc.py (100%) rename {prelude/apple => apple}/tools/code_signing/identity.py (100%) rename {prelude/apple => apple}/tools/code_signing/identity_test.py (100%) rename {prelude/apple => apple}/tools/code_signing/info_plist_metadata.py (100%) rename {prelude/apple => apple}/tools/code_signing/info_plist_metadata_test.py (100%) rename {prelude/apple => apple}/tools/code_signing/list_codesign_identities.py (100%) rename {prelude/apple => apple}/tools/code_signing/main.py (100%) rename {prelude/apple => apple}/tools/code_signing/prepare_code_signing_entitlements.py (100%) rename {prelude/apple => apple}/tools/code_signing/prepare_code_signing_entitlements_test.py (100%) rename {prelude/apple => apple}/tools/code_signing/prepare_info_plist.py (100%) rename {prelude/apple => apple}/tools/code_signing/prepare_info_plist_test.py (100%) rename {prelude/apple => apple}/tools/code_signing/provisioning_profile_diagnostics.py (100%) rename {prelude/apple => apple}/tools/code_signing/provisioning_profile_metadata.py (100%) rename {prelude/apple => apple}/tools/code_signing/provisioning_profile_metadata_test.py (100%) rename {prelude/apple => apple}/tools/code_signing/provisioning_profile_selection.py (100%) rename {prelude/apple => apple}/tools/code_signing/provisioning_profile_selection_test.py (100%) rename {prelude/apple => apple}/tools/code_signing/read_provisioning_profile_command_factory.py (100%) rename {prelude/apple => apple}/tools/code_signing/test_resources/Entitlements.plist (100%) rename {prelude/apple => apple}/tools/code_signing/test_resources/qualified_sample.mobileprovision (100%) rename {prelude/apple => apple}/tools/code_signing/test_resources/sample.mobileprovision (100%) rename {prelude/apple => apple}/tools/defs.bzl (100%) rename {prelude/apple => apple}/tools/dry_codesign_tool.py (100%) rename {prelude/apple => apple}/tools/info_plist_processor/BUCK.v2 (100%) rename {prelude/apple => apple}/tools/info_plist_processor/main.py (100%) rename {prelude/apple => apple}/tools/info_plist_processor/preprocess.py (100%) rename {prelude/apple => apple}/tools/info_plist_processor/preprocess_test.py (100%) rename {prelude/apple => apple}/tools/info_plist_processor/process.py (100%) rename {prelude/apple => apple}/tools/info_plist_processor/process_test.py (100%) rename {prelude/apple => apple}/tools/ipa_package_maker.py (100%) rename {prelude/apple => apple}/tools/linker_wrapper.py (100%) rename {prelude/apple => apple}/tools/make_modulemap.py (100%) rename {prelude/apple => apple}/tools/make_swift_comp_db.py (100%) rename {prelude/apple => apple}/tools/make_swift_interface.py (100%) rename {prelude/apple => apple}/tools/make_vfsoverlay.py (100%) rename {prelude/apple => apple}/tools/plistlib_utils.py (100%) rename {prelude/apple => apple}/tools/re_compatibility_utils/BUCK (100%) rename {prelude/apple => apple}/tools/re_compatibility_utils/writable.py (100%) rename {prelude/apple => apple}/tools/resource_broker/BUCK.v2 (100%) rename {prelude/apple => apple}/tools/resource_broker/idb_companion.py (100%) rename {prelude/apple => apple}/tools/resource_broker/idb_target.py (100%) rename {prelude/apple => apple}/tools/resource_broker/ios.py (100%) rename {prelude/apple => apple}/tools/resource_broker/macos.py (100%) rename {prelude/apple => apple}/tools/resource_broker/main.py (100%) rename {prelude/apple => apple}/tools/resource_broker/simctl_runtime.py (100%) rename {prelude/apple => apple}/tools/resource_broker/timeouts.py (100%) rename {prelude/apple => apple}/tools/resource_broker/utils.py (100%) rename {prelude/apple => apple}/tools/selective_debugging/BUCK.v2 (100%) rename {prelude/apple => apple}/tools/selective_debugging/macho.py (100%) rename {prelude/apple => apple}/tools/selective_debugging/macho_parser.py (100%) rename {prelude/apple => apple}/tools/selective_debugging/main.py (100%) rename {prelude/apple => apple}/tools/selective_debugging/scrubber.py (100%) rename {prelude/apple => apple}/tools/selective_debugging/scrubber_test.py (100%) rename {prelude/apple => apple}/tools/selective_debugging/spec.py (100%) rename {prelude/apple => apple}/tools/selective_debugging/spec_test.py (100%) rename {prelude/apple => apple}/tools/selective_debugging/test_resources/HelloWorld (100%) rename {prelude/apple => apple}/tools/selective_debugging/test_resources/focused_spec.json (100%) rename {prelude/apple => apple}/tools/selective_debugging/test_resources/focused_targets.json (100%) rename {prelude/apple => apple}/tools/selective_debugging/test_resources/focused_targets_empty.json (100%) rename {prelude/apple => apple}/tools/selective_debugging/utils.py (100%) rename {prelude/apple => apple}/tools/split_arch_combine_dsym_bundles_tool.py (100%) rename {prelude/apple => apple}/tools/swift_exec.sh (100%) rename {prelude/apple => apple}/tools/swift_objc_header_postprocess.py (100%) rename {prelude/apple => apple}/user/apple_resource_bundle.bzl (100%) rename {prelude/apple => apple}/user/apple_resource_transition.bzl (100%) rename {prelude/apple => apple}/user/apple_selected_debug_path_file.bzl (100%) rename {prelude/apple => apple}/user/apple_selective_debugging.bzl (100%) rename {prelude/apple => apple}/user/apple_simulators.bzl (100%) rename {prelude/apple => apple}/user/apple_toolchain_override.bzl (100%) rename {prelude/apple => apple}/user/apple_tools.bzl (100%) rename {prelude/apple => apple}/user/apple_watchos_bundle.bzl (100%) rename {prelude/apple => apple}/user/cpu_split_transition.bzl (100%) rename {prelude/apple => apple}/user/resource_group_map.bzl (100%) rename {prelude/apple => apple}/user/watch_transition.bzl (100%) rename {prelude/apple => apple}/xcode.bzl (100%) rename {prelude/apple => apple}/xcode_postbuild_script.bzl (100%) rename {prelude/apple => apple}/xcode_prebuild_script.bzl (100%) rename {prelude/apple => apple}/xctest_swift_support.bzl (100%) rename prelude/artifact_tset.bzl => artifact_tset.bzl (100%) rename prelude/artifacts.bzl => artifacts.bzl (100%) rename prelude/asserts.bzl => asserts.bzl (100%) rename prelude/attributes.bzl => attributes.bzl (100%) rename prelude/buck2_compatibility.bzl => buck2_compatibility.bzl (100%) rename prelude/build_mode.bzl => build_mode.bzl (100%) rename prelude/cache_mode.bzl => cache_mode.bzl (100%) rename prelude/command_alias.bzl => command_alias.bzl (100%) rename {prelude/configurations => configurations}/rules.bzl (100%) rename {prelude/configurations => configurations}/util.bzl (100%) rename {prelude/cpu => cpu}/BUCK.v2 (100%) rename {prelude/cpu => cpu}/constraints/BUCK.v2 (100%) rename {prelude/csharp => csharp}/csharp.bzl (100%) rename {prelude/csharp => csharp}/csharp_providers.bzl (100%) rename {prelude/csharp => csharp}/toolchain.bzl (100%) rename {prelude/cxx => cxx}/anon_link.bzl (100%) rename {prelude/cxx => cxx}/archive.bzl (100%) rename {prelude/cxx => cxx}/argsfiles.bzl (100%) rename {prelude/cxx => cxx}/attr_selection.bzl (100%) rename {prelude/cxx => cxx}/bitcode.bzl (100%) rename {prelude/cxx => cxx}/comp_db.bzl (100%) rename {prelude/cxx => cxx}/compile.bzl (100%) rename {prelude/cxx => cxx}/compiler.bzl (100%) rename {prelude/cxx => cxx}/cxx.bzl (100%) rename {prelude/cxx => cxx}/cxx_bolt.bzl (100%) rename {prelude/cxx => cxx}/cxx_context.bzl (100%) rename {prelude/cxx => cxx}/cxx_executable.bzl (100%) rename {prelude/cxx => cxx}/cxx_library.bzl (100%) rename {prelude/cxx => cxx}/cxx_library_utility.bzl (100%) rename {prelude/cxx => cxx}/cxx_link_utility.bzl (100%) rename {prelude/cxx => cxx}/cxx_sources.bzl (100%) rename {prelude/cxx => cxx}/cxx_toolchain.bzl (100%) rename {prelude/cxx => cxx}/cxx_toolchain_macro_layer.bzl (100%) rename {prelude/cxx => cxx}/cxx_toolchain_types.bzl (100%) rename {prelude/cxx => cxx}/cxx_types.bzl (100%) rename {prelude/cxx => cxx}/cxx_utility.bzl (100%) rename {prelude/cxx => cxx}/debug.bzl (100%) rename {prelude/cxx => cxx}/dist_lto/README.md (100%) rename {prelude/cxx => cxx}/dist_lto/dist_lto.bzl (100%) rename {prelude/cxx => cxx}/dist_lto/tools.bzl (100%) rename {prelude/cxx => cxx}/dist_lto/tools/BUCK.v2 (100%) rename {prelude/cxx => cxx}/dist_lto/tools/__init__.py (100%) rename {prelude/cxx => cxx}/dist_lto/tools/dist_lto_copy.py (100%) rename {prelude/cxx => cxx}/dist_lto/tools/dist_lto_opt.py (100%) rename {prelude/cxx => cxx}/dist_lto/tools/dist_lto_planner.py (100%) rename {prelude/cxx => cxx}/dist_lto/tools/dist_lto_prepare.py (100%) rename {prelude/cxx => cxx}/dist_lto/tools/tests/test_dist_lto_opt.py (100%) rename {prelude/cxx => cxx}/dwp.bzl (100%) rename {prelude/cxx => cxx}/groups.bzl (100%) rename {prelude/cxx => cxx}/groups_types.bzl (100%) rename {prelude/cxx => cxx}/headers.bzl (100%) rename {prelude/cxx => cxx}/link.bzl (100%) rename {prelude/cxx => cxx}/link_groups.bzl (100%) rename {prelude/cxx => cxx}/link_groups_types.bzl (100%) rename {prelude/cxx => cxx}/link_types.bzl (100%) rename {prelude/cxx => cxx}/linker.bzl (100%) rename {prelude/cxx => cxx}/omnibus.bzl (100%) rename {prelude/cxx => cxx}/platform.bzl (100%) rename {prelude/cxx => cxx}/prebuilt_cxx_library_group.bzl (100%) rename {prelude/cxx => cxx}/preprocessor.bzl (100%) rename {prelude/cxx => cxx}/shared_library_interface.bzl (100%) rename {prelude/cxx => cxx}/symbols.bzl (100%) rename {prelude/cxx => cxx}/tools/BUCK.v2 (100%) rename {prelude/cxx => cxx}/tools/defs.bzl (100%) rename {prelude/cxx => cxx}/tools/dep_file_processor.py (100%) rename {prelude/cxx => cxx}/tools/dep_file_utils.py (100%) rename {prelude/cxx => cxx}/tools/hmap_wrapper.py (100%) rename {prelude/cxx => cxx}/tools/linker_wrapper.py (100%) rename {prelude/cxx => cxx}/tools/make_comp_db.py (100%) rename {prelude/cxx => cxx}/tools/makefile_to_dep_file.py (100%) rename {prelude/cxx => cxx}/tools/show_headers_to_dep_file.py (100%) rename {prelude/cxx => cxx}/tools/show_includes_to_dep_file.py (100%) rename {prelude/cxx => cxx}/user/cxx_toolchain_override.bzl (100%) rename {prelude/cxx => cxx}/user/link_group_map.bzl (100%) rename {prelude/cxx => cxx}/windows_resource.bzl (100%) rename {prelude/cxx => cxx}/xcode.bzl (100%) rename {prelude/debugging => debugging}/common.bzl (100%) rename {prelude/debugging => debugging}/ensure_dwp.bzl (100%) rename {prelude/debugging => debugging}/fdb.bxl (100%) rename {prelude/debugging => debugging}/inspect_dbg_exec.bzl (100%) rename {prelude/debugging => debugging}/inspect_default.bzl (100%) rename {prelude/debugging => debugging}/inspect_java.bzl (100%) rename {prelude/debugging => debugging}/labels.bzl (100%) rename {prelude/debugging => debugging}/types.bzl (100%) rename {prelude/decls => decls}/android_common.bzl (100%) rename {prelude/decls => decls}/android_rules.bzl (100%) rename {prelude/decls => decls}/common.bzl (100%) rename {prelude/decls => decls}/core_rules.bzl (100%) rename {prelude/decls => decls}/cxx_common.bzl (100%) rename {prelude/decls => decls}/cxx_rules.bzl (100%) rename {prelude/decls => decls}/d_common.bzl (100%) rename {prelude/decls => decls}/d_rules.bzl (100%) rename {prelude/decls => decls}/dotnet_rules.bzl (100%) rename {prelude/decls => decls}/erlang_rules.bzl (100%) rename {prelude/decls => decls}/genrule_common.bzl (100%) rename {prelude/decls => decls}/git_rules.bzl (100%) rename {prelude/decls => decls}/go_common.bzl (100%) rename {prelude/decls => decls}/go_rules.bzl (100%) rename {prelude/decls => decls}/groovy_rules.bzl (100%) rename {prelude/decls => decls}/halide_rules.bzl (100%) rename {prelude/decls => decls}/haskell_common.bzl (100%) rename {prelude/decls => decls}/haskell_rules.bzl (100%) rename {prelude/decls => decls}/ios_rules.bzl (100%) rename {prelude/decls => decls}/java_rules.bzl (100%) rename {prelude/decls => decls}/js_rules.bzl (100%) rename {prelude/decls => decls}/jvm_common.bzl (100%) rename {prelude/decls => decls}/kotlin_rules.bzl (100%) rename {prelude/decls => decls}/lua_common.bzl (100%) rename {prelude/decls => decls}/lua_rules.bzl (100%) rename {prelude/decls => decls}/native_common.bzl (100%) rename {prelude/decls => decls}/ocaml_common.bzl (100%) rename {prelude/decls => decls}/ocaml_rules.bzl (100%) rename {prelude/decls => decls}/python_common.bzl (100%) rename {prelude/decls => decls}/python_rules.bzl (100%) rename {prelude/decls => decls}/re_test_common.bzl (100%) rename {prelude/decls => decls}/remote_common.bzl (100%) rename {prelude/decls => decls}/rust_common.bzl (100%) rename {prelude/decls => decls}/rust_rules.bzl (100%) rename {prelude/decls => decls}/scala_rules.bzl (100%) rename {prelude/decls => decls}/shell_rules.bzl (100%) rename {prelude/decls => decls}/toolchains_common.bzl (100%) rename {prelude/decls => decls}/uncategorized_rules.bzl (100%) rename {prelude/dist => dist}/dist_info.bzl (100%) rename {prelude/docs => docs}/rules.bzl (100%) rename {prelude/erlang => erlang}/applications/BUCK.v2 (100%) rename {prelude/erlang => erlang}/common_test/.elp.toml (100%) rename {prelude/erlang => erlang}/common_test/common/BUCK.v2 (100%) rename {prelude/erlang => erlang}/common_test/common/include/artifact_annotations.hrl (100%) rename {prelude/erlang => erlang}/common_test/common/include/buck_ct_records.hrl (100%) rename {prelude/erlang => erlang}/common_test/common/include/tpx_records.hrl (100%) rename {prelude/erlang => erlang}/common_test/common/src/artifact_annotations.erl (100%) rename {prelude/erlang => erlang}/common_test/common/src/bounded_buffer.erl (100%) rename {prelude/erlang => erlang}/common_test/common/src/buck_ct_parser.erl (100%) rename {prelude/erlang => erlang}/common_test/common/src/buck_ct_provider.erl (100%) rename {prelude/erlang => erlang}/common_test/common/src/ct_error_printer.erl (100%) rename {prelude/erlang => erlang}/common_test/common/src/execution_logs.erl (100%) rename {prelude/erlang => erlang}/common_test/common/src/io_buffer.erl (100%) rename {prelude/erlang => erlang}/common_test/common/src/test_artifact_directory.erl (100%) rename {prelude/erlang => erlang}/common_test/common/src/test_logger.erl (100%) rename {prelude/erlang => erlang}/common_test/cth_hooks/BUCK.v2 (100%) rename {prelude/erlang => erlang}/common_test/cth_hooks/src/cth_tpx.erl (100%) rename {prelude/erlang => erlang}/common_test/cth_hooks/src/cth_tpx_role.erl (100%) rename {prelude/erlang => erlang}/common_test/cth_hooks/src/cth_tpx_server.erl (100%) rename {prelude/erlang => erlang}/common_test/cth_hooks/src/cth_tpx_test_tree.erl (100%) rename {prelude/erlang => erlang}/common_test/cth_hooks/src/method_ids.hrl (100%) rename {prelude/erlang => erlang}/common_test/test_binary/BUCK.v2 (100%) rename {prelude/erlang => erlang}/common_test/test_binary/src/json_interfacer.erl (100%) rename {prelude/erlang => erlang}/common_test/test_binary/src/junit_interfacer.erl (100%) rename {prelude/erlang => erlang}/common_test/test_binary/src/list_test.erl (100%) rename {prelude/erlang => erlang}/common_test/test_binary/src/listing_interfacer.erl (100%) rename {prelude/erlang => erlang}/common_test/test_binary/src/test_binary.erl (100%) rename {prelude/erlang => erlang}/common_test/test_binary/src/test_runner.erl (100%) rename {prelude/erlang => erlang}/common_test/test_cli_lib/BUCK.v2 (100%) rename {prelude/erlang => erlang}/common_test/test_cli_lib/src/test.erl (100%) rename {prelude/erlang => erlang}/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl (100%) rename {prelude/erlang => erlang}/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/BUCK.v2 (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/ct_daemon.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/ct_daemon_core.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/ct_daemon_hooks.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/ct_daemon_logger.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/ct_daemon_node.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/ct_daemon_printer.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/ct_daemon_runner.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/ct_executor.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/ct_runner.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/epmd_manager.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/test_exec.app.src (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/test_exec.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/src/test_exec_sup.erl (100%) rename {prelude/erlang => erlang}/common_test/test_exec/test/ct_executor_SUITE.erl (100%) rename {prelude/erlang => erlang}/erlang.bzl (100%) rename {prelude/erlang => erlang}/erlang_application.bzl (100%) rename {prelude/erlang => erlang}/erlang_application_includes.bzl (100%) rename {prelude/erlang => erlang}/erlang_build.bzl (100%) rename {prelude/erlang => erlang}/erlang_dependencies.bzl (100%) rename {prelude/erlang => erlang}/erlang_escript.bzl (100%) rename {prelude/erlang => erlang}/erlang_info.bzl (100%) rename {prelude/erlang => erlang}/erlang_ls.config (100%) rename {prelude/erlang => erlang}/erlang_otp_application.bzl (100%) rename {prelude/erlang => erlang}/erlang_release.bzl (100%) rename {prelude/erlang => erlang}/erlang_shell.bzl (100%) rename {prelude/erlang => erlang}/erlang_tests.bzl (100%) rename {prelude/erlang => erlang}/erlang_toolchain.bzl (100%) rename {prelude/erlang => erlang}/erlang_utils.bzl (100%) rename {prelude/erlang => erlang}/shell/BUCK.v2 (100%) rename {prelude/erlang => erlang}/shell/shell.bxl (100%) rename {prelude/erlang => erlang}/shell/src/shell_buck2_utils.erl (100%) rename {prelude/erlang => erlang}/shell/src/user_default.erl (100%) rename {prelude/erlang => erlang}/toolchain/BUCK.v2 (100%) rename {prelude/erlang => erlang}/toolchain/app_src_builder.escript (100%) rename {prelude/erlang => erlang}/toolchain/boot_script_builder.escript (100%) rename {prelude/erlang => erlang}/toolchain/dependency_analyzer.escript (100%) rename {prelude/erlang => erlang}/toolchain/dependency_finalizer.escript (100%) rename {prelude/erlang => erlang}/toolchain/edoc_cli.escript (100%) rename {prelude/erlang => erlang}/toolchain/edoc_doclet_chunks.erl (100%) rename {prelude/erlang => erlang}/toolchain/edoc_report.erl (100%) rename {prelude/erlang => erlang}/toolchain/erlang_ls.config (100%) rename {prelude/erlang => erlang}/toolchain/erlc_trampoline.sh (100%) rename {prelude/erlang => erlang}/toolchain/escript_builder.escript (100%) rename {prelude/erlang => erlang}/toolchain/escript_trampoline.sh (100%) rename {prelude/erlang => erlang}/toolchain/include_erts.escript (100%) rename {prelude/erlang => erlang}/toolchain/release_variables_builder.escript (100%) rename {prelude/erlang => erlang}/toolchain/transform_project_root.erl (100%) rename prelude/export_exe.bzl => export_exe.bzl (100%) rename prelude/export_file.bzl => export_file.bzl (100%) rename prelude/filegroup.bzl => filegroup.bzl (100%) rename prelude/genrule.bzl => genrule.bzl (100%) rename prelude/genrule_local_labels.bzl => genrule_local_labels.bzl (100%) rename prelude/genrule_toolchain.bzl => genrule_toolchain.bzl (100%) rename {prelude/git => git}/git_fetch.bzl (100%) rename {prelude/git => git}/tools/BUCK.v2 (100%) rename {prelude/git => git}/tools/git_fetch.py (100%) rename {prelude/go => go}/cgo_builder.bzl (100%) rename {prelude/go => go}/cgo_library.bzl (100%) rename {prelude/go => go}/compile.bzl (100%) rename {prelude/go => go}/constraints/BUCK.v2 (100%) rename {prelude/go => go}/constraints/defs.bzl (100%) rename {prelude/go => go}/coverage.bzl (100%) rename {prelude/go => go}/go_binary.bzl (100%) rename {prelude/go => go}/go_exported_library.bzl (100%) rename {prelude/go => go}/go_library.bzl (100%) rename {prelude/go => go}/go_list.bzl (100%) rename {prelude/go => go}/go_stdlib.bzl (100%) rename {prelude/go => go}/go_test.bzl (100%) rename {prelude/go => go}/link.bzl (100%) rename {prelude/go => go}/package_builder.bzl (100%) rename {prelude/go => go}/packages.bzl (100%) rename {prelude/go => go}/toolchain.bzl (100%) rename {prelude/go => go}/tools/BUCK.v2 (100%) rename {prelude/go => go}/tools/cgo_wrapper.py (100%) rename {prelude/go => go}/tools/concat_files.py (100%) rename {prelude/go => go}/tools/gen_stdlib_importcfg.py (100%) rename {prelude/go => go}/tools/go_list_wrapper.py (100%) rename {prelude/go => go}/tools/go_wrapper.py (100%) rename {prelude/go => go}/tools/testmaingen.go (100%) rename {prelude/go => go}/transitions/defs.bzl (100%) rename {prelude/go => go}/transitions/tags_helper.bzl (100%) rename {prelude/haskell => haskell}/compile.bzl (100%) rename {prelude/haskell => haskell}/haskell.bzl (100%) rename {prelude/haskell => haskell}/haskell_ghci.bzl (100%) rename {prelude/haskell => haskell}/haskell_haddock.bzl (100%) rename {prelude/haskell => haskell}/haskell_ide.bzl (100%) rename {prelude/haskell => haskell}/ide/README.md (100%) rename {prelude/haskell => haskell}/ide/hie.yaml (100%) rename {prelude/haskell => haskell}/ide/ide.bxl (100%) rename {prelude/haskell => haskell}/library_info.bzl (100%) rename {prelude/haskell => haskell}/link_info.bzl (100%) rename {prelude/haskell => haskell}/toolchain.bzl (100%) rename {prelude/haskell => haskell}/tools/BUCK.v2 (100%) rename {prelude/haskell => haskell}/tools/generate_target_metadata.py (100%) rename {prelude/haskell => haskell}/tools/ghc_wrapper.py (100%) rename {prelude/haskell => haskell}/tools/script_template_processor.py (100%) rename {prelude/haskell => haskell}/util.bzl (100%) rename {prelude/http_archive => http_archive}/exec_deps.bzl (100%) rename {prelude/http_archive => http_archive}/http_archive.bzl (100%) rename {prelude/http_archive => http_archive}/tools/BUCK.v2 (100%) rename {prelude/http_archive => http_archive}/tools/create_exclusion_list.py (100%) rename prelude/http_file.bzl => http_file.bzl (100%) rename {prelude/ide_integrations => ide_integrations}/xcode.bzl (100%) rename prelude/is_buck2.bzl => is_buck2.bzl (100%) rename prelude/is_buck2_internal.bzl => is_buck2_internal.bzl (100%) rename prelude/is_full_meta_repo.bzl => is_full_meta_repo.bzl (100%) rename {prelude/java => java}/class_to_srcs.bzl (100%) rename {prelude/java => java}/dex.bzl (100%) rename {prelude/java => java}/dex_toolchain.bzl (100%) rename {prelude/java => java}/gwt_binary.bzl (100%) rename {prelude/java => java}/jar_genrule.bzl (100%) rename {prelude/java => java}/java.bzl (100%) rename {prelude/java => java}/java_binary.bzl (100%) rename {prelude/java => java}/java_library.bzl (100%) rename {prelude/java => java}/java_providers.bzl (100%) rename {prelude/java => java}/java_resources.bzl (100%) rename {prelude/java => java}/java_test.bzl (100%) rename {prelude/java => java}/java_toolchain.bzl (100%) rename {prelude/java => java}/javacd_jar_creator.bzl (100%) rename {prelude/java => java}/keystore.bzl (100%) rename {prelude/java => java}/plugins/java_annotation_processor.bzl (100%) rename {prelude/java => java}/plugins/java_plugin.bzl (100%) rename {prelude/java => java}/prebuilt_jar.bzl (100%) rename {prelude/java => java}/tools/BUCK.v2 (100%) rename {prelude/java => java}/tools/compile_and_package.py (100%) rename {prelude/java => java}/tools/fat_jar.py (100%) rename {prelude/java => java}/tools/gen_class_to_source_map.py (100%) rename {prelude/java => java}/tools/list_class_names.py (100%) rename {prelude/java => java}/tools/merge_class_to_source_maps.py (100%) rename {prelude/java => java}/tools/utils.py (100%) rename {prelude/java => java}/utils/java_more_utils.bzl (100%) rename {prelude/java => java}/utils/java_utils.bzl (100%) rename {prelude/js => js}/js.bzl (100%) rename {prelude/js => js}/js_bundle.bzl (100%) rename {prelude/js => js}/js_bundle_genrule.bzl (100%) rename {prelude/js => js}/js_library.bzl (100%) rename {prelude/js => js}/js_providers.bzl (100%) rename {prelude/js => js}/js_utils.bzl (100%) rename {prelude/julia => julia}/julia.bzl (100%) rename {prelude/julia => julia}/julia_binary.bzl (100%) rename {prelude/julia => julia}/julia_info.bzl (100%) rename {prelude/julia => julia}/julia_library.bzl (100%) rename {prelude/julia => julia}/julia_test.bzl (100%) rename {prelude/julia => julia}/julia_toolchain.bzl (100%) rename {prelude/julia => julia}/tools/BUCK.v2 (100%) rename {prelude/julia => julia}/tools/parse_julia_cmd.py (100%) rename {prelude/jvm => jvm}/cd_jar_creator_util.bzl (100%) rename {prelude/jvm => jvm}/nullsafe.bzl (100%) rename {prelude/kotlin => kotlin}/kotlin.bzl (100%) rename {prelude/kotlin => kotlin}/kotlin_library.bzl (100%) rename {prelude/kotlin => kotlin}/kotlin_test.bzl (100%) rename {prelude/kotlin => kotlin}/kotlin_toolchain.bzl (100%) rename {prelude/kotlin => kotlin}/kotlin_utils.bzl (100%) rename {prelude/kotlin => kotlin}/kotlincd_jar_creator.bzl (100%) rename {prelude/kotlin => kotlin}/tools/compile_kotlin/BUCK.v2 (100%) rename {prelude/kotlin => kotlin}/tools/compile_kotlin/compile_kotlin.py (100%) rename {prelude/kotlin => kotlin}/tools/defs.bzl (100%) rename {prelude/kotlin => kotlin}/tools/kapt_base64_encoder/BUCK.v2 (100%) rename {prelude/kotlin => kotlin}/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java (100%) rename {prelude/linking => linking}/execution_preference.bzl (100%) rename {prelude/linking => linking}/link_groups.bzl (100%) rename {prelude/linking => linking}/link_info.bzl (100%) rename {prelude/linking => linking}/linkable_graph.bzl (100%) rename {prelude/linking => linking}/linkables.bzl (100%) rename {prelude/linking => linking}/lto.bzl (100%) rename {prelude/linking => linking}/shared_libraries.bzl (100%) rename {prelude/linking => linking}/strip.bzl (100%) rename {prelude/linking => linking}/types.bzl (100%) rename prelude/local_only.bzl => local_only.bzl (100%) rename {prelude/lua => lua}/cxx_lua_extension.bzl (100%) rename {prelude/lua => lua}/lua_binary.bzl (100%) rename {prelude/lua => lua}/lua_library.bzl (100%) rename {prelude/matlab => matlab}/matlab.bzl (100%) rename {prelude/matlab => matlab}/matlab_info.bzl (100%) rename {prelude/matlab => matlab}/matlab_program.bzl (100%) rename {prelude/matlab => matlab}/matlab_toolchain.bzl (100%) rename prelude/native.bzl => native.bzl (100%) rename {prelude/ocaml => ocaml}/attrs.bzl (100%) rename {prelude/ocaml => ocaml}/makefile.bzl (100%) rename {prelude/ocaml => ocaml}/ocaml.bzl (100%) rename {prelude/ocaml => ocaml}/ocaml_toolchain_types.bzl (100%) rename {prelude/os => os}/BUCK.v2 (100%) rename {prelude/os => os}/constraints/BUCK.v2 (100%) rename {prelude/os_lookup => os_lookup}/defs.bzl (100%) rename {prelude/os_lookup => os_lookup}/targets/BUCK.v2 (100%) rename prelude/paths.bzl => paths.bzl (100%) rename {prelude/platforms => platforms}/BUCK (100%) rename {prelude/platforms => platforms}/BUCK.v2 (100%) rename {prelude/platforms => platforms}/apple/BUCK (100%) rename {prelude/platforms => platforms}/apple/arch.bzl (100%) rename {prelude/platforms => platforms}/apple/base.bzl (100%) rename {prelude/platforms => platforms}/apple/build_mode.bzl (100%) rename {prelude/platforms => platforms}/apple/constants.bzl (100%) rename {prelude/platforms => platforms}/apple/constraints/BUCK (100%) rename {prelude/platforms => platforms}/apple/platforms.bzl (100%) rename {prelude/platforms => platforms}/apple/platforms_map.bzl (100%) rename {prelude/platforms => platforms}/apple/sdk.bzl (100%) rename {prelude/platforms => platforms}/defs.bzl (100%) rename {prelude/playground => playground}/test.bxl (100%) rename prelude/prelude.bzl => prelude.bzl (100%) rename prelude/pull_request_template.md => pull_request_template.md (100%) rename {prelude/python => python}/compile.bzl (100%) rename {prelude/python => python}/cxx_python_extension.bzl (100%) rename {prelude/python => python}/interface.bzl (100%) rename {prelude/python => python}/make_py_package.bzl (100%) rename {prelude/python => python}/manifest.bzl (100%) rename {prelude/python => python}/native_python_util.bzl (100%) rename {prelude/python => python}/needed_coverage.bzl (100%) rename {prelude/python => python}/prebuilt_python_library.bzl (100%) rename {prelude/python => python}/python.bzl (100%) rename {prelude/python => python}/python_binary.bzl (100%) rename {prelude/python => python}/python_library.bzl (100%) rename {prelude/python => python}/python_needed_coverage_test.bzl (100%) rename {prelude/python => python}/python_test.bzl (100%) rename {prelude/python => python}/python_wheel.bzl (100%) rename {prelude/python => python}/runtime/BUCK.v2 (100%) rename {prelude/python => python}/runtime/__par__/bootstrap.py (100%) rename {prelude/python => python}/source_db.bzl (100%) rename {prelude/python => python}/sourcedb/build.bxl (100%) rename {prelude/python => python}/sourcedb/classic.bxl (100%) rename {prelude/python => python}/sourcedb/code_navigation.bxl (100%) rename {prelude/python => python}/sourcedb/filter.bxl (100%) rename {prelude/python => python}/sourcedb/merge.bxl (100%) rename {prelude/python => python}/sourcedb/query.bxl (100%) rename {prelude/python => python}/toolchain.bzl (100%) rename {prelude/python => python}/tools/BUCK.v2 (100%) rename {prelude/python => python}/tools/__test_main__.py (100%) rename {prelude/python => python}/tools/compile.py (100%) rename {prelude/python => python}/tools/create_manifest_for_source_dir.py (100%) rename {prelude/python => python}/tools/embedded_main.cpp (100%) rename {prelude/python => python}/tools/extract.py (100%) rename {prelude/python => python}/tools/fail_with_message.py (100%) rename {prelude/python => python}/tools/generate_static_extension_info.py (100%) rename {prelude/python => python}/tools/make_par/BUCK (100%) rename {prelude/python => python}/tools/make_par/__run_lpar_main__.py (100%) rename {prelude/python => python}/tools/make_par/_lpar_bootstrap.sh.template (100%) rename {prelude/python => python}/tools/make_par/sitecustomize.py (100%) rename {prelude/python => python}/tools/make_py_package_inplace.py (100%) rename {prelude/python => python}/tools/make_py_package_manifest_module.py (100%) rename {prelude/python => python}/tools/make_py_package_modules.py (100%) rename {prelude/python => python}/tools/make_source_db.py (100%) rename {prelude/python => python}/tools/make_source_db_no_deps.py (100%) rename {prelude/python => python}/tools/parse_imports.py (100%) rename {prelude/python => python}/tools/py38stdlib.py (100%) rename {prelude/python => python}/tools/run_inplace.py.in (100%) rename {prelude/python => python}/tools/run_inplace_lite.py.in (100%) rename {prelude/python => python}/tools/sourcedb_merger/BUCK.v2 (100%) rename {prelude/python => python}/tools/sourcedb_merger/inputs.py (100%) rename {prelude/python => python}/tools/sourcedb_merger/legacy_merge.py (100%) rename {prelude/python => python}/tools/sourcedb_merger/legacy_outputs.py (100%) rename {prelude/python => python}/tools/sourcedb_merger/merge.py (100%) rename {prelude/python => python}/tools/sourcedb_merger/outputs.py (100%) rename {prelude/python => python}/tools/sourcedb_merger/tests/__init__.py (100%) rename {prelude/python => python}/tools/sourcedb_merger/tests/inputs_test.py (100%) rename {prelude/python => python}/tools/sourcedb_merger/tests/legacy_output_test.py (100%) rename {prelude/python => python}/tools/sourcedb_merger/tests/main.sh (100%) rename {prelude/python => python}/tools/sourcedb_merger/tests/outputs_test.py (100%) rename {prelude/python => python}/tools/static_extension_finder.py (100%) rename {prelude/python => python}/tools/static_extension_utils.cpp (100%) rename {prelude/python => python}/tools/traverse_dep_manifest.py (100%) rename {prelude/python => python}/tools/wheel.py (100%) rename {prelude/python => python}/typecheck/batch.bxl (100%) rename {prelude/python => python}/typing.bzl (100%) rename {prelude/python_bootstrap => python_bootstrap}/python_bootstrap.bzl (100%) rename {prelude/python_bootstrap => python_bootstrap}/tools/BUCK.v2 (100%) rename {prelude/python_bootstrap => python_bootstrap}/tools/win_python_wrapper.bat (100%) rename prelude/remote_file.bzl => remote_file.bzl (100%) rename prelude/resources.bzl => resources.bzl (100%) rename prelude/rules.bzl => rules.bzl (100%) rename prelude/rules_impl.bzl => rules_impl.bzl (100%) rename {prelude/rust => rust}/build.bzl (100%) rename {prelude/rust => rust}/build_params.bzl (100%) rename {prelude/rust => rust}/cargo_buildscript.bzl (100%) rename {prelude/rust => rust}/cargo_package.bzl (100%) rename {prelude/rust => rust}/clippy_configuration.bzl (100%) rename {prelude/rust => rust}/context.bzl (100%) rename {prelude/rust => rust}/extern.bzl (100%) rename {prelude/rust => rust}/failure_filter.bzl (100%) rename {prelude/rust => rust}/link_info.bzl (100%) rename {prelude/rust => rust}/outputs.bzl (100%) rename {prelude/rust => rust}/proc_macro_alias.bzl (100%) rename {prelude/rust => rust}/resources.bzl (100%) rename {prelude/rust => rust}/rust-analyzer/check.bxl (100%) rename {prelude/rust => rust}/rust-analyzer/resolve_deps.bxl (100%) rename {prelude/rust => rust}/rust_binary.bzl (100%) rename {prelude/rust => rust}/rust_common.bzl (100%) rename {prelude/rust => rust}/rust_library.bzl (100%) rename {prelude/rust => rust}/rust_toolchain.bzl (100%) rename {prelude/rust => rust}/targets.bzl (100%) rename {prelude/rust => rust}/tools/BUCK.v2 (100%) rename {prelude/rust => rust}/tools/attrs.bzl (100%) rename {prelude/rust => rust}/tools/buildscript_run.py (100%) rename {prelude/rust => rust}/tools/failure_filter_action.py (100%) rename {prelude/rust => rust}/tools/rustc_action.py (100%) rename {prelude/rust => rust}/tools/rustdoc_coverage.py (100%) rename {prelude/rust => rust}/tools/rustdoc_test_with_resources.py (100%) rename {prelude/rust => rust}/tools/tool_rules.bzl (100%) rename {prelude/rust => rust}/tools/transitive_dependency_symlinks.py (100%) rename {prelude/rust => rust}/with_workspace.bzl (100%) rename prelude/sh_binary.bzl => sh_binary.bzl (100%) rename prelude/sh_test.bzl => sh_test.bzl (100%) rename {prelude/test => test}/inject_test_run_info.bzl (100%) rename {prelude/test => test}/tools/BUCK.v2 (100%) rename {prelude/test => test}/tools/inject_test_env.py (100%) rename prelude/test_suite.bzl => test_suite.bzl (100%) rename {prelude/tests => tests}/re_utils.bzl (100%) rename {prelude/tests => tests}/remote_test_execution_toolchain.bzl (100%) rename {prelude/third-party => third-party}/hmaptool/BUCK.v2 (100%) rename {prelude/third-party => third-party}/hmaptool/METADATA.bzl (100%) rename {prelude/third-party => third-party}/hmaptool/README.md (100%) rename {prelude/third-party => third-party}/hmaptool/hmaptool (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/.gitignore (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/BUCK.v2 (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/Makefile (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/README (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/defs.bzl (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/src/xcode_exec_tester.m (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/src/xcode_version_checker.m (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/src/xcode_version_checks.h (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/src/xcode_version_checks.m (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/src/xcode_version_tester.m (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/test/Xcode_14.2.0_14C18_fb_version.plist (100%) rename {prelude/toolchains => toolchains}/apple/xcode_version_checker/xcode_version_checker (100%) rename {prelude/toolchains => toolchains}/conan/BUCK (100%) rename {prelude/toolchains => toolchains}/conan/buckler/conanfile.py (100%) rename {prelude/toolchains => toolchains}/conan/conan_common.py (100%) rename {prelude/toolchains => toolchains}/conan/conan_generate.py (100%) rename {prelude/toolchains => toolchains}/conan/conan_init.py (100%) rename {prelude/toolchains => toolchains}/conan/conan_lock.py (100%) rename {prelude/toolchains => toolchains}/conan/conan_package.py (100%) rename {prelude/toolchains => toolchains}/conan/conan_package_extract.py (100%) rename {prelude/toolchains => toolchains}/conan/conan_update.py (100%) rename {prelude/toolchains => toolchains}/conan/defs.bzl (100%) rename {prelude/toolchains => toolchains}/conan/lock_generate.py (100%) rename {prelude/toolchains => toolchains}/csharp.bzl (100%) rename {prelude/toolchains => toolchains}/cxx.bzl (100%) rename {prelude/toolchains => toolchains}/cxx/zig/BUCK (100%) rename {prelude/toolchains => toolchains}/cxx/zig/defs.bzl (100%) rename {prelude/toolchains => toolchains}/cxx/zig/releases.bzl (100%) rename {prelude/toolchains => toolchains}/demo.bzl (100%) rename {prelude/toolchains => toolchains}/execution_host.bzl (100%) rename {prelude/toolchains => toolchains}/genrule.bzl (100%) rename {prelude/toolchains => toolchains}/go.bzl (100%) rename {prelude/toolchains => toolchains}/haskell.bzl (100%) rename {prelude/toolchains => toolchains}/msvc/BUCK.v2 (100%) rename {prelude/toolchains => toolchains}/msvc/run_msvc_tool.py (100%) rename {prelude/toolchains => toolchains}/msvc/tools.bzl (100%) rename {prelude/toolchains => toolchains}/msvc/vswhere.py (100%) rename {prelude/toolchains => toolchains}/ocaml.bzl (100%) rename {prelude/toolchains => toolchains}/python.bzl (100%) rename {prelude/toolchains => toolchains}/remote_test_execution.bzl (100%) rename {prelude/toolchains => toolchains}/rust.bzl (100%) rename {prelude/tools => tools}/audit_providers_universe.bxl (100%) rename {prelude/transitions => transitions}/constraint_overrides.bzl (100%) rename {prelude/user => user}/all.bzl (100%) rename {prelude/user => user}/cxx_headers_bundle.bzl (100%) rename {prelude/user => user}/extract_archive.bzl (100%) rename {prelude/user => user}/rule_spec.bzl (100%) rename {prelude/user => user}/write_file.bzl (100%) rename {prelude/utils => utils}/arglike.bzl (100%) rename {prelude/utils => utils}/buckconfig.bzl (100%) rename {prelude/utils => utils}/build_target_pattern.bzl (100%) rename {prelude/utils => utils}/cmd_script.bzl (100%) rename {prelude/utils => utils}/dicts.bzl (100%) rename {prelude/utils => utils}/expect.bzl (100%) rename {prelude/utils => utils}/graph_utils.bzl (100%) rename {prelude/utils => utils}/host.bzl (100%) rename {prelude/utils => utils}/lazy.bzl (100%) rename {prelude/utils => utils}/pick.bzl (100%) rename {prelude/utils => utils}/platform_flavors_util.bzl (100%) rename {prelude/utils => utils}/selects.bzl (100%) rename {prelude/utils => utils}/set.bzl (100%) rename {prelude/utils => utils}/strings.bzl (100%) rename {prelude/utils => utils}/type_defs.bzl (100%) rename {prelude/utils => utils}/utils.bzl (100%) rename prelude/validation_deps.bzl => validation_deps.bzl (100%) rename {prelude/windows => windows}/tools/BUCK.v2 (100%) rename {prelude/windows => windows}/tools/msvc_hermetic_exec.bat (100%) rename prelude/worker_tool.bzl => worker_tool.bzl (100%) rename {prelude/zip_file => zip_file}/tools/BUCK.v2 (100%) rename {prelude/zip_file => zip_file}/tools/unzip.py (100%) rename {prelude/zip_file => zip_file}/zip_file.bzl (100%) rename {prelude/zip_file => zip_file}/zip_file_toolchain.bzl (100%) diff --git a/prelude/.buckconfig b/.buckconfig similarity index 100% rename from prelude/.buckconfig rename to .buckconfig diff --git a/prelude/.gitignore b/.gitignore similarity index 100% rename from prelude/.gitignore rename to .gitignore diff --git a/prelude/BUCK b/BUCK similarity index 100% rename from prelude/BUCK rename to BUCK diff --git a/prelude/CHANGELOG.md b/CHANGELOG.md similarity index 100% rename from prelude/CHANGELOG.md rename to CHANGELOG.md diff --git a/prelude/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md similarity index 100% rename from prelude/CODE_OF_CONDUCT.md rename to CODE_OF_CONDUCT.md diff --git a/prelude/CONTRIBUTING.md b/CONTRIBUTING.md similarity index 100% rename from prelude/CONTRIBUTING.md rename to CONTRIBUTING.md diff --git a/prelude/LICENSE-APACHE b/LICENSE-APACHE similarity index 100% rename from prelude/LICENSE-APACHE rename to LICENSE-APACHE diff --git a/prelude/LICENSE-MIT b/LICENSE-MIT similarity index 100% rename from prelude/LICENSE-MIT rename to LICENSE-MIT diff --git a/prelude/README.md b/README.md similarity index 100% rename from prelude/README.md rename to README.md diff --git a/prelude/abi/BUCK.v2 b/abi/BUCK.v2 similarity index 100% rename from prelude/abi/BUCK.v2 rename to abi/BUCK.v2 diff --git a/prelude/abi/constraints/BUCK.v2 b/abi/constraints/BUCK.v2 similarity index 100% rename from prelude/abi/constraints/BUCK.v2 rename to abi/constraints/BUCK.v2 diff --git a/prelude/alias.bzl b/alias.bzl similarity index 100% rename from prelude/alias.bzl rename to alias.bzl diff --git a/prelude/android/aapt2_link.bzl b/android/aapt2_link.bzl similarity index 100% rename from prelude/android/aapt2_link.bzl rename to android/aapt2_link.bzl diff --git a/prelude/android/android.bzl b/android/android.bzl similarity index 100% rename from prelude/android/android.bzl rename to android/android.bzl diff --git a/prelude/android/android_aar.bzl b/android/android_aar.bzl similarity index 100% rename from prelude/android/android_aar.bzl rename to android/android_aar.bzl diff --git a/prelude/android/android_apk.bzl b/android/android_apk.bzl similarity index 100% rename from prelude/android/android_apk.bzl rename to android/android_apk.bzl diff --git a/prelude/android/android_binary.bzl b/android/android_binary.bzl similarity index 100% rename from prelude/android/android_binary.bzl rename to android/android_binary.bzl diff --git a/prelude/android/android_binary_native_library_rules.bzl b/android/android_binary_native_library_rules.bzl similarity index 100% rename from prelude/android/android_binary_native_library_rules.bzl rename to android/android_binary_native_library_rules.bzl diff --git a/prelude/android/android_binary_resources_rules.bzl b/android/android_binary_resources_rules.bzl similarity index 100% rename from prelude/android/android_binary_resources_rules.bzl rename to android/android_binary_resources_rules.bzl diff --git a/prelude/android/android_build_config.bzl b/android/android_build_config.bzl similarity index 100% rename from prelude/android/android_build_config.bzl rename to android/android_build_config.bzl diff --git a/prelude/android/android_bundle.bzl b/android/android_bundle.bzl similarity index 100% rename from prelude/android/android_bundle.bzl rename to android/android_bundle.bzl diff --git a/prelude/android/android_instrumentation_apk.bzl b/android/android_instrumentation_apk.bzl similarity index 100% rename from prelude/android/android_instrumentation_apk.bzl rename to android/android_instrumentation_apk.bzl diff --git a/prelude/android/android_instrumentation_test.bzl b/android/android_instrumentation_test.bzl similarity index 100% rename from prelude/android/android_instrumentation_test.bzl rename to android/android_instrumentation_test.bzl diff --git a/prelude/android/android_library.bzl b/android/android_library.bzl similarity index 100% rename from prelude/android/android_library.bzl rename to android/android_library.bzl diff --git a/prelude/android/android_manifest.bzl b/android/android_manifest.bzl similarity index 100% rename from prelude/android/android_manifest.bzl rename to android/android_manifest.bzl diff --git a/prelude/android/android_prebuilt_aar.bzl b/android/android_prebuilt_aar.bzl similarity index 100% rename from prelude/android/android_prebuilt_aar.bzl rename to android/android_prebuilt_aar.bzl diff --git a/prelude/android/android_providers.bzl b/android/android_providers.bzl similarity index 100% rename from prelude/android/android_providers.bzl rename to android/android_providers.bzl diff --git a/prelude/android/android_resource.bzl b/android/android_resource.bzl similarity index 100% rename from prelude/android/android_resource.bzl rename to android/android_resource.bzl diff --git a/prelude/android/android_toolchain.bzl b/android/android_toolchain.bzl similarity index 100% rename from prelude/android/android_toolchain.bzl rename to android/android_toolchain.bzl diff --git a/prelude/android/apk_genrule.bzl b/android/apk_genrule.bzl similarity index 100% rename from prelude/android/apk_genrule.bzl rename to android/apk_genrule.bzl diff --git a/prelude/android/build_only_native_code.bzl b/android/build_only_native_code.bzl similarity index 100% rename from prelude/android/build_only_native_code.bzl rename to android/build_only_native_code.bzl diff --git a/prelude/android/bundletool_util.bzl b/android/bundletool_util.bzl similarity index 100% rename from prelude/android/bundletool_util.bzl rename to android/bundletool_util.bzl diff --git a/prelude/android/configuration.bzl b/android/configuration.bzl similarity index 100% rename from prelude/android/configuration.bzl rename to android/configuration.bzl diff --git a/prelude/android/constraints/BUCK.v2 b/android/constraints/BUCK.v2 similarity index 100% rename from prelude/android/constraints/BUCK.v2 rename to android/constraints/BUCK.v2 diff --git a/prelude/android/cpu_filters.bzl b/android/cpu_filters.bzl similarity index 100% rename from prelude/android/cpu_filters.bzl rename to android/cpu_filters.bzl diff --git a/prelude/android/dex_rules.bzl b/android/dex_rules.bzl similarity index 100% rename from prelude/android/dex_rules.bzl rename to android/dex_rules.bzl diff --git a/prelude/android/exopackage.bzl b/android/exopackage.bzl similarity index 100% rename from prelude/android/exopackage.bzl rename to android/exopackage.bzl diff --git a/prelude/android/gen_aidl.bzl b/android/gen_aidl.bzl similarity index 100% rename from prelude/android/gen_aidl.bzl rename to android/gen_aidl.bzl diff --git a/prelude/android/min_sdk_version.bzl b/android/min_sdk_version.bzl similarity index 100% rename from prelude/android/min_sdk_version.bzl rename to android/min_sdk_version.bzl diff --git a/prelude/android/prebuilt_native_library.bzl b/android/prebuilt_native_library.bzl similarity index 100% rename from prelude/android/prebuilt_native_library.bzl rename to android/prebuilt_native_library.bzl diff --git a/prelude/android/preprocess_java_classes.bzl b/android/preprocess_java_classes.bzl similarity index 100% rename from prelude/android/preprocess_java_classes.bzl rename to android/preprocess_java_classes.bzl diff --git a/prelude/android/proguard.bzl b/android/proguard.bzl similarity index 100% rename from prelude/android/proguard.bzl rename to android/proguard.bzl diff --git a/prelude/android/r_dot_java.bzl b/android/r_dot_java.bzl similarity index 100% rename from prelude/android/r_dot_java.bzl rename to android/r_dot_java.bzl diff --git a/prelude/android/robolectric_test.bzl b/android/robolectric_test.bzl similarity index 100% rename from prelude/android/robolectric_test.bzl rename to android/robolectric_test.bzl diff --git a/prelude/android/tools/BUCK.v2 b/android/tools/BUCK.v2 similarity index 100% rename from prelude/android/tools/BUCK.v2 rename to android/tools/BUCK.v2 diff --git a/prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java b/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java similarity index 100% rename from prelude/android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java rename to android/tools/com/facebook/buck_generated/AppWithoutResourcesStub.java diff --git a/prelude/android/tools/combine_native_library_dirs.py b/android/tools/combine_native_library_dirs.py similarity index 100% rename from prelude/android/tools/combine_native_library_dirs.py rename to android/tools/combine_native_library_dirs.py diff --git a/prelude/android/tools/filter_dex.py b/android/tools/filter_dex.py similarity index 100% rename from prelude/android/tools/filter_dex.py rename to android/tools/filter_dex.py diff --git a/prelude/android/tools/filter_extra_resources.py b/android/tools/filter_extra_resources.py similarity index 100% rename from prelude/android/tools/filter_extra_resources.py rename to android/tools/filter_extra_resources.py diff --git a/prelude/android/tools/filter_prebuilt_native_library_dir.py b/android/tools/filter_prebuilt_native_library_dir.py similarity index 100% rename from prelude/android/tools/filter_prebuilt_native_library_dir.py rename to android/tools/filter_prebuilt_native_library_dir.py diff --git a/prelude/android/tools/merge_sequence.py b/android/tools/merge_sequence.py similarity index 100% rename from prelude/android/tools/merge_sequence.py rename to android/tools/merge_sequence.py diff --git a/prelude/android/tools/native_libs_as_assets_metadata.py b/android/tools/native_libs_as_assets_metadata.py similarity index 100% rename from prelude/android/tools/native_libs_as_assets_metadata.py rename to android/tools/native_libs_as_assets_metadata.py diff --git a/prelude/android/tools/unpack_aar.py b/android/tools/unpack_aar.py similarity index 100% rename from prelude/android/tools/unpack_aar.py rename to android/tools/unpack_aar.py diff --git a/prelude/android/util.bzl b/android/util.bzl similarity index 100% rename from prelude/android/util.bzl rename to android/util.bzl diff --git a/prelude/android/voltron.bzl b/android/voltron.bzl similarity index 100% rename from prelude/android/voltron.bzl rename to android/voltron.bzl diff --git a/prelude/apple/apple_asset_catalog.bzl b/apple/apple_asset_catalog.bzl similarity index 100% rename from prelude/apple/apple_asset_catalog.bzl rename to apple/apple_asset_catalog.bzl diff --git a/prelude/apple/apple_asset_catalog_compilation_options.bzl b/apple/apple_asset_catalog_compilation_options.bzl similarity index 100% rename from prelude/apple/apple_asset_catalog_compilation_options.bzl rename to apple/apple_asset_catalog_compilation_options.bzl diff --git a/prelude/apple/apple_asset_catalog_types.bzl b/apple/apple_asset_catalog_types.bzl similarity index 100% rename from prelude/apple/apple_asset_catalog_types.bzl rename to apple/apple_asset_catalog_types.bzl diff --git a/prelude/apple/apple_binary.bzl b/apple/apple_binary.bzl similarity index 100% rename from prelude/apple/apple_binary.bzl rename to apple/apple_binary.bzl diff --git a/prelude/apple/apple_bundle.bzl b/apple/apple_bundle.bzl similarity index 100% rename from prelude/apple/apple_bundle.bzl rename to apple/apple_bundle.bzl diff --git a/prelude/apple/apple_bundle_attrs.bzl b/apple/apple_bundle_attrs.bzl similarity index 100% rename from prelude/apple/apple_bundle_attrs.bzl rename to apple/apple_bundle_attrs.bzl diff --git a/prelude/apple/apple_bundle_config.bzl b/apple/apple_bundle_config.bzl similarity index 100% rename from prelude/apple/apple_bundle_config.bzl rename to apple/apple_bundle_config.bzl diff --git a/prelude/apple/apple_bundle_destination.bzl b/apple/apple_bundle_destination.bzl similarity index 100% rename from prelude/apple/apple_bundle_destination.bzl rename to apple/apple_bundle_destination.bzl diff --git a/prelude/apple/apple_bundle_part.bzl b/apple/apple_bundle_part.bzl similarity index 100% rename from prelude/apple/apple_bundle_part.bzl rename to apple/apple_bundle_part.bzl diff --git a/prelude/apple/apple_bundle_resources.bzl b/apple/apple_bundle_resources.bzl similarity index 100% rename from prelude/apple/apple_bundle_resources.bzl rename to apple/apple_bundle_resources.bzl diff --git a/prelude/apple/apple_bundle_types.bzl b/apple/apple_bundle_types.bzl similarity index 100% rename from prelude/apple/apple_bundle_types.bzl rename to apple/apple_bundle_types.bzl diff --git a/prelude/apple/apple_bundle_utility.bzl b/apple/apple_bundle_utility.bzl similarity index 100% rename from prelude/apple/apple_bundle_utility.bzl rename to apple/apple_bundle_utility.bzl diff --git a/prelude/apple/apple_code_signing_types.bzl b/apple/apple_code_signing_types.bzl similarity index 100% rename from prelude/apple/apple_code_signing_types.bzl rename to apple/apple_code_signing_types.bzl diff --git a/prelude/apple/apple_common.bzl b/apple/apple_common.bzl similarity index 100% rename from prelude/apple/apple_common.bzl rename to apple/apple_common.bzl diff --git a/prelude/apple/apple_core_data.bzl b/apple/apple_core_data.bzl similarity index 100% rename from prelude/apple/apple_core_data.bzl rename to apple/apple_core_data.bzl diff --git a/prelude/apple/apple_core_data_types.bzl b/apple/apple_core_data_types.bzl similarity index 100% rename from prelude/apple/apple_core_data_types.bzl rename to apple/apple_core_data_types.bzl diff --git a/prelude/apple/apple_dsym.bzl b/apple/apple_dsym.bzl similarity index 100% rename from prelude/apple/apple_dsym.bzl rename to apple/apple_dsym.bzl diff --git a/prelude/apple/apple_dsym_config.bzl b/apple/apple_dsym_config.bzl similarity index 100% rename from prelude/apple/apple_dsym_config.bzl rename to apple/apple_dsym_config.bzl diff --git a/prelude/apple/apple_entitlements.bzl b/apple/apple_entitlements.bzl similarity index 100% rename from prelude/apple/apple_entitlements.bzl rename to apple/apple_entitlements.bzl diff --git a/prelude/apple/apple_framework_versions.bzl b/apple/apple_framework_versions.bzl similarity index 100% rename from prelude/apple/apple_framework_versions.bzl rename to apple/apple_framework_versions.bzl diff --git a/prelude/apple/apple_frameworks.bzl b/apple/apple_frameworks.bzl similarity index 100% rename from prelude/apple/apple_frameworks.bzl rename to apple/apple_frameworks.bzl diff --git a/prelude/apple/apple_info_plist.bzl b/apple/apple_info_plist.bzl similarity index 100% rename from prelude/apple/apple_info_plist.bzl rename to apple/apple_info_plist.bzl diff --git a/prelude/apple/apple_info_plist_substitutions_parsing.bzl b/apple/apple_info_plist_substitutions_parsing.bzl similarity index 100% rename from prelude/apple/apple_info_plist_substitutions_parsing.bzl rename to apple/apple_info_plist_substitutions_parsing.bzl diff --git a/prelude/apple/apple_library.bzl b/apple/apple_library.bzl similarity index 100% rename from prelude/apple/apple_library.bzl rename to apple/apple_library.bzl diff --git a/prelude/apple/apple_macro_layer.bzl b/apple/apple_macro_layer.bzl similarity index 100% rename from prelude/apple/apple_macro_layer.bzl rename to apple/apple_macro_layer.bzl diff --git a/prelude/apple/apple_modular_utility.bzl b/apple/apple_modular_utility.bzl similarity index 100% rename from prelude/apple/apple_modular_utility.bzl rename to apple/apple_modular_utility.bzl diff --git a/prelude/apple/apple_native.bzl b/apple/apple_native.bzl similarity index 100% rename from prelude/apple/apple_native.bzl rename to apple/apple_native.bzl diff --git a/prelude/apple/apple_package.bzl b/apple/apple_package.bzl similarity index 100% rename from prelude/apple/apple_package.bzl rename to apple/apple_package.bzl diff --git a/prelude/apple/apple_package_config.bzl b/apple/apple_package_config.bzl similarity index 100% rename from prelude/apple/apple_package_config.bzl rename to apple/apple_package_config.bzl diff --git a/prelude/apple/apple_resource.bzl b/apple/apple_resource.bzl similarity index 100% rename from prelude/apple/apple_resource.bzl rename to apple/apple_resource.bzl diff --git a/prelude/apple/apple_resource_bundle.bzl b/apple/apple_resource_bundle.bzl similarity index 100% rename from prelude/apple/apple_resource_bundle.bzl rename to apple/apple_resource_bundle.bzl diff --git a/prelude/apple/apple_resource_types.bzl b/apple/apple_resource_types.bzl similarity index 100% rename from prelude/apple/apple_resource_types.bzl rename to apple/apple_resource_types.bzl diff --git a/prelude/apple/apple_resource_utility.bzl b/apple/apple_resource_utility.bzl similarity index 100% rename from prelude/apple/apple_resource_utility.bzl rename to apple/apple_resource_utility.bzl diff --git a/prelude/apple/apple_rules_impl.bzl b/apple/apple_rules_impl.bzl similarity index 100% rename from prelude/apple/apple_rules_impl.bzl rename to apple/apple_rules_impl.bzl diff --git a/prelude/apple/apple_rules_impl_utility.bzl b/apple/apple_rules_impl_utility.bzl similarity index 100% rename from prelude/apple/apple_rules_impl_utility.bzl rename to apple/apple_rules_impl_utility.bzl diff --git a/prelude/apple/apple_sdk.bzl b/apple/apple_sdk.bzl similarity index 100% rename from prelude/apple/apple_sdk.bzl rename to apple/apple_sdk.bzl diff --git a/prelude/apple/apple_sdk_metadata.bzl b/apple/apple_sdk_metadata.bzl similarity index 100% rename from prelude/apple/apple_sdk_metadata.bzl rename to apple/apple_sdk_metadata.bzl diff --git a/prelude/apple/apple_stripping.bzl b/apple/apple_stripping.bzl similarity index 100% rename from prelude/apple/apple_stripping.bzl rename to apple/apple_stripping.bzl diff --git a/prelude/apple/apple_swift_stdlib.bzl b/apple/apple_swift_stdlib.bzl similarity index 100% rename from prelude/apple/apple_swift_stdlib.bzl rename to apple/apple_swift_stdlib.bzl diff --git a/prelude/apple/apple_target_sdk_version.bzl b/apple/apple_target_sdk_version.bzl similarity index 100% rename from prelude/apple/apple_target_sdk_version.bzl rename to apple/apple_target_sdk_version.bzl diff --git a/prelude/apple/apple_test.bzl b/apple/apple_test.bzl similarity index 100% rename from prelude/apple/apple_test.bzl rename to apple/apple_test.bzl diff --git a/prelude/apple/apple_toolchain.bzl b/apple/apple_toolchain.bzl similarity index 100% rename from prelude/apple/apple_toolchain.bzl rename to apple/apple_toolchain.bzl diff --git a/prelude/apple/apple_toolchain_types.bzl b/apple/apple_toolchain_types.bzl similarity index 100% rename from prelude/apple/apple_toolchain_types.bzl rename to apple/apple_toolchain_types.bzl diff --git a/prelude/apple/apple_universal_binaries.bzl b/apple/apple_universal_binaries.bzl similarity index 100% rename from prelude/apple/apple_universal_binaries.bzl rename to apple/apple_universal_binaries.bzl diff --git a/prelude/apple/apple_universal_executable.bzl b/apple/apple_universal_executable.bzl similarity index 100% rename from prelude/apple/apple_universal_executable.bzl rename to apple/apple_universal_executable.bzl diff --git a/prelude/apple/apple_utility.bzl b/apple/apple_utility.bzl similarity index 100% rename from prelude/apple/apple_utility.bzl rename to apple/apple_utility.bzl diff --git a/prelude/apple/apple_xcuitest.bzl b/apple/apple_xcuitest.bzl similarity index 100% rename from prelude/apple/apple_xcuitest.bzl rename to apple/apple_xcuitest.bzl diff --git a/prelude/apple/debug.bzl b/apple/debug.bzl similarity index 100% rename from prelude/apple/debug.bzl rename to apple/debug.bzl diff --git a/prelude/apple/mockingbird/mockingbird_mock.bzl b/apple/mockingbird/mockingbird_mock.bzl similarity index 100% rename from prelude/apple/mockingbird/mockingbird_mock.bzl rename to apple/mockingbird/mockingbird_mock.bzl diff --git a/prelude/apple/mockingbird/mockingbird_types.bzl b/apple/mockingbird/mockingbird_types.bzl similarity index 100% rename from prelude/apple/mockingbird/mockingbird_types.bzl rename to apple/mockingbird/mockingbird_types.bzl diff --git a/prelude/apple/modulemap.bzl b/apple/modulemap.bzl similarity index 100% rename from prelude/apple/modulemap.bzl rename to apple/modulemap.bzl diff --git a/prelude/apple/prebuilt_apple_framework.bzl b/apple/prebuilt_apple_framework.bzl similarity index 100% rename from prelude/apple/prebuilt_apple_framework.bzl rename to apple/prebuilt_apple_framework.bzl diff --git a/prelude/apple/resource_groups.bzl b/apple/resource_groups.bzl similarity index 100% rename from prelude/apple/resource_groups.bzl rename to apple/resource_groups.bzl diff --git a/prelude/apple/scene_kit_assets.bzl b/apple/scene_kit_assets.bzl similarity index 100% rename from prelude/apple/scene_kit_assets.bzl rename to apple/scene_kit_assets.bzl diff --git a/prelude/apple/scene_kit_assets_types.bzl b/apple/scene_kit_assets_types.bzl similarity index 100% rename from prelude/apple/scene_kit_assets_types.bzl rename to apple/scene_kit_assets_types.bzl diff --git a/prelude/apple/swift/apple_sdk_clang_module.bzl b/apple/swift/apple_sdk_clang_module.bzl similarity index 100% rename from prelude/apple/swift/apple_sdk_clang_module.bzl rename to apple/swift/apple_sdk_clang_module.bzl diff --git a/prelude/apple/swift/apple_sdk_modules_utility.bzl b/apple/swift/apple_sdk_modules_utility.bzl similarity index 100% rename from prelude/apple/swift/apple_sdk_modules_utility.bzl rename to apple/swift/apple_sdk_modules_utility.bzl diff --git a/prelude/apple/swift/apple_sdk_swift_module.bzl b/apple/swift/apple_sdk_swift_module.bzl similarity index 100% rename from prelude/apple/swift/apple_sdk_swift_module.bzl rename to apple/swift/apple_sdk_swift_module.bzl diff --git a/prelude/apple/swift/swift_compilation.bzl b/apple/swift/swift_compilation.bzl similarity index 100% rename from prelude/apple/swift/swift_compilation.bzl rename to apple/swift/swift_compilation.bzl diff --git a/prelude/apple/swift/swift_debug_info_utils.bzl b/apple/swift/swift_debug_info_utils.bzl similarity index 100% rename from prelude/apple/swift/swift_debug_info_utils.bzl rename to apple/swift/swift_debug_info_utils.bzl diff --git a/prelude/apple/swift/swift_incremental_support.bzl b/apple/swift/swift_incremental_support.bzl similarity index 100% rename from prelude/apple/swift/swift_incremental_support.bzl rename to apple/swift/swift_incremental_support.bzl diff --git a/prelude/apple/swift/swift_module_map.bzl b/apple/swift/swift_module_map.bzl similarity index 100% rename from prelude/apple/swift/swift_module_map.bzl rename to apple/swift/swift_module_map.bzl diff --git a/prelude/apple/swift/swift_pcm_compilation.bzl b/apple/swift/swift_pcm_compilation.bzl similarity index 100% rename from prelude/apple/swift/swift_pcm_compilation.bzl rename to apple/swift/swift_pcm_compilation.bzl diff --git a/prelude/apple/swift/swift_pcm_compilation_types.bzl b/apple/swift/swift_pcm_compilation_types.bzl similarity index 100% rename from prelude/apple/swift/swift_pcm_compilation_types.bzl rename to apple/swift/swift_pcm_compilation_types.bzl diff --git a/prelude/apple/swift/swift_runtime.bzl b/apple/swift/swift_runtime.bzl similarity index 100% rename from prelude/apple/swift/swift_runtime.bzl rename to apple/swift/swift_runtime.bzl diff --git a/prelude/apple/swift/swift_sdk_pcm_compilation.bzl b/apple/swift/swift_sdk_pcm_compilation.bzl similarity index 100% rename from prelude/apple/swift/swift_sdk_pcm_compilation.bzl rename to apple/swift/swift_sdk_pcm_compilation.bzl diff --git a/prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl b/apple/swift/swift_sdk_swiftinterface_compilation.bzl similarity index 100% rename from prelude/apple/swift/swift_sdk_swiftinterface_compilation.bzl rename to apple/swift/swift_sdk_swiftinterface_compilation.bzl diff --git a/prelude/apple/swift/swift_toolchain.bzl b/apple/swift/swift_toolchain.bzl similarity index 100% rename from prelude/apple/swift/swift_toolchain.bzl rename to apple/swift/swift_toolchain.bzl diff --git a/prelude/apple/swift/swift_toolchain_macro_layer.bzl b/apple/swift/swift_toolchain_macro_layer.bzl similarity index 100% rename from prelude/apple/swift/swift_toolchain_macro_layer.bzl rename to apple/swift/swift_toolchain_macro_layer.bzl diff --git a/prelude/apple/swift/swift_toolchain_types.bzl b/apple/swift/swift_toolchain_types.bzl similarity index 100% rename from prelude/apple/swift/swift_toolchain_types.bzl rename to apple/swift/swift_toolchain_types.bzl diff --git a/prelude/apple/swift/swift_types.bzl b/apple/swift/swift_types.bzl similarity index 100% rename from prelude/apple/swift/swift_types.bzl rename to apple/swift/swift_types.bzl diff --git a/prelude/apple/tools/BUCK.v2 b/apple/tools/BUCK.v2 similarity index 100% rename from prelude/apple/tools/BUCK.v2 rename to apple/tools/BUCK.v2 diff --git a/prelude/apple/tools/bundling/BUCK.v2 b/apple/tools/bundling/BUCK.v2 similarity index 100% rename from prelude/apple/tools/bundling/BUCK.v2 rename to apple/tools/bundling/BUCK.v2 diff --git a/prelude/apple/tools/bundling/action_metadata.py b/apple/tools/bundling/action_metadata.py similarity index 100% rename from prelude/apple/tools/bundling/action_metadata.py rename to apple/tools/bundling/action_metadata.py diff --git a/prelude/apple/tools/bundling/action_metadata_test.py b/apple/tools/bundling/action_metadata_test.py similarity index 100% rename from prelude/apple/tools/bundling/action_metadata_test.py rename to apple/tools/bundling/action_metadata_test.py diff --git a/prelude/apple/tools/bundling/assemble_bundle.py b/apple/tools/bundling/assemble_bundle.py similarity index 100% rename from prelude/apple/tools/bundling/assemble_bundle.py rename to apple/tools/bundling/assemble_bundle.py diff --git a/prelude/apple/tools/bundling/assemble_bundle_types.py b/apple/tools/bundling/assemble_bundle_types.py similarity index 100% rename from prelude/apple/tools/bundling/assemble_bundle_types.py rename to apple/tools/bundling/assemble_bundle_types.py diff --git a/prelude/apple/tools/bundling/incremental_state.py b/apple/tools/bundling/incremental_state.py similarity index 100% rename from prelude/apple/tools/bundling/incremental_state.py rename to apple/tools/bundling/incremental_state.py diff --git a/prelude/apple/tools/bundling/incremental_state_test.py b/apple/tools/bundling/incremental_state_test.py similarity index 100% rename from prelude/apple/tools/bundling/incremental_state_test.py rename to apple/tools/bundling/incremental_state_test.py diff --git a/prelude/apple/tools/bundling/incremental_utils.py b/apple/tools/bundling/incremental_utils.py similarity index 100% rename from prelude/apple/tools/bundling/incremental_utils.py rename to apple/tools/bundling/incremental_utils.py diff --git a/prelude/apple/tools/bundling/incremental_utils_test.py b/apple/tools/bundling/incremental_utils_test.py similarity index 100% rename from prelude/apple/tools/bundling/incremental_utils_test.py rename to apple/tools/bundling/incremental_utils_test.py diff --git a/prelude/apple/tools/bundling/main.py b/apple/tools/bundling/main.py similarity index 100% rename from prelude/apple/tools/bundling/main.py rename to apple/tools/bundling/main.py diff --git a/prelude/apple/tools/bundling/swift_support.py b/apple/tools/bundling/swift_support.py similarity index 100% rename from prelude/apple/tools/bundling/swift_support.py rename to apple/tools/bundling/swift_support.py diff --git a/prelude/apple/tools/bundling/test_resources/newer_version_action_metadata.json b/apple/tools/bundling/test_resources/newer_version_action_metadata.json similarity index 100% rename from prelude/apple/tools/bundling/test_resources/newer_version_action_metadata.json rename to apple/tools/bundling/test_resources/newer_version_action_metadata.json diff --git a/prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json b/apple/tools/bundling/test_resources/newer_version_incremental_state.json similarity index 100% rename from prelude/apple/tools/bundling/test_resources/newer_version_incremental_state.json rename to apple/tools/bundling/test_resources/newer_version_incremental_state.json diff --git a/prelude/apple/tools/bundling/test_resources/the.broken_json b/apple/tools/bundling/test_resources/the.broken_json similarity index 100% rename from prelude/apple/tools/bundling/test_resources/the.broken_json rename to apple/tools/bundling/test_resources/the.broken_json diff --git a/prelude/apple/tools/bundling/test_resources/valid_action_metadata.json b/apple/tools/bundling/test_resources/valid_action_metadata.json similarity index 100% rename from prelude/apple/tools/bundling/test_resources/valid_action_metadata.json rename to apple/tools/bundling/test_resources/valid_action_metadata.json diff --git a/prelude/apple/tools/bundling/test_resources/valid_incremental_state.json b/apple/tools/bundling/test_resources/valid_incremental_state.json similarity index 100% rename from prelude/apple/tools/bundling/test_resources/valid_incremental_state.json rename to apple/tools/bundling/test_resources/valid_incremental_state.json diff --git a/prelude/apple/tools/code_signing/BUCK.v2 b/apple/tools/code_signing/BUCK.v2 similarity index 100% rename from prelude/apple/tools/code_signing/BUCK.v2 rename to apple/tools/code_signing/BUCK.v2 diff --git a/prelude/apple/tools/code_signing/app_id.py b/apple/tools/code_signing/app_id.py similarity index 100% rename from prelude/apple/tools/code_signing/app_id.py rename to apple/tools/code_signing/app_id.py diff --git a/prelude/apple/tools/code_signing/app_id_test.py b/apple/tools/code_signing/app_id_test.py similarity index 100% rename from prelude/apple/tools/code_signing/app_id_test.py rename to apple/tools/code_signing/app_id_test.py diff --git a/prelude/apple/tools/code_signing/apple_platform.py b/apple/tools/code_signing/apple_platform.py similarity index 100% rename from prelude/apple/tools/code_signing/apple_platform.py rename to apple/tools/code_signing/apple_platform.py diff --git a/prelude/apple/tools/code_signing/codesign_bundle.py b/apple/tools/code_signing/codesign_bundle.py similarity index 100% rename from prelude/apple/tools/code_signing/codesign_bundle.py rename to apple/tools/code_signing/codesign_bundle.py diff --git a/prelude/apple/tools/code_signing/codesign_command_factory.py b/apple/tools/code_signing/codesign_command_factory.py similarity index 100% rename from prelude/apple/tools/code_signing/codesign_command_factory.py rename to apple/tools/code_signing/codesign_command_factory.py diff --git a/prelude/apple/tools/code_signing/fast_adhoc.py b/apple/tools/code_signing/fast_adhoc.py similarity index 100% rename from prelude/apple/tools/code_signing/fast_adhoc.py rename to apple/tools/code_signing/fast_adhoc.py diff --git a/prelude/apple/tools/code_signing/identity.py b/apple/tools/code_signing/identity.py similarity index 100% rename from prelude/apple/tools/code_signing/identity.py rename to apple/tools/code_signing/identity.py diff --git a/prelude/apple/tools/code_signing/identity_test.py b/apple/tools/code_signing/identity_test.py similarity index 100% rename from prelude/apple/tools/code_signing/identity_test.py rename to apple/tools/code_signing/identity_test.py diff --git a/prelude/apple/tools/code_signing/info_plist_metadata.py b/apple/tools/code_signing/info_plist_metadata.py similarity index 100% rename from prelude/apple/tools/code_signing/info_plist_metadata.py rename to apple/tools/code_signing/info_plist_metadata.py diff --git a/prelude/apple/tools/code_signing/info_plist_metadata_test.py b/apple/tools/code_signing/info_plist_metadata_test.py similarity index 100% rename from prelude/apple/tools/code_signing/info_plist_metadata_test.py rename to apple/tools/code_signing/info_plist_metadata_test.py diff --git a/prelude/apple/tools/code_signing/list_codesign_identities.py b/apple/tools/code_signing/list_codesign_identities.py similarity index 100% rename from prelude/apple/tools/code_signing/list_codesign_identities.py rename to apple/tools/code_signing/list_codesign_identities.py diff --git a/prelude/apple/tools/code_signing/main.py b/apple/tools/code_signing/main.py similarity index 100% rename from prelude/apple/tools/code_signing/main.py rename to apple/tools/code_signing/main.py diff --git a/prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py b/apple/tools/code_signing/prepare_code_signing_entitlements.py similarity index 100% rename from prelude/apple/tools/code_signing/prepare_code_signing_entitlements.py rename to apple/tools/code_signing/prepare_code_signing_entitlements.py diff --git a/prelude/apple/tools/code_signing/prepare_code_signing_entitlements_test.py b/apple/tools/code_signing/prepare_code_signing_entitlements_test.py similarity index 100% rename from prelude/apple/tools/code_signing/prepare_code_signing_entitlements_test.py rename to apple/tools/code_signing/prepare_code_signing_entitlements_test.py diff --git a/prelude/apple/tools/code_signing/prepare_info_plist.py b/apple/tools/code_signing/prepare_info_plist.py similarity index 100% rename from prelude/apple/tools/code_signing/prepare_info_plist.py rename to apple/tools/code_signing/prepare_info_plist.py diff --git a/prelude/apple/tools/code_signing/prepare_info_plist_test.py b/apple/tools/code_signing/prepare_info_plist_test.py similarity index 100% rename from prelude/apple/tools/code_signing/prepare_info_plist_test.py rename to apple/tools/code_signing/prepare_info_plist_test.py diff --git a/prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py b/apple/tools/code_signing/provisioning_profile_diagnostics.py similarity index 100% rename from prelude/apple/tools/code_signing/provisioning_profile_diagnostics.py rename to apple/tools/code_signing/provisioning_profile_diagnostics.py diff --git a/prelude/apple/tools/code_signing/provisioning_profile_metadata.py b/apple/tools/code_signing/provisioning_profile_metadata.py similarity index 100% rename from prelude/apple/tools/code_signing/provisioning_profile_metadata.py rename to apple/tools/code_signing/provisioning_profile_metadata.py diff --git a/prelude/apple/tools/code_signing/provisioning_profile_metadata_test.py b/apple/tools/code_signing/provisioning_profile_metadata_test.py similarity index 100% rename from prelude/apple/tools/code_signing/provisioning_profile_metadata_test.py rename to apple/tools/code_signing/provisioning_profile_metadata_test.py diff --git a/prelude/apple/tools/code_signing/provisioning_profile_selection.py b/apple/tools/code_signing/provisioning_profile_selection.py similarity index 100% rename from prelude/apple/tools/code_signing/provisioning_profile_selection.py rename to apple/tools/code_signing/provisioning_profile_selection.py diff --git a/prelude/apple/tools/code_signing/provisioning_profile_selection_test.py b/apple/tools/code_signing/provisioning_profile_selection_test.py similarity index 100% rename from prelude/apple/tools/code_signing/provisioning_profile_selection_test.py rename to apple/tools/code_signing/provisioning_profile_selection_test.py diff --git a/prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py b/apple/tools/code_signing/read_provisioning_profile_command_factory.py similarity index 100% rename from prelude/apple/tools/code_signing/read_provisioning_profile_command_factory.py rename to apple/tools/code_signing/read_provisioning_profile_command_factory.py diff --git a/prelude/apple/tools/code_signing/test_resources/Entitlements.plist b/apple/tools/code_signing/test_resources/Entitlements.plist similarity index 100% rename from prelude/apple/tools/code_signing/test_resources/Entitlements.plist rename to apple/tools/code_signing/test_resources/Entitlements.plist diff --git a/prelude/apple/tools/code_signing/test_resources/qualified_sample.mobileprovision b/apple/tools/code_signing/test_resources/qualified_sample.mobileprovision similarity index 100% rename from prelude/apple/tools/code_signing/test_resources/qualified_sample.mobileprovision rename to apple/tools/code_signing/test_resources/qualified_sample.mobileprovision diff --git a/prelude/apple/tools/code_signing/test_resources/sample.mobileprovision b/apple/tools/code_signing/test_resources/sample.mobileprovision similarity index 100% rename from prelude/apple/tools/code_signing/test_resources/sample.mobileprovision rename to apple/tools/code_signing/test_resources/sample.mobileprovision diff --git a/prelude/apple/tools/defs.bzl b/apple/tools/defs.bzl similarity index 100% rename from prelude/apple/tools/defs.bzl rename to apple/tools/defs.bzl diff --git a/prelude/apple/tools/dry_codesign_tool.py b/apple/tools/dry_codesign_tool.py similarity index 100% rename from prelude/apple/tools/dry_codesign_tool.py rename to apple/tools/dry_codesign_tool.py diff --git a/prelude/apple/tools/info_plist_processor/BUCK.v2 b/apple/tools/info_plist_processor/BUCK.v2 similarity index 100% rename from prelude/apple/tools/info_plist_processor/BUCK.v2 rename to apple/tools/info_plist_processor/BUCK.v2 diff --git a/prelude/apple/tools/info_plist_processor/main.py b/apple/tools/info_plist_processor/main.py similarity index 100% rename from prelude/apple/tools/info_plist_processor/main.py rename to apple/tools/info_plist_processor/main.py diff --git a/prelude/apple/tools/info_plist_processor/preprocess.py b/apple/tools/info_plist_processor/preprocess.py similarity index 100% rename from prelude/apple/tools/info_plist_processor/preprocess.py rename to apple/tools/info_plist_processor/preprocess.py diff --git a/prelude/apple/tools/info_plist_processor/preprocess_test.py b/apple/tools/info_plist_processor/preprocess_test.py similarity index 100% rename from prelude/apple/tools/info_plist_processor/preprocess_test.py rename to apple/tools/info_plist_processor/preprocess_test.py diff --git a/prelude/apple/tools/info_plist_processor/process.py b/apple/tools/info_plist_processor/process.py similarity index 100% rename from prelude/apple/tools/info_plist_processor/process.py rename to apple/tools/info_plist_processor/process.py diff --git a/prelude/apple/tools/info_plist_processor/process_test.py b/apple/tools/info_plist_processor/process_test.py similarity index 100% rename from prelude/apple/tools/info_plist_processor/process_test.py rename to apple/tools/info_plist_processor/process_test.py diff --git a/prelude/apple/tools/ipa_package_maker.py b/apple/tools/ipa_package_maker.py similarity index 100% rename from prelude/apple/tools/ipa_package_maker.py rename to apple/tools/ipa_package_maker.py diff --git a/prelude/apple/tools/linker_wrapper.py b/apple/tools/linker_wrapper.py similarity index 100% rename from prelude/apple/tools/linker_wrapper.py rename to apple/tools/linker_wrapper.py diff --git a/prelude/apple/tools/make_modulemap.py b/apple/tools/make_modulemap.py similarity index 100% rename from prelude/apple/tools/make_modulemap.py rename to apple/tools/make_modulemap.py diff --git a/prelude/apple/tools/make_swift_comp_db.py b/apple/tools/make_swift_comp_db.py similarity index 100% rename from prelude/apple/tools/make_swift_comp_db.py rename to apple/tools/make_swift_comp_db.py diff --git a/prelude/apple/tools/make_swift_interface.py b/apple/tools/make_swift_interface.py similarity index 100% rename from prelude/apple/tools/make_swift_interface.py rename to apple/tools/make_swift_interface.py diff --git a/prelude/apple/tools/make_vfsoverlay.py b/apple/tools/make_vfsoverlay.py similarity index 100% rename from prelude/apple/tools/make_vfsoverlay.py rename to apple/tools/make_vfsoverlay.py diff --git a/prelude/apple/tools/plistlib_utils.py b/apple/tools/plistlib_utils.py similarity index 100% rename from prelude/apple/tools/plistlib_utils.py rename to apple/tools/plistlib_utils.py diff --git a/prelude/apple/tools/re_compatibility_utils/BUCK b/apple/tools/re_compatibility_utils/BUCK similarity index 100% rename from prelude/apple/tools/re_compatibility_utils/BUCK rename to apple/tools/re_compatibility_utils/BUCK diff --git a/prelude/apple/tools/re_compatibility_utils/writable.py b/apple/tools/re_compatibility_utils/writable.py similarity index 100% rename from prelude/apple/tools/re_compatibility_utils/writable.py rename to apple/tools/re_compatibility_utils/writable.py diff --git a/prelude/apple/tools/resource_broker/BUCK.v2 b/apple/tools/resource_broker/BUCK.v2 similarity index 100% rename from prelude/apple/tools/resource_broker/BUCK.v2 rename to apple/tools/resource_broker/BUCK.v2 diff --git a/prelude/apple/tools/resource_broker/idb_companion.py b/apple/tools/resource_broker/idb_companion.py similarity index 100% rename from prelude/apple/tools/resource_broker/idb_companion.py rename to apple/tools/resource_broker/idb_companion.py diff --git a/prelude/apple/tools/resource_broker/idb_target.py b/apple/tools/resource_broker/idb_target.py similarity index 100% rename from prelude/apple/tools/resource_broker/idb_target.py rename to apple/tools/resource_broker/idb_target.py diff --git a/prelude/apple/tools/resource_broker/ios.py b/apple/tools/resource_broker/ios.py similarity index 100% rename from prelude/apple/tools/resource_broker/ios.py rename to apple/tools/resource_broker/ios.py diff --git a/prelude/apple/tools/resource_broker/macos.py b/apple/tools/resource_broker/macos.py similarity index 100% rename from prelude/apple/tools/resource_broker/macos.py rename to apple/tools/resource_broker/macos.py diff --git a/prelude/apple/tools/resource_broker/main.py b/apple/tools/resource_broker/main.py similarity index 100% rename from prelude/apple/tools/resource_broker/main.py rename to apple/tools/resource_broker/main.py diff --git a/prelude/apple/tools/resource_broker/simctl_runtime.py b/apple/tools/resource_broker/simctl_runtime.py similarity index 100% rename from prelude/apple/tools/resource_broker/simctl_runtime.py rename to apple/tools/resource_broker/simctl_runtime.py diff --git a/prelude/apple/tools/resource_broker/timeouts.py b/apple/tools/resource_broker/timeouts.py similarity index 100% rename from prelude/apple/tools/resource_broker/timeouts.py rename to apple/tools/resource_broker/timeouts.py diff --git a/prelude/apple/tools/resource_broker/utils.py b/apple/tools/resource_broker/utils.py similarity index 100% rename from prelude/apple/tools/resource_broker/utils.py rename to apple/tools/resource_broker/utils.py diff --git a/prelude/apple/tools/selective_debugging/BUCK.v2 b/apple/tools/selective_debugging/BUCK.v2 similarity index 100% rename from prelude/apple/tools/selective_debugging/BUCK.v2 rename to apple/tools/selective_debugging/BUCK.v2 diff --git a/prelude/apple/tools/selective_debugging/macho.py b/apple/tools/selective_debugging/macho.py similarity index 100% rename from prelude/apple/tools/selective_debugging/macho.py rename to apple/tools/selective_debugging/macho.py diff --git a/prelude/apple/tools/selective_debugging/macho_parser.py b/apple/tools/selective_debugging/macho_parser.py similarity index 100% rename from prelude/apple/tools/selective_debugging/macho_parser.py rename to apple/tools/selective_debugging/macho_parser.py diff --git a/prelude/apple/tools/selective_debugging/main.py b/apple/tools/selective_debugging/main.py similarity index 100% rename from prelude/apple/tools/selective_debugging/main.py rename to apple/tools/selective_debugging/main.py diff --git a/prelude/apple/tools/selective_debugging/scrubber.py b/apple/tools/selective_debugging/scrubber.py similarity index 100% rename from prelude/apple/tools/selective_debugging/scrubber.py rename to apple/tools/selective_debugging/scrubber.py diff --git a/prelude/apple/tools/selective_debugging/scrubber_test.py b/apple/tools/selective_debugging/scrubber_test.py similarity index 100% rename from prelude/apple/tools/selective_debugging/scrubber_test.py rename to apple/tools/selective_debugging/scrubber_test.py diff --git a/prelude/apple/tools/selective_debugging/spec.py b/apple/tools/selective_debugging/spec.py similarity index 100% rename from prelude/apple/tools/selective_debugging/spec.py rename to apple/tools/selective_debugging/spec.py diff --git a/prelude/apple/tools/selective_debugging/spec_test.py b/apple/tools/selective_debugging/spec_test.py similarity index 100% rename from prelude/apple/tools/selective_debugging/spec_test.py rename to apple/tools/selective_debugging/spec_test.py diff --git a/prelude/apple/tools/selective_debugging/test_resources/HelloWorld b/apple/tools/selective_debugging/test_resources/HelloWorld similarity index 100% rename from prelude/apple/tools/selective_debugging/test_resources/HelloWorld rename to apple/tools/selective_debugging/test_resources/HelloWorld diff --git a/prelude/apple/tools/selective_debugging/test_resources/focused_spec.json b/apple/tools/selective_debugging/test_resources/focused_spec.json similarity index 100% rename from prelude/apple/tools/selective_debugging/test_resources/focused_spec.json rename to apple/tools/selective_debugging/test_resources/focused_spec.json diff --git a/prelude/apple/tools/selective_debugging/test_resources/focused_targets.json b/apple/tools/selective_debugging/test_resources/focused_targets.json similarity index 100% rename from prelude/apple/tools/selective_debugging/test_resources/focused_targets.json rename to apple/tools/selective_debugging/test_resources/focused_targets.json diff --git a/prelude/apple/tools/selective_debugging/test_resources/focused_targets_empty.json b/apple/tools/selective_debugging/test_resources/focused_targets_empty.json similarity index 100% rename from prelude/apple/tools/selective_debugging/test_resources/focused_targets_empty.json rename to apple/tools/selective_debugging/test_resources/focused_targets_empty.json diff --git a/prelude/apple/tools/selective_debugging/utils.py b/apple/tools/selective_debugging/utils.py similarity index 100% rename from prelude/apple/tools/selective_debugging/utils.py rename to apple/tools/selective_debugging/utils.py diff --git a/prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py b/apple/tools/split_arch_combine_dsym_bundles_tool.py similarity index 100% rename from prelude/apple/tools/split_arch_combine_dsym_bundles_tool.py rename to apple/tools/split_arch_combine_dsym_bundles_tool.py diff --git a/prelude/apple/tools/swift_exec.sh b/apple/tools/swift_exec.sh similarity index 100% rename from prelude/apple/tools/swift_exec.sh rename to apple/tools/swift_exec.sh diff --git a/prelude/apple/tools/swift_objc_header_postprocess.py b/apple/tools/swift_objc_header_postprocess.py similarity index 100% rename from prelude/apple/tools/swift_objc_header_postprocess.py rename to apple/tools/swift_objc_header_postprocess.py diff --git a/prelude/apple/user/apple_resource_bundle.bzl b/apple/user/apple_resource_bundle.bzl similarity index 100% rename from prelude/apple/user/apple_resource_bundle.bzl rename to apple/user/apple_resource_bundle.bzl diff --git a/prelude/apple/user/apple_resource_transition.bzl b/apple/user/apple_resource_transition.bzl similarity index 100% rename from prelude/apple/user/apple_resource_transition.bzl rename to apple/user/apple_resource_transition.bzl diff --git a/prelude/apple/user/apple_selected_debug_path_file.bzl b/apple/user/apple_selected_debug_path_file.bzl similarity index 100% rename from prelude/apple/user/apple_selected_debug_path_file.bzl rename to apple/user/apple_selected_debug_path_file.bzl diff --git a/prelude/apple/user/apple_selective_debugging.bzl b/apple/user/apple_selective_debugging.bzl similarity index 100% rename from prelude/apple/user/apple_selective_debugging.bzl rename to apple/user/apple_selective_debugging.bzl diff --git a/prelude/apple/user/apple_simulators.bzl b/apple/user/apple_simulators.bzl similarity index 100% rename from prelude/apple/user/apple_simulators.bzl rename to apple/user/apple_simulators.bzl diff --git a/prelude/apple/user/apple_toolchain_override.bzl b/apple/user/apple_toolchain_override.bzl similarity index 100% rename from prelude/apple/user/apple_toolchain_override.bzl rename to apple/user/apple_toolchain_override.bzl diff --git a/prelude/apple/user/apple_tools.bzl b/apple/user/apple_tools.bzl similarity index 100% rename from prelude/apple/user/apple_tools.bzl rename to apple/user/apple_tools.bzl diff --git a/prelude/apple/user/apple_watchos_bundle.bzl b/apple/user/apple_watchos_bundle.bzl similarity index 100% rename from prelude/apple/user/apple_watchos_bundle.bzl rename to apple/user/apple_watchos_bundle.bzl diff --git a/prelude/apple/user/cpu_split_transition.bzl b/apple/user/cpu_split_transition.bzl similarity index 100% rename from prelude/apple/user/cpu_split_transition.bzl rename to apple/user/cpu_split_transition.bzl diff --git a/prelude/apple/user/resource_group_map.bzl b/apple/user/resource_group_map.bzl similarity index 100% rename from prelude/apple/user/resource_group_map.bzl rename to apple/user/resource_group_map.bzl diff --git a/prelude/apple/user/watch_transition.bzl b/apple/user/watch_transition.bzl similarity index 100% rename from prelude/apple/user/watch_transition.bzl rename to apple/user/watch_transition.bzl diff --git a/prelude/apple/xcode.bzl b/apple/xcode.bzl similarity index 100% rename from prelude/apple/xcode.bzl rename to apple/xcode.bzl diff --git a/prelude/apple/xcode_postbuild_script.bzl b/apple/xcode_postbuild_script.bzl similarity index 100% rename from prelude/apple/xcode_postbuild_script.bzl rename to apple/xcode_postbuild_script.bzl diff --git a/prelude/apple/xcode_prebuild_script.bzl b/apple/xcode_prebuild_script.bzl similarity index 100% rename from prelude/apple/xcode_prebuild_script.bzl rename to apple/xcode_prebuild_script.bzl diff --git a/prelude/apple/xctest_swift_support.bzl b/apple/xctest_swift_support.bzl similarity index 100% rename from prelude/apple/xctest_swift_support.bzl rename to apple/xctest_swift_support.bzl diff --git a/prelude/artifact_tset.bzl b/artifact_tset.bzl similarity index 100% rename from prelude/artifact_tset.bzl rename to artifact_tset.bzl diff --git a/prelude/artifacts.bzl b/artifacts.bzl similarity index 100% rename from prelude/artifacts.bzl rename to artifacts.bzl diff --git a/prelude/asserts.bzl b/asserts.bzl similarity index 100% rename from prelude/asserts.bzl rename to asserts.bzl diff --git a/prelude/attributes.bzl b/attributes.bzl similarity index 100% rename from prelude/attributes.bzl rename to attributes.bzl diff --git a/prelude/buck2_compatibility.bzl b/buck2_compatibility.bzl similarity index 100% rename from prelude/buck2_compatibility.bzl rename to buck2_compatibility.bzl diff --git a/prelude/build_mode.bzl b/build_mode.bzl similarity index 100% rename from prelude/build_mode.bzl rename to build_mode.bzl diff --git a/prelude/cache_mode.bzl b/cache_mode.bzl similarity index 100% rename from prelude/cache_mode.bzl rename to cache_mode.bzl diff --git a/prelude/command_alias.bzl b/command_alias.bzl similarity index 100% rename from prelude/command_alias.bzl rename to command_alias.bzl diff --git a/prelude/configurations/rules.bzl b/configurations/rules.bzl similarity index 100% rename from prelude/configurations/rules.bzl rename to configurations/rules.bzl diff --git a/prelude/configurations/util.bzl b/configurations/util.bzl similarity index 100% rename from prelude/configurations/util.bzl rename to configurations/util.bzl diff --git a/prelude/cpu/BUCK.v2 b/cpu/BUCK.v2 similarity index 100% rename from prelude/cpu/BUCK.v2 rename to cpu/BUCK.v2 diff --git a/prelude/cpu/constraints/BUCK.v2 b/cpu/constraints/BUCK.v2 similarity index 100% rename from prelude/cpu/constraints/BUCK.v2 rename to cpu/constraints/BUCK.v2 diff --git a/prelude/csharp/csharp.bzl b/csharp/csharp.bzl similarity index 100% rename from prelude/csharp/csharp.bzl rename to csharp/csharp.bzl diff --git a/prelude/csharp/csharp_providers.bzl b/csharp/csharp_providers.bzl similarity index 100% rename from prelude/csharp/csharp_providers.bzl rename to csharp/csharp_providers.bzl diff --git a/prelude/csharp/toolchain.bzl b/csharp/toolchain.bzl similarity index 100% rename from prelude/csharp/toolchain.bzl rename to csharp/toolchain.bzl diff --git a/prelude/cxx/anon_link.bzl b/cxx/anon_link.bzl similarity index 100% rename from prelude/cxx/anon_link.bzl rename to cxx/anon_link.bzl diff --git a/prelude/cxx/archive.bzl b/cxx/archive.bzl similarity index 100% rename from prelude/cxx/archive.bzl rename to cxx/archive.bzl diff --git a/prelude/cxx/argsfiles.bzl b/cxx/argsfiles.bzl similarity index 100% rename from prelude/cxx/argsfiles.bzl rename to cxx/argsfiles.bzl diff --git a/prelude/cxx/attr_selection.bzl b/cxx/attr_selection.bzl similarity index 100% rename from prelude/cxx/attr_selection.bzl rename to cxx/attr_selection.bzl diff --git a/prelude/cxx/bitcode.bzl b/cxx/bitcode.bzl similarity index 100% rename from prelude/cxx/bitcode.bzl rename to cxx/bitcode.bzl diff --git a/prelude/cxx/comp_db.bzl b/cxx/comp_db.bzl similarity index 100% rename from prelude/cxx/comp_db.bzl rename to cxx/comp_db.bzl diff --git a/prelude/cxx/compile.bzl b/cxx/compile.bzl similarity index 100% rename from prelude/cxx/compile.bzl rename to cxx/compile.bzl diff --git a/prelude/cxx/compiler.bzl b/cxx/compiler.bzl similarity index 100% rename from prelude/cxx/compiler.bzl rename to cxx/compiler.bzl diff --git a/prelude/cxx/cxx.bzl b/cxx/cxx.bzl similarity index 100% rename from prelude/cxx/cxx.bzl rename to cxx/cxx.bzl diff --git a/prelude/cxx/cxx_bolt.bzl b/cxx/cxx_bolt.bzl similarity index 100% rename from prelude/cxx/cxx_bolt.bzl rename to cxx/cxx_bolt.bzl diff --git a/prelude/cxx/cxx_context.bzl b/cxx/cxx_context.bzl similarity index 100% rename from prelude/cxx/cxx_context.bzl rename to cxx/cxx_context.bzl diff --git a/prelude/cxx/cxx_executable.bzl b/cxx/cxx_executable.bzl similarity index 100% rename from prelude/cxx/cxx_executable.bzl rename to cxx/cxx_executable.bzl diff --git a/prelude/cxx/cxx_library.bzl b/cxx/cxx_library.bzl similarity index 100% rename from prelude/cxx/cxx_library.bzl rename to cxx/cxx_library.bzl diff --git a/prelude/cxx/cxx_library_utility.bzl b/cxx/cxx_library_utility.bzl similarity index 100% rename from prelude/cxx/cxx_library_utility.bzl rename to cxx/cxx_library_utility.bzl diff --git a/prelude/cxx/cxx_link_utility.bzl b/cxx/cxx_link_utility.bzl similarity index 100% rename from prelude/cxx/cxx_link_utility.bzl rename to cxx/cxx_link_utility.bzl diff --git a/prelude/cxx/cxx_sources.bzl b/cxx/cxx_sources.bzl similarity index 100% rename from prelude/cxx/cxx_sources.bzl rename to cxx/cxx_sources.bzl diff --git a/prelude/cxx/cxx_toolchain.bzl b/cxx/cxx_toolchain.bzl similarity index 100% rename from prelude/cxx/cxx_toolchain.bzl rename to cxx/cxx_toolchain.bzl diff --git a/prelude/cxx/cxx_toolchain_macro_layer.bzl b/cxx/cxx_toolchain_macro_layer.bzl similarity index 100% rename from prelude/cxx/cxx_toolchain_macro_layer.bzl rename to cxx/cxx_toolchain_macro_layer.bzl diff --git a/prelude/cxx/cxx_toolchain_types.bzl b/cxx/cxx_toolchain_types.bzl similarity index 100% rename from prelude/cxx/cxx_toolchain_types.bzl rename to cxx/cxx_toolchain_types.bzl diff --git a/prelude/cxx/cxx_types.bzl b/cxx/cxx_types.bzl similarity index 100% rename from prelude/cxx/cxx_types.bzl rename to cxx/cxx_types.bzl diff --git a/prelude/cxx/cxx_utility.bzl b/cxx/cxx_utility.bzl similarity index 100% rename from prelude/cxx/cxx_utility.bzl rename to cxx/cxx_utility.bzl diff --git a/prelude/cxx/debug.bzl b/cxx/debug.bzl similarity index 100% rename from prelude/cxx/debug.bzl rename to cxx/debug.bzl diff --git a/prelude/cxx/dist_lto/README.md b/cxx/dist_lto/README.md similarity index 100% rename from prelude/cxx/dist_lto/README.md rename to cxx/dist_lto/README.md diff --git a/prelude/cxx/dist_lto/dist_lto.bzl b/cxx/dist_lto/dist_lto.bzl similarity index 100% rename from prelude/cxx/dist_lto/dist_lto.bzl rename to cxx/dist_lto/dist_lto.bzl diff --git a/prelude/cxx/dist_lto/tools.bzl b/cxx/dist_lto/tools.bzl similarity index 100% rename from prelude/cxx/dist_lto/tools.bzl rename to cxx/dist_lto/tools.bzl diff --git a/prelude/cxx/dist_lto/tools/BUCK.v2 b/cxx/dist_lto/tools/BUCK.v2 similarity index 100% rename from prelude/cxx/dist_lto/tools/BUCK.v2 rename to cxx/dist_lto/tools/BUCK.v2 diff --git a/prelude/cxx/dist_lto/tools/__init__.py b/cxx/dist_lto/tools/__init__.py similarity index 100% rename from prelude/cxx/dist_lto/tools/__init__.py rename to cxx/dist_lto/tools/__init__.py diff --git a/prelude/cxx/dist_lto/tools/dist_lto_copy.py b/cxx/dist_lto/tools/dist_lto_copy.py similarity index 100% rename from prelude/cxx/dist_lto/tools/dist_lto_copy.py rename to cxx/dist_lto/tools/dist_lto_copy.py diff --git a/prelude/cxx/dist_lto/tools/dist_lto_opt.py b/cxx/dist_lto/tools/dist_lto_opt.py similarity index 100% rename from prelude/cxx/dist_lto/tools/dist_lto_opt.py rename to cxx/dist_lto/tools/dist_lto_opt.py diff --git a/prelude/cxx/dist_lto/tools/dist_lto_planner.py b/cxx/dist_lto/tools/dist_lto_planner.py similarity index 100% rename from prelude/cxx/dist_lto/tools/dist_lto_planner.py rename to cxx/dist_lto/tools/dist_lto_planner.py diff --git a/prelude/cxx/dist_lto/tools/dist_lto_prepare.py b/cxx/dist_lto/tools/dist_lto_prepare.py similarity index 100% rename from prelude/cxx/dist_lto/tools/dist_lto_prepare.py rename to cxx/dist_lto/tools/dist_lto_prepare.py diff --git a/prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py b/cxx/dist_lto/tools/tests/test_dist_lto_opt.py similarity index 100% rename from prelude/cxx/dist_lto/tools/tests/test_dist_lto_opt.py rename to cxx/dist_lto/tools/tests/test_dist_lto_opt.py diff --git a/prelude/cxx/dwp.bzl b/cxx/dwp.bzl similarity index 100% rename from prelude/cxx/dwp.bzl rename to cxx/dwp.bzl diff --git a/prelude/cxx/groups.bzl b/cxx/groups.bzl similarity index 100% rename from prelude/cxx/groups.bzl rename to cxx/groups.bzl diff --git a/prelude/cxx/groups_types.bzl b/cxx/groups_types.bzl similarity index 100% rename from prelude/cxx/groups_types.bzl rename to cxx/groups_types.bzl diff --git a/prelude/cxx/headers.bzl b/cxx/headers.bzl similarity index 100% rename from prelude/cxx/headers.bzl rename to cxx/headers.bzl diff --git a/prelude/cxx/link.bzl b/cxx/link.bzl similarity index 100% rename from prelude/cxx/link.bzl rename to cxx/link.bzl diff --git a/prelude/cxx/link_groups.bzl b/cxx/link_groups.bzl similarity index 100% rename from prelude/cxx/link_groups.bzl rename to cxx/link_groups.bzl diff --git a/prelude/cxx/link_groups_types.bzl b/cxx/link_groups_types.bzl similarity index 100% rename from prelude/cxx/link_groups_types.bzl rename to cxx/link_groups_types.bzl diff --git a/prelude/cxx/link_types.bzl b/cxx/link_types.bzl similarity index 100% rename from prelude/cxx/link_types.bzl rename to cxx/link_types.bzl diff --git a/prelude/cxx/linker.bzl b/cxx/linker.bzl similarity index 100% rename from prelude/cxx/linker.bzl rename to cxx/linker.bzl diff --git a/prelude/cxx/omnibus.bzl b/cxx/omnibus.bzl similarity index 100% rename from prelude/cxx/omnibus.bzl rename to cxx/omnibus.bzl diff --git a/prelude/cxx/platform.bzl b/cxx/platform.bzl similarity index 100% rename from prelude/cxx/platform.bzl rename to cxx/platform.bzl diff --git a/prelude/cxx/prebuilt_cxx_library_group.bzl b/cxx/prebuilt_cxx_library_group.bzl similarity index 100% rename from prelude/cxx/prebuilt_cxx_library_group.bzl rename to cxx/prebuilt_cxx_library_group.bzl diff --git a/prelude/cxx/preprocessor.bzl b/cxx/preprocessor.bzl similarity index 100% rename from prelude/cxx/preprocessor.bzl rename to cxx/preprocessor.bzl diff --git a/prelude/cxx/shared_library_interface.bzl b/cxx/shared_library_interface.bzl similarity index 100% rename from prelude/cxx/shared_library_interface.bzl rename to cxx/shared_library_interface.bzl diff --git a/prelude/cxx/symbols.bzl b/cxx/symbols.bzl similarity index 100% rename from prelude/cxx/symbols.bzl rename to cxx/symbols.bzl diff --git a/prelude/cxx/tools/BUCK.v2 b/cxx/tools/BUCK.v2 similarity index 100% rename from prelude/cxx/tools/BUCK.v2 rename to cxx/tools/BUCK.v2 diff --git a/prelude/cxx/tools/defs.bzl b/cxx/tools/defs.bzl similarity index 100% rename from prelude/cxx/tools/defs.bzl rename to cxx/tools/defs.bzl diff --git a/prelude/cxx/tools/dep_file_processor.py b/cxx/tools/dep_file_processor.py similarity index 100% rename from prelude/cxx/tools/dep_file_processor.py rename to cxx/tools/dep_file_processor.py diff --git a/prelude/cxx/tools/dep_file_utils.py b/cxx/tools/dep_file_utils.py similarity index 100% rename from prelude/cxx/tools/dep_file_utils.py rename to cxx/tools/dep_file_utils.py diff --git a/prelude/cxx/tools/hmap_wrapper.py b/cxx/tools/hmap_wrapper.py similarity index 100% rename from prelude/cxx/tools/hmap_wrapper.py rename to cxx/tools/hmap_wrapper.py diff --git a/prelude/cxx/tools/linker_wrapper.py b/cxx/tools/linker_wrapper.py similarity index 100% rename from prelude/cxx/tools/linker_wrapper.py rename to cxx/tools/linker_wrapper.py diff --git a/prelude/cxx/tools/make_comp_db.py b/cxx/tools/make_comp_db.py similarity index 100% rename from prelude/cxx/tools/make_comp_db.py rename to cxx/tools/make_comp_db.py diff --git a/prelude/cxx/tools/makefile_to_dep_file.py b/cxx/tools/makefile_to_dep_file.py similarity index 100% rename from prelude/cxx/tools/makefile_to_dep_file.py rename to cxx/tools/makefile_to_dep_file.py diff --git a/prelude/cxx/tools/show_headers_to_dep_file.py b/cxx/tools/show_headers_to_dep_file.py similarity index 100% rename from prelude/cxx/tools/show_headers_to_dep_file.py rename to cxx/tools/show_headers_to_dep_file.py diff --git a/prelude/cxx/tools/show_includes_to_dep_file.py b/cxx/tools/show_includes_to_dep_file.py similarity index 100% rename from prelude/cxx/tools/show_includes_to_dep_file.py rename to cxx/tools/show_includes_to_dep_file.py diff --git a/prelude/cxx/user/cxx_toolchain_override.bzl b/cxx/user/cxx_toolchain_override.bzl similarity index 100% rename from prelude/cxx/user/cxx_toolchain_override.bzl rename to cxx/user/cxx_toolchain_override.bzl diff --git a/prelude/cxx/user/link_group_map.bzl b/cxx/user/link_group_map.bzl similarity index 100% rename from prelude/cxx/user/link_group_map.bzl rename to cxx/user/link_group_map.bzl diff --git a/prelude/cxx/windows_resource.bzl b/cxx/windows_resource.bzl similarity index 100% rename from prelude/cxx/windows_resource.bzl rename to cxx/windows_resource.bzl diff --git a/prelude/cxx/xcode.bzl b/cxx/xcode.bzl similarity index 100% rename from prelude/cxx/xcode.bzl rename to cxx/xcode.bzl diff --git a/prelude/debugging/common.bzl b/debugging/common.bzl similarity index 100% rename from prelude/debugging/common.bzl rename to debugging/common.bzl diff --git a/prelude/debugging/ensure_dwp.bzl b/debugging/ensure_dwp.bzl similarity index 100% rename from prelude/debugging/ensure_dwp.bzl rename to debugging/ensure_dwp.bzl diff --git a/prelude/debugging/fdb.bxl b/debugging/fdb.bxl similarity index 100% rename from prelude/debugging/fdb.bxl rename to debugging/fdb.bxl diff --git a/prelude/debugging/inspect_dbg_exec.bzl b/debugging/inspect_dbg_exec.bzl similarity index 100% rename from prelude/debugging/inspect_dbg_exec.bzl rename to debugging/inspect_dbg_exec.bzl diff --git a/prelude/debugging/inspect_default.bzl b/debugging/inspect_default.bzl similarity index 100% rename from prelude/debugging/inspect_default.bzl rename to debugging/inspect_default.bzl diff --git a/prelude/debugging/inspect_java.bzl b/debugging/inspect_java.bzl similarity index 100% rename from prelude/debugging/inspect_java.bzl rename to debugging/inspect_java.bzl diff --git a/prelude/debugging/labels.bzl b/debugging/labels.bzl similarity index 100% rename from prelude/debugging/labels.bzl rename to debugging/labels.bzl diff --git a/prelude/debugging/types.bzl b/debugging/types.bzl similarity index 100% rename from prelude/debugging/types.bzl rename to debugging/types.bzl diff --git a/prelude/decls/android_common.bzl b/decls/android_common.bzl similarity index 100% rename from prelude/decls/android_common.bzl rename to decls/android_common.bzl diff --git a/prelude/decls/android_rules.bzl b/decls/android_rules.bzl similarity index 100% rename from prelude/decls/android_rules.bzl rename to decls/android_rules.bzl diff --git a/prelude/decls/common.bzl b/decls/common.bzl similarity index 100% rename from prelude/decls/common.bzl rename to decls/common.bzl diff --git a/prelude/decls/core_rules.bzl b/decls/core_rules.bzl similarity index 100% rename from prelude/decls/core_rules.bzl rename to decls/core_rules.bzl diff --git a/prelude/decls/cxx_common.bzl b/decls/cxx_common.bzl similarity index 100% rename from prelude/decls/cxx_common.bzl rename to decls/cxx_common.bzl diff --git a/prelude/decls/cxx_rules.bzl b/decls/cxx_rules.bzl similarity index 100% rename from prelude/decls/cxx_rules.bzl rename to decls/cxx_rules.bzl diff --git a/prelude/decls/d_common.bzl b/decls/d_common.bzl similarity index 100% rename from prelude/decls/d_common.bzl rename to decls/d_common.bzl diff --git a/prelude/decls/d_rules.bzl b/decls/d_rules.bzl similarity index 100% rename from prelude/decls/d_rules.bzl rename to decls/d_rules.bzl diff --git a/prelude/decls/dotnet_rules.bzl b/decls/dotnet_rules.bzl similarity index 100% rename from prelude/decls/dotnet_rules.bzl rename to decls/dotnet_rules.bzl diff --git a/prelude/decls/erlang_rules.bzl b/decls/erlang_rules.bzl similarity index 100% rename from prelude/decls/erlang_rules.bzl rename to decls/erlang_rules.bzl diff --git a/prelude/decls/genrule_common.bzl b/decls/genrule_common.bzl similarity index 100% rename from prelude/decls/genrule_common.bzl rename to decls/genrule_common.bzl diff --git a/prelude/decls/git_rules.bzl b/decls/git_rules.bzl similarity index 100% rename from prelude/decls/git_rules.bzl rename to decls/git_rules.bzl diff --git a/prelude/decls/go_common.bzl b/decls/go_common.bzl similarity index 100% rename from prelude/decls/go_common.bzl rename to decls/go_common.bzl diff --git a/prelude/decls/go_rules.bzl b/decls/go_rules.bzl similarity index 100% rename from prelude/decls/go_rules.bzl rename to decls/go_rules.bzl diff --git a/prelude/decls/groovy_rules.bzl b/decls/groovy_rules.bzl similarity index 100% rename from prelude/decls/groovy_rules.bzl rename to decls/groovy_rules.bzl diff --git a/prelude/decls/halide_rules.bzl b/decls/halide_rules.bzl similarity index 100% rename from prelude/decls/halide_rules.bzl rename to decls/halide_rules.bzl diff --git a/prelude/decls/haskell_common.bzl b/decls/haskell_common.bzl similarity index 100% rename from prelude/decls/haskell_common.bzl rename to decls/haskell_common.bzl diff --git a/prelude/decls/haskell_rules.bzl b/decls/haskell_rules.bzl similarity index 100% rename from prelude/decls/haskell_rules.bzl rename to decls/haskell_rules.bzl diff --git a/prelude/decls/ios_rules.bzl b/decls/ios_rules.bzl similarity index 100% rename from prelude/decls/ios_rules.bzl rename to decls/ios_rules.bzl diff --git a/prelude/decls/java_rules.bzl b/decls/java_rules.bzl similarity index 100% rename from prelude/decls/java_rules.bzl rename to decls/java_rules.bzl diff --git a/prelude/decls/js_rules.bzl b/decls/js_rules.bzl similarity index 100% rename from prelude/decls/js_rules.bzl rename to decls/js_rules.bzl diff --git a/prelude/decls/jvm_common.bzl b/decls/jvm_common.bzl similarity index 100% rename from prelude/decls/jvm_common.bzl rename to decls/jvm_common.bzl diff --git a/prelude/decls/kotlin_rules.bzl b/decls/kotlin_rules.bzl similarity index 100% rename from prelude/decls/kotlin_rules.bzl rename to decls/kotlin_rules.bzl diff --git a/prelude/decls/lua_common.bzl b/decls/lua_common.bzl similarity index 100% rename from prelude/decls/lua_common.bzl rename to decls/lua_common.bzl diff --git a/prelude/decls/lua_rules.bzl b/decls/lua_rules.bzl similarity index 100% rename from prelude/decls/lua_rules.bzl rename to decls/lua_rules.bzl diff --git a/prelude/decls/native_common.bzl b/decls/native_common.bzl similarity index 100% rename from prelude/decls/native_common.bzl rename to decls/native_common.bzl diff --git a/prelude/decls/ocaml_common.bzl b/decls/ocaml_common.bzl similarity index 100% rename from prelude/decls/ocaml_common.bzl rename to decls/ocaml_common.bzl diff --git a/prelude/decls/ocaml_rules.bzl b/decls/ocaml_rules.bzl similarity index 100% rename from prelude/decls/ocaml_rules.bzl rename to decls/ocaml_rules.bzl diff --git a/prelude/decls/python_common.bzl b/decls/python_common.bzl similarity index 100% rename from prelude/decls/python_common.bzl rename to decls/python_common.bzl diff --git a/prelude/decls/python_rules.bzl b/decls/python_rules.bzl similarity index 100% rename from prelude/decls/python_rules.bzl rename to decls/python_rules.bzl diff --git a/prelude/decls/re_test_common.bzl b/decls/re_test_common.bzl similarity index 100% rename from prelude/decls/re_test_common.bzl rename to decls/re_test_common.bzl diff --git a/prelude/decls/remote_common.bzl b/decls/remote_common.bzl similarity index 100% rename from prelude/decls/remote_common.bzl rename to decls/remote_common.bzl diff --git a/prelude/decls/rust_common.bzl b/decls/rust_common.bzl similarity index 100% rename from prelude/decls/rust_common.bzl rename to decls/rust_common.bzl diff --git a/prelude/decls/rust_rules.bzl b/decls/rust_rules.bzl similarity index 100% rename from prelude/decls/rust_rules.bzl rename to decls/rust_rules.bzl diff --git a/prelude/decls/scala_rules.bzl b/decls/scala_rules.bzl similarity index 100% rename from prelude/decls/scala_rules.bzl rename to decls/scala_rules.bzl diff --git a/prelude/decls/shell_rules.bzl b/decls/shell_rules.bzl similarity index 100% rename from prelude/decls/shell_rules.bzl rename to decls/shell_rules.bzl diff --git a/prelude/decls/toolchains_common.bzl b/decls/toolchains_common.bzl similarity index 100% rename from prelude/decls/toolchains_common.bzl rename to decls/toolchains_common.bzl diff --git a/prelude/decls/uncategorized_rules.bzl b/decls/uncategorized_rules.bzl similarity index 100% rename from prelude/decls/uncategorized_rules.bzl rename to decls/uncategorized_rules.bzl diff --git a/prelude/dist/dist_info.bzl b/dist/dist_info.bzl similarity index 100% rename from prelude/dist/dist_info.bzl rename to dist/dist_info.bzl diff --git a/prelude/docs/rules.bzl b/docs/rules.bzl similarity index 100% rename from prelude/docs/rules.bzl rename to docs/rules.bzl diff --git a/prelude/erlang/applications/BUCK.v2 b/erlang/applications/BUCK.v2 similarity index 100% rename from prelude/erlang/applications/BUCK.v2 rename to erlang/applications/BUCK.v2 diff --git a/prelude/erlang/common_test/.elp.toml b/erlang/common_test/.elp.toml similarity index 100% rename from prelude/erlang/common_test/.elp.toml rename to erlang/common_test/.elp.toml diff --git a/prelude/erlang/common_test/common/BUCK.v2 b/erlang/common_test/common/BUCK.v2 similarity index 100% rename from prelude/erlang/common_test/common/BUCK.v2 rename to erlang/common_test/common/BUCK.v2 diff --git a/prelude/erlang/common_test/common/include/artifact_annotations.hrl b/erlang/common_test/common/include/artifact_annotations.hrl similarity index 100% rename from prelude/erlang/common_test/common/include/artifact_annotations.hrl rename to erlang/common_test/common/include/artifact_annotations.hrl diff --git a/prelude/erlang/common_test/common/include/buck_ct_records.hrl b/erlang/common_test/common/include/buck_ct_records.hrl similarity index 100% rename from prelude/erlang/common_test/common/include/buck_ct_records.hrl rename to erlang/common_test/common/include/buck_ct_records.hrl diff --git a/prelude/erlang/common_test/common/include/tpx_records.hrl b/erlang/common_test/common/include/tpx_records.hrl similarity index 100% rename from prelude/erlang/common_test/common/include/tpx_records.hrl rename to erlang/common_test/common/include/tpx_records.hrl diff --git a/prelude/erlang/common_test/common/src/artifact_annotations.erl b/erlang/common_test/common/src/artifact_annotations.erl similarity index 100% rename from prelude/erlang/common_test/common/src/artifact_annotations.erl rename to erlang/common_test/common/src/artifact_annotations.erl diff --git a/prelude/erlang/common_test/common/src/bounded_buffer.erl b/erlang/common_test/common/src/bounded_buffer.erl similarity index 100% rename from prelude/erlang/common_test/common/src/bounded_buffer.erl rename to erlang/common_test/common/src/bounded_buffer.erl diff --git a/prelude/erlang/common_test/common/src/buck_ct_parser.erl b/erlang/common_test/common/src/buck_ct_parser.erl similarity index 100% rename from prelude/erlang/common_test/common/src/buck_ct_parser.erl rename to erlang/common_test/common/src/buck_ct_parser.erl diff --git a/prelude/erlang/common_test/common/src/buck_ct_provider.erl b/erlang/common_test/common/src/buck_ct_provider.erl similarity index 100% rename from prelude/erlang/common_test/common/src/buck_ct_provider.erl rename to erlang/common_test/common/src/buck_ct_provider.erl diff --git a/prelude/erlang/common_test/common/src/ct_error_printer.erl b/erlang/common_test/common/src/ct_error_printer.erl similarity index 100% rename from prelude/erlang/common_test/common/src/ct_error_printer.erl rename to erlang/common_test/common/src/ct_error_printer.erl diff --git a/prelude/erlang/common_test/common/src/execution_logs.erl b/erlang/common_test/common/src/execution_logs.erl similarity index 100% rename from prelude/erlang/common_test/common/src/execution_logs.erl rename to erlang/common_test/common/src/execution_logs.erl diff --git a/prelude/erlang/common_test/common/src/io_buffer.erl b/erlang/common_test/common/src/io_buffer.erl similarity index 100% rename from prelude/erlang/common_test/common/src/io_buffer.erl rename to erlang/common_test/common/src/io_buffer.erl diff --git a/prelude/erlang/common_test/common/src/test_artifact_directory.erl b/erlang/common_test/common/src/test_artifact_directory.erl similarity index 100% rename from prelude/erlang/common_test/common/src/test_artifact_directory.erl rename to erlang/common_test/common/src/test_artifact_directory.erl diff --git a/prelude/erlang/common_test/common/src/test_logger.erl b/erlang/common_test/common/src/test_logger.erl similarity index 100% rename from prelude/erlang/common_test/common/src/test_logger.erl rename to erlang/common_test/common/src/test_logger.erl diff --git a/prelude/erlang/common_test/cth_hooks/BUCK.v2 b/erlang/common_test/cth_hooks/BUCK.v2 similarity index 100% rename from prelude/erlang/common_test/cth_hooks/BUCK.v2 rename to erlang/common_test/cth_hooks/BUCK.v2 diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl b/erlang/common_test/cth_hooks/src/cth_tpx.erl similarity index 100% rename from prelude/erlang/common_test/cth_hooks/src/cth_tpx.erl rename to erlang/common_test/cth_hooks/src/cth_tpx.erl diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl b/erlang/common_test/cth_hooks/src/cth_tpx_role.erl similarity index 100% rename from prelude/erlang/common_test/cth_hooks/src/cth_tpx_role.erl rename to erlang/common_test/cth_hooks/src/cth_tpx_role.erl diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl b/erlang/common_test/cth_hooks/src/cth_tpx_server.erl similarity index 100% rename from prelude/erlang/common_test/cth_hooks/src/cth_tpx_server.erl rename to erlang/common_test/cth_hooks/src/cth_tpx_server.erl diff --git a/prelude/erlang/common_test/cth_hooks/src/cth_tpx_test_tree.erl b/erlang/common_test/cth_hooks/src/cth_tpx_test_tree.erl similarity index 100% rename from prelude/erlang/common_test/cth_hooks/src/cth_tpx_test_tree.erl rename to erlang/common_test/cth_hooks/src/cth_tpx_test_tree.erl diff --git a/prelude/erlang/common_test/cth_hooks/src/method_ids.hrl b/erlang/common_test/cth_hooks/src/method_ids.hrl similarity index 100% rename from prelude/erlang/common_test/cth_hooks/src/method_ids.hrl rename to erlang/common_test/cth_hooks/src/method_ids.hrl diff --git a/prelude/erlang/common_test/test_binary/BUCK.v2 b/erlang/common_test/test_binary/BUCK.v2 similarity index 100% rename from prelude/erlang/common_test/test_binary/BUCK.v2 rename to erlang/common_test/test_binary/BUCK.v2 diff --git a/prelude/erlang/common_test/test_binary/src/json_interfacer.erl b/erlang/common_test/test_binary/src/json_interfacer.erl similarity index 100% rename from prelude/erlang/common_test/test_binary/src/json_interfacer.erl rename to erlang/common_test/test_binary/src/json_interfacer.erl diff --git a/prelude/erlang/common_test/test_binary/src/junit_interfacer.erl b/erlang/common_test/test_binary/src/junit_interfacer.erl similarity index 100% rename from prelude/erlang/common_test/test_binary/src/junit_interfacer.erl rename to erlang/common_test/test_binary/src/junit_interfacer.erl diff --git a/prelude/erlang/common_test/test_binary/src/list_test.erl b/erlang/common_test/test_binary/src/list_test.erl similarity index 100% rename from prelude/erlang/common_test/test_binary/src/list_test.erl rename to erlang/common_test/test_binary/src/list_test.erl diff --git a/prelude/erlang/common_test/test_binary/src/listing_interfacer.erl b/erlang/common_test/test_binary/src/listing_interfacer.erl similarity index 100% rename from prelude/erlang/common_test/test_binary/src/listing_interfacer.erl rename to erlang/common_test/test_binary/src/listing_interfacer.erl diff --git a/prelude/erlang/common_test/test_binary/src/test_binary.erl b/erlang/common_test/test_binary/src/test_binary.erl similarity index 100% rename from prelude/erlang/common_test/test_binary/src/test_binary.erl rename to erlang/common_test/test_binary/src/test_binary.erl diff --git a/prelude/erlang/common_test/test_binary/src/test_runner.erl b/erlang/common_test/test_binary/src/test_runner.erl similarity index 100% rename from prelude/erlang/common_test/test_binary/src/test_runner.erl rename to erlang/common_test/test_binary/src/test_runner.erl diff --git a/prelude/erlang/common_test/test_cli_lib/BUCK.v2 b/erlang/common_test/test_cli_lib/BUCK.v2 similarity index 100% rename from prelude/erlang/common_test/test_cli_lib/BUCK.v2 rename to erlang/common_test/test_cli_lib/BUCK.v2 diff --git a/prelude/erlang/common_test/test_cli_lib/src/test.erl b/erlang/common_test/test_cli_lib/src/test.erl similarity index 100% rename from prelude/erlang/common_test/test_cli_lib/src/test.erl rename to erlang/common_test/test_cli_lib/src/test.erl diff --git a/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl b/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl similarity index 100% rename from prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl rename to erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE.erl diff --git a/prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl b/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl similarity index 100% rename from prelude/erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl rename to erlang/common_test/test_cli_lib/test/test_cli_e2e_SUITE_data/test_list_SUITE.erl diff --git a/prelude/erlang/common_test/test_exec/BUCK.v2 b/erlang/common_test/test_exec/BUCK.v2 similarity index 100% rename from prelude/erlang/common_test/test_exec/BUCK.v2 rename to erlang/common_test/test_exec/BUCK.v2 diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon.erl b/erlang/common_test/test_exec/src/ct_daemon.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/ct_daemon.erl rename to erlang/common_test/test_exec/src/ct_daemon.erl diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl b/erlang/common_test/test_exec/src/ct_daemon_core.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/ct_daemon_core.erl rename to erlang/common_test/test_exec/src/ct_daemon_core.erl diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl b/erlang/common_test/test_exec/src/ct_daemon_hooks.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/ct_daemon_hooks.erl rename to erlang/common_test/test_exec/src/ct_daemon_hooks.erl diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl b/erlang/common_test/test_exec/src/ct_daemon_logger.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/ct_daemon_logger.erl rename to erlang/common_test/test_exec/src/ct_daemon_logger.erl diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl b/erlang/common_test/test_exec/src/ct_daemon_node.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/ct_daemon_node.erl rename to erlang/common_test/test_exec/src/ct_daemon_node.erl diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl b/erlang/common_test/test_exec/src/ct_daemon_printer.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/ct_daemon_printer.erl rename to erlang/common_test/test_exec/src/ct_daemon_printer.erl diff --git a/prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl b/erlang/common_test/test_exec/src/ct_daemon_runner.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/ct_daemon_runner.erl rename to erlang/common_test/test_exec/src/ct_daemon_runner.erl diff --git a/prelude/erlang/common_test/test_exec/src/ct_executor.erl b/erlang/common_test/test_exec/src/ct_executor.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/ct_executor.erl rename to erlang/common_test/test_exec/src/ct_executor.erl diff --git a/prelude/erlang/common_test/test_exec/src/ct_runner.erl b/erlang/common_test/test_exec/src/ct_runner.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/ct_runner.erl rename to erlang/common_test/test_exec/src/ct_runner.erl diff --git a/prelude/erlang/common_test/test_exec/src/epmd_manager.erl b/erlang/common_test/test_exec/src/epmd_manager.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/epmd_manager.erl rename to erlang/common_test/test_exec/src/epmd_manager.erl diff --git a/prelude/erlang/common_test/test_exec/src/test_exec.app.src b/erlang/common_test/test_exec/src/test_exec.app.src similarity index 100% rename from prelude/erlang/common_test/test_exec/src/test_exec.app.src rename to erlang/common_test/test_exec/src/test_exec.app.src diff --git a/prelude/erlang/common_test/test_exec/src/test_exec.erl b/erlang/common_test/test_exec/src/test_exec.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/test_exec.erl rename to erlang/common_test/test_exec/src/test_exec.erl diff --git a/prelude/erlang/common_test/test_exec/src/test_exec_sup.erl b/erlang/common_test/test_exec/src/test_exec_sup.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/src/test_exec_sup.erl rename to erlang/common_test/test_exec/src/test_exec_sup.erl diff --git a/prelude/erlang/common_test/test_exec/test/ct_executor_SUITE.erl b/erlang/common_test/test_exec/test/ct_executor_SUITE.erl similarity index 100% rename from prelude/erlang/common_test/test_exec/test/ct_executor_SUITE.erl rename to erlang/common_test/test_exec/test/ct_executor_SUITE.erl diff --git a/prelude/erlang/erlang.bzl b/erlang/erlang.bzl similarity index 100% rename from prelude/erlang/erlang.bzl rename to erlang/erlang.bzl diff --git a/prelude/erlang/erlang_application.bzl b/erlang/erlang_application.bzl similarity index 100% rename from prelude/erlang/erlang_application.bzl rename to erlang/erlang_application.bzl diff --git a/prelude/erlang/erlang_application_includes.bzl b/erlang/erlang_application_includes.bzl similarity index 100% rename from prelude/erlang/erlang_application_includes.bzl rename to erlang/erlang_application_includes.bzl diff --git a/prelude/erlang/erlang_build.bzl b/erlang/erlang_build.bzl similarity index 100% rename from prelude/erlang/erlang_build.bzl rename to erlang/erlang_build.bzl diff --git a/prelude/erlang/erlang_dependencies.bzl b/erlang/erlang_dependencies.bzl similarity index 100% rename from prelude/erlang/erlang_dependencies.bzl rename to erlang/erlang_dependencies.bzl diff --git a/prelude/erlang/erlang_escript.bzl b/erlang/erlang_escript.bzl similarity index 100% rename from prelude/erlang/erlang_escript.bzl rename to erlang/erlang_escript.bzl diff --git a/prelude/erlang/erlang_info.bzl b/erlang/erlang_info.bzl similarity index 100% rename from prelude/erlang/erlang_info.bzl rename to erlang/erlang_info.bzl diff --git a/prelude/erlang/erlang_ls.config b/erlang/erlang_ls.config similarity index 100% rename from prelude/erlang/erlang_ls.config rename to erlang/erlang_ls.config diff --git a/prelude/erlang/erlang_otp_application.bzl b/erlang/erlang_otp_application.bzl similarity index 100% rename from prelude/erlang/erlang_otp_application.bzl rename to erlang/erlang_otp_application.bzl diff --git a/prelude/erlang/erlang_release.bzl b/erlang/erlang_release.bzl similarity index 100% rename from prelude/erlang/erlang_release.bzl rename to erlang/erlang_release.bzl diff --git a/prelude/erlang/erlang_shell.bzl b/erlang/erlang_shell.bzl similarity index 100% rename from prelude/erlang/erlang_shell.bzl rename to erlang/erlang_shell.bzl diff --git a/prelude/erlang/erlang_tests.bzl b/erlang/erlang_tests.bzl similarity index 100% rename from prelude/erlang/erlang_tests.bzl rename to erlang/erlang_tests.bzl diff --git a/prelude/erlang/erlang_toolchain.bzl b/erlang/erlang_toolchain.bzl similarity index 100% rename from prelude/erlang/erlang_toolchain.bzl rename to erlang/erlang_toolchain.bzl diff --git a/prelude/erlang/erlang_utils.bzl b/erlang/erlang_utils.bzl similarity index 100% rename from prelude/erlang/erlang_utils.bzl rename to erlang/erlang_utils.bzl diff --git a/prelude/erlang/shell/BUCK.v2 b/erlang/shell/BUCK.v2 similarity index 100% rename from prelude/erlang/shell/BUCK.v2 rename to erlang/shell/BUCK.v2 diff --git a/prelude/erlang/shell/shell.bxl b/erlang/shell/shell.bxl similarity index 100% rename from prelude/erlang/shell/shell.bxl rename to erlang/shell/shell.bxl diff --git a/prelude/erlang/shell/src/shell_buck2_utils.erl b/erlang/shell/src/shell_buck2_utils.erl similarity index 100% rename from prelude/erlang/shell/src/shell_buck2_utils.erl rename to erlang/shell/src/shell_buck2_utils.erl diff --git a/prelude/erlang/shell/src/user_default.erl b/erlang/shell/src/user_default.erl similarity index 100% rename from prelude/erlang/shell/src/user_default.erl rename to erlang/shell/src/user_default.erl diff --git a/prelude/erlang/toolchain/BUCK.v2 b/erlang/toolchain/BUCK.v2 similarity index 100% rename from prelude/erlang/toolchain/BUCK.v2 rename to erlang/toolchain/BUCK.v2 diff --git a/prelude/erlang/toolchain/app_src_builder.escript b/erlang/toolchain/app_src_builder.escript similarity index 100% rename from prelude/erlang/toolchain/app_src_builder.escript rename to erlang/toolchain/app_src_builder.escript diff --git a/prelude/erlang/toolchain/boot_script_builder.escript b/erlang/toolchain/boot_script_builder.escript similarity index 100% rename from prelude/erlang/toolchain/boot_script_builder.escript rename to erlang/toolchain/boot_script_builder.escript diff --git a/prelude/erlang/toolchain/dependency_analyzer.escript b/erlang/toolchain/dependency_analyzer.escript similarity index 100% rename from prelude/erlang/toolchain/dependency_analyzer.escript rename to erlang/toolchain/dependency_analyzer.escript diff --git a/prelude/erlang/toolchain/dependency_finalizer.escript b/erlang/toolchain/dependency_finalizer.escript similarity index 100% rename from prelude/erlang/toolchain/dependency_finalizer.escript rename to erlang/toolchain/dependency_finalizer.escript diff --git a/prelude/erlang/toolchain/edoc_cli.escript b/erlang/toolchain/edoc_cli.escript similarity index 100% rename from prelude/erlang/toolchain/edoc_cli.escript rename to erlang/toolchain/edoc_cli.escript diff --git a/prelude/erlang/toolchain/edoc_doclet_chunks.erl b/erlang/toolchain/edoc_doclet_chunks.erl similarity index 100% rename from prelude/erlang/toolchain/edoc_doclet_chunks.erl rename to erlang/toolchain/edoc_doclet_chunks.erl diff --git a/prelude/erlang/toolchain/edoc_report.erl b/erlang/toolchain/edoc_report.erl similarity index 100% rename from prelude/erlang/toolchain/edoc_report.erl rename to erlang/toolchain/edoc_report.erl diff --git a/prelude/erlang/toolchain/erlang_ls.config b/erlang/toolchain/erlang_ls.config similarity index 100% rename from prelude/erlang/toolchain/erlang_ls.config rename to erlang/toolchain/erlang_ls.config diff --git a/prelude/erlang/toolchain/erlc_trampoline.sh b/erlang/toolchain/erlc_trampoline.sh similarity index 100% rename from prelude/erlang/toolchain/erlc_trampoline.sh rename to erlang/toolchain/erlc_trampoline.sh diff --git a/prelude/erlang/toolchain/escript_builder.escript b/erlang/toolchain/escript_builder.escript similarity index 100% rename from prelude/erlang/toolchain/escript_builder.escript rename to erlang/toolchain/escript_builder.escript diff --git a/prelude/erlang/toolchain/escript_trampoline.sh b/erlang/toolchain/escript_trampoline.sh similarity index 100% rename from prelude/erlang/toolchain/escript_trampoline.sh rename to erlang/toolchain/escript_trampoline.sh diff --git a/prelude/erlang/toolchain/include_erts.escript b/erlang/toolchain/include_erts.escript similarity index 100% rename from prelude/erlang/toolchain/include_erts.escript rename to erlang/toolchain/include_erts.escript diff --git a/prelude/erlang/toolchain/release_variables_builder.escript b/erlang/toolchain/release_variables_builder.escript similarity index 100% rename from prelude/erlang/toolchain/release_variables_builder.escript rename to erlang/toolchain/release_variables_builder.escript diff --git a/prelude/erlang/toolchain/transform_project_root.erl b/erlang/toolchain/transform_project_root.erl similarity index 100% rename from prelude/erlang/toolchain/transform_project_root.erl rename to erlang/toolchain/transform_project_root.erl diff --git a/prelude/export_exe.bzl b/export_exe.bzl similarity index 100% rename from prelude/export_exe.bzl rename to export_exe.bzl diff --git a/prelude/export_file.bzl b/export_file.bzl similarity index 100% rename from prelude/export_file.bzl rename to export_file.bzl diff --git a/prelude/filegroup.bzl b/filegroup.bzl similarity index 100% rename from prelude/filegroup.bzl rename to filegroup.bzl diff --git a/prelude/genrule.bzl b/genrule.bzl similarity index 100% rename from prelude/genrule.bzl rename to genrule.bzl diff --git a/prelude/genrule_local_labels.bzl b/genrule_local_labels.bzl similarity index 100% rename from prelude/genrule_local_labels.bzl rename to genrule_local_labels.bzl diff --git a/prelude/genrule_toolchain.bzl b/genrule_toolchain.bzl similarity index 100% rename from prelude/genrule_toolchain.bzl rename to genrule_toolchain.bzl diff --git a/prelude/git/git_fetch.bzl b/git/git_fetch.bzl similarity index 100% rename from prelude/git/git_fetch.bzl rename to git/git_fetch.bzl diff --git a/prelude/git/tools/BUCK.v2 b/git/tools/BUCK.v2 similarity index 100% rename from prelude/git/tools/BUCK.v2 rename to git/tools/BUCK.v2 diff --git a/prelude/git/tools/git_fetch.py b/git/tools/git_fetch.py similarity index 100% rename from prelude/git/tools/git_fetch.py rename to git/tools/git_fetch.py diff --git a/prelude/go/cgo_builder.bzl b/go/cgo_builder.bzl similarity index 100% rename from prelude/go/cgo_builder.bzl rename to go/cgo_builder.bzl diff --git a/prelude/go/cgo_library.bzl b/go/cgo_library.bzl similarity index 100% rename from prelude/go/cgo_library.bzl rename to go/cgo_library.bzl diff --git a/prelude/go/compile.bzl b/go/compile.bzl similarity index 100% rename from prelude/go/compile.bzl rename to go/compile.bzl diff --git a/prelude/go/constraints/BUCK.v2 b/go/constraints/BUCK.v2 similarity index 100% rename from prelude/go/constraints/BUCK.v2 rename to go/constraints/BUCK.v2 diff --git a/prelude/go/constraints/defs.bzl b/go/constraints/defs.bzl similarity index 100% rename from prelude/go/constraints/defs.bzl rename to go/constraints/defs.bzl diff --git a/prelude/go/coverage.bzl b/go/coverage.bzl similarity index 100% rename from prelude/go/coverage.bzl rename to go/coverage.bzl diff --git a/prelude/go/go_binary.bzl b/go/go_binary.bzl similarity index 100% rename from prelude/go/go_binary.bzl rename to go/go_binary.bzl diff --git a/prelude/go/go_exported_library.bzl b/go/go_exported_library.bzl similarity index 100% rename from prelude/go/go_exported_library.bzl rename to go/go_exported_library.bzl diff --git a/prelude/go/go_library.bzl b/go/go_library.bzl similarity index 100% rename from prelude/go/go_library.bzl rename to go/go_library.bzl diff --git a/prelude/go/go_list.bzl b/go/go_list.bzl similarity index 100% rename from prelude/go/go_list.bzl rename to go/go_list.bzl diff --git a/prelude/go/go_stdlib.bzl b/go/go_stdlib.bzl similarity index 100% rename from prelude/go/go_stdlib.bzl rename to go/go_stdlib.bzl diff --git a/prelude/go/go_test.bzl b/go/go_test.bzl similarity index 100% rename from prelude/go/go_test.bzl rename to go/go_test.bzl diff --git a/prelude/go/link.bzl b/go/link.bzl similarity index 100% rename from prelude/go/link.bzl rename to go/link.bzl diff --git a/prelude/go/package_builder.bzl b/go/package_builder.bzl similarity index 100% rename from prelude/go/package_builder.bzl rename to go/package_builder.bzl diff --git a/prelude/go/packages.bzl b/go/packages.bzl similarity index 100% rename from prelude/go/packages.bzl rename to go/packages.bzl diff --git a/prelude/go/toolchain.bzl b/go/toolchain.bzl similarity index 100% rename from prelude/go/toolchain.bzl rename to go/toolchain.bzl diff --git a/prelude/go/tools/BUCK.v2 b/go/tools/BUCK.v2 similarity index 100% rename from prelude/go/tools/BUCK.v2 rename to go/tools/BUCK.v2 diff --git a/prelude/go/tools/cgo_wrapper.py b/go/tools/cgo_wrapper.py similarity index 100% rename from prelude/go/tools/cgo_wrapper.py rename to go/tools/cgo_wrapper.py diff --git a/prelude/go/tools/concat_files.py b/go/tools/concat_files.py similarity index 100% rename from prelude/go/tools/concat_files.py rename to go/tools/concat_files.py diff --git a/prelude/go/tools/gen_stdlib_importcfg.py b/go/tools/gen_stdlib_importcfg.py similarity index 100% rename from prelude/go/tools/gen_stdlib_importcfg.py rename to go/tools/gen_stdlib_importcfg.py diff --git a/prelude/go/tools/go_list_wrapper.py b/go/tools/go_list_wrapper.py similarity index 100% rename from prelude/go/tools/go_list_wrapper.py rename to go/tools/go_list_wrapper.py diff --git a/prelude/go/tools/go_wrapper.py b/go/tools/go_wrapper.py similarity index 100% rename from prelude/go/tools/go_wrapper.py rename to go/tools/go_wrapper.py diff --git a/prelude/go/tools/testmaingen.go b/go/tools/testmaingen.go similarity index 100% rename from prelude/go/tools/testmaingen.go rename to go/tools/testmaingen.go diff --git a/prelude/go/transitions/defs.bzl b/go/transitions/defs.bzl similarity index 100% rename from prelude/go/transitions/defs.bzl rename to go/transitions/defs.bzl diff --git a/prelude/go/transitions/tags_helper.bzl b/go/transitions/tags_helper.bzl similarity index 100% rename from prelude/go/transitions/tags_helper.bzl rename to go/transitions/tags_helper.bzl diff --git a/prelude/haskell/compile.bzl b/haskell/compile.bzl similarity index 100% rename from prelude/haskell/compile.bzl rename to haskell/compile.bzl diff --git a/prelude/haskell/haskell.bzl b/haskell/haskell.bzl similarity index 100% rename from prelude/haskell/haskell.bzl rename to haskell/haskell.bzl diff --git a/prelude/haskell/haskell_ghci.bzl b/haskell/haskell_ghci.bzl similarity index 100% rename from prelude/haskell/haskell_ghci.bzl rename to haskell/haskell_ghci.bzl diff --git a/prelude/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl similarity index 100% rename from prelude/haskell/haskell_haddock.bzl rename to haskell/haskell_haddock.bzl diff --git a/prelude/haskell/haskell_ide.bzl b/haskell/haskell_ide.bzl similarity index 100% rename from prelude/haskell/haskell_ide.bzl rename to haskell/haskell_ide.bzl diff --git a/prelude/haskell/ide/README.md b/haskell/ide/README.md similarity index 100% rename from prelude/haskell/ide/README.md rename to haskell/ide/README.md diff --git a/prelude/haskell/ide/hie.yaml b/haskell/ide/hie.yaml similarity index 100% rename from prelude/haskell/ide/hie.yaml rename to haskell/ide/hie.yaml diff --git a/prelude/haskell/ide/ide.bxl b/haskell/ide/ide.bxl similarity index 100% rename from prelude/haskell/ide/ide.bxl rename to haskell/ide/ide.bxl diff --git a/prelude/haskell/library_info.bzl b/haskell/library_info.bzl similarity index 100% rename from prelude/haskell/library_info.bzl rename to haskell/library_info.bzl diff --git a/prelude/haskell/link_info.bzl b/haskell/link_info.bzl similarity index 100% rename from prelude/haskell/link_info.bzl rename to haskell/link_info.bzl diff --git a/prelude/haskell/toolchain.bzl b/haskell/toolchain.bzl similarity index 100% rename from prelude/haskell/toolchain.bzl rename to haskell/toolchain.bzl diff --git a/prelude/haskell/tools/BUCK.v2 b/haskell/tools/BUCK.v2 similarity index 100% rename from prelude/haskell/tools/BUCK.v2 rename to haskell/tools/BUCK.v2 diff --git a/prelude/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py similarity index 100% rename from prelude/haskell/tools/generate_target_metadata.py rename to haskell/tools/generate_target_metadata.py diff --git a/prelude/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py similarity index 100% rename from prelude/haskell/tools/ghc_wrapper.py rename to haskell/tools/ghc_wrapper.py diff --git a/prelude/haskell/tools/script_template_processor.py b/haskell/tools/script_template_processor.py similarity index 100% rename from prelude/haskell/tools/script_template_processor.py rename to haskell/tools/script_template_processor.py diff --git a/prelude/haskell/util.bzl b/haskell/util.bzl similarity index 100% rename from prelude/haskell/util.bzl rename to haskell/util.bzl diff --git a/prelude/http_archive/exec_deps.bzl b/http_archive/exec_deps.bzl similarity index 100% rename from prelude/http_archive/exec_deps.bzl rename to http_archive/exec_deps.bzl diff --git a/prelude/http_archive/http_archive.bzl b/http_archive/http_archive.bzl similarity index 100% rename from prelude/http_archive/http_archive.bzl rename to http_archive/http_archive.bzl diff --git a/prelude/http_archive/tools/BUCK.v2 b/http_archive/tools/BUCK.v2 similarity index 100% rename from prelude/http_archive/tools/BUCK.v2 rename to http_archive/tools/BUCK.v2 diff --git a/prelude/http_archive/tools/create_exclusion_list.py b/http_archive/tools/create_exclusion_list.py similarity index 100% rename from prelude/http_archive/tools/create_exclusion_list.py rename to http_archive/tools/create_exclusion_list.py diff --git a/prelude/http_file.bzl b/http_file.bzl similarity index 100% rename from prelude/http_file.bzl rename to http_file.bzl diff --git a/prelude/ide_integrations/xcode.bzl b/ide_integrations/xcode.bzl similarity index 100% rename from prelude/ide_integrations/xcode.bzl rename to ide_integrations/xcode.bzl diff --git a/prelude/is_buck2.bzl b/is_buck2.bzl similarity index 100% rename from prelude/is_buck2.bzl rename to is_buck2.bzl diff --git a/prelude/is_buck2_internal.bzl b/is_buck2_internal.bzl similarity index 100% rename from prelude/is_buck2_internal.bzl rename to is_buck2_internal.bzl diff --git a/prelude/is_full_meta_repo.bzl b/is_full_meta_repo.bzl similarity index 100% rename from prelude/is_full_meta_repo.bzl rename to is_full_meta_repo.bzl diff --git a/prelude/java/class_to_srcs.bzl b/java/class_to_srcs.bzl similarity index 100% rename from prelude/java/class_to_srcs.bzl rename to java/class_to_srcs.bzl diff --git a/prelude/java/dex.bzl b/java/dex.bzl similarity index 100% rename from prelude/java/dex.bzl rename to java/dex.bzl diff --git a/prelude/java/dex_toolchain.bzl b/java/dex_toolchain.bzl similarity index 100% rename from prelude/java/dex_toolchain.bzl rename to java/dex_toolchain.bzl diff --git a/prelude/java/gwt_binary.bzl b/java/gwt_binary.bzl similarity index 100% rename from prelude/java/gwt_binary.bzl rename to java/gwt_binary.bzl diff --git a/prelude/java/jar_genrule.bzl b/java/jar_genrule.bzl similarity index 100% rename from prelude/java/jar_genrule.bzl rename to java/jar_genrule.bzl diff --git a/prelude/java/java.bzl b/java/java.bzl similarity index 100% rename from prelude/java/java.bzl rename to java/java.bzl diff --git a/prelude/java/java_binary.bzl b/java/java_binary.bzl similarity index 100% rename from prelude/java/java_binary.bzl rename to java/java_binary.bzl diff --git a/prelude/java/java_library.bzl b/java/java_library.bzl similarity index 100% rename from prelude/java/java_library.bzl rename to java/java_library.bzl diff --git a/prelude/java/java_providers.bzl b/java/java_providers.bzl similarity index 100% rename from prelude/java/java_providers.bzl rename to java/java_providers.bzl diff --git a/prelude/java/java_resources.bzl b/java/java_resources.bzl similarity index 100% rename from prelude/java/java_resources.bzl rename to java/java_resources.bzl diff --git a/prelude/java/java_test.bzl b/java/java_test.bzl similarity index 100% rename from prelude/java/java_test.bzl rename to java/java_test.bzl diff --git a/prelude/java/java_toolchain.bzl b/java/java_toolchain.bzl similarity index 100% rename from prelude/java/java_toolchain.bzl rename to java/java_toolchain.bzl diff --git a/prelude/java/javacd_jar_creator.bzl b/java/javacd_jar_creator.bzl similarity index 100% rename from prelude/java/javacd_jar_creator.bzl rename to java/javacd_jar_creator.bzl diff --git a/prelude/java/keystore.bzl b/java/keystore.bzl similarity index 100% rename from prelude/java/keystore.bzl rename to java/keystore.bzl diff --git a/prelude/java/plugins/java_annotation_processor.bzl b/java/plugins/java_annotation_processor.bzl similarity index 100% rename from prelude/java/plugins/java_annotation_processor.bzl rename to java/plugins/java_annotation_processor.bzl diff --git a/prelude/java/plugins/java_plugin.bzl b/java/plugins/java_plugin.bzl similarity index 100% rename from prelude/java/plugins/java_plugin.bzl rename to java/plugins/java_plugin.bzl diff --git a/prelude/java/prebuilt_jar.bzl b/java/prebuilt_jar.bzl similarity index 100% rename from prelude/java/prebuilt_jar.bzl rename to java/prebuilt_jar.bzl diff --git a/prelude/java/tools/BUCK.v2 b/java/tools/BUCK.v2 similarity index 100% rename from prelude/java/tools/BUCK.v2 rename to java/tools/BUCK.v2 diff --git a/prelude/java/tools/compile_and_package.py b/java/tools/compile_and_package.py similarity index 100% rename from prelude/java/tools/compile_and_package.py rename to java/tools/compile_and_package.py diff --git a/prelude/java/tools/fat_jar.py b/java/tools/fat_jar.py similarity index 100% rename from prelude/java/tools/fat_jar.py rename to java/tools/fat_jar.py diff --git a/prelude/java/tools/gen_class_to_source_map.py b/java/tools/gen_class_to_source_map.py similarity index 100% rename from prelude/java/tools/gen_class_to_source_map.py rename to java/tools/gen_class_to_source_map.py diff --git a/prelude/java/tools/list_class_names.py b/java/tools/list_class_names.py similarity index 100% rename from prelude/java/tools/list_class_names.py rename to java/tools/list_class_names.py diff --git a/prelude/java/tools/merge_class_to_source_maps.py b/java/tools/merge_class_to_source_maps.py similarity index 100% rename from prelude/java/tools/merge_class_to_source_maps.py rename to java/tools/merge_class_to_source_maps.py diff --git a/prelude/java/tools/utils.py b/java/tools/utils.py similarity index 100% rename from prelude/java/tools/utils.py rename to java/tools/utils.py diff --git a/prelude/java/utils/java_more_utils.bzl b/java/utils/java_more_utils.bzl similarity index 100% rename from prelude/java/utils/java_more_utils.bzl rename to java/utils/java_more_utils.bzl diff --git a/prelude/java/utils/java_utils.bzl b/java/utils/java_utils.bzl similarity index 100% rename from prelude/java/utils/java_utils.bzl rename to java/utils/java_utils.bzl diff --git a/prelude/js/js.bzl b/js/js.bzl similarity index 100% rename from prelude/js/js.bzl rename to js/js.bzl diff --git a/prelude/js/js_bundle.bzl b/js/js_bundle.bzl similarity index 100% rename from prelude/js/js_bundle.bzl rename to js/js_bundle.bzl diff --git a/prelude/js/js_bundle_genrule.bzl b/js/js_bundle_genrule.bzl similarity index 100% rename from prelude/js/js_bundle_genrule.bzl rename to js/js_bundle_genrule.bzl diff --git a/prelude/js/js_library.bzl b/js/js_library.bzl similarity index 100% rename from prelude/js/js_library.bzl rename to js/js_library.bzl diff --git a/prelude/js/js_providers.bzl b/js/js_providers.bzl similarity index 100% rename from prelude/js/js_providers.bzl rename to js/js_providers.bzl diff --git a/prelude/js/js_utils.bzl b/js/js_utils.bzl similarity index 100% rename from prelude/js/js_utils.bzl rename to js/js_utils.bzl diff --git a/prelude/julia/julia.bzl b/julia/julia.bzl similarity index 100% rename from prelude/julia/julia.bzl rename to julia/julia.bzl diff --git a/prelude/julia/julia_binary.bzl b/julia/julia_binary.bzl similarity index 100% rename from prelude/julia/julia_binary.bzl rename to julia/julia_binary.bzl diff --git a/prelude/julia/julia_info.bzl b/julia/julia_info.bzl similarity index 100% rename from prelude/julia/julia_info.bzl rename to julia/julia_info.bzl diff --git a/prelude/julia/julia_library.bzl b/julia/julia_library.bzl similarity index 100% rename from prelude/julia/julia_library.bzl rename to julia/julia_library.bzl diff --git a/prelude/julia/julia_test.bzl b/julia/julia_test.bzl similarity index 100% rename from prelude/julia/julia_test.bzl rename to julia/julia_test.bzl diff --git a/prelude/julia/julia_toolchain.bzl b/julia/julia_toolchain.bzl similarity index 100% rename from prelude/julia/julia_toolchain.bzl rename to julia/julia_toolchain.bzl diff --git a/prelude/julia/tools/BUCK.v2 b/julia/tools/BUCK.v2 similarity index 100% rename from prelude/julia/tools/BUCK.v2 rename to julia/tools/BUCK.v2 diff --git a/prelude/julia/tools/parse_julia_cmd.py b/julia/tools/parse_julia_cmd.py similarity index 100% rename from prelude/julia/tools/parse_julia_cmd.py rename to julia/tools/parse_julia_cmd.py diff --git a/prelude/jvm/cd_jar_creator_util.bzl b/jvm/cd_jar_creator_util.bzl similarity index 100% rename from prelude/jvm/cd_jar_creator_util.bzl rename to jvm/cd_jar_creator_util.bzl diff --git a/prelude/jvm/nullsafe.bzl b/jvm/nullsafe.bzl similarity index 100% rename from prelude/jvm/nullsafe.bzl rename to jvm/nullsafe.bzl diff --git a/prelude/kotlin/kotlin.bzl b/kotlin/kotlin.bzl similarity index 100% rename from prelude/kotlin/kotlin.bzl rename to kotlin/kotlin.bzl diff --git a/prelude/kotlin/kotlin_library.bzl b/kotlin/kotlin_library.bzl similarity index 100% rename from prelude/kotlin/kotlin_library.bzl rename to kotlin/kotlin_library.bzl diff --git a/prelude/kotlin/kotlin_test.bzl b/kotlin/kotlin_test.bzl similarity index 100% rename from prelude/kotlin/kotlin_test.bzl rename to kotlin/kotlin_test.bzl diff --git a/prelude/kotlin/kotlin_toolchain.bzl b/kotlin/kotlin_toolchain.bzl similarity index 100% rename from prelude/kotlin/kotlin_toolchain.bzl rename to kotlin/kotlin_toolchain.bzl diff --git a/prelude/kotlin/kotlin_utils.bzl b/kotlin/kotlin_utils.bzl similarity index 100% rename from prelude/kotlin/kotlin_utils.bzl rename to kotlin/kotlin_utils.bzl diff --git a/prelude/kotlin/kotlincd_jar_creator.bzl b/kotlin/kotlincd_jar_creator.bzl similarity index 100% rename from prelude/kotlin/kotlincd_jar_creator.bzl rename to kotlin/kotlincd_jar_creator.bzl diff --git a/prelude/kotlin/tools/compile_kotlin/BUCK.v2 b/kotlin/tools/compile_kotlin/BUCK.v2 similarity index 100% rename from prelude/kotlin/tools/compile_kotlin/BUCK.v2 rename to kotlin/tools/compile_kotlin/BUCK.v2 diff --git a/prelude/kotlin/tools/compile_kotlin/compile_kotlin.py b/kotlin/tools/compile_kotlin/compile_kotlin.py similarity index 100% rename from prelude/kotlin/tools/compile_kotlin/compile_kotlin.py rename to kotlin/tools/compile_kotlin/compile_kotlin.py diff --git a/prelude/kotlin/tools/defs.bzl b/kotlin/tools/defs.bzl similarity index 100% rename from prelude/kotlin/tools/defs.bzl rename to kotlin/tools/defs.bzl diff --git a/prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 b/kotlin/tools/kapt_base64_encoder/BUCK.v2 similarity index 100% rename from prelude/kotlin/tools/kapt_base64_encoder/BUCK.v2 rename to kotlin/tools/kapt_base64_encoder/BUCK.v2 diff --git a/prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java b/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java similarity index 100% rename from prelude/kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java rename to kotlin/tools/kapt_base64_encoder/com/facebook/kapt/KaptBase64Encoder.java diff --git a/prelude/linking/execution_preference.bzl b/linking/execution_preference.bzl similarity index 100% rename from prelude/linking/execution_preference.bzl rename to linking/execution_preference.bzl diff --git a/prelude/linking/link_groups.bzl b/linking/link_groups.bzl similarity index 100% rename from prelude/linking/link_groups.bzl rename to linking/link_groups.bzl diff --git a/prelude/linking/link_info.bzl b/linking/link_info.bzl similarity index 100% rename from prelude/linking/link_info.bzl rename to linking/link_info.bzl diff --git a/prelude/linking/linkable_graph.bzl b/linking/linkable_graph.bzl similarity index 100% rename from prelude/linking/linkable_graph.bzl rename to linking/linkable_graph.bzl diff --git a/prelude/linking/linkables.bzl b/linking/linkables.bzl similarity index 100% rename from prelude/linking/linkables.bzl rename to linking/linkables.bzl diff --git a/prelude/linking/lto.bzl b/linking/lto.bzl similarity index 100% rename from prelude/linking/lto.bzl rename to linking/lto.bzl diff --git a/prelude/linking/shared_libraries.bzl b/linking/shared_libraries.bzl similarity index 100% rename from prelude/linking/shared_libraries.bzl rename to linking/shared_libraries.bzl diff --git a/prelude/linking/strip.bzl b/linking/strip.bzl similarity index 100% rename from prelude/linking/strip.bzl rename to linking/strip.bzl diff --git a/prelude/linking/types.bzl b/linking/types.bzl similarity index 100% rename from prelude/linking/types.bzl rename to linking/types.bzl diff --git a/prelude/local_only.bzl b/local_only.bzl similarity index 100% rename from prelude/local_only.bzl rename to local_only.bzl diff --git a/prelude/lua/cxx_lua_extension.bzl b/lua/cxx_lua_extension.bzl similarity index 100% rename from prelude/lua/cxx_lua_extension.bzl rename to lua/cxx_lua_extension.bzl diff --git a/prelude/lua/lua_binary.bzl b/lua/lua_binary.bzl similarity index 100% rename from prelude/lua/lua_binary.bzl rename to lua/lua_binary.bzl diff --git a/prelude/lua/lua_library.bzl b/lua/lua_library.bzl similarity index 100% rename from prelude/lua/lua_library.bzl rename to lua/lua_library.bzl diff --git a/prelude/matlab/matlab.bzl b/matlab/matlab.bzl similarity index 100% rename from prelude/matlab/matlab.bzl rename to matlab/matlab.bzl diff --git a/prelude/matlab/matlab_info.bzl b/matlab/matlab_info.bzl similarity index 100% rename from prelude/matlab/matlab_info.bzl rename to matlab/matlab_info.bzl diff --git a/prelude/matlab/matlab_program.bzl b/matlab/matlab_program.bzl similarity index 100% rename from prelude/matlab/matlab_program.bzl rename to matlab/matlab_program.bzl diff --git a/prelude/matlab/matlab_toolchain.bzl b/matlab/matlab_toolchain.bzl similarity index 100% rename from prelude/matlab/matlab_toolchain.bzl rename to matlab/matlab_toolchain.bzl diff --git a/prelude/native.bzl b/native.bzl similarity index 100% rename from prelude/native.bzl rename to native.bzl diff --git a/prelude/ocaml/attrs.bzl b/ocaml/attrs.bzl similarity index 100% rename from prelude/ocaml/attrs.bzl rename to ocaml/attrs.bzl diff --git a/prelude/ocaml/makefile.bzl b/ocaml/makefile.bzl similarity index 100% rename from prelude/ocaml/makefile.bzl rename to ocaml/makefile.bzl diff --git a/prelude/ocaml/ocaml.bzl b/ocaml/ocaml.bzl similarity index 100% rename from prelude/ocaml/ocaml.bzl rename to ocaml/ocaml.bzl diff --git a/prelude/ocaml/ocaml_toolchain_types.bzl b/ocaml/ocaml_toolchain_types.bzl similarity index 100% rename from prelude/ocaml/ocaml_toolchain_types.bzl rename to ocaml/ocaml_toolchain_types.bzl diff --git a/prelude/os/BUCK.v2 b/os/BUCK.v2 similarity index 100% rename from prelude/os/BUCK.v2 rename to os/BUCK.v2 diff --git a/prelude/os/constraints/BUCK.v2 b/os/constraints/BUCK.v2 similarity index 100% rename from prelude/os/constraints/BUCK.v2 rename to os/constraints/BUCK.v2 diff --git a/prelude/os_lookup/defs.bzl b/os_lookup/defs.bzl similarity index 100% rename from prelude/os_lookup/defs.bzl rename to os_lookup/defs.bzl diff --git a/prelude/os_lookup/targets/BUCK.v2 b/os_lookup/targets/BUCK.v2 similarity index 100% rename from prelude/os_lookup/targets/BUCK.v2 rename to os_lookup/targets/BUCK.v2 diff --git a/prelude/paths.bzl b/paths.bzl similarity index 100% rename from prelude/paths.bzl rename to paths.bzl diff --git a/prelude/platforms/BUCK b/platforms/BUCK similarity index 100% rename from prelude/platforms/BUCK rename to platforms/BUCK diff --git a/prelude/platforms/BUCK.v2 b/platforms/BUCK.v2 similarity index 100% rename from prelude/platforms/BUCK.v2 rename to platforms/BUCK.v2 diff --git a/prelude/platforms/apple/BUCK b/platforms/apple/BUCK similarity index 100% rename from prelude/platforms/apple/BUCK rename to platforms/apple/BUCK diff --git a/prelude/platforms/apple/arch.bzl b/platforms/apple/arch.bzl similarity index 100% rename from prelude/platforms/apple/arch.bzl rename to platforms/apple/arch.bzl diff --git a/prelude/platforms/apple/base.bzl b/platforms/apple/base.bzl similarity index 100% rename from prelude/platforms/apple/base.bzl rename to platforms/apple/base.bzl diff --git a/prelude/platforms/apple/build_mode.bzl b/platforms/apple/build_mode.bzl similarity index 100% rename from prelude/platforms/apple/build_mode.bzl rename to platforms/apple/build_mode.bzl diff --git a/prelude/platforms/apple/constants.bzl b/platforms/apple/constants.bzl similarity index 100% rename from prelude/platforms/apple/constants.bzl rename to platforms/apple/constants.bzl diff --git a/prelude/platforms/apple/constraints/BUCK b/platforms/apple/constraints/BUCK similarity index 100% rename from prelude/platforms/apple/constraints/BUCK rename to platforms/apple/constraints/BUCK diff --git a/prelude/platforms/apple/platforms.bzl b/platforms/apple/platforms.bzl similarity index 100% rename from prelude/platforms/apple/platforms.bzl rename to platforms/apple/platforms.bzl diff --git a/prelude/platforms/apple/platforms_map.bzl b/platforms/apple/platforms_map.bzl similarity index 100% rename from prelude/platforms/apple/platforms_map.bzl rename to platforms/apple/platforms_map.bzl diff --git a/prelude/platforms/apple/sdk.bzl b/platforms/apple/sdk.bzl similarity index 100% rename from prelude/platforms/apple/sdk.bzl rename to platforms/apple/sdk.bzl diff --git a/prelude/platforms/defs.bzl b/platforms/defs.bzl similarity index 100% rename from prelude/platforms/defs.bzl rename to platforms/defs.bzl diff --git a/prelude/playground/test.bxl b/playground/test.bxl similarity index 100% rename from prelude/playground/test.bxl rename to playground/test.bxl diff --git a/prelude/prelude.bzl b/prelude.bzl similarity index 100% rename from prelude/prelude.bzl rename to prelude.bzl diff --git a/prelude/pull_request_template.md b/pull_request_template.md similarity index 100% rename from prelude/pull_request_template.md rename to pull_request_template.md diff --git a/prelude/python/compile.bzl b/python/compile.bzl similarity index 100% rename from prelude/python/compile.bzl rename to python/compile.bzl diff --git a/prelude/python/cxx_python_extension.bzl b/python/cxx_python_extension.bzl similarity index 100% rename from prelude/python/cxx_python_extension.bzl rename to python/cxx_python_extension.bzl diff --git a/prelude/python/interface.bzl b/python/interface.bzl similarity index 100% rename from prelude/python/interface.bzl rename to python/interface.bzl diff --git a/prelude/python/make_py_package.bzl b/python/make_py_package.bzl similarity index 100% rename from prelude/python/make_py_package.bzl rename to python/make_py_package.bzl diff --git a/prelude/python/manifest.bzl b/python/manifest.bzl similarity index 100% rename from prelude/python/manifest.bzl rename to python/manifest.bzl diff --git a/prelude/python/native_python_util.bzl b/python/native_python_util.bzl similarity index 100% rename from prelude/python/native_python_util.bzl rename to python/native_python_util.bzl diff --git a/prelude/python/needed_coverage.bzl b/python/needed_coverage.bzl similarity index 100% rename from prelude/python/needed_coverage.bzl rename to python/needed_coverage.bzl diff --git a/prelude/python/prebuilt_python_library.bzl b/python/prebuilt_python_library.bzl similarity index 100% rename from prelude/python/prebuilt_python_library.bzl rename to python/prebuilt_python_library.bzl diff --git a/prelude/python/python.bzl b/python/python.bzl similarity index 100% rename from prelude/python/python.bzl rename to python/python.bzl diff --git a/prelude/python/python_binary.bzl b/python/python_binary.bzl similarity index 100% rename from prelude/python/python_binary.bzl rename to python/python_binary.bzl diff --git a/prelude/python/python_library.bzl b/python/python_library.bzl similarity index 100% rename from prelude/python/python_library.bzl rename to python/python_library.bzl diff --git a/prelude/python/python_needed_coverage_test.bzl b/python/python_needed_coverage_test.bzl similarity index 100% rename from prelude/python/python_needed_coverage_test.bzl rename to python/python_needed_coverage_test.bzl diff --git a/prelude/python/python_test.bzl b/python/python_test.bzl similarity index 100% rename from prelude/python/python_test.bzl rename to python/python_test.bzl diff --git a/prelude/python/python_wheel.bzl b/python/python_wheel.bzl similarity index 100% rename from prelude/python/python_wheel.bzl rename to python/python_wheel.bzl diff --git a/prelude/python/runtime/BUCK.v2 b/python/runtime/BUCK.v2 similarity index 100% rename from prelude/python/runtime/BUCK.v2 rename to python/runtime/BUCK.v2 diff --git a/prelude/python/runtime/__par__/bootstrap.py b/python/runtime/__par__/bootstrap.py similarity index 100% rename from prelude/python/runtime/__par__/bootstrap.py rename to python/runtime/__par__/bootstrap.py diff --git a/prelude/python/source_db.bzl b/python/source_db.bzl similarity index 100% rename from prelude/python/source_db.bzl rename to python/source_db.bzl diff --git a/prelude/python/sourcedb/build.bxl b/python/sourcedb/build.bxl similarity index 100% rename from prelude/python/sourcedb/build.bxl rename to python/sourcedb/build.bxl diff --git a/prelude/python/sourcedb/classic.bxl b/python/sourcedb/classic.bxl similarity index 100% rename from prelude/python/sourcedb/classic.bxl rename to python/sourcedb/classic.bxl diff --git a/prelude/python/sourcedb/code_navigation.bxl b/python/sourcedb/code_navigation.bxl similarity index 100% rename from prelude/python/sourcedb/code_navigation.bxl rename to python/sourcedb/code_navigation.bxl diff --git a/prelude/python/sourcedb/filter.bxl b/python/sourcedb/filter.bxl similarity index 100% rename from prelude/python/sourcedb/filter.bxl rename to python/sourcedb/filter.bxl diff --git a/prelude/python/sourcedb/merge.bxl b/python/sourcedb/merge.bxl similarity index 100% rename from prelude/python/sourcedb/merge.bxl rename to python/sourcedb/merge.bxl diff --git a/prelude/python/sourcedb/query.bxl b/python/sourcedb/query.bxl similarity index 100% rename from prelude/python/sourcedb/query.bxl rename to python/sourcedb/query.bxl diff --git a/prelude/python/toolchain.bzl b/python/toolchain.bzl similarity index 100% rename from prelude/python/toolchain.bzl rename to python/toolchain.bzl diff --git a/prelude/python/tools/BUCK.v2 b/python/tools/BUCK.v2 similarity index 100% rename from prelude/python/tools/BUCK.v2 rename to python/tools/BUCK.v2 diff --git a/prelude/python/tools/__test_main__.py b/python/tools/__test_main__.py similarity index 100% rename from prelude/python/tools/__test_main__.py rename to python/tools/__test_main__.py diff --git a/prelude/python/tools/compile.py b/python/tools/compile.py similarity index 100% rename from prelude/python/tools/compile.py rename to python/tools/compile.py diff --git a/prelude/python/tools/create_manifest_for_source_dir.py b/python/tools/create_manifest_for_source_dir.py similarity index 100% rename from prelude/python/tools/create_manifest_for_source_dir.py rename to python/tools/create_manifest_for_source_dir.py diff --git a/prelude/python/tools/embedded_main.cpp b/python/tools/embedded_main.cpp similarity index 100% rename from prelude/python/tools/embedded_main.cpp rename to python/tools/embedded_main.cpp diff --git a/prelude/python/tools/extract.py b/python/tools/extract.py similarity index 100% rename from prelude/python/tools/extract.py rename to python/tools/extract.py diff --git a/prelude/python/tools/fail_with_message.py b/python/tools/fail_with_message.py similarity index 100% rename from prelude/python/tools/fail_with_message.py rename to python/tools/fail_with_message.py diff --git a/prelude/python/tools/generate_static_extension_info.py b/python/tools/generate_static_extension_info.py similarity index 100% rename from prelude/python/tools/generate_static_extension_info.py rename to python/tools/generate_static_extension_info.py diff --git a/prelude/python/tools/make_par/BUCK b/python/tools/make_par/BUCK similarity index 100% rename from prelude/python/tools/make_par/BUCK rename to python/tools/make_par/BUCK diff --git a/prelude/python/tools/make_par/__run_lpar_main__.py b/python/tools/make_par/__run_lpar_main__.py similarity index 100% rename from prelude/python/tools/make_par/__run_lpar_main__.py rename to python/tools/make_par/__run_lpar_main__.py diff --git a/prelude/python/tools/make_par/_lpar_bootstrap.sh.template b/python/tools/make_par/_lpar_bootstrap.sh.template similarity index 100% rename from prelude/python/tools/make_par/_lpar_bootstrap.sh.template rename to python/tools/make_par/_lpar_bootstrap.sh.template diff --git a/prelude/python/tools/make_par/sitecustomize.py b/python/tools/make_par/sitecustomize.py similarity index 100% rename from prelude/python/tools/make_par/sitecustomize.py rename to python/tools/make_par/sitecustomize.py diff --git a/prelude/python/tools/make_py_package_inplace.py b/python/tools/make_py_package_inplace.py similarity index 100% rename from prelude/python/tools/make_py_package_inplace.py rename to python/tools/make_py_package_inplace.py diff --git a/prelude/python/tools/make_py_package_manifest_module.py b/python/tools/make_py_package_manifest_module.py similarity index 100% rename from prelude/python/tools/make_py_package_manifest_module.py rename to python/tools/make_py_package_manifest_module.py diff --git a/prelude/python/tools/make_py_package_modules.py b/python/tools/make_py_package_modules.py similarity index 100% rename from prelude/python/tools/make_py_package_modules.py rename to python/tools/make_py_package_modules.py diff --git a/prelude/python/tools/make_source_db.py b/python/tools/make_source_db.py similarity index 100% rename from prelude/python/tools/make_source_db.py rename to python/tools/make_source_db.py diff --git a/prelude/python/tools/make_source_db_no_deps.py b/python/tools/make_source_db_no_deps.py similarity index 100% rename from prelude/python/tools/make_source_db_no_deps.py rename to python/tools/make_source_db_no_deps.py diff --git a/prelude/python/tools/parse_imports.py b/python/tools/parse_imports.py similarity index 100% rename from prelude/python/tools/parse_imports.py rename to python/tools/parse_imports.py diff --git a/prelude/python/tools/py38stdlib.py b/python/tools/py38stdlib.py similarity index 100% rename from prelude/python/tools/py38stdlib.py rename to python/tools/py38stdlib.py diff --git a/prelude/python/tools/run_inplace.py.in b/python/tools/run_inplace.py.in similarity index 100% rename from prelude/python/tools/run_inplace.py.in rename to python/tools/run_inplace.py.in diff --git a/prelude/python/tools/run_inplace_lite.py.in b/python/tools/run_inplace_lite.py.in similarity index 100% rename from prelude/python/tools/run_inplace_lite.py.in rename to python/tools/run_inplace_lite.py.in diff --git a/prelude/python/tools/sourcedb_merger/BUCK.v2 b/python/tools/sourcedb_merger/BUCK.v2 similarity index 100% rename from prelude/python/tools/sourcedb_merger/BUCK.v2 rename to python/tools/sourcedb_merger/BUCK.v2 diff --git a/prelude/python/tools/sourcedb_merger/inputs.py b/python/tools/sourcedb_merger/inputs.py similarity index 100% rename from prelude/python/tools/sourcedb_merger/inputs.py rename to python/tools/sourcedb_merger/inputs.py diff --git a/prelude/python/tools/sourcedb_merger/legacy_merge.py b/python/tools/sourcedb_merger/legacy_merge.py similarity index 100% rename from prelude/python/tools/sourcedb_merger/legacy_merge.py rename to python/tools/sourcedb_merger/legacy_merge.py diff --git a/prelude/python/tools/sourcedb_merger/legacy_outputs.py b/python/tools/sourcedb_merger/legacy_outputs.py similarity index 100% rename from prelude/python/tools/sourcedb_merger/legacy_outputs.py rename to python/tools/sourcedb_merger/legacy_outputs.py diff --git a/prelude/python/tools/sourcedb_merger/merge.py b/python/tools/sourcedb_merger/merge.py similarity index 100% rename from prelude/python/tools/sourcedb_merger/merge.py rename to python/tools/sourcedb_merger/merge.py diff --git a/prelude/python/tools/sourcedb_merger/outputs.py b/python/tools/sourcedb_merger/outputs.py similarity index 100% rename from prelude/python/tools/sourcedb_merger/outputs.py rename to python/tools/sourcedb_merger/outputs.py diff --git a/prelude/python/tools/sourcedb_merger/tests/__init__.py b/python/tools/sourcedb_merger/tests/__init__.py similarity index 100% rename from prelude/python/tools/sourcedb_merger/tests/__init__.py rename to python/tools/sourcedb_merger/tests/__init__.py diff --git a/prelude/python/tools/sourcedb_merger/tests/inputs_test.py b/python/tools/sourcedb_merger/tests/inputs_test.py similarity index 100% rename from prelude/python/tools/sourcedb_merger/tests/inputs_test.py rename to python/tools/sourcedb_merger/tests/inputs_test.py diff --git a/prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py b/python/tools/sourcedb_merger/tests/legacy_output_test.py similarity index 100% rename from prelude/python/tools/sourcedb_merger/tests/legacy_output_test.py rename to python/tools/sourcedb_merger/tests/legacy_output_test.py diff --git a/prelude/python/tools/sourcedb_merger/tests/main.sh b/python/tools/sourcedb_merger/tests/main.sh similarity index 100% rename from prelude/python/tools/sourcedb_merger/tests/main.sh rename to python/tools/sourcedb_merger/tests/main.sh diff --git a/prelude/python/tools/sourcedb_merger/tests/outputs_test.py b/python/tools/sourcedb_merger/tests/outputs_test.py similarity index 100% rename from prelude/python/tools/sourcedb_merger/tests/outputs_test.py rename to python/tools/sourcedb_merger/tests/outputs_test.py diff --git a/prelude/python/tools/static_extension_finder.py b/python/tools/static_extension_finder.py similarity index 100% rename from prelude/python/tools/static_extension_finder.py rename to python/tools/static_extension_finder.py diff --git a/prelude/python/tools/static_extension_utils.cpp b/python/tools/static_extension_utils.cpp similarity index 100% rename from prelude/python/tools/static_extension_utils.cpp rename to python/tools/static_extension_utils.cpp diff --git a/prelude/python/tools/traverse_dep_manifest.py b/python/tools/traverse_dep_manifest.py similarity index 100% rename from prelude/python/tools/traverse_dep_manifest.py rename to python/tools/traverse_dep_manifest.py diff --git a/prelude/python/tools/wheel.py b/python/tools/wheel.py similarity index 100% rename from prelude/python/tools/wheel.py rename to python/tools/wheel.py diff --git a/prelude/python/typecheck/batch.bxl b/python/typecheck/batch.bxl similarity index 100% rename from prelude/python/typecheck/batch.bxl rename to python/typecheck/batch.bxl diff --git a/prelude/python/typing.bzl b/python/typing.bzl similarity index 100% rename from prelude/python/typing.bzl rename to python/typing.bzl diff --git a/prelude/python_bootstrap/python_bootstrap.bzl b/python_bootstrap/python_bootstrap.bzl similarity index 100% rename from prelude/python_bootstrap/python_bootstrap.bzl rename to python_bootstrap/python_bootstrap.bzl diff --git a/prelude/python_bootstrap/tools/BUCK.v2 b/python_bootstrap/tools/BUCK.v2 similarity index 100% rename from prelude/python_bootstrap/tools/BUCK.v2 rename to python_bootstrap/tools/BUCK.v2 diff --git a/prelude/python_bootstrap/tools/win_python_wrapper.bat b/python_bootstrap/tools/win_python_wrapper.bat similarity index 100% rename from prelude/python_bootstrap/tools/win_python_wrapper.bat rename to python_bootstrap/tools/win_python_wrapper.bat diff --git a/prelude/remote_file.bzl b/remote_file.bzl similarity index 100% rename from prelude/remote_file.bzl rename to remote_file.bzl diff --git a/prelude/resources.bzl b/resources.bzl similarity index 100% rename from prelude/resources.bzl rename to resources.bzl diff --git a/prelude/rules.bzl b/rules.bzl similarity index 100% rename from prelude/rules.bzl rename to rules.bzl diff --git a/prelude/rules_impl.bzl b/rules_impl.bzl similarity index 100% rename from prelude/rules_impl.bzl rename to rules_impl.bzl diff --git a/prelude/rust/build.bzl b/rust/build.bzl similarity index 100% rename from prelude/rust/build.bzl rename to rust/build.bzl diff --git a/prelude/rust/build_params.bzl b/rust/build_params.bzl similarity index 100% rename from prelude/rust/build_params.bzl rename to rust/build_params.bzl diff --git a/prelude/rust/cargo_buildscript.bzl b/rust/cargo_buildscript.bzl similarity index 100% rename from prelude/rust/cargo_buildscript.bzl rename to rust/cargo_buildscript.bzl diff --git a/prelude/rust/cargo_package.bzl b/rust/cargo_package.bzl similarity index 100% rename from prelude/rust/cargo_package.bzl rename to rust/cargo_package.bzl diff --git a/prelude/rust/clippy_configuration.bzl b/rust/clippy_configuration.bzl similarity index 100% rename from prelude/rust/clippy_configuration.bzl rename to rust/clippy_configuration.bzl diff --git a/prelude/rust/context.bzl b/rust/context.bzl similarity index 100% rename from prelude/rust/context.bzl rename to rust/context.bzl diff --git a/prelude/rust/extern.bzl b/rust/extern.bzl similarity index 100% rename from prelude/rust/extern.bzl rename to rust/extern.bzl diff --git a/prelude/rust/failure_filter.bzl b/rust/failure_filter.bzl similarity index 100% rename from prelude/rust/failure_filter.bzl rename to rust/failure_filter.bzl diff --git a/prelude/rust/link_info.bzl b/rust/link_info.bzl similarity index 100% rename from prelude/rust/link_info.bzl rename to rust/link_info.bzl diff --git a/prelude/rust/outputs.bzl b/rust/outputs.bzl similarity index 100% rename from prelude/rust/outputs.bzl rename to rust/outputs.bzl diff --git a/prelude/rust/proc_macro_alias.bzl b/rust/proc_macro_alias.bzl similarity index 100% rename from prelude/rust/proc_macro_alias.bzl rename to rust/proc_macro_alias.bzl diff --git a/prelude/rust/resources.bzl b/rust/resources.bzl similarity index 100% rename from prelude/rust/resources.bzl rename to rust/resources.bzl diff --git a/prelude/rust/rust-analyzer/check.bxl b/rust/rust-analyzer/check.bxl similarity index 100% rename from prelude/rust/rust-analyzer/check.bxl rename to rust/rust-analyzer/check.bxl diff --git a/prelude/rust/rust-analyzer/resolve_deps.bxl b/rust/rust-analyzer/resolve_deps.bxl similarity index 100% rename from prelude/rust/rust-analyzer/resolve_deps.bxl rename to rust/rust-analyzer/resolve_deps.bxl diff --git a/prelude/rust/rust_binary.bzl b/rust/rust_binary.bzl similarity index 100% rename from prelude/rust/rust_binary.bzl rename to rust/rust_binary.bzl diff --git a/prelude/rust/rust_common.bzl b/rust/rust_common.bzl similarity index 100% rename from prelude/rust/rust_common.bzl rename to rust/rust_common.bzl diff --git a/prelude/rust/rust_library.bzl b/rust/rust_library.bzl similarity index 100% rename from prelude/rust/rust_library.bzl rename to rust/rust_library.bzl diff --git a/prelude/rust/rust_toolchain.bzl b/rust/rust_toolchain.bzl similarity index 100% rename from prelude/rust/rust_toolchain.bzl rename to rust/rust_toolchain.bzl diff --git a/prelude/rust/targets.bzl b/rust/targets.bzl similarity index 100% rename from prelude/rust/targets.bzl rename to rust/targets.bzl diff --git a/prelude/rust/tools/BUCK.v2 b/rust/tools/BUCK.v2 similarity index 100% rename from prelude/rust/tools/BUCK.v2 rename to rust/tools/BUCK.v2 diff --git a/prelude/rust/tools/attrs.bzl b/rust/tools/attrs.bzl similarity index 100% rename from prelude/rust/tools/attrs.bzl rename to rust/tools/attrs.bzl diff --git a/prelude/rust/tools/buildscript_run.py b/rust/tools/buildscript_run.py similarity index 100% rename from prelude/rust/tools/buildscript_run.py rename to rust/tools/buildscript_run.py diff --git a/prelude/rust/tools/failure_filter_action.py b/rust/tools/failure_filter_action.py similarity index 100% rename from prelude/rust/tools/failure_filter_action.py rename to rust/tools/failure_filter_action.py diff --git a/prelude/rust/tools/rustc_action.py b/rust/tools/rustc_action.py similarity index 100% rename from prelude/rust/tools/rustc_action.py rename to rust/tools/rustc_action.py diff --git a/prelude/rust/tools/rustdoc_coverage.py b/rust/tools/rustdoc_coverage.py similarity index 100% rename from prelude/rust/tools/rustdoc_coverage.py rename to rust/tools/rustdoc_coverage.py diff --git a/prelude/rust/tools/rustdoc_test_with_resources.py b/rust/tools/rustdoc_test_with_resources.py similarity index 100% rename from prelude/rust/tools/rustdoc_test_with_resources.py rename to rust/tools/rustdoc_test_with_resources.py diff --git a/prelude/rust/tools/tool_rules.bzl b/rust/tools/tool_rules.bzl similarity index 100% rename from prelude/rust/tools/tool_rules.bzl rename to rust/tools/tool_rules.bzl diff --git a/prelude/rust/tools/transitive_dependency_symlinks.py b/rust/tools/transitive_dependency_symlinks.py similarity index 100% rename from prelude/rust/tools/transitive_dependency_symlinks.py rename to rust/tools/transitive_dependency_symlinks.py diff --git a/prelude/rust/with_workspace.bzl b/rust/with_workspace.bzl similarity index 100% rename from prelude/rust/with_workspace.bzl rename to rust/with_workspace.bzl diff --git a/prelude/sh_binary.bzl b/sh_binary.bzl similarity index 100% rename from prelude/sh_binary.bzl rename to sh_binary.bzl diff --git a/prelude/sh_test.bzl b/sh_test.bzl similarity index 100% rename from prelude/sh_test.bzl rename to sh_test.bzl diff --git a/prelude/test/inject_test_run_info.bzl b/test/inject_test_run_info.bzl similarity index 100% rename from prelude/test/inject_test_run_info.bzl rename to test/inject_test_run_info.bzl diff --git a/prelude/test/tools/BUCK.v2 b/test/tools/BUCK.v2 similarity index 100% rename from prelude/test/tools/BUCK.v2 rename to test/tools/BUCK.v2 diff --git a/prelude/test/tools/inject_test_env.py b/test/tools/inject_test_env.py similarity index 100% rename from prelude/test/tools/inject_test_env.py rename to test/tools/inject_test_env.py diff --git a/prelude/test_suite.bzl b/test_suite.bzl similarity index 100% rename from prelude/test_suite.bzl rename to test_suite.bzl diff --git a/prelude/tests/re_utils.bzl b/tests/re_utils.bzl similarity index 100% rename from prelude/tests/re_utils.bzl rename to tests/re_utils.bzl diff --git a/prelude/tests/remote_test_execution_toolchain.bzl b/tests/remote_test_execution_toolchain.bzl similarity index 100% rename from prelude/tests/remote_test_execution_toolchain.bzl rename to tests/remote_test_execution_toolchain.bzl diff --git a/prelude/third-party/hmaptool/BUCK.v2 b/third-party/hmaptool/BUCK.v2 similarity index 100% rename from prelude/third-party/hmaptool/BUCK.v2 rename to third-party/hmaptool/BUCK.v2 diff --git a/prelude/third-party/hmaptool/METADATA.bzl b/third-party/hmaptool/METADATA.bzl similarity index 100% rename from prelude/third-party/hmaptool/METADATA.bzl rename to third-party/hmaptool/METADATA.bzl diff --git a/prelude/third-party/hmaptool/README.md b/third-party/hmaptool/README.md similarity index 100% rename from prelude/third-party/hmaptool/README.md rename to third-party/hmaptool/README.md diff --git a/prelude/third-party/hmaptool/hmaptool b/third-party/hmaptool/hmaptool similarity index 100% rename from prelude/third-party/hmaptool/hmaptool rename to third-party/hmaptool/hmaptool diff --git a/prelude/toolchains/apple/xcode_version_checker/.gitignore b/toolchains/apple/xcode_version_checker/.gitignore similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/.gitignore rename to toolchains/apple/xcode_version_checker/.gitignore diff --git a/prelude/toolchains/apple/xcode_version_checker/BUCK.v2 b/toolchains/apple/xcode_version_checker/BUCK.v2 similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/BUCK.v2 rename to toolchains/apple/xcode_version_checker/BUCK.v2 diff --git a/prelude/toolchains/apple/xcode_version_checker/Makefile b/toolchains/apple/xcode_version_checker/Makefile similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/Makefile rename to toolchains/apple/xcode_version_checker/Makefile diff --git a/prelude/toolchains/apple/xcode_version_checker/README b/toolchains/apple/xcode_version_checker/README similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/README rename to toolchains/apple/xcode_version_checker/README diff --git a/prelude/toolchains/apple/xcode_version_checker/defs.bzl b/toolchains/apple/xcode_version_checker/defs.bzl similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/defs.bzl rename to toolchains/apple/xcode_version_checker/defs.bzl diff --git a/prelude/toolchains/apple/xcode_version_checker/src/xcode_exec_tester.m b/toolchains/apple/xcode_version_checker/src/xcode_exec_tester.m similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/src/xcode_exec_tester.m rename to toolchains/apple/xcode_version_checker/src/xcode_exec_tester.m diff --git a/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checker.m b/toolchains/apple/xcode_version_checker/src/xcode_version_checker.m similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checker.m rename to toolchains/apple/xcode_version_checker/src/xcode_version_checker.m diff --git a/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.h b/toolchains/apple/xcode_version_checker/src/xcode_version_checks.h similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.h rename to toolchains/apple/xcode_version_checker/src/xcode_version_checks.h diff --git a/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.m b/toolchains/apple/xcode_version_checker/src/xcode_version_checks.m similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/src/xcode_version_checks.m rename to toolchains/apple/xcode_version_checker/src/xcode_version_checks.m diff --git a/prelude/toolchains/apple/xcode_version_checker/src/xcode_version_tester.m b/toolchains/apple/xcode_version_checker/src/xcode_version_tester.m similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/src/xcode_version_tester.m rename to toolchains/apple/xcode_version_checker/src/xcode_version_tester.m diff --git a/prelude/toolchains/apple/xcode_version_checker/test/Xcode_14.2.0_14C18_fb_version.plist b/toolchains/apple/xcode_version_checker/test/Xcode_14.2.0_14C18_fb_version.plist similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/test/Xcode_14.2.0_14C18_fb_version.plist rename to toolchains/apple/xcode_version_checker/test/Xcode_14.2.0_14C18_fb_version.plist diff --git a/prelude/toolchains/apple/xcode_version_checker/xcode_version_checker b/toolchains/apple/xcode_version_checker/xcode_version_checker similarity index 100% rename from prelude/toolchains/apple/xcode_version_checker/xcode_version_checker rename to toolchains/apple/xcode_version_checker/xcode_version_checker diff --git a/prelude/toolchains/conan/BUCK b/toolchains/conan/BUCK similarity index 100% rename from prelude/toolchains/conan/BUCK rename to toolchains/conan/BUCK diff --git a/prelude/toolchains/conan/buckler/conanfile.py b/toolchains/conan/buckler/conanfile.py similarity index 100% rename from prelude/toolchains/conan/buckler/conanfile.py rename to toolchains/conan/buckler/conanfile.py diff --git a/prelude/toolchains/conan/conan_common.py b/toolchains/conan/conan_common.py similarity index 100% rename from prelude/toolchains/conan/conan_common.py rename to toolchains/conan/conan_common.py diff --git a/prelude/toolchains/conan/conan_generate.py b/toolchains/conan/conan_generate.py similarity index 100% rename from prelude/toolchains/conan/conan_generate.py rename to toolchains/conan/conan_generate.py diff --git a/prelude/toolchains/conan/conan_init.py b/toolchains/conan/conan_init.py similarity index 100% rename from prelude/toolchains/conan/conan_init.py rename to toolchains/conan/conan_init.py diff --git a/prelude/toolchains/conan/conan_lock.py b/toolchains/conan/conan_lock.py similarity index 100% rename from prelude/toolchains/conan/conan_lock.py rename to toolchains/conan/conan_lock.py diff --git a/prelude/toolchains/conan/conan_package.py b/toolchains/conan/conan_package.py similarity index 100% rename from prelude/toolchains/conan/conan_package.py rename to toolchains/conan/conan_package.py diff --git a/prelude/toolchains/conan/conan_package_extract.py b/toolchains/conan/conan_package_extract.py similarity index 100% rename from prelude/toolchains/conan/conan_package_extract.py rename to toolchains/conan/conan_package_extract.py diff --git a/prelude/toolchains/conan/conan_update.py b/toolchains/conan/conan_update.py similarity index 100% rename from prelude/toolchains/conan/conan_update.py rename to toolchains/conan/conan_update.py diff --git a/prelude/toolchains/conan/defs.bzl b/toolchains/conan/defs.bzl similarity index 100% rename from prelude/toolchains/conan/defs.bzl rename to toolchains/conan/defs.bzl diff --git a/prelude/toolchains/conan/lock_generate.py b/toolchains/conan/lock_generate.py similarity index 100% rename from prelude/toolchains/conan/lock_generate.py rename to toolchains/conan/lock_generate.py diff --git a/prelude/toolchains/csharp.bzl b/toolchains/csharp.bzl similarity index 100% rename from prelude/toolchains/csharp.bzl rename to toolchains/csharp.bzl diff --git a/prelude/toolchains/cxx.bzl b/toolchains/cxx.bzl similarity index 100% rename from prelude/toolchains/cxx.bzl rename to toolchains/cxx.bzl diff --git a/prelude/toolchains/cxx/zig/BUCK b/toolchains/cxx/zig/BUCK similarity index 100% rename from prelude/toolchains/cxx/zig/BUCK rename to toolchains/cxx/zig/BUCK diff --git a/prelude/toolchains/cxx/zig/defs.bzl b/toolchains/cxx/zig/defs.bzl similarity index 100% rename from prelude/toolchains/cxx/zig/defs.bzl rename to toolchains/cxx/zig/defs.bzl diff --git a/prelude/toolchains/cxx/zig/releases.bzl b/toolchains/cxx/zig/releases.bzl similarity index 100% rename from prelude/toolchains/cxx/zig/releases.bzl rename to toolchains/cxx/zig/releases.bzl diff --git a/prelude/toolchains/demo.bzl b/toolchains/demo.bzl similarity index 100% rename from prelude/toolchains/demo.bzl rename to toolchains/demo.bzl diff --git a/prelude/toolchains/execution_host.bzl b/toolchains/execution_host.bzl similarity index 100% rename from prelude/toolchains/execution_host.bzl rename to toolchains/execution_host.bzl diff --git a/prelude/toolchains/genrule.bzl b/toolchains/genrule.bzl similarity index 100% rename from prelude/toolchains/genrule.bzl rename to toolchains/genrule.bzl diff --git a/prelude/toolchains/go.bzl b/toolchains/go.bzl similarity index 100% rename from prelude/toolchains/go.bzl rename to toolchains/go.bzl diff --git a/prelude/toolchains/haskell.bzl b/toolchains/haskell.bzl similarity index 100% rename from prelude/toolchains/haskell.bzl rename to toolchains/haskell.bzl diff --git a/prelude/toolchains/msvc/BUCK.v2 b/toolchains/msvc/BUCK.v2 similarity index 100% rename from prelude/toolchains/msvc/BUCK.v2 rename to toolchains/msvc/BUCK.v2 diff --git a/prelude/toolchains/msvc/run_msvc_tool.py b/toolchains/msvc/run_msvc_tool.py similarity index 100% rename from prelude/toolchains/msvc/run_msvc_tool.py rename to toolchains/msvc/run_msvc_tool.py diff --git a/prelude/toolchains/msvc/tools.bzl b/toolchains/msvc/tools.bzl similarity index 100% rename from prelude/toolchains/msvc/tools.bzl rename to toolchains/msvc/tools.bzl diff --git a/prelude/toolchains/msvc/vswhere.py b/toolchains/msvc/vswhere.py similarity index 100% rename from prelude/toolchains/msvc/vswhere.py rename to toolchains/msvc/vswhere.py diff --git a/prelude/toolchains/ocaml.bzl b/toolchains/ocaml.bzl similarity index 100% rename from prelude/toolchains/ocaml.bzl rename to toolchains/ocaml.bzl diff --git a/prelude/toolchains/python.bzl b/toolchains/python.bzl similarity index 100% rename from prelude/toolchains/python.bzl rename to toolchains/python.bzl diff --git a/prelude/toolchains/remote_test_execution.bzl b/toolchains/remote_test_execution.bzl similarity index 100% rename from prelude/toolchains/remote_test_execution.bzl rename to toolchains/remote_test_execution.bzl diff --git a/prelude/toolchains/rust.bzl b/toolchains/rust.bzl similarity index 100% rename from prelude/toolchains/rust.bzl rename to toolchains/rust.bzl diff --git a/prelude/tools/audit_providers_universe.bxl b/tools/audit_providers_universe.bxl similarity index 100% rename from prelude/tools/audit_providers_universe.bxl rename to tools/audit_providers_universe.bxl diff --git a/prelude/transitions/constraint_overrides.bzl b/transitions/constraint_overrides.bzl similarity index 100% rename from prelude/transitions/constraint_overrides.bzl rename to transitions/constraint_overrides.bzl diff --git a/prelude/user/all.bzl b/user/all.bzl similarity index 100% rename from prelude/user/all.bzl rename to user/all.bzl diff --git a/prelude/user/cxx_headers_bundle.bzl b/user/cxx_headers_bundle.bzl similarity index 100% rename from prelude/user/cxx_headers_bundle.bzl rename to user/cxx_headers_bundle.bzl diff --git a/prelude/user/extract_archive.bzl b/user/extract_archive.bzl similarity index 100% rename from prelude/user/extract_archive.bzl rename to user/extract_archive.bzl diff --git a/prelude/user/rule_spec.bzl b/user/rule_spec.bzl similarity index 100% rename from prelude/user/rule_spec.bzl rename to user/rule_spec.bzl diff --git a/prelude/user/write_file.bzl b/user/write_file.bzl similarity index 100% rename from prelude/user/write_file.bzl rename to user/write_file.bzl diff --git a/prelude/utils/arglike.bzl b/utils/arglike.bzl similarity index 100% rename from prelude/utils/arglike.bzl rename to utils/arglike.bzl diff --git a/prelude/utils/buckconfig.bzl b/utils/buckconfig.bzl similarity index 100% rename from prelude/utils/buckconfig.bzl rename to utils/buckconfig.bzl diff --git a/prelude/utils/build_target_pattern.bzl b/utils/build_target_pattern.bzl similarity index 100% rename from prelude/utils/build_target_pattern.bzl rename to utils/build_target_pattern.bzl diff --git a/prelude/utils/cmd_script.bzl b/utils/cmd_script.bzl similarity index 100% rename from prelude/utils/cmd_script.bzl rename to utils/cmd_script.bzl diff --git a/prelude/utils/dicts.bzl b/utils/dicts.bzl similarity index 100% rename from prelude/utils/dicts.bzl rename to utils/dicts.bzl diff --git a/prelude/utils/expect.bzl b/utils/expect.bzl similarity index 100% rename from prelude/utils/expect.bzl rename to utils/expect.bzl diff --git a/prelude/utils/graph_utils.bzl b/utils/graph_utils.bzl similarity index 100% rename from prelude/utils/graph_utils.bzl rename to utils/graph_utils.bzl diff --git a/prelude/utils/host.bzl b/utils/host.bzl similarity index 100% rename from prelude/utils/host.bzl rename to utils/host.bzl diff --git a/prelude/utils/lazy.bzl b/utils/lazy.bzl similarity index 100% rename from prelude/utils/lazy.bzl rename to utils/lazy.bzl diff --git a/prelude/utils/pick.bzl b/utils/pick.bzl similarity index 100% rename from prelude/utils/pick.bzl rename to utils/pick.bzl diff --git a/prelude/utils/platform_flavors_util.bzl b/utils/platform_flavors_util.bzl similarity index 100% rename from prelude/utils/platform_flavors_util.bzl rename to utils/platform_flavors_util.bzl diff --git a/prelude/utils/selects.bzl b/utils/selects.bzl similarity index 100% rename from prelude/utils/selects.bzl rename to utils/selects.bzl diff --git a/prelude/utils/set.bzl b/utils/set.bzl similarity index 100% rename from prelude/utils/set.bzl rename to utils/set.bzl diff --git a/prelude/utils/strings.bzl b/utils/strings.bzl similarity index 100% rename from prelude/utils/strings.bzl rename to utils/strings.bzl diff --git a/prelude/utils/type_defs.bzl b/utils/type_defs.bzl similarity index 100% rename from prelude/utils/type_defs.bzl rename to utils/type_defs.bzl diff --git a/prelude/utils/utils.bzl b/utils/utils.bzl similarity index 100% rename from prelude/utils/utils.bzl rename to utils/utils.bzl diff --git a/prelude/validation_deps.bzl b/validation_deps.bzl similarity index 100% rename from prelude/validation_deps.bzl rename to validation_deps.bzl diff --git a/prelude/windows/tools/BUCK.v2 b/windows/tools/BUCK.v2 similarity index 100% rename from prelude/windows/tools/BUCK.v2 rename to windows/tools/BUCK.v2 diff --git a/prelude/windows/tools/msvc_hermetic_exec.bat b/windows/tools/msvc_hermetic_exec.bat similarity index 100% rename from prelude/windows/tools/msvc_hermetic_exec.bat rename to windows/tools/msvc_hermetic_exec.bat diff --git a/prelude/worker_tool.bzl b/worker_tool.bzl similarity index 100% rename from prelude/worker_tool.bzl rename to worker_tool.bzl diff --git a/prelude/zip_file/tools/BUCK.v2 b/zip_file/tools/BUCK.v2 similarity index 100% rename from prelude/zip_file/tools/BUCK.v2 rename to zip_file/tools/BUCK.v2 diff --git a/prelude/zip_file/tools/unzip.py b/zip_file/tools/unzip.py similarity index 100% rename from prelude/zip_file/tools/unzip.py rename to zip_file/tools/unzip.py diff --git a/prelude/zip_file/zip_file.bzl b/zip_file/zip_file.bzl similarity index 100% rename from prelude/zip_file/zip_file.bzl rename to zip_file/zip_file.bzl diff --git a/prelude/zip_file/zip_file_toolchain.bzl b/zip_file/zip_file_toolchain.bzl similarity index 100% rename from prelude/zip_file/zip_file_toolchain.bzl rename to zip_file/zip_file_toolchain.bzl From bee8454ad55a3a0a7337f2a9f87bd28d57163049 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 14:31:50 +0200 Subject: [PATCH 0837/1133] TMP HS MD include raw ghc_depends --- haskell/tools/generate_target_metadata.py | 1 + 1 file changed, 1 insertion(+) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 4e3eff04d..ef2c633b2 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -78,6 +78,7 @@ def obtain_target_metadata(args): "module_mapping": module_mapping, "module_graph": module_graph, "package_deps": package_deps, + "raw": ghc_depends, } From 979b9b73a31e8b2644d6032f7ac26810c4ced3f1 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 15:18:47 +0200 Subject: [PATCH 0838/1133] Python script to parse ghc-pkg dump Extracts a mapping from interface file import paths to package names. --- .../generate_toolchain_library_catalog.py | 91 +++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100755 haskell/tools/generate_toolchain_library_catalog.py diff --git a/haskell/tools/generate_toolchain_library_catalog.py b/haskell/tools/generate_toolchain_library_catalog.py new file mode 100755 index 000000000..3a3aa28a3 --- /dev/null +++ b/haskell/tools/generate_toolchain_library_catalog.py @@ -0,0 +1,91 @@ +#!/usr/bin/env python3 + +"""Helper script to generate a mapping from interface paths to toolchain library names. + +The result is a JSON object with the following fields: +* `by-import-dirs`: A trie mapping import directory prefixes to package names. Encoded as nested dictionaries with leafs denoted by the special key `//pkgname`. +""" + +import argparse +import json +from pathlib import Path +import subprocess + + +def main(): + parser = argparse.ArgumentParser( + description=__doc__, + fromfile_prefix_chars="@") + parser.add_argument( + "--output", + required=True, + type=argparse.FileType("w"), + help="Write package mapping to this file in JSON format.") + parser.add_argument( + "--ghc-pkg", + required=True, + type=str, + help="Path to the Haskell compiler's ghc-pkg utilty.") + args = parser.parse_args() + + with subprocess.Popen(_ghc_pkg_command(args.ghc_pkg), stdout=subprocess.PIPE, text=True) as proc: + packages = list(_parse_ghc_pkg_dump(proc.stdout)) + result = _construct_import_path_trie(packages) + + json.dump(result, args.output) + + +def _ghc_pkg_command(ghc_pkg): + return [ + ghc_pkg, + "dump", + "--global", + "--no-user-package-db", + "--simple-output", + "--expand-pkgroot", + ] + + +def _parse_ghc_pkg_dump(lines): + current_package = {} + current_key = None + + for line in lines: + if "---" == line.strip(): + if current_package: + yield(current_package) + + current_package = {} + elif ":" in line: + key, value = map(str.strip, line.split(":", 1)) + + if key == "name": + current_key = "name" + current_package["name"] = value + elif key == "import-dirs": + current_key = "import-dirs" + if value: + current_package.setdefault("import-dirs", []).append(value) + else: + current_key = None + elif current_key == "import-dirs" and line.strip(): + current_package.setdefault("import-dirs", []).append(line.strip()) + + +def _construct_import_path_trie(packages): + result = {} + + for package in packages: + for import_dir in package.get("import-dirs", []): + layer = result + + for part in Path(import_dir).parts: + layer = layer.setdefault(part, {}) + + layer["//pkgname"] = package["name"] + + return result + + +if __name__ == "__main__": + main() From fe10f26a63d01ad76a4884d302ef0f5996cabdc9 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 15:22:43 +0200 Subject: [PATCH 0839/1133] Depend on toolchain library catalog generator --- decls/haskell_common.bzl | 4 ++++ haskell/tools/BUCK.v2 | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index f2b8ea43a..d55f32b1a 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -46,6 +46,10 @@ def _scripts_arg(): providers = [RunInfo], default = "prelude//haskell/tools:generate_target_metadata", ), + "_generate_toolchain_library_catalog": attrs.dep( + providers = [RunInfo], + default = "prelude//haskell/tools:generate_target_metadata", + ), "_ghc_wrapper": attrs.dep( providers = [RunInfo], default = "prelude//haskell/tools:ghc_wrapper", diff --git a/haskell/tools/BUCK.v2 b/haskell/tools/BUCK.v2 index 610e419c8..6c7b58c3f 100644 --- a/haskell/tools/BUCK.v2 +++ b/haskell/tools/BUCK.v2 @@ -12,6 +12,12 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) +prelude.python_bootstrap_binary( + name = "generate_toolchain_library_catalog", + main = "generate_toolchain_library_catalog.py", + visibility = ["PUBLIC"], +) + prelude.python_bootstrap_binary( name = "ghc_wrapper", main = "ghc_wrapper.py", From c3ee38c0844b0f3d9db5ed1f6de40f7b5c30789f Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 15:39:05 +0200 Subject: [PATCH 0840/1133] Generate the toolchain library catalog --- haskell/compile.bzl | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e6ea50e2e..a66c70c9e 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -152,6 +152,31 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl return modules +def _toolchain_library_catalog_impl(ctx: AnalysisContext) -> list[Provider]: + ghc_pkg = ctx.attrs.toolchain[HaskellToolchainInfo].packager + catalog_gen = ctx.attrs.generate_toolchain_library_catalog[RunInfo] + catalog = ctx.actions.declare_output("haskell_toolchain_libraries.json") + ctx.actions.run( + cmd_args(catalog_gen, "--ghc-pkg", ghc_pkg, "--output", catalog.as_output()), + category = "haskell_toolchain_library_catalog", + ) + return [DefaultInfo(default_output = catalog)] + +_toolchain_library_catalog = anon_rule( + impl = _toolchain_library_catalog_impl, + attrs = { + "toolchain": attrs.dep( + providers = [HaskellToolchainInfo], + ), + "generate_toolchain_library_catalog": attrs.dep( + providers = [RunInfo], + ), + }, + artifact_promise_mappings = { + "catalog": lambda x: x[DefaultInfo].default_outputs[0], + } +) + def target_metadata( ctx: AnalysisContext, *, @@ -168,6 +193,11 @@ def target_metadata( if HaskellToolchainLibrary in dep ] + toolchain_libs_catalog = ctx.actions.anon_target(_toolchain_library_catalog, { + "toolchain": ctx.attrs._haskell_toolchain, + "generate_toolchain_library_catalog": ctx.attrs._generate_toolchain_library_catalog, + }) + # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. packages_info = get_packages_info( From fa3c6d9c374ee53c971bc07ae6fe415f09a30a15 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 15:44:02 +0200 Subject: [PATCH 0841/1133] fix _generate_toolchain_library_catalog attr --- decls/haskell_common.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index d55f32b1a..8bcf122be 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -48,7 +48,7 @@ def _scripts_arg(): ), "_generate_toolchain_library_catalog": attrs.dep( providers = [RunInfo], - default = "prelude//haskell/tools:generate_target_metadata", + default = "prelude//haskell/tools:generate_toolchain_library_catalog", ), "_ghc_wrapper": attrs.dep( providers = [RunInfo], From 4ae083438cf639880fa08429531ddf197aebd39b Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 15:44:38 +0200 Subject: [PATCH 0842/1133] pass toolchain libs file to md gen --- haskell/compile.bzl | 1 + haskell/tools/generate_target_metadata.py | 5 +++++ 2 files changed, 6 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a66c70c9e..4e52df6d8 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -227,6 +227,7 @@ def target_metadata( md_args = cmd_args(md_gen) md_args.add("--output", md_file.as_output()) + md_args.add("--toolchain-libs", toolchain_libs_catalog.artifact("catalog")) md_args.add("--ghc", haskell_toolchain.compiler) md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) md_args.add( diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index ef2c633b2..87af237ed 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -31,6 +31,11 @@ def main(): required=True, type=argparse.FileType("w"), help="Write package metadata to this file in JSON format.") + parser.add_argument( + "--toolchain-libs", + required=True, + type=str, + help="Path to the toolchain libraries catalog file.") parser.add_argument( "--ghc", required=True, From 1fe5186e662b91986c3af632b9485846d17c328a Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 16:00:20 +0200 Subject: [PATCH 0843/1133] Track toolchain library deps in md.json --- haskell/tools/generate_target_metadata.py | 40 ++++++++++++++++++----- 1 file changed, 32 insertions(+), 8 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 87af237ed..e15510a0e 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -12,6 +12,7 @@ * `module_mapping`: Mapping from source inferred module name to actual module name, if different. * `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. * `package_deps`": Cross-package module dependencies, `dict[modname, dict[pkgname, list[modname]]`. +* `toolchain_deps`": Toolchain library dependencies, `dict[modname, pkgname]`. """ import argparse @@ -75,18 +76,25 @@ def main(): def obtain_target_metadata(args): ghc_depends, ghc_options = run_ghc_depends(args.ghc, args.ghc_arg, args.source) th_modules = determine_th_modules(ghc_options, args.source_prefix) + toolchain_packages = load_toolchain_packages(args.toolchain_libs) package_prefixes = calc_package_prefixes(args.package) - module_mapping, module_graph, package_deps = interpret_ghc_depends( - ghc_depends, args.source_prefix, package_prefixes) + module_mapping, module_graph, package_deps, toolchain_deps = interpret_ghc_depends( + ghc_depends, args.source_prefix, package_prefixes, toolchain_packages) return { "th_modules": th_modules, "module_mapping": module_mapping, "module_graph": module_graph, "package_deps": package_deps, + "toolchain_deps": toolchain_deps, "raw": ghc_depends, } +def load_toolchain_packages(filepath): + with open(filepath, "r") as f: + return json.load(f) + + def determine_th_modules(ghc_options, source_prefix): return [ src_to_module_name(strip_prefix_(source_prefix, fname).lstrip("/")) @@ -138,6 +146,17 @@ def calc_package_prefixes(package_specs): return result +def lookup_toolchain_dep(module_dep, toolchain_packages): + module_path = Path(module_dep) + layer = toolchain_packages + for part in module_path.parts: + if (layer := layer.get(part)) is None: + return None + + if (pkgname := layer.get("//pkgname")) is not None: + return pkgname + + def lookup_package_dep(module_dep, package_prefixes): """Look up a cross-packge module dependency. @@ -155,18 +174,21 @@ def lookup_package_dep(module_dep, package_prefixes): return pkgname, modname -def interpret_ghc_depends(ghc_depends, source_prefix, package_prefixes): +def interpret_ghc_depends(ghc_depends, source_prefix, package_prefixes, toolchain_packages): mapping = {} graph = {} extgraph = {} + toolchaingraph = {} for k, vs in ghc_depends.items(): module_name = src_to_module_name(k) - intdeps, extdeps = parse_module_deps(vs, package_prefixes) + intdeps, extdeps, toolchaindeps = parse_module_deps(vs, package_prefixes, toolchain_packages) graph.setdefault(module_name, []).extend(intdeps) for pkg, mods in extdeps.items(): extgraph.setdefault(module_name, {}).setdefault(pkg, []).extend(mods) + for pkg in toolchaindeps: + toolchaingraph.setdefault(module_name, []).append(pkg) ext = os.path.splitext(k)[1] @@ -188,18 +210,20 @@ def interpret_ghc_depends(ghc_depends, source_prefix, package_prefixes): if hs_module_name != module_name: mapping[hs_module_name] = module_name - return mapping, graph, extgraph + return mapping, graph, extgraph, toolchaingraph -def parse_module_deps(module_deps, package_prefixes): +def parse_module_deps(module_deps, package_prefixes, toolchain_packages): internal_deps = [] external_deps = {} + toolchain_deps = set() for module_dep in module_deps: if is_haskell_src(module_dep): continue - if os.path.isabs(module_dep): + if (tooldep := lookup_toolchain_dep(module_dep, toolchain_packages)) is not None: + toolchain_deps.add(tooldep) continue if (pkgdep := lookup_package_dep(module_dep, package_prefixes)) is not None: @@ -209,7 +233,7 @@ def parse_module_deps(module_deps, package_prefixes): internal_deps.append(src_to_module_name(module_dep)) - return internal_deps, external_deps + return internal_deps, external_deps, toolchain_deps def src_to_module_name(x): From 484c5a8490d5ac35c8e1dd6a86bf0acd9f4b3f7d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 16:08:02 +0200 Subject: [PATCH 0844/1133] Sort toolchain library deps --- haskell/tools/generate_target_metadata.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index e15510a0e..08c199736 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -70,7 +70,13 @@ def main(): result = obtain_target_metadata(args) - json.dump(result, args.output, indent=4) + json.dump(result, args.output, indent=4, default=json_default_handler) + + +def json_default_handler(o): + if isinstance(o, set): + return sorted(o) + raise TypeError(f'Object of type {o.__class__.__name__} is not JSON serializable') def obtain_target_metadata(args): @@ -188,7 +194,7 @@ def interpret_ghc_depends(ghc_depends, source_prefix, package_prefixes, toolchai for pkg, mods in extdeps.items(): extgraph.setdefault(module_name, {}).setdefault(pkg, []).extend(mods) for pkg in toolchaindeps: - toolchaingraph.setdefault(module_name, []).append(pkg) + toolchaingraph.setdefault(module_name, set()).add(pkg) ext = os.path.splitext(k)[1] From bcbabb23fef042bbe8c01667de5cd7da9a5f21ea Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 17:59:11 +0200 Subject: [PATCH 0845/1133] Track module package and toolchain dependencies --- haskell/compile.bzl | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 4e52df6d8..430ea2e0c 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -44,6 +44,7 @@ CompiledModuleInfo = provider(fields = { "interfaces": provider_field(list[Artifact]), "objects": provider_field(list[Artifact]), "dyn_object_dot_o": provider_field(Artifact), + "package_deps": provider_field(list[str]), }) def _compiled_module_project_as_abi(mod: CompiledModuleInfo) -> cmd_args: @@ -58,12 +59,16 @@ def _compiled_module_project_as_objects(mod: CompiledModuleInfo) -> cmd_args: def _compiled_module_project_as_dyn_objects_dot_o(mod: CompiledModuleInfo) -> cmd_args: return cmd_args(mod.dyn_object_dot_o) +def _compiled_module_project_as_package_deps(mod: CompiledModuleInfo) -> cmd_args: + return cmd_args(mod.package_deps) + CompiledModuleTSet = transitive_set( args_projections = { "abi": _compiled_module_project_as_abi, "interfaces": _compiled_module_project_as_interfaces, "objects": _compiled_module_project_as_objects, "dyn_objects_dot_o": _compiled_module_project_as_dyn_objects_dot_o, + "package_deps": _compiled_module_project_as_package_deps, }, ) @@ -547,6 +552,7 @@ def _compile_module( md_file: Artifact, graph: dict[str, list[str]], package_deps: dict[str, list[str]], + toolchain_deps: list[str], outputs: dict[Artifact, Artifact], resolved: dict[DynamicValue, ResolvedDynamicValue], artifact_suffix: str, @@ -637,6 +643,7 @@ def _compile_module( interfaces = module.interfaces, objects = module.objects, dyn_object_dot_o = dyn_object_dot_o, + package_deps = package_deps.keys() + toolchain_deps, ), children = [cross_package_modules] + this_package_modules, ) @@ -661,6 +668,7 @@ def compile( module_map = md["module_mapping"] graph = md["module_graph"] package_deps = md["package_deps"] + toolchain_deps = md["toolchain_deps"] mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } module_tsets = {} @@ -676,6 +684,7 @@ def compile( module_tsets = module_tsets, graph = graph, package_deps = package_deps.get(module_name, {}), + toolchain_deps = toolchain_deps.get(module_name, []), outputs = outputs, resolved = resolved, md_file=md_file, From fa39bea857381382b6120bb4e8ff1fb378b7bbc5 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 18:02:06 +0200 Subject: [PATCH 0846/1133] todo note --- haskell/compile.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 430ea2e0c..b1b61bc74 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -60,6 +60,7 @@ def _compiled_module_project_as_dyn_objects_dot_o(mod: CompiledModuleInfo) -> cm return cmd_args(mod.dyn_object_dot_o) def _compiled_module_project_as_package_deps(mod: CompiledModuleInfo) -> cmd_args: + # TODO[AH] avoid duplicate package flags return cmd_args(mod.package_deps) CompiledModuleTSet = transitive_set( From c4e00a9c05716d5b5fb7dd6ddb4bfeec8faaa810 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 18:08:51 +0200 Subject: [PATCH 0847/1133] Add module specific -package flags --- haskell/compile.bzl | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index b1b61bc74..bf19ca614 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -363,7 +363,8 @@ def _common_compile_args( compile_args = cmd_args() compile_args.add("-no-link", "-i") compile_args.add("-hide-all-packages") - compile_args.add(cmd_args(toolchain_libs, prepend="-package")) + if not modname: + compile_args.add(cmd_args(toolchain_libs, prepend="-package")) if enable_profiling: compile_args.add("-prof") @@ -389,8 +390,8 @@ def _common_compile_args( pkgname = pkgname, ) - compile_args.add(packages_info.exposed_package_args) if not modname: + compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) compile_args.add(packages_info.packagedb_args) if enable_th: @@ -607,6 +608,9 @@ def _compile_module( children = [cross_package_modules] + this_package_modules, ) + module_packages = package_deps.keys() + toolchain_deps + compile_cmd.add(cmd_args(module_packages, prepend = "-package")) + abi_tag = ctx.actions.artifact_tag() compile_cmd.hidden( @@ -614,6 +618,7 @@ def _compile_module( if enable_th: compile_cmd.hidden(dependency_modules.project_as_args("objects")) compile_cmd.add(dependency_modules.project_as_args("dyn_objects_dot_o")) + compile_cmd.add(cmd_args(dependency_modules.project_as_args("package_deps"), prepend = "-package")) dep_file = ctx.actions.declare_output("dep-{}_{}".format(module_name, artifact_suffix)).as_output() @@ -644,7 +649,7 @@ def _compile_module( interfaces = module.interfaces, objects = module.objects, dyn_object_dot_o = dyn_object_dot_o, - package_deps = package_deps.keys() + toolchain_deps, + package_deps = module_packages, ), children = [cross_package_modules] + this_package_modules, ) From 0c77ee7b9ad337ed8bb9115f06d12de78f207f76 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 18:26:16 +0200 Subject: [PATCH 0848/1133] Avoid duplicate arguments --- haskell/compile.bzl | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index bf19ca614..13603b0aa 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -63,6 +63,14 @@ def _compiled_module_project_as_package_deps(mod: CompiledModuleInfo) -> cmd_arg # TODO[AH] avoid duplicate package flags return cmd_args(mod.package_deps) +def _compiled_module_reduce_as_package_deps(children: list[dict[str, None]], mod: CompiledModuleInfo | None) -> dict[str, None]: + # TODO[AH] is there a better way to avoid duplicate -package flags? + # Using a project instead would produce duplicates. + result = {pkg: None for pkg in mod.package_deps} if mod else {} + for child in children: + result.update(child) + return result + CompiledModuleTSet = transitive_set( args_projections = { "abi": _compiled_module_project_as_abi, @@ -71,6 +79,9 @@ CompiledModuleTSet = transitive_set( "dyn_objects_dot_o": _compiled_module_project_as_dyn_objects_dot_o, "package_deps": _compiled_module_project_as_package_deps, }, + reductions = { + "package_deps": _compiled_module_reduce_as_package_deps, + }, ) DynamicCompileResultInfo = provider(fields = { @@ -618,7 +629,7 @@ def _compile_module( if enable_th: compile_cmd.hidden(dependency_modules.project_as_args("objects")) compile_cmd.add(dependency_modules.project_as_args("dyn_objects_dot_o")) - compile_cmd.add(cmd_args(dependency_modules.project_as_args("package_deps"), prepend = "-package")) + compile_cmd.add(cmd_args(dependency_modules.reduce("package_deps").keys(), prepend = "-package")) dep_file = ctx.actions.declare_output("dep-{}_{}".format(module_name, artifact_suffix)).as_output() From adf899d29cb0927a02fdc0901011b936c61ae440 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 18:26:29 +0200 Subject: [PATCH 0849/1133] remove unused projection --- haskell/compile.bzl | 5 ----- 1 file changed, 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 13603b0aa..ab67316c9 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -59,10 +59,6 @@ def _compiled_module_project_as_objects(mod: CompiledModuleInfo) -> cmd_args: def _compiled_module_project_as_dyn_objects_dot_o(mod: CompiledModuleInfo) -> cmd_args: return cmd_args(mod.dyn_object_dot_o) -def _compiled_module_project_as_package_deps(mod: CompiledModuleInfo) -> cmd_args: - # TODO[AH] avoid duplicate package flags - return cmd_args(mod.package_deps) - def _compiled_module_reduce_as_package_deps(children: list[dict[str, None]], mod: CompiledModuleInfo | None) -> dict[str, None]: # TODO[AH] is there a better way to avoid duplicate -package flags? # Using a project instead would produce duplicates. @@ -77,7 +73,6 @@ CompiledModuleTSet = transitive_set( "interfaces": _compiled_module_project_as_interfaces, "objects": _compiled_module_project_as_objects, "dyn_objects_dot_o": _compiled_module_project_as_dyn_objects_dot_o, - "package_deps": _compiled_module_project_as_package_deps, }, reductions = { "package_deps": _compiled_module_reduce_as_package_deps, From 6afd67ce6cb82373e80d54a8d55bcbe194c7f6f4 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 3 May 2024 18:27:22 +0200 Subject: [PATCH 0850/1133] Revert "TMP HS MD include raw ghc_depends" This reverts commit fa75540479236731e970dcc8a3e4f5e61d127fd8. --- haskell/tools/generate_target_metadata.py | 1 - 1 file changed, 1 deletion(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 08c199736..6afd9c531 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -92,7 +92,6 @@ def obtain_target_metadata(args): "module_graph": module_graph, "package_deps": package_deps, "toolchain_deps": toolchain_deps, - "raw": ghc_depends, } From e1a6da88a5e2b3e61f18afc29cd85062cde6ff1b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 10 Apr 2024 14:00:40 +0200 Subject: [PATCH 0851/1133] Create a haddock interface per module --- haskell/compile.bzl | 46 +++++++++---------- haskell/haskell.bzl | 6 +-- haskell/haskell_haddock.bzl | 91 +++++++++++++++++++++++++++---------- 3 files changed, 93 insertions(+), 50 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index ab67316c9..a57626a06 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -87,7 +87,7 @@ DynamicCompileResultInfo = provider(fields = { CompileResultInfo = record( objects = field(list[Artifact]), hi = field(list[Artifact]), - stubs = field(Artifact), + #stubs = field(Artifact), hashes = field(list[Artifact]), producing_indices = field(bool), module_tsets = field(None | list[CompiledModuleTSet] | DynamicValue), @@ -445,23 +445,23 @@ def compile_args( # TODO[AH] These are only used for haddock and conflict with tracking # per-module outputs individually. Rework the Haddock part to support this. - objects = ctx.actions.declare_output( - "objects-" + artifact_suffix, - dir = True, - ) - hi = ctx.actions.declare_output("hi-" + artifact_suffix, dir = True) - stubs = ctx.actions.declare_output("stubs-" + artifact_suffix, dir = True) - - compile_args.add( - "-odir", - objects.as_output(), - "-hidir", - hi.as_output(), - "-hiedir", - hi.as_output(), - "-stubdir", - stubs.as_output(), - ) + # objects = ctx.actions.declare_output( + # "objects-" + artifact_suffix, + # dir = True, + # ) + # hi = ctx.actions.declare_output("hi-" + artifact_suffix, dir = True) + # stubs = ctx.actions.declare_output("stubs-" + artifact_suffix, dir = True) + + # compile_args.add( + # "-odir", + # objects.as_output(), + # "-hidir", + # hi.as_output(), + # "-hiedir", + # hi.as_output(), + # "-stubdir", + # stubs.as_output(), + # ) srcs = cmd_args() for (path, src) in srcs_to_pairs(ctx.attrs.srcs): @@ -476,10 +476,10 @@ def compile_args( return CompileArgsInfo( result = CompileResultInfo( - objects = [objects], - hi = [hi], + objects = [], + hi = [], hashes = [], - stubs = stubs, + #stubs = stubs, producing_indices = producing_indices, module_tsets = None, ), @@ -538,7 +538,7 @@ def _compile_module_args( objects = objects, hi = his, hashes = [module.hash], - stubs = stubs, + #stubs = stubs, producing_indices = producing_indices, module_tsets = module_tsets, ), @@ -750,7 +750,7 @@ def compile( objects = objects, hi = interfaces, hashes = abi_hashes, - stubs = stubs_dir, + #stubs = stubs_dir, producing_indices = False, module_tsets = dyn_module_tsets, ) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 90beea5e4..4e7487876 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -617,7 +617,7 @@ def _build_haskell_lib( False: non_profiling_hlib.compiled.objects, } all_libs = libs + non_profiling_hlib.libs - stub_dirs = [compiled.stubs] + [non_profiling_hlib.compiled.stubs] + stub_dirs = [] #[compiled.stubs] + [non_profiling_hlib.compiled.stubs] else: dynamic = { False: compiled.module_tsets, @@ -629,7 +629,7 @@ def _build_haskell_lib( False: compiled.objects, } all_libs = libs - stub_dirs = [compiled.stubs] + stub_dirs = [] #compiled.stubs] db = _make_package( ctx, @@ -858,7 +858,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: shared_libs, shared_library_infos, ), - haskell_haddock_lib(ctx, pkgname), + haskell_haddock_lib(ctx, pkgname, [src for src in ctx.attrs.srcs if is_haskell_src(src.short_path)]), ] if indexing_tsets: diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index f1a7fda42..d62d8bb13 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -14,21 +14,22 @@ load( load( "@prelude//haskell:util.bzl", "attr_deps", + "src_to_module_name", ) HaskellHaddockInfo = provider( fields = { "html": provider_field(typing.Any, default = None), - "interface": provider_field(typing.Any, default = None), + "interfaces": provider_field(list[typing.Any], default = []), }, ) -def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: +def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, sources: list[Artifact]) -> Provider: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - iface = ctx.actions.declare_output("haddock-interface") odir = ctx.actions.declare_output("haddock-html", dir = True) + link_style = cxx_toolchain_link_style(ctx) args = compile_args( ctx, @@ -39,21 +40,22 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: pkgname = pkgname, ) + touch = ctx.actions.declare_output("haddock-stamp") + ctx.actions.write(touch, "") cmd = cmd_args(haskell_toolchain.haddock) cmd.add(cmd_args(args.args_for_cmd, format = "--optghc={}")) + cmd.add( - "--use-index", - "doc-index.html", - "--use-contents", - "index.html", - "--html", - "--hoogle", + #"--use-index", + #"doc-index.html", + #"--use-contents", + #"index.html", + #"--html", + #"--hoogle", "--no-tmp-comp-dir", "--no-warnings", - "--dump-interface", - iface.as_output(), - "--odir", - odir.as_output(), + #"--odir", + #odir.as_output(), "--package-name", pkgname, ) @@ -61,7 +63,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: for lib in attr_deps(ctx): hi = lib.get(HaskellHaddockInfo) if hi != None: - cmd.add("--read-interface", hi.interface) + cmd.add(cmd_args(hi.interfaces, format="--read-interface={}")) cmd.add(ctx.attrs.haddock_flags) @@ -82,17 +84,56 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: else: cmd.add(cmd_args(args.args_for_file, format = "--optghc={}")) - cmd.add(args.srcs) + cmd.add( + cmd_args( + cmd_args(touch, format = "--optghc=-i{}").parent(), + "mod-shared", + delimiter="/" + ), + "--optghc=-hidir", + cmd_args(cmd_args(touch).parent(), "mod-shared", delimiter="/"), + ) + + ifaces = [] + + for src in sources: + module = src_to_module_name(src.short_path) + iface = ctx.actions.declare_output("haddock-interface/{}".format(module)) + + ifaces.append(iface) + pprint(iface) + + ctx.actions.run( + cmd.copy().add("--dump-interface", iface.as_output(), src), + category = "haskell_haddock", + identifier = module, + no_outputs_cleanup = True, + ) + + cmd.add( + "--use-index", + "doc-index.html", + "--use-contents", + "index.html", + "--html", + "--hoogle", + "--no-tmp-comp-dir", + "--no-warnings", + "--odir", + odir.as_output(), + cmd_args(ifaces, format="--read-interface={}"), + ) + # Buck2 requires that the output artifacts are always produced, but Haddock only # creates them if it needs to, so we need a wrapper script to mkdir the outputs. - script = ctx.actions.declare_output("haddock-script") + script = ctx.actions.declare_output("haddock-script-{}".format(module)) script_args = cmd_args([ - "mkdir", - "-p", - args.result.objects[0].as_output(), - args.result.hi[0].as_output(), - args.result.stubs.as_output(), - "&&", + #"mkdir", + #"-p", + #args.result.objects[0].as_output(), + #args.result.hi[0].as_output(), + #args.result.stubs.as_output(), + #"&& set -x &&", cmd_args(cmd, quote = "shell"), ], delimiter = " ") ctx.actions.write( @@ -105,10 +146,12 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str) -> Provider: ctx.actions.run( cmd_args(script).hidden(cmd), category = "haskell_haddock", + identifier = "html", no_outputs_cleanup = True, ) - return HaskellHaddockInfo(interface = iface, html = odir) + + return HaskellHaddockInfo(interfaces = ifaces, html = odir) def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -128,7 +171,7 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: for lib in attr_deps(ctx): hi = lib.get(HaskellHaddockInfo) if hi != None: - cmd.add("--read-interface", hi.interface) + cmd.add(cmd_args(hi.interfaces, format="--read-interface={}")) dep_htmls.append(hi.html) cmd.add(ctx.attrs.haddock_flags) From bc52e2fdd023cf03a1973fc7c1ff503448c225af Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 30 Apr 2024 17:22:57 +0200 Subject: [PATCH 0852/1133] [buck2] first working with haddock one-shot mode! --- haskell/compile.bzl | 7 +- haskell/haskell.bzl | 33 +-- haskell/haskell_haddock.bzl | 398 +++++++++++++++++++++++++++--------- 3 files changed, 330 insertions(+), 108 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a57626a06..a53043e1c 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -194,8 +194,9 @@ def target_metadata( *, pkgname: str, sources: list[Artifact], + suffix: str = "", ) -> Artifact: - md_file = ctx.actions.declare_output(ctx.attrs.name + ".md.json") + md_file = ctx.actions.declare_output(ctx.attrs.name + suffix + ".md.json") md_gen = ctx.attrs._generate_target_metadata[RunInfo] haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -252,7 +253,7 @@ def target_metadata( _attr_deps_haskell_lib_package_name_and_prefix(ctx), ) - ctx.actions.run(md_args, category = "haskell_metadata") + ctx.actions.run(md_args, category = "haskell_metadata", identifier = suffix if suffix else None) return md_file @@ -436,7 +437,7 @@ def compile_args( compile_cmd.add(ctx.attrs.compiler_flags) # TODO[CB] use the empty lib once using hi haddock - _, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, use_empty_lib = False) + _, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, use_empty_lib = True) if getattr(ctx.attrs, "main", None) != None: compile_args.add(["-main-is", ctx.attrs.main]) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 4e7487876..fc1128b41 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -64,8 +64,8 @@ load( load( "@prelude//haskell:library_info.bzl", "HaskellLibraryInfo", - "HaskellLibraryProvider", "HaskellLibraryInfoTSet", + "HaskellLibraryProvider", ) load( "@prelude//haskell:link_info.bzl", @@ -280,11 +280,13 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: hlibinfos[link_style] = hlibinfo hlinkinfos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlibinfo, children = [ - lib.info[link_style] for lib in haskell_infos + lib.info[link_style] + for lib in haskell_infos ]) prof_hlibinfos[link_style] = prof_hlibinfo prof_hlinkinfos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = prof_hlibinfo, children = [ - lib.prof_info[link_style] for lib in haskell_infos + lib.prof_info[link_style] + for lib in haskell_infos ]) link_infos[link_style] = LinkInfos( default = LinkInfo( @@ -425,7 +427,7 @@ def _make_package( art_suff = get_artifact_suffix(link_style, profiled) return "\"${pkgroot}/" + dir_prefix + "-" + art_suff + "\"" - import_dirs = [ mk_artifact_dir("mod", profiled) for profiled in profiling ] + import_dirs = [mk_artifact_dir("mod", profiled) for profiled in profiling] conf = [ "name: " + pkgname, @@ -450,7 +452,7 @@ def _make_package( # following this logic (https://fburl.com/code/3gmobm5x) and will fail. libname += "_p" - library_dirs = [ mk_artifact_dir("lib", profiled) for profiled in profiling ] + library_dirs = [mk_artifact_dir("lib", profiled) for profiled in profiling] conf.append("library-dirs:" + ", ".join(library_dirs)) conf.append("extra-libraries: " + libname) @@ -552,7 +554,7 @@ def _build_haskell_lib( link.add(haskell_toolchain.linker_flags) link.add(ctx.attrs.linker_flags) link.add("-hide-all-packages") - link.add(cmd_args(toolchain_libs, prepend="-package")) + link.add(cmd_args(toolchain_libs, prepend = "-package")) link.add("-o", lib.as_output()) link.add( get_shared_library_flags(linker_info.type), @@ -617,7 +619,7 @@ def _build_haskell_lib( False: non_profiling_hlib.compiled.objects, } all_libs = libs + non_profiling_hlib.libs - stub_dirs = [] #[compiled.stubs] + [non_profiling_hlib.compiled.stubs] + stub_dirs = [] #[compiled.stubs] + [non_profiling_hlib.compiled.stubs] else: dynamic = { False: compiled.module_tsets, @@ -629,7 +631,7 @@ def _build_haskell_lib( False: compiled.objects, } all_libs = libs - stub_dirs = [] #compiled.stubs] + stub_dirs = [] #compiled.stubs] db = _make_package( ctx, @@ -657,7 +659,7 @@ def _build_haskell_lib( db = db, empty_db = empty_db, id = pkgname, - dynamic = dynamic, # TODO(ah) refine with dynamic projections + dynamic = dynamic, # TODO(ah) refine with dynamic projections import_dirs = import_artifacts, objects = object_artifacts, stub_dirs = stub_dirs, @@ -858,7 +860,13 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: shared_libs, shared_library_infos, ), - haskell_haddock_lib(ctx, pkgname, [src for src in ctx.attrs.srcs if is_haskell_src(src.short_path)]), + haskell_haddock_lib( + ctx, + pkgname, + [src for src in ctx.attrs.srcs if is_haskell_src(src.short_path)], + non_profiling_hlib[LinkStyle("shared")].compiled, + md_file, + ), ] if indexing_tsets: @@ -1002,7 +1010,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: output = ctx.actions.declare_output(ctx.attrs.name) link = cmd_args(haskell_toolchain.compiler) link.add("-hide-all-packages") - link.add(cmd_args(toolchain_libs, prepend="-package")) + link.add(cmd_args(toolchain_libs, prepend = "-package")) link.add(cmd_args(packages_info.exposed_package_args)) link.add(packages_info.packagedb_args) link.add("-o", output.as_output()) @@ -1012,6 +1020,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link.hidden(packages_info.exposed_package_libs) objects = {} + # only add the first object per module # TODO[CB] restructure this to use a record / dict for compiled.objects for obj in compiled.objects: @@ -1183,7 +1192,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: linkable_artifacts, ) - link.add(cmd_args(db, prepend="-package-db")) + link.add(cmd_args(db, prepend = "-package-db")) link.add("-package", pkgname) link.hidden(linkable_artifacts) else: diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index d62d8bb13..19bdd4556 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -5,7 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//haskell:compile.bzl", "compile_args") +load("@prelude//haskell:compile.bzl", "compile_args", "CompileResultInfo", "CompiledModuleTSet", "DynamicCompileResultInfo") +load("@prelude//haskell:library_info.bzl", "HaskellLibraryInfoTSet") load("@prelude//haskell:link_info.bzl", "cxx_toolchain_link_style") load( "@prelude//haskell:toolchain.bzl", @@ -14,75 +15,170 @@ load( load( "@prelude//haskell:util.bzl", "attr_deps", + "attr_deps_haskell_link_infos", + "get_artifact_suffix", + "is_haskell_src", "src_to_module_name", ) +load( + "@prelude//paths.bzl", "paths" +) +load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_traversal") +load("@prelude//utils:arglike.bzl", "ArgLike") HaskellHaddockInfo = provider( fields = { - "html": provider_field(typing.Any, default = None), + "html": provider_field(list[typing.Any], default = []), "interfaces": provider_field(list[typing.Any], default = []), }, ) -def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, sources: list[Artifact]) -> Provider: - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - odir = ctx.actions.declare_output("haddock-html", dir = True) +_HaddockInterface = record( + src = Artifact, + output = Artifact, + html = Artifact, +) + +_HaddockInfo = record( + interface = ArgLike, # FIXME should be Artifact + dump = Artifact, + html = Artifact, +) +def _haskell_interfaces_args(info: _HaddockInfo): + return cmd_args(info.interface, format="--one-shot-dep-hi={}") - link_style = cxx_toolchain_link_style(ctx) - args = compile_args( - ctx, - link_style, - enable_profiling = False, - enable_th = True, - suffix = "-haddock", - pkgname = pkgname, +_HaddockInfoTSet = transitive_set( + args_projections = { + "interfaces": _haskell_interfaces_args + } +) + +def _dump_haddock_interface( + ctx: AnalysisContext, + cmd: cmd_args, + module_name: str, + module_tsets: dict[str, _HaddockInfoTSet], + haddock_interfaces: dict[str, _HaddockInterface], + module_deps: list[CompiledModuleTSet], + graph: dict[str, list[str]], + outputs: dict[Artifact, Artifact]) -> _HaddockInfoTSet: + + haddock_interface = haddock_interfaces[module_name] + + #pprint(cmd) + + #print(transitive_deps.keys()) + #deps = [ dep for dep in transitive_deps[module_name] ] + + # Transitive module dependencies from other packages. + cross_package_modules = ctx.actions.tset( + CompiledModuleTSet, + children = module_deps, ) + cross_interfaces = cross_package_modules.project_as_args("interfaces") + + # Transitive module dependencies from the same package. + this_package_modules = [ + module_tsets[dep_name] + for dep_name in graph[module_name] + ] + #pprint(this_package_modules) + + haskell_interface = cmd_args( + cmd_args(outputs[haddock_interface.output].as_output(), parent = 2), + "mod-shared", + paths.replace_extension(haddock_interface.src.short_path, ".dyn_hi"), delimiter='/') + ctx.actions.run( + cmd.copy().add( + "--html", + "--hoogle", + "--odir", cmd_args(outputs[haddock_interface.html].as_output(), parent = 1), + "--dump-interface", outputs[haddock_interface.output].as_output(), + # TODO add specific reference to hi artifact + cmd_args( + haskell_interface, + format="--one-shot-hi={}"), + cmd_args( + [haddock_info.project_as_args("interfaces") for haddock_info in this_package_modules], + ), + cmd_args( + cross_interfaces, format="--one-shot-dep-hi={}" + ) + ), + category = "haskell_haddock_x", + identifier = module_name, + no_outputs_cleanup = True, + ) + + #print(module_name, ":", this_package_modules) + + return ctx.actions.tset( + _HaddockInfoTSet, + value = _HaddockInfo(interface = haskell_interface, dump = outputs[haddock_interface.output], html = outputs[haddock_interface.html]), + children = this_package_modules, + ) + + #haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + +def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, sources: list[Artifact], compiled: CompileResultInfo, md_file: Artifact) -> Provider: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + hifaces = [] # : list[Artifact] + #iface = ctx.actions.declare_output("haddock-interface") + #odir = ctx.actions.declare_output("haddock-html", dir = True) + + link_style = cxx_toolchain_link_style(ctx) + # args = compile_args( + # ctx, + # link_style, + # enable_profiling = False, + # enable_th = True, + # suffix = "-haddock", + # pkgname = pkgname, + # ) touch = ctx.actions.declare_output("haddock-stamp") ctx.actions.write(touch, "") cmd = cmd_args(haskell_toolchain.haddock) - cmd.add(cmd_args(args.args_for_cmd, format = "--optghc={}")) + #cmd.add(cmd_args(args.args_for_cmd, format = "--optghc={}")) cmd.add( - #"--use-index", - #"doc-index.html", - #"--use-contents", - #"index.html", + "--use-index", + "doc-index.html", + "--use-contents", + "index.html", #"--html", #"--hoogle", "--no-tmp-comp-dir", "--no-warnings", + #"--dump-interface", + #iface.as_output(), + #"--trace-args", #"--odir", #odir.as_output(), "--package-name", pkgname, ) - for lib in attr_deps(ctx): - hi = lib.get(HaskellHaddockInfo) - if hi != None: - cmd.add(cmd_args(hi.interfaces, format="--read-interface={}")) - cmd.add(ctx.attrs.haddock_flags) source_entity = read_root_config("haskell", "haddock_source_entity", None) if source_entity: cmd.add("--source-entity", source_entity) - if args.args_for_file: - if haskell_toolchain.use_argsfile: - argsfile = ctx.actions.declare_output( - "haskell_haddock.argsfile", - ) - ghcargs = cmd_args(args.args_for_file, format = "--optghc={}") - fileargs = cmd_args(ghcargs).add(args.srcs) - ctx.actions.write(argsfile.as_output(), fileargs, allow_args = True) - cmd.add(cmd_args(argsfile, format = "@{}")) - cmd.hidden(fileargs) - else: - cmd.add(cmd_args(args.args_for_file, format = "--optghc={}")) + # if args.args_for_file: + # if haskell_toolchain.use_argsfile: + # argsfile = ctx.actions.declare_output( + # "haskell_haddock.argsfile", + # ) + # ghcargs = cmd_args(args.args_for_file, format = "--optghc={}") + # fileargs = cmd_args(ghcargs).add(args.srcs) + # ctx.actions.write(argsfile.as_output(), fileargs, allow_args = True) + # cmd.add(cmd_args(argsfile, format = "@{}")) + # cmd.hidden(fileargs) + # else: + # cmd.add(cmd_args(args.args_for_file, format = "--optghc={}")) cmd.add( cmd_args( @@ -90,69 +186,185 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, sources: list[Artifa "mod-shared", delimiter="/" ), - "--optghc=-hidir", - cmd_args(cmd_args(touch).parent(), "mod-shared", delimiter="/"), + # cmd_args( + # "-hidir", + # cmd_args(cmd_args(touch).parent(), "mod-shared", delimiter="/"), + # format="--optghc={}" + # ) ) - ifaces = [] + artifact_suffix = get_artifact_suffix(link_style, enable_profiling = False) - for src in sources: - module = src_to_module_name(src.short_path) - iface = ctx.actions.declare_output("haddock-interface/{}".format(module)) + #modules = modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = False, suffix = artifact_suffix) - ifaces.append(iface) - pprint(iface) - - ctx.actions.run( - cmd.copy().add("--dump-interface", iface.as_output(), src), - category = "haskell_haddock", - identifier = module, - no_outputs_cleanup = True, + haddock_interfaces = { + src_to_module_name(src.short_path): _HaddockInterface( + src = src, + output = ctx.actions.declare_output("haddock-interface/{}.haddock".format(src_to_module_name(src.short_path))), + html = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(src.short_path).replace(".", "-"))), ) + for src in sources if is_haskell_src(src.short_path) + } + + # for haddock in haddock_interfaces.values(): + # ctx.actions.run( + # cmd_args("touch", haddock.output.as_output()), + # category = "touch_haddock", + # identifier = haddock.output.short_path, + # ) + cmd.hidden(hifaces) # TODO remove once no longer needed + + direct_deps_link_info = attr_deps_haskell_link_infos(ctx) + + def dump_haddock_interfaces(ctx, artifacts, resolved, outputs, md_file=md_file, dyn_cmd=cmd.copy(), haddock_interfaces=haddock_interfaces): + md = artifacts[md_file].read_json() + th_modules = md["th_modules"] + module_map = md["module_mapping"] + graph = md["module_graph"] + package_deps = md["package_deps"] + + print(ctx.label.name, package_deps) + # libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = [ + # lib.info[link_style] + # for lib in direct_deps_link_info + # ]) + + + dynamic_info_lib = {} + + for lib in direct_deps_link_info: + info = lib.info[link_style] + direct = info.value + dynamic = direct.dynamic[False] + dynamic_info = resolved[dynamic][DynamicCompileResultInfo] + + dynamic_info_lib[direct.name] = dynamic_info + + mapped_modules = { module_map.get(k, k): v for k, v in haddock_interfaces.items() } + module_tsets = {} + + for module_name in post_order_traversal(graph): + module_deps = [ + info.modules[mod] + for lib, info in dynamic_info_lib.items() + for mod in package_deps.get(module_name, {}).get(lib, []) + ] + + # for lib, info in dynamic_info_lib.items(): + # for mod in package_deps.get(module_name, {}).get(lib, []): + # module_deps.append(info.modules[mod]) + + module_tsets[module_name] = _dump_haddock_interface( + ctx, + dyn_cmd.copy(), + module_name = module_name, + module_tsets = module_tsets, + haddock_interfaces = mapped_modules, + module_deps = module_deps, + graph = graph, + outputs = outputs + ) - cmd.add( - "--use-index", - "doc-index.html", - "--use-contents", - "index.html", - "--html", - "--hoogle", - "--no-tmp-comp-dir", - "--no-warnings", - "--odir", - odir.as_output(), - cmd_args(ifaces, format="--read-interface={}"), - ) - - # Buck2 requires that the output artifacts are always produced, but Haddock only - # creates them if it needs to, so we need a wrapper script to mkdir the outputs. - script = ctx.actions.declare_output("haddock-script-{}".format(module)) - script_args = cmd_args([ - #"mkdir", - #"-p", - #args.result.objects[0].as_output(), - #args.result.hi[0].as_output(), - #args.result.stubs.as_output(), - #"&& set -x &&", - cmd_args(cmd, quote = "shell"), - ], delimiter = " ") - ctx.actions.write( - script, - cmd_args("#!/bin/sh", script_args), - is_executable = True, - allow_args = True, + #print(haddock_interfaces) + ctx.actions.dynamic_output( + dynamic = [md_file], + promises = [ + info.value.dynamic[False] + for lib in direct_deps_link_info + for info in [ + #lib.prof_info[link_style] + #if enable_profiling else + lib.info[link_style] + ] + ], + inputs = hifaces, + outputs = [output.as_output() for haddock in haddock_interfaces.values() for output in [haddock.output, haddock.html]], + f = dump_haddock_interfaces ) - ctx.actions.run( - cmd_args(script).hidden(cmd), - category = "haskell_haddock", - identifier = "html", - no_outputs_cleanup = True, + # for haddock in haddock_interfaces.values(): + # ctx.actions.run( + # cmd.copy().add( + # #"--odir", mod_odir, + # "--dump-interface", haddock.output.as_output(), + # # TODO add specific reference to hi artifact + # cmd_args( + # cmd_args( + # cmd_args(haddock.output.as_output(), parent = 2), + # "mod-shared", + # paths.replace_extension(haddock.src.short_path, ".dyn_hi"), delimiter='/'), + # format="--one-shot-hi={}"), + # ), + # category = "haskell_haddock_x", + # identifier = src_to_module_name(haddock.src.short_path), + # no_outputs_cleanup = True, + # ) + + #mod_odir = ctx.actions.declare_output("haddock-out_{}".format(module), dir=True) + + #pprint(iface) + + # ctx.actions.run( + # cmd_args( + # "mkdir", + # mod_odir.as_output(), + # ), + # category = "haddock_odir", + # identifier = module, + # ) + + # cmd.add( + # "--use-index", + # "doc-index.html", + # "--use-contents", + # "index.html", + # "--html", + # "--hoogle", + # "--no-tmp-comp-dir", + # "--no-warnings", + # "--odir", + # odir.as_output(), + # #cmd_args(ifaces, format="--read-interface={}"), + # ) + + # #cmd.add(args.srcs) + # #print([h.short_path for h in hifaces]) + # #pprint(cmd) + + # # Buck2 requires that the output artifacts are always produced, but Haddock only + # # creates them if it needs to, so we need a wrapper script to mkdir the outputs. + # script = ctx.actions.declare_output("haddock-script-{}".format(ctx.label.name)) + # script_args = cmd_args(["/nix/store/mb488rr560vq1xnl10hinnyfflcrd51n-coreutils-9.4/bin/ls "] + hifaces + [ + # #"mkdir", + # #"-p", + # #args.result.objects[0].as_output(), + # #args.result.hi[0].as_output(), + # #args.result.stubs.as_output(), + # #"&& set -x &&", + # "&& /nix/store/mb488rr560vq1xnl10hinnyfflcrd51n-coreutils-9.4/bin/ls -lh &&", + # cmd_args(cmd, quote = "shell"), + # " >&2", + # ], delimiter = " \\\n ") + # ctx.actions.write( + # script, + # cmd_args("#!/bin/sh", script_args), + # is_executable = True, + # allow_args = True, + # ) + + # ctx.actions.run( + # cmd_args(script).hidden(cmd), + # category = "haskell_haddock", + # identifier = "html", + # no_outputs_cleanup = True, + # ) + + + return HaskellHaddockInfo( + interfaces = [i.output for i in haddock_interfaces.values()], + html = [i.html for i in haddock_interfaces.values()] ) - - return HaskellHaddockInfo(interfaces = ifaces, html = odir) - def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -172,7 +384,8 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: hi = lib.get(HaskellHaddockInfo) if hi != None: cmd.add(cmd_args(hi.interfaces, format="--read-interface={}")) - dep_htmls.append(hi.html) + if hi.html: + dep_htmls.extend(hi.html) cmd.add(ctx.attrs.haddock_flags) @@ -183,12 +396,11 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: cmd_args(cmd, delimiter = " ", quote = "shell"), [ cmd_args( - # NOTE could use --reflink=auto if cp command supports it - ["cp", "-Rf", cmd_args(dir, format = "{}/*"), out.as_output()], + ["cp", "-f", "--reflink=auto", html, out.as_output()], delimiter = " ", - ) for dir in dep_htmls + ) for html in dep_htmls ], - delimiter = " && " + delimiter = " && \\\n " ) ]) From a351d88b335dd198b8f67d1f5497f946ff418272 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 23 Apr 2024 10:41:18 +0200 Subject: [PATCH 0853/1133] Explicitely pass hi files to haddock --- haskell/haskell.bzl | 1 - haskell/haskell_haddock.bzl | 32 ++++++++++++-------------------- 2 files changed, 12 insertions(+), 21 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index fc1128b41..3ef6c45df 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -863,7 +863,6 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: haskell_haddock_lib( ctx, pkgname, - [src for src in ctx.attrs.srcs if is_haskell_src(src.short_path)], non_profiling_hlib[LinkStyle("shared")].compiled, md_file, ), diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 19bdd4556..255dab17f 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -35,7 +35,7 @@ HaskellHaddockInfo = provider( _HaddockInterface = record( - src = Artifact, + hi = Artifact, output = Artifact, html = Artifact, ) @@ -86,19 +86,14 @@ def _dump_haddock_interface( ] #pprint(this_package_modules) - haskell_interface = cmd_args( - cmd_args(outputs[haddock_interface.output].as_output(), parent = 2), - "mod-shared", - paths.replace_extension(haddock_interface.src.short_path, ".dyn_hi"), delimiter='/') ctx.actions.run( cmd.copy().add( "--html", "--hoogle", "--odir", cmd_args(outputs[haddock_interface.html].as_output(), parent = 1), "--dump-interface", outputs[haddock_interface.output].as_output(), - # TODO add specific reference to hi artifact cmd_args( - haskell_interface, + haddock_interface.hi, format="--one-shot-hi={}"), cmd_args( [haddock_info.project_as_args("interfaces") for haddock_info in this_package_modules], @@ -107,7 +102,7 @@ def _dump_haddock_interface( cross_interfaces, format="--one-shot-dep-hi={}" ) ), - category = "haskell_haddock_x", + category = "haskell_haddock", identifier = module_name, no_outputs_cleanup = True, ) @@ -116,17 +111,14 @@ def _dump_haddock_interface( return ctx.actions.tset( _HaddockInfoTSet, - value = _HaddockInfo(interface = haskell_interface, dump = outputs[haddock_interface.output], html = outputs[haddock_interface.html]), + value = _HaddockInfo(interface = haddock_interface.hi, dump = outputs[haddock_interface.output], html = outputs[haddock_interface.html]), children = this_package_modules, ) - #haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] -def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, sources: list[Artifact], compiled: CompileResultInfo, md_file: Artifact) -> Provider: +#def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, sources: list[Artifact], compiled: CompileResultInfo, md_file: Artifact) -> Provider: +def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileResultInfo, md_file: Artifact) -> Provider: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - hifaces = [] # : list[Artifact] - #iface = ctx.actions.declare_output("haddock-interface") - #odir = ctx.actions.declare_output("haddock-html", dir = True) link_style = cxx_toolchain_link_style(ctx) # args = compile_args( @@ -198,12 +190,12 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, sources: list[Artifa #modules = modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = False, suffix = artifact_suffix) haddock_interfaces = { - src_to_module_name(src.short_path): _HaddockInterface( - src = src, - output = ctx.actions.declare_output("haddock-interface/{}.haddock".format(src_to_module_name(src.short_path))), - html = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(src.short_path).replace(".", "-"))), + src_to_module_name(hi.short_path): _HaddockInterface( + hi = hi, + output = ctx.actions.declare_output("haddock-interface/{}.haddock".format(src_to_module_name(hi.short_path))), + html = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(hi.short_path).replace(".", "-"))), ) - for src in sources if is_haskell_src(src.short_path) + for hi in compiled.hi } # for haddock in haddock_interfaces.values(): @@ -277,7 +269,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, sources: list[Artifa lib.info[link_style] ] ], - inputs = hifaces, + inputs = compiled.hi, outputs = [output.as_output() for haddock in haddock_interfaces.values() for output in [haddock.output, haddock.html]], f = dump_haddock_interfaces ) From 2e85edcb28fc4807e9d365b87cfb8548afb72b19 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 23 Apr 2024 13:53:43 +0200 Subject: [PATCH 0854/1133] Refactor and cleanup `_HaddockInterface` and `_HaddockInfo` are the same now. --- haskell/compile.bzl | 7 +- haskell/haskell.bzl | 3 +- haskell/haskell_haddock.bzl | 219 ++++-------------------------------- 3 files changed, 27 insertions(+), 202 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a53043e1c..5c81261ae 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -36,7 +36,7 @@ load( "LinkStyle", ) load("@prelude//:paths.bzl", "paths") -load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_traversal") +load("@prelude//utils:graph_utils.bzl", "post_order_traversal") load("@prelude//utils:strings.bzl", "strip_prefix") CompiledModuleInfo = provider(fields = { @@ -192,7 +192,6 @@ _toolchain_library_catalog = anon_rule( def target_metadata( ctx: AnalysisContext, *, - pkgname: str, sources: list[Artifact], suffix: str = "", ) -> Artifact: @@ -287,8 +286,7 @@ def get_packages_info( enable_profiling: bool, use_empty_lib: bool, resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None, - package_deps: None | dict[str, list[str]] = None, - pkgname: str | None = None) -> PackagesInfo: + package_deps: None | dict[str, list[str]] = None) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] # Collect library dependencies. Note that these don't need to be in a @@ -394,7 +392,6 @@ def _common_compile_args( use_empty_lib = use_empty_lib, resolved = resolved, package_deps = package_deps, - pkgname = pkgname, ) if not modname: diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 3ef6c45df..33fcd63f0 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -703,7 +703,6 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: md_file = target_metadata( ctx, - pkgname = pkgname, sources = ctx.attrs.srcs, ) @@ -983,7 +982,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: if enable_profiling and link_style == LinkStyle("shared"): link_style = LinkStyle("static") - md_file = target_metadata(ctx, pkgname = "", sources = ctx.attrs.srcs) + md_file = target_metadata(ctx, sources = ctx.attrs.srcs) compiled = compile( ctx, diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 255dab17f..c20aa1c66 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -6,7 +6,6 @@ # of this source tree. load("@prelude//haskell:compile.bzl", "compile_args", "CompileResultInfo", "CompiledModuleTSet", "DynamicCompileResultInfo") -load("@prelude//haskell:library_info.bzl", "HaskellLibraryInfoTSet") load("@prelude//haskell:link_info.bzl", "cxx_toolchain_link_style") load( "@prelude//haskell:toolchain.bzl", @@ -16,15 +15,9 @@ load( "@prelude//haskell:util.bzl", "attr_deps", "attr_deps_haskell_link_infos", - "get_artifact_suffix", - "is_haskell_src", "src_to_module_name", ) -load( - "@prelude//paths.bzl", "paths" -) -load("@prelude//utils:graph_utils.bzl", "post_order_traversal", "breadth_first_traversal") -load("@prelude//utils:arglike.bzl", "ArgLike") +load("@prelude//utils:graph_utils.bzl", "post_order_traversal") HaskellHaddockInfo = provider( fields = { @@ -34,15 +27,9 @@ HaskellHaddockInfo = provider( ) -_HaddockInterface = record( - hi = Artifact, - output = Artifact, - html = Artifact, -) - _HaddockInfo = record( - interface = ArgLike, # FIXME should be Artifact - dump = Artifact, + interface = Artifact, + haddock = Artifact, html = Artifact, ) @@ -55,23 +42,16 @@ _HaddockInfoTSet = transitive_set( } ) -def _dump_haddock_interface( +def _haddock_dump_interface( ctx: AnalysisContext, cmd: cmd_args, module_name: str, module_tsets: dict[str, _HaddockInfoTSet], - haddock_interfaces: dict[str, _HaddockInterface], + haddock_info: _HaddockInfo, module_deps: list[CompiledModuleTSet], graph: dict[str, list[str]], outputs: dict[Artifact, Artifact]) -> _HaddockInfoTSet: - haddock_interface = haddock_interfaces[module_name] - - #pprint(cmd) - - #print(transitive_deps.keys()) - #deps = [ dep for dep in transitive_deps[module_name] ] - # Transitive module dependencies from other packages. cross_package_modules = ctx.actions.tset( CompiledModuleTSet, @@ -84,16 +64,15 @@ def _dump_haddock_interface( module_tsets[dep_name] for dep_name in graph[module_name] ] - #pprint(this_package_modules) ctx.actions.run( cmd.copy().add( + "--odir", cmd_args(outputs[haddock_info.html].as_output(), parent = 1), + "--dump-interface", outputs[haddock_info.haddock].as_output(), "--html", "--hoogle", - "--odir", cmd_args(outputs[haddock_interface.html].as_output(), parent = 1), - "--dump-interface", outputs[haddock_interface.output].as_output(), cmd_args( - haddock_interface.hi, + haddock_info.interface, format="--one-shot-hi={}"), cmd_args( [haddock_info.project_as_args("interfaces") for haddock_info in this_package_modules], @@ -107,48 +86,27 @@ def _dump_haddock_interface( no_outputs_cleanup = True, ) - #print(module_name, ":", this_package_modules) - return ctx.actions.tset( _HaddockInfoTSet, - value = _HaddockInfo(interface = haddock_interface.hi, dump = outputs[haddock_interface.output], html = outputs[haddock_interface.html]), + value = _HaddockInfo(interface = haddock_info.interface, haddock = outputs[haddock_info.haddock], html = outputs[haddock_info.html]), children = this_package_modules, ) -#def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, sources: list[Artifact], compiled: CompileResultInfo, md_file: Artifact) -> Provider: def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileResultInfo, md_file: Artifact) -> Provider: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] link_style = cxx_toolchain_link_style(ctx) - # args = compile_args( - # ctx, - # link_style, - # enable_profiling = False, - # enable_th = True, - # suffix = "-haddock", - # pkgname = pkgname, - # ) - - touch = ctx.actions.declare_output("haddock-stamp") - ctx.actions.write(touch, "") + cmd = cmd_args(haskell_toolchain.haddock) - #cmd.add(cmd_args(args.args_for_cmd, format = "--optghc={}")) cmd.add( "--use-index", "doc-index.html", "--use-contents", "index.html", - #"--html", - #"--hoogle", "--no-tmp-comp-dir", "--no-warnings", - #"--dump-interface", - #iface.as_output(), - #"--trace-args", - #"--odir", - #odir.as_output(), "--package-name", pkgname, ) @@ -159,69 +117,23 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes if source_entity: cmd.add("--source-entity", source_entity) - # if args.args_for_file: - # if haskell_toolchain.use_argsfile: - # argsfile = ctx.actions.declare_output( - # "haskell_haddock.argsfile", - # ) - # ghcargs = cmd_args(args.args_for_file, format = "--optghc={}") - # fileargs = cmd_args(ghcargs).add(args.srcs) - # ctx.actions.write(argsfile.as_output(), fileargs, allow_args = True) - # cmd.add(cmd_args(argsfile, format = "@{}")) - # cmd.hidden(fileargs) - # else: - # cmd.add(cmd_args(args.args_for_file, format = "--optghc={}")) - - cmd.add( - cmd_args( - cmd_args(touch, format = "--optghc=-i{}").parent(), - "mod-shared", - delimiter="/" - ), - # cmd_args( - # "-hidir", - # cmd_args(cmd_args(touch).parent(), "mod-shared", delimiter="/"), - # format="--optghc={}" - # ) - ) - - artifact_suffix = get_artifact_suffix(link_style, enable_profiling = False) - - #modules = modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = False, suffix = artifact_suffix) - - haddock_interfaces = { - src_to_module_name(hi.short_path): _HaddockInterface( - hi = hi, - output = ctx.actions.declare_output("haddock-interface/{}.haddock".format(src_to_module_name(hi.short_path))), + haddock_infos = { + src_to_module_name(hi.short_path): _HaddockInfo( + interface = hi, + haddock = ctx.actions.declare_output("haddock-interface/{}.haddock".format(src_to_module_name(hi.short_path))), html = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(hi.short_path).replace(".", "-"))), ) for hi in compiled.hi } - # for haddock in haddock_interfaces.values(): - # ctx.actions.run( - # cmd_args("touch", haddock.output.as_output()), - # category = "touch_haddock", - # identifier = haddock.output.short_path, - # ) - cmd.hidden(hifaces) # TODO remove once no longer needed - direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - def dump_haddock_interfaces(ctx, artifacts, resolved, outputs, md_file=md_file, dyn_cmd=cmd.copy(), haddock_interfaces=haddock_interfaces): + def haddock_dump_interfaces(ctx, artifacts, resolved, outputs, md_file=md_file, dyn_cmd=cmd.copy(), haddock_infos=haddock_infos): md = artifacts[md_file].read_json() - th_modules = md["th_modules"] module_map = md["module_mapping"] graph = md["module_graph"] package_deps = md["package_deps"] - print(ctx.label.name, package_deps) - # libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = [ - # lib.info[link_style] - # for lib in direct_deps_link_info - # ]) - - dynamic_info_lib = {} for lib in direct_deps_link_info: @@ -232,7 +144,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes dynamic_info_lib[direct.name] = dynamic_info - mapped_modules = { module_map.get(k, k): v for k, v in haddock_interfaces.items() } + haddock_infos = { module_map.get(k, k): v for k, v in haddock_infos.items() } module_tsets = {} for module_name in post_order_traversal(graph): @@ -242,22 +154,17 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes for mod in package_deps.get(module_name, {}).get(lib, []) ] - # for lib, info in dynamic_info_lib.items(): - # for mod in package_deps.get(module_name, {}).get(lib, []): - # module_deps.append(info.modules[mod]) - - module_tsets[module_name] = _dump_haddock_interface( + module_tsets[module_name] = _haddock_dump_interface( ctx, dyn_cmd.copy(), module_name = module_name, module_tsets = module_tsets, - haddock_interfaces = mapped_modules, + haddock_info = haddock_infos[module_name], module_deps = module_deps, graph = graph, - outputs = outputs + outputs = outputs, ) - #print(haddock_interfaces) ctx.actions.dynamic_output( dynamic = [md_file], promises = [ @@ -266,95 +173,17 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes for info in [ #lib.prof_info[link_style] #if enable_profiling else - lib.info[link_style] + lib.info[link_style], ] ], inputs = compiled.hi, - outputs = [output.as_output() for haddock in haddock_interfaces.values() for output in [haddock.output, haddock.html]], - f = dump_haddock_interfaces + outputs = [output.as_output() for info in haddock_infos.values() for output in [info.haddock, info.html]], + f = haddock_dump_interfaces, ) - # for haddock in haddock_interfaces.values(): - # ctx.actions.run( - # cmd.copy().add( - # #"--odir", mod_odir, - # "--dump-interface", haddock.output.as_output(), - # # TODO add specific reference to hi artifact - # cmd_args( - # cmd_args( - # cmd_args(haddock.output.as_output(), parent = 2), - # "mod-shared", - # paths.replace_extension(haddock.src.short_path, ".dyn_hi"), delimiter='/'), - # format="--one-shot-hi={}"), - # ), - # category = "haskell_haddock_x", - # identifier = src_to_module_name(haddock.src.short_path), - # no_outputs_cleanup = True, - # ) - - #mod_odir = ctx.actions.declare_output("haddock-out_{}".format(module), dir=True) - - #pprint(iface) - - # ctx.actions.run( - # cmd_args( - # "mkdir", - # mod_odir.as_output(), - # ), - # category = "haddock_odir", - # identifier = module, - # ) - - # cmd.add( - # "--use-index", - # "doc-index.html", - # "--use-contents", - # "index.html", - # "--html", - # "--hoogle", - # "--no-tmp-comp-dir", - # "--no-warnings", - # "--odir", - # odir.as_output(), - # #cmd_args(ifaces, format="--read-interface={}"), - # ) - - # #cmd.add(args.srcs) - # #print([h.short_path for h in hifaces]) - # #pprint(cmd) - - # # Buck2 requires that the output artifacts are always produced, but Haddock only - # # creates them if it needs to, so we need a wrapper script to mkdir the outputs. - # script = ctx.actions.declare_output("haddock-script-{}".format(ctx.label.name)) - # script_args = cmd_args(["/nix/store/mb488rr560vq1xnl10hinnyfflcrd51n-coreutils-9.4/bin/ls "] + hifaces + [ - # #"mkdir", - # #"-p", - # #args.result.objects[0].as_output(), - # #args.result.hi[0].as_output(), - # #args.result.stubs.as_output(), - # #"&& set -x &&", - # "&& /nix/store/mb488rr560vq1xnl10hinnyfflcrd51n-coreutils-9.4/bin/ls -lh &&", - # cmd_args(cmd, quote = "shell"), - # " >&2", - # ], delimiter = " \\\n ") - # ctx.actions.write( - # script, - # cmd_args("#!/bin/sh", script_args), - # is_executable = True, - # allow_args = True, - # ) - - # ctx.actions.run( - # cmd_args(script).hidden(cmd), - # category = "haskell_haddock", - # identifier = "html", - # no_outputs_cleanup = True, - # ) - - return HaskellHaddockInfo( - interfaces = [i.output for i in haddock_interfaces.values()], - html = [i.html for i in haddock_interfaces.values()] + interfaces = [i.haddock for i in haddock_infos.values()], + html = [i.html for i in haddock_infos.values()], ) def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: From fe5deb6a08b6077f8bc9f3b8a79d2290adfc2b51 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 30 Apr 2024 17:22:57 +0200 Subject: [PATCH 0855/1133] Remove unused function --- haskell/compile.bzl | 69 ------------------------------------- haskell/haskell_haddock.bzl | 2 +- 2 files changed, 1 insertion(+), 70 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 5c81261ae..3a5e6c122 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -416,75 +416,6 @@ def _common_compile_args( return module_tsets, compile_args -# NOTE this function is currently only used by `haskell_haddock_lib` -def compile_args( - ctx: AnalysisContext, - link_style: LinkStyle, - enable_profiling: bool, - enable_th: bool, - pkgname = None, - suffix: str = "") -> CompileArgsInfo: - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - - compile_cmd = cmd_args() - compile_cmd.add(haskell_toolchain.compiler_flags) - - # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't - # be parsed when inside an argsfile. - compile_cmd.add(ctx.attrs.compiler_flags) - - # TODO[CB] use the empty lib once using hi haddock - _, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, use_empty_lib = True) - - if getattr(ctx.attrs, "main", None) != None: - compile_args.add(["-main-is", ctx.attrs.main]) - - artifact_suffix = get_artifact_suffix(link_style, enable_profiling, suffix) - - # TODO[AH] These are only used for haddock and conflict with tracking - # per-module outputs individually. Rework the Haddock part to support this. - # objects = ctx.actions.declare_output( - # "objects-" + artifact_suffix, - # dir = True, - # ) - # hi = ctx.actions.declare_output("hi-" + artifact_suffix, dir = True) - # stubs = ctx.actions.declare_output("stubs-" + artifact_suffix, dir = True) - - # compile_args.add( - # "-odir", - # objects.as_output(), - # "-hidir", - # hi.as_output(), - # "-hiedir", - # hi.as_output(), - # "-stubdir", - # stubs.as_output(), - # ) - - srcs = cmd_args() - for (path, src) in srcs_to_pairs(ctx.attrs.srcs): - # hs-boot files aren't expected to be an argument to compiler but does need - # to be included in the directory of the associated src file - if is_haskell_src(path): - srcs.add(src) - else: - srcs.hidden(src) - - producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags - - return CompileArgsInfo( - result = CompileResultInfo( - objects = [], - hi = [], - hashes = [], - #stubs = stubs, - producing_indices = producing_indices, - module_tsets = None, - ), - srcs = srcs, - args_for_cmd = compile_cmd, - args_for_file = compile_args, - ) def _compile_module_args( ctx: AnalysisContext, diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index c20aa1c66..89105ae9c 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//haskell:compile.bzl", "compile_args", "CompileResultInfo", "CompiledModuleTSet", "DynamicCompileResultInfo") +load("@prelude//haskell:compile.bzl", "CompileResultInfo", "CompiledModuleTSet", "DynamicCompileResultInfo") load("@prelude//haskell:link_info.bzl", "cxx_toolchain_link_style") load( "@prelude//haskell:toolchain.bzl", From 09c18d14f97225345365d2f04685cb403b7532bf Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 15 May 2024 16:22:39 +0200 Subject: [PATCH 0856/1133] Add per-module sub-targets for .o and .hi --- haskell/haskell.bzl | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 90beea5e4..19ef71543 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -760,6 +760,10 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: default_outputs = libs, )] + sub_targets.update(_haskell_module_sub_targets( + compiled = compiled, + )) + pic_behavior = ctx.attrs._cxx_toolchain[CxxToolchainInfo].pic_behavior link_style = cxx_toolchain_link_style(ctx) output_style = get_lib_output_style( @@ -1205,8 +1209,16 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ) run.hidden(symlink_dir) + sub_targets = {} + sub_targets.update(_haskell_module_sub_targets( + compiled = compiled, + )) + providers = [ - DefaultInfo(default_output = output), + DefaultInfo( + default_output = output, + sub_targets = sub_targets, + ), RunInfo(args = run), ] @@ -1214,3 +1226,15 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: providers.append(HaskellIndexInfo(info = indexing_tsets)) return providers + +def _haskell_module_sub_targets(*, compiled): + return { + "interfaces": [DefaultInfo(sub_targets = { + src_to_module_name(hi.short_path): [DefaultInfo(default_output = hi)] + for hi in compiled.hi + })], + "objects": [DefaultInfo(sub_targets = { + src_to_module_name(o.short_path): [DefaultInfo(default_output = o)] + for o in compiled.objects + })], + } From 9da8f15b06e0970a9c0ca5c6ea370b963a02ee89 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 15 May 2024 16:32:42 +0200 Subject: [PATCH 0857/1133] Capture matchink link-style and profiling-mode --- haskell/haskell.bzl | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 19ef71543..3dbc493b4 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -88,6 +88,7 @@ load( "attr_deps_shared_library_infos", "get_artifact_suffix", "is_haskell_src", + "output_extensions", "src_to_module_name", "srcs_to_pairs", ) @@ -758,12 +759,13 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: sub_targets[link_style.value.replace("_", "-")] = [DefaultInfo( default_outputs = libs, + sub_targets = _haskell_module_sub_targets( + compiled = compiled, + link_style = link_style, + enable_profiling = enable_profiling, + ), )] - sub_targets.update(_haskell_module_sub_targets( - compiled = compiled, - )) - pic_behavior = ctx.attrs._cxx_toolchain[CxxToolchainInfo].pic_behavior link_style = cxx_toolchain_link_style(ctx) output_style = get_lib_output_style( @@ -1212,6 +1214,8 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: sub_targets = {} sub_targets.update(_haskell_module_sub_targets( compiled = compiled, + link_style = link_style, + enable_profiling = enable_profiling, )) providers = [ @@ -1227,14 +1231,17 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: return providers -def _haskell_module_sub_targets(*, compiled): +def _haskell_module_sub_targets(*, compiled, link_style, enable_profiling): + (osuf, hisuf) = output_extensions(link_style, enable_profiling) return { "interfaces": [DefaultInfo(sub_targets = { src_to_module_name(hi.short_path): [DefaultInfo(default_output = hi)] for hi in compiled.hi + if hi.extension[1:] == hisuf })], "objects": [DefaultInfo(sub_targets = { src_to_module_name(o.short_path): [DefaultInfo(default_output = o)] for o in compiled.objects + if o.extension[1:] == osuf })], } From 240a8f75bac4fa1e274839a819285cf3ca9299e7 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 7 May 2024 10:17:29 +0200 Subject: [PATCH 0858/1133] Provide sub targets for haddock html output For example: ``` buck2 build backend/src:backend_infra[haddock][haddock-html/App.html] --show-output ``` --- haskell/haskell.bzl | 24 ++++++++++++++++++------ haskell/haskell_haddock.bzl | 2 +- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 33fcd63f0..4016d44a1 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -835,6 +835,23 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: # )] pp = [] + haddock, = haskell_haddock_lib( + ctx, + pkgname, + non_profiling_hlib[LinkStyle("shared")].compiled, + md_file, + ), + + sub_targets.update({ + "haddock": [DefaultInfo( + default_outputs = haddock.html, + sub_targets = { + html.short_path: [DefaultInfo(default_output = html)] + for html in haddock.html + } + )] + }) + providers = [ DefaultInfo( default_outputs = default_output, @@ -859,12 +876,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: shared_libs, shared_library_infos, ), - haskell_haddock_lib( - ctx, - pkgname, - non_profiling_hlib[LinkStyle("shared")].compiled, - md_file, - ), + haddock, ] if indexing_tsets: diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 89105ae9c..f7300a387 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -93,7 +93,7 @@ def _haddock_dump_interface( ) -def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileResultInfo, md_file: Artifact) -> Provider: +def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileResultInfo, md_file: Artifact) -> HaskellHaddockInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] link_style = cxx_toolchain_link_style(ctx) From ac8dd09554bf682ee1f8621e290127403a2cfa37 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 8 May 2024 13:19:41 +0200 Subject: [PATCH 0859/1133] Add style files to the html sub-targets as other_outputs --- haskell/haskell.bzl | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 4016d44a1..cd06d9555 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -842,11 +842,26 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: md_file, ), + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + + styles = [ + ctx.actions.declare_output("haddock-html", file) + for file in "synopsis.png linuwial.css quick-jump.css haddock-bundle.min.js".split() + ] + ctx.actions.run( + cmd_args( + haskell_toolchain.haddock, + "--gen-index", + "-o", cmd_args(styles[0].as_output(), parent=1), + hidden=[file.as_output() for file in styles] + ), + category = "haddock_styles", + ) sub_targets.update({ "haddock": [DefaultInfo( default_outputs = haddock.html, sub_targets = { - html.short_path: [DefaultInfo(default_output = html)] + html.short_path: [DefaultInfo(default_output = html, other_outputs=styles)] for html in haddock.html } )] From 098001c5e7c7ffd4330a029f7a95eb020a947ba2 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 15 May 2024 14:40:43 +0200 Subject: [PATCH 0860/1133] Re-introduce stubs --- haskell/compile.bzl | 6 +++--- haskell/haskell.bzl | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 3a5e6c122..53dfb17dc 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -87,7 +87,7 @@ DynamicCompileResultInfo = provider(fields = { CompileResultInfo = record( objects = field(list[Artifact]), hi = field(list[Artifact]), - #stubs = field(Artifact), + stubs = field(Artifact), hashes = field(list[Artifact]), producing_indices = field(bool), module_tsets = field(None | list[CompiledModuleTSet] | DynamicValue), @@ -467,7 +467,7 @@ def _compile_module_args( objects = objects, hi = his, hashes = [module.hash], - #stubs = stubs, + stubs = stubs, producing_indices = producing_indices, module_tsets = module_tsets, ), @@ -679,7 +679,7 @@ def compile( objects = objects, hi = interfaces, hashes = abi_hashes, - #stubs = stubs_dir, + stubs = stubs_dir, producing_indices = False, module_tsets = dyn_module_tsets, ) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index cd06d9555..2857fbf22 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -619,7 +619,7 @@ def _build_haskell_lib( False: non_profiling_hlib.compiled.objects, } all_libs = libs + non_profiling_hlib.libs - stub_dirs = [] #[compiled.stubs] + [non_profiling_hlib.compiled.stubs] + stub_dirs = [compiled.stubs] + [non_profiling_hlib.compiled.stubs] else: dynamic = { False: compiled.module_tsets, @@ -631,7 +631,7 @@ def _build_haskell_lib( False: compiled.objects, } all_libs = libs - stub_dirs = [] #compiled.stubs] + stub_dirs = [compiled.stubs] db = _make_package( ctx, From 681bec2ac4cbd76cdb196f26ce39b56a4e0eeab4 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 16 May 2024 07:10:36 +0200 Subject: [PATCH 0861/1133] Use module name as haddock sub-target key --- haskell/haskell.bzl | 6 +++--- haskell/haskell_haddock.bzl | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 2857fbf22..99eb892d7 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -859,10 +859,10 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: ) sub_targets.update({ "haddock": [DefaultInfo( - default_outputs = haddock.html, + default_outputs = haddock.html.values(), sub_targets = { - html.short_path: [DefaultInfo(default_output = html, other_outputs=styles)] - for html in haddock.html + module: [DefaultInfo(default_output = html, other_outputs=styles)] + for module, html in haddock.html.items() } )] }) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index f7300a387..fe7ca6039 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -21,7 +21,7 @@ load("@prelude//utils:graph_utils.bzl", "post_order_traversal") HaskellHaddockInfo = provider( fields = { - "html": provider_field(list[typing.Any], default = []), + "html": provider_field(dict[str, typing.Any], default = {}), "interfaces": provider_field(list[typing.Any], default = []), }, ) @@ -183,7 +183,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes return HaskellHaddockInfo( interfaces = [i.haddock for i in haddock_infos.values()], - html = [i.html for i in haddock_infos.values()], + html = {module: i.html for module, i in haddock_infos.items()}, ) def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: @@ -206,7 +206,7 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: if hi != None: cmd.add(cmd_args(hi.interfaces, format="--read-interface={}")) if hi.html: - dep_htmls.extend(hi.html) + dep_htmls.extend(hi.html.values()) cmd.add(ctx.attrs.haddock_flags) From e69f886f4cb8e4c52c39e37318bb9a4d7fccc2ac Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 16 May 2024 07:41:51 +0200 Subject: [PATCH 0862/1133] Also check in toolchain flags if `write-ide-info` is enabled --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 53dfb17dc..82b6280e8 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -460,7 +460,7 @@ def _compile_module_args( if not is_haskell_src(path): srcs.hidden(src) - producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags + producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags + haskell_toolchain.compiler_flags return CompileArgsInfo( result = CompileResultInfo( From 2193b40842dc191c52ba5513a43a75eb5471f156 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 16 May 2024 08:17:40 +0200 Subject: [PATCH 0863/1133] Add `-haddock` flag to first non-profiling haskell library --- haskell/compile.bzl | 20 +++++++++++++++++++- haskell/haskell.bzl | 5 +++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 82b6280e8..26fb4cc6e 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -422,6 +422,7 @@ def _compile_module_args( module: _Module, link_style: LinkStyle, enable_profiling: bool, + enable_haddock: bool, enable_th: bool, outputs: dict[Artifact, Artifact], resolved: dict[DynamicValue, ResolvedDynamicValue], @@ -437,6 +438,9 @@ def _compile_module_args( compile_cmd.add(ctx.attrs.compiler_flags) compile_cmd.add("-c") + if enable_haddock: + compile_cmd.add("-haddock") + module_tsets, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, package_deps = package_deps) objects = [outputs[obj] for obj in module.objects] @@ -482,6 +486,7 @@ def _compile_module( *, link_style: LinkStyle, enable_profiling: bool, + enable_haddock: bool, enable_th: bool, module_name: str, modules: dict[str, _Module], @@ -501,7 +506,18 @@ def _compile_module( compile_cmd = cmd_args(ctx.attrs._ghc_wrapper[RunInfo]) compile_cmd.add("--ghc", haskell_toolchain.compiler) - args = _compile_module_args(ctx, module, link_style, enable_profiling, enable_th, outputs, resolved, pkgname, package_deps = package_deps) + args = _compile_module_args( + ctx, + module, + link_style, + enable_profiling, + enable_haddock, + enable_th, + outputs, + resolved, + pkgname, + package_deps = package_deps + ) if args.args_for_file: if haskell_toolchain.use_argsfile: @@ -597,6 +613,7 @@ def compile( ctx: AnalysisContext, link_style: LinkStyle, enable_profiling: bool, + enable_haddock: bool, md_file: Artifact, pkgname: str | None = None) -> CompileResultInfo: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) @@ -619,6 +636,7 @@ def compile( ctx, link_style = link_style, enable_profiling = enable_profiling, + enable_haddock = enable_haddock, enable_th = module_name in th_modules, module_name = module_name, modules = mapped_modules, diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 99eb892d7..517685862 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -513,6 +513,7 @@ def _build_haskell_lib( nlis: list[MergedLinkInfo], # native link infos from all deps link_style: LinkStyle, enable_profiling: bool, + enable_haddock: bool, md_file: Artifact, # The non-profiling artifacts are also needed to build the package for # profiling, so it should be passed when `enable_profiling` is True. @@ -527,6 +528,7 @@ def _build_haskell_lib( ctx, link_style, enable_profiling = enable_profiling, + enable_haddock = enable_haddock, md_file = md_file, pkgname = pkgname, ) @@ -724,6 +726,8 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: nlis = nlis, link_style = link_style, enable_profiling = enable_profiling, + # enable haddock only for the first non-profiling hlib + enable_haddock = not enable_profiling and not non_profiling_hlib, md_file = md_file, non_profiling_hlib = non_profiling_hlib.get(link_style), ) @@ -1015,6 +1019,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ctx, link_style, enable_profiling = enable_profiling, + enable_haddock = False, md_file = md_file, ) From 9fb46c3f925510f086ea8650e32c83876fbe733d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 17 May 2024 11:20:14 +0200 Subject: [PATCH 0864/1133] Fix missing package in librar catalog The package `async` was missing in the toolchain library catalog in the context of https://github.com/MercuryTechnologies/the-culture-repo/issues/147. The reason seems to be that the package is listed last, and `ghc-pkg` does not always close the list of packages with a trailing `---`. The corresponding code is adapted to take a last package into account even if it is not followed by `---`. --- haskell/tools/generate_toolchain_library_catalog.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/haskell/tools/generate_toolchain_library_catalog.py b/haskell/tools/generate_toolchain_library_catalog.py index 3a3aa28a3..b66f2b0d2 100755 --- a/haskell/tools/generate_toolchain_library_catalog.py +++ b/haskell/tools/generate_toolchain_library_catalog.py @@ -71,6 +71,9 @@ def _parse_ghc_pkg_dump(lines): elif current_key == "import-dirs" and line.strip(): current_package.setdefault("import-dirs", []).append(line.strip()) + if current_package: + yield current_package + def _construct_import_path_trie(packages): result = {} From 1cd5b259afa7407882aad5571b7ef5d395842488 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 17 May 2024 11:30:29 +0200 Subject: [PATCH 0865/1133] Improve error message --- haskell/tools/generate_target_metadata.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 6afd9c531..37f753f57 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -18,6 +18,7 @@ import argparse import json import os +from os.path import isabs from pathlib import Path import subprocess import tempfile @@ -231,6 +232,9 @@ def parse_module_deps(module_deps, package_prefixes, toolchain_packages): toolchain_deps.add(tooldep) continue + if os.path.isabs(module_dep): + raise RuntimeError(f"Unexpected module dependency `{module_dep}`. Perhaps a missing `haskell_toolchain_library`?") + if (pkgdep := lookup_package_dep(module_dep, package_prefixes)) is not None: pkgname, modname = pkgdep external_deps.setdefault(pkgname, []).append(modname) From 715756ff619aa7bf7ef14d24b9c2f31badee8d4f Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 17 May 2024 13:59:43 +0200 Subject: [PATCH 0866/1133] remove redundant import Was generated by IDE --- haskell/tools/generate_target_metadata.py | 1 - 1 file changed, 1 deletion(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 37f753f57..b06880396 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -18,7 +18,6 @@ import argparse import json import os -from os.path import isabs from pathlib import Path import subprocess import tempfile From cb18358df11a6ae69872ee056e96472a945ce3a1 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 17 May 2024 17:27:24 +0200 Subject: [PATCH 0867/1133] Parse package-id field Note, both `name` and `id` fields can be line wrapped for long values. --- .../tools/generate_toolchain_library_catalog.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/haskell/tools/generate_toolchain_library_catalog.py b/haskell/tools/generate_toolchain_library_catalog.py index b66f2b0d2..46dcabdd2 100755 --- a/haskell/tools/generate_toolchain_library_catalog.py +++ b/haskell/tools/generate_toolchain_library_catalog.py @@ -61,15 +61,23 @@ def _parse_ghc_pkg_dump(lines): if key == "name": current_key = "name" - current_package["name"] = value + if value: + current_package["name"] = value + elif key == "id": + current_key = "id" + if value: + current_package["id"] = value elif key == "import-dirs": current_key = "import-dirs" if value: current_package.setdefault("import-dirs", []).append(value) else: current_key = None - elif current_key == "import-dirs" and line.strip(): - current_package.setdefault("import-dirs", []).append(line.strip()) + elif line.strip(): + if current_key in ["name", "id"]: + current_package[current_key] = line.strip() + elif current_key == "import-dirs": + current_package.setdefault("import-dirs", []).append(line.strip()) if current_package: yield current_package From b8b0311a602190c970146451eed7e7ca7e3921c6 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 17 May 2024 17:33:53 +0200 Subject: [PATCH 0868/1133] Track package-id for toolchain libraries --- haskell/tools/generate_target_metadata.py | 4 ++-- haskell/tools/generate_toolchain_library_catalog.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index b06880396..e1afa9cee 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -158,8 +158,8 @@ def lookup_toolchain_dep(module_dep, toolchain_packages): if (layer := layer.get(part)) is None: return None - if (pkgname := layer.get("//pkgname")) is not None: - return pkgname + if (pkgid := layer.get("//pkgid")) is not None: + return pkgid def lookup_package_dep(module_dep, package_prefixes): diff --git a/haskell/tools/generate_toolchain_library_catalog.py b/haskell/tools/generate_toolchain_library_catalog.py index 46dcabdd2..c27f67bc9 100755 --- a/haskell/tools/generate_toolchain_library_catalog.py +++ b/haskell/tools/generate_toolchain_library_catalog.py @@ -3,7 +3,7 @@ """Helper script to generate a mapping from interface paths to toolchain library names. The result is a JSON object with the following fields: -* `by-import-dirs`: A trie mapping import directory prefixes to package names. Encoded as nested dictionaries with leafs denoted by the special key `//pkgname`. +* `by-import-dirs`: A trie mapping import directory prefixes to package names. Encoded as nested dictionaries with leafs denoted by the special key `//pkgid`. """ import argparse @@ -93,7 +93,7 @@ def _construct_import_path_trie(packages): for part in Path(import_dir).parts: layer = layer.setdefault(part, {}) - layer["//pkgname"] = package["name"] + layer["//pkgid"] = package["id"] return result From b89ad5a30967ffdda49dff61f5ab420087ce97c2 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 17 May 2024 17:42:11 +0200 Subject: [PATCH 0869/1133] store mapping from package name to package id --- haskell/tools/generate_target_metadata.py | 2 +- haskell/tools/generate_toolchain_library_catalog.py | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index e1afa9cee..35102d577 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -153,7 +153,7 @@ def calc_package_prefixes(package_specs): def lookup_toolchain_dep(module_dep, toolchain_packages): module_path = Path(module_dep) - layer = toolchain_packages + layer = toolchain_packages["by-import-dirs"] for part in module_path.parts: if (layer := layer.get(part)) is None: return None diff --git a/haskell/tools/generate_toolchain_library_catalog.py b/haskell/tools/generate_toolchain_library_catalog.py index c27f67bc9..2c2eeaa2a 100755 --- a/haskell/tools/generate_toolchain_library_catalog.py +++ b/haskell/tools/generate_toolchain_library_catalog.py @@ -4,6 +4,7 @@ The result is a JSON object with the following fields: * `by-import-dirs`: A trie mapping import directory prefixes to package names. Encoded as nested dictionaries with leafs denoted by the special key `//pkgid`. +* `by-package-name`: A mapping from package name to package id. """ import argparse @@ -30,7 +31,7 @@ def main(): with subprocess.Popen(_ghc_pkg_command(args.ghc_pkg), stdout=subprocess.PIPE, text=True) as proc: packages = list(_parse_ghc_pkg_dump(proc.stdout)) - result = _construct_import_path_trie(packages) + result = _construct_package_mappings(packages) json.dump(result, args.output) @@ -83,12 +84,16 @@ def _parse_ghc_pkg_dump(lines): yield current_package -def _construct_import_path_trie(packages): - result = {} +def _construct_package_mappings(packages): + result = { + "by-import-dirs": {}, + "by-package-name": {}, + } for package in packages: + result["by-package-name"][package["name"]] = package["id"] for import_dir in package.get("import-dirs", []): - layer = result + layer = result["by-import-dirs"] for part in Path(import_dir).parts: layer = layer.setdefault(part, {}) From df31bf1599f61334986cb560baeb596908634fab Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 17 May 2024 17:55:59 +0200 Subject: [PATCH 0870/1133] [workaround] MD: Replace -package with -package-id Replace -package flags by -package-id flags in the ghc -M invocation for metadata calculation. The -package flags are constructed within Starlark where we don't have easy access to the package-ids. Find a better way to construct these flags in the future. --- haskell/tools/generate_target_metadata.py | 39 +++++++++++++++++++++-- 1 file changed, 37 insertions(+), 2 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 35102d577..7de325fe9 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -80,9 +80,10 @@ def json_default_handler(o): def obtain_target_metadata(args): - ghc_depends, ghc_options = run_ghc_depends(args.ghc, args.ghc_arg, args.source) - th_modules = determine_th_modules(ghc_options, args.source_prefix) toolchain_packages = load_toolchain_packages(args.toolchain_libs) + ghc_args = fix_ghc_args(args.ghc_arg, toolchain_packages) + ghc_depends, ghc_options = run_ghc_depends(args.ghc, ghc_args, args.source) + th_modules = determine_th_modules(ghc_options, args.source_prefix) package_prefixes = calc_package_prefixes(args.package) module_mapping, module_graph, package_deps, toolchain_deps = interpret_ghc_depends( ghc_depends, args.source_prefix, package_prefixes, toolchain_packages) @@ -116,6 +117,40 @@ def uses_th(opts): return any([f"-X{ext}" in opts for ext in __TH_EXTENSIONS]) +def fix_ghc_args(ghc_args, toolchain_packages): + """Replaces -package flags by -package-id where applicable. + + Packages that have hidden internal packages cause failures of the form: + + Could not load module ‘Data.Attoparsec.Text’. + It is a member of the hidden package ‘attoparsec-0.14.4’. + + This can be avoided by specifying the corresponding packages by package-id + rather than package name. + + The toolchain libraries catalog tracks a mapping from package name to + package id. We apply it here to any toolchain library dependencies. + """ + result = [] + mapping = toolchain_packages["by-package-name"] + + args_iter = iter(ghc_args) + for arg in args_iter: + if arg == "-package": + package_name = next(args_iter) + if package_name is None: + raise RuntimeError("Missing package name argument for -package flag") + + if (package_id := mapping.get(package_name, None)) is not None: + result.extend(["-package-id", package_id]) + else: + result.extend(["-package", package_name]) + else: + result.append(arg) + + return result + + def run_ghc_depends(ghc, ghc_args, sources): with tempfile.TemporaryDirectory() as dname: json_fname = os.path.join(dname, "depends.json") From 9fabb38863d5d5a3283c7b1541160e1dc440be3a Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 17 May 2024 18:06:43 +0200 Subject: [PATCH 0871/1133] Specify toolchain_deps using -package-id --- haskell/compile.bzl | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index ab67316c9..7b7c42a9d 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -45,6 +45,7 @@ CompiledModuleInfo = provider(fields = { "objects": provider_field(list[Artifact]), "dyn_object_dot_o": provider_field(Artifact), "package_deps": provider_field(list[str]), + "toolchain_deps": provider_field(list[str]), }) def _compiled_module_project_as_abi(mod: CompiledModuleInfo) -> cmd_args: @@ -67,6 +68,14 @@ def _compiled_module_reduce_as_package_deps(children: list[dict[str, None]], mod result.update(child) return result +def _compiled_module_reduce_as_toolchain_deps(children: list[dict[str, None]], mod: CompiledModuleInfo | None) -> dict[str, None]: + # TODO[AH] is there a better way to avoid duplicate -package-id flags? + # Using a project instead would produce duplicates. + result = {pkg: None for pkg in mod.toolchain_deps} if mod else {} + for child in children: + result.update(child) + return result + CompiledModuleTSet = transitive_set( args_projections = { "abi": _compiled_module_project_as_abi, @@ -76,6 +85,7 @@ CompiledModuleTSet = transitive_set( }, reductions = { "package_deps": _compiled_module_reduce_as_package_deps, + "toolchain_deps": _compiled_module_reduce_as_toolchain_deps, }, ) @@ -614,8 +624,8 @@ def _compile_module( children = [cross_package_modules] + this_package_modules, ) - module_packages = package_deps.keys() + toolchain_deps - compile_cmd.add(cmd_args(module_packages, prepend = "-package")) + compile_cmd.add(cmd_args(toolchain_deps, prepend = "-package-id")) + compile_cmd.add(cmd_args(package_deps.keys(), prepend = "-package")) abi_tag = ctx.actions.artifact_tag() @@ -625,6 +635,7 @@ def _compile_module( compile_cmd.hidden(dependency_modules.project_as_args("objects")) compile_cmd.add(dependency_modules.project_as_args("dyn_objects_dot_o")) compile_cmd.add(cmd_args(dependency_modules.reduce("package_deps").keys(), prepend = "-package")) + compile_cmd.add(cmd_args(dependency_modules.reduce("toolchain_deps").keys(), prepend = "-package-id")) dep_file = ctx.actions.declare_output("dep-{}_{}".format(module_name, artifact_suffix)).as_output() @@ -655,7 +666,8 @@ def _compile_module( interfaces = module.interfaces, objects = module.objects, dyn_object_dot_o = dyn_object_dot_o, - package_deps = module_packages, + package_deps = package_deps.keys(), + toolchain_deps = toolchain_deps, ), children = [cross_package_modules] + this_package_modules, ) From 24f69a064c8116155a76302635d5dae93c1646b8 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 30 May 2024 17:56:22 +0200 Subject: [PATCH 0872/1133] Do not set `GHC_PACKAGE_PATH` when it is empty Otherwise this led to a spurious error like this: ``` Local command returned non-zero exit code 1 Reproduce locally: `env -- 'BUCK_SCRATCH_PATH=buck-out/v2/tmp/root/904931f735703749/backend/src/__backend_infra__/haskel ...... nfra__/db-static buck-out/v2/gen/root/904931f735703749/backend/src/__backend_infra__/pkg-static.conf (run `buck2 log what-failed` to get the full command)` stdout: stderr: WARNING: cache does not exist: ./package.cache ghc will fail to read this package db. Use 'ghc-pkg recache' to fix. ghc-pkg-9.8.2: "the input" (line 14, column 1): unexpected operator "#" expecting field or section name ``` --- haskell/haskell.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 68eb31f13..2fdb541b9 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -482,7 +482,7 @@ def _make_package( ]), category = "haskell_package_" + artifact_suffix.replace("-", "_"), identifier = "empty" if use_empty_lib else "final", - env = {"GHC_PACKAGE_PATH": ghc_package_path}, + env = {"GHC_PACKAGE_PATH": ghc_package_path} if db_deps else {}, ) return db From 81b856105d75ad39f2185ee9502a139d06783c87 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 5 Jun 2024 08:03:54 +0200 Subject: [PATCH 0873/1133] Add hidden inputs to final ghci script This is needed when the script is referring to another output that depends on other artifacts at runtime. --- haskell/haskell_ghci.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/haskell_ghci.bzl b/haskell/haskell_ghci.bzl index 70042d8a8..4bb8ada36 100644 --- a/haskell/haskell_ghci.bzl +++ b/haskell/haskell_ghci.bzl @@ -726,7 +726,7 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: "__{}__".format(ctx.label.name), output_artifacts, ) - run = cmd_args(final_ghci_script).hidden(outputs) + run = cmd_args(final_ghci_script, hidden=ctx.attrs.ghci_bin_dep.get(RunInfo) or []).hidden(outputs) return [ DefaultInfo(default_outputs = [root_output_dir]), From 3e338facde3fd218eed8a3ee2a9c66e610e4bd81 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 10:39:45 +0200 Subject: [PATCH 0874/1133] move variable assignments together --- haskell/compile.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 40cebd624..f4682cb22 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -315,8 +315,6 @@ def get_packages_info( exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) - packagedb_args = cmd_args() - if resolved != None and package_deps != None: exposed_package_modules = [] @@ -336,6 +334,7 @@ def get_packages_info( # we're using Template Haskell: exposed_package_libs.hidden(lib.libs) + packagedb_args = cmd_args() packagedb_args.add(libs.project_as_args("empty_package_db" if use_empty_lib else "package_db")) haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( From 5808cde5f6affa80fa864dfc74d9274d70ff8b53 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 10:51:10 +0200 Subject: [PATCH 0875/1133] No implicity -package-db prefix --- haskell/compile.bzl | 6 ++++-- haskell/library_info.bzl | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index f4682cb22..c77a78c19 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -334,8 +334,10 @@ def get_packages_info( # we're using Template Haskell: exposed_package_libs.hidden(lib.libs) - packagedb_args = cmd_args() - packagedb_args.add(libs.project_as_args("empty_package_db" if use_empty_lib else "package_db")) + packagedb_args = cmd_args( + libs.project_as_args("empty_package_db" if use_empty_lib else "package_db"), + prepend = "-package-db" + ) haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( ctx, diff --git a/haskell/library_info.bzl b/haskell/library_info.bzl index d8cff43e2..3a359b6ff 100644 --- a/haskell/library_info.bzl +++ b/haskell/library_info.bzl @@ -49,10 +49,10 @@ HaskellLibraryInfo = record( ) def _project_as_package_db(lib: HaskellLibraryInfo): - return cmd_args("-package-db", lib.db) + return cmd_args(lib.db) def _project_as_empty_package_db(lib: HaskellLibraryInfo): - return cmd_args("-package-db", lib.empty_db) + return cmd_args(lib.empty_db) HaskellLibraryInfoTSet = transitive_set( args_projections = { From 0f8596abdad9898a34a50e3a552babb76894029a Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 11:09:45 +0200 Subject: [PATCH 0876/1133] Push the -package-db prefix up --- haskell/compile.bzl | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index c77a78c19..d8bfec6f4 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -244,7 +244,7 @@ def target_metadata( ghc_args.add(package_flag, "base") ghc_args.add(cmd_args(toolchain_libs, prepend=package_flag)) ghc_args.add(cmd_args(packages_info.exposed_package_args)) - ghc_args.add(packages_info.packagedb_args) + ghc_args.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) ghc_args.add(ctx.attrs.compiler_flags) md_args = cmd_args(md_gen) @@ -334,10 +334,9 @@ def get_packages_info( # we're using Template Haskell: exposed_package_libs.hidden(lib.libs) - packagedb_args = cmd_args( - libs.project_as_args("empty_package_db" if use_empty_lib else "package_db"), - prepend = "-package-db" - ) + packagedb_args = cmd_args(libs.project_as_args( + "empty_package_db" if use_empty_lib else "package_db", + )) haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( ctx, @@ -408,7 +407,7 @@ def _common_compile_args( if not modname: compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) - compile_args.add(packages_info.packagedb_args) + compile_args.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) if enable_th: compile_args.add(packages_info.exposed_package_libs) if not modname: From b3413e2668679963acc6ea83ea7e0f3fc4269630 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 11:24:00 +0200 Subject: [PATCH 0877/1133] define a package-env file for the package-db flags See https://ghc.gitlab.haskell.org/ghc/doc/users_guide/packages.html --- haskell/compile.bzl | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index d8bfec6f4..0e277bed0 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -407,7 +407,34 @@ def _common_compile_args( if not modname: compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) - compile_args.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) + + package_env_file = ctx.actions.declare_output(".".join([ + ctx.label.name, + modname or "pkg", + "package-db", + output_extensions(link_style, enable_profiling)[1], + "env", + ])) + package_env = cmd_args( + "clear-package-db", + "global-package-db", + delimiter = "\n", + ) + package_env.add(cmd_args( + packages_info.packagedb_args, + format = "package-db {}", + ).relative_to(package_env_file, parent = 1)) + _, package_env_inputs = ctx.actions.write( + package_env_file, + package_env, + allow_args = True, + ) + compile_args.add(cmd_args( + package_env_file, + prepend = "-package-env", + hidden = package_env_inputs, + )) + if enable_th: compile_args.add(packages_info.exposed_package_libs) if not modname: From b0e65d05b8422904bef0d68cfd79b35252f1ebec Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 11:29:48 +0200 Subject: [PATCH 0878/1133] todo note --- haskell/compile.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 0e277bed0..5f2b16a6c 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -408,6 +408,7 @@ def _common_compile_args( compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) + # TODO[AH] Avoid duplicates and share identical env files. package_env_file = ctx.actions.declare_output(".".join([ ctx.label.name, modname or "pkg", From 8c75c3f1be409d7c6278564c75f36a55a30f8946 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 11:41:52 +0200 Subject: [PATCH 0879/1133] Fix missing package_db inputs --- haskell/compile.bzl | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 5f2b16a6c..d1ccd2293 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -425,15 +425,14 @@ def _common_compile_args( packages_info.packagedb_args, format = "package-db {}", ).relative_to(package_env_file, parent = 1)) - _, package_env_inputs = ctx.actions.write( + ctx.actions.write( package_env_file, package_env, - allow_args = True, ) compile_args.add(cmd_args( package_env_file, prepend = "-package-env", - hidden = package_env_inputs, + hidden = packages_info.packagedb_args, )) if enable_th: From b8d177435d124557b90514bc776e2a706a917d4b Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 12:01:41 +0200 Subject: [PATCH 0880/1133] Add dep file for package-db flags --- haskell/compile.bzl | 23 +++++++++++++++++++---- haskell/tools/ghc_wrapper.py | 15 +++++++++++++++ 2 files changed, 34 insertions(+), 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index d1ccd2293..b5cce836a 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -108,6 +108,7 @@ CompileArgsInfo = record( srcs = field(cmd_args), args_for_cmd = field(cmd_args), args_for_file = field(cmd_args), + packagedb_tag = field(ArtifactTag), ) PackagesInfo = record( @@ -372,7 +373,7 @@ def _common_compile_args( modname: str | None = None, resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None, package_deps: None | dict[str, list[str]] = None, - use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args): + use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args, ArtifactTag): toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] compile_args = cmd_args() @@ -408,6 +409,8 @@ def _common_compile_args( compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) + packagedb_tag = ctx.actions.artifact_tag() + # TODO[AH] Avoid duplicates and share identical env files. package_env_file = ctx.actions.declare_output(".".join([ ctx.label.name, @@ -430,11 +433,21 @@ def _common_compile_args( package_env, ) compile_args.add(cmd_args( - package_env_file, + packagedb_tag.tag_artifacts(package_env_file), prepend = "-package-env", hidden = packages_info.packagedb_args, )) + dep_file = ctx.actions.declare_output(".".join([ + ctx.label.name, + modname or "pkg", + "package-db", + output_extensions(link_style, enable_profiling)[1], + "dep", + ])).as_output() + tagged_dep_file = packagedb_tag.tag_artifacts(dep_file) + compile_args.add("--buck2-packagedb-dep", tagged_dep_file) + if enable_th: compile_args.add(packages_info.exposed_package_libs) if not modname: @@ -451,7 +464,7 @@ def _common_compile_args( module_tsets = packages_info.exposed_package_modules - return module_tsets, compile_args + return module_tsets, compile_args, packagedb_tag def _compile_module_args( @@ -478,7 +491,7 @@ def _compile_module_args( if enable_haddock: compile_cmd.add("-haddock") - module_tsets, compile_args = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, package_deps = package_deps) + module_tsets, compile_args, packagedb_tag = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, package_deps = package_deps) objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] @@ -515,6 +528,7 @@ def _compile_module_args( srcs = srcs, args_for_cmd = compile_cmd, args_for_file = compile_args, + packagedb_tag = packagedb_tag, ) @@ -621,6 +635,7 @@ def _compile_module( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, dep_files = { "abi": abi_tag, + "packagedb": args.packagedb_tag, } ) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index fcfb8ad36..977079c4b 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -25,6 +25,11 @@ def main(): required=True, help="Path to the dep file.", ) + parser.add_argument( + "--buck2-packagedb-dep", + required=True, + help="Path to the dep file.", + ) parser.add_argument( "--ghc", required=True, type=str, help="Path to the Haskell compiler GHC." ) @@ -53,6 +58,16 @@ def main(): os.remove(args.buck2_dep) raise e + # write an empty dep file, to signal that all tagged files are unused + try: + with open(args.buck2_packagedb_dep, "w") as f: + f.write("\n") + + except Exception as e: + # remove incomplete dep file + os.remove(args.buck2_packagedb_dep) + raise e + def recompute_abi_hash(ghc, abi_out): """Call ghc on the hi file and write the ABI hash to abi_out.""" From 1036781d4604a57c8125ff93cf6a094d4976799c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 14:02:16 +0200 Subject: [PATCH 0881/1133] Tag all package-db inputs (over-eager) --- haskell/compile.bzl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index b5cce836a..f84a57955 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -424,8 +424,9 @@ def _common_compile_args( "global-package-db", delimiter = "\n", ) + packagedb_args = packagedb_tag.tag_artifacts(packages_info.packagedb_args) package_env.add(cmd_args( - packages_info.packagedb_args, + packagedb_args, format = "package-db {}", ).relative_to(package_env_file, parent = 1)) ctx.actions.write( @@ -435,7 +436,7 @@ def _common_compile_args( compile_args.add(cmd_args( packagedb_tag.tag_artifacts(package_env_file), prepend = "-package-env", - hidden = packages_info.packagedb_args, + hidden = packagedb_args, )) dep_file = ctx.actions.declare_output(".".join([ From a7f6bd26915ab3bf1ac4e1d51c45b0fe6eef86a5 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 14:48:53 +0200 Subject: [PATCH 0882/1133] track module package-db dependencies --- haskell/compile.bzl | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index f84a57955..9cf82ad98 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -44,6 +44,8 @@ CompiledModuleInfo = provider(fields = { "interfaces": provider_field(list[Artifact]), "objects": provider_field(list[Artifact]), "dyn_object_dot_o": provider_field(Artifact), + # TODO[AH] track this module's package-name/id & package-db instead. + "db_deps": provider_field(list[Artifact]), "package_deps": provider_field(list[str]), "toolchain_deps": provider_field(list[str]), }) @@ -109,6 +111,7 @@ CompileArgsInfo = record( args_for_cmd = field(cmd_args), args_for_file = field(cmd_args), packagedb_tag = field(ArtifactTag), + packagedbs = field(list[Artifact]), ) PackagesInfo = record( @@ -117,6 +120,7 @@ PackagesInfo = record( exposed_package_objects = field(list[Artifact]), exposed_package_libs = cmd_args, exposed_package_args = cmd_args, + exposed_package_dbs = field(list[Artifact]), packagedb_args = cmd_args, transitive_deps = field(HaskellLibraryInfoTSet), ) @@ -315,6 +319,7 @@ def get_packages_info( exposed_package_objects = [] exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) + exposed_package_dbs = [] if resolved != None and package_deps != None: exposed_package_modules = [] @@ -327,6 +332,10 @@ def get_packages_info( for mod in package_deps.get(direct.name, []): exposed_package_modules.append(dynamic_info.modules[mod]) + + if direct.name in package_deps: + db = direct.empty_db if use_empty_lib else direct.db + exposed_package_dbs.append(db) else: for lib in libs.traverse(): exposed_package_imports.extend(lib.import_dirs[enable_profiling]) @@ -359,6 +368,7 @@ def get_packages_info( exposed_package_objects = exposed_package_objects, exposed_package_libs = exposed_package_libs, exposed_package_args = exposed_package_args, + exposed_package_dbs = exposed_package_dbs, packagedb_args = packagedb_args, transitive_deps = libs, ) @@ -373,7 +383,7 @@ def _common_compile_args( modname: str | None = None, resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None, package_deps: None | dict[str, list[str]] = None, - use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args, ArtifactTag): + use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args, ArtifactTag, list[Artifact]): toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] compile_args = cmd_args() @@ -465,7 +475,7 @@ def _common_compile_args( module_tsets = packages_info.exposed_package_modules - return module_tsets, compile_args, packagedb_tag + return module_tsets, compile_args, packagedb_tag, packages_info.exposed_package_dbs def _compile_module_args( @@ -492,7 +502,7 @@ def _compile_module_args( if enable_haddock: compile_cmd.add("-haddock") - module_tsets, compile_args, packagedb_tag = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, package_deps = package_deps) + module_tsets, compile_args, packagedb_tag, exposed_package_dbs = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, package_deps = package_deps) objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] @@ -530,6 +540,7 @@ def _compile_module_args( args_for_cmd = compile_cmd, args_for_file = compile_args, packagedb_tag = packagedb_tag, + packagedbs = exposed_package_dbs, ) @@ -656,6 +667,7 @@ def _compile_module( dyn_object_dot_o = dyn_object_dot_o, package_deps = package_deps.keys(), toolchain_deps = toolchain_deps, + db_deps = args.packagedbs, ), children = [cross_package_modules] + this_package_modules, ) From 32a91df95e432a2117178eeb6d433e9d23e2fe84 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 14:51:30 +0200 Subject: [PATCH 0883/1133] projection to access module package db deps --- haskell/compile.bzl | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 9cf82ad98..2d398894e 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -64,15 +64,23 @@ def _compiled_module_project_as_dyn_objects_dot_o(mod: CompiledModuleInfo) -> cm def _compiled_module_reduce_as_package_deps(children: list[dict[str, None]], mod: CompiledModuleInfo | None) -> dict[str, None]: # TODO[AH] is there a better way to avoid duplicate -package flags? - # Using a project instead would produce duplicates. + # Using a projection instead would produce duplicates. result = {pkg: None for pkg in mod.package_deps} if mod else {} for child in children: result.update(child) return result +def _compiled_module_reduce_as_packagedb_deps(children: list[dict[Artifact, None]], mod: CompiledModuleInfo | None) -> dict[Artifact, None]: + # TODO[AH] is there a better way to avoid duplicate package-dbs? + # Using a projection instead would produce duplicates. + result = {db: None for db in mod.db_deps} if mod else {} + for child in children: + result.update(child) + return result + def _compiled_module_reduce_as_toolchain_deps(children: list[dict[str, None]], mod: CompiledModuleInfo | None) -> dict[str, None]: # TODO[AH] is there a better way to avoid duplicate -package-id flags? - # Using a project instead would produce duplicates. + # Using a projection instead would produce duplicates. result = {pkg: None for pkg in mod.toolchain_deps} if mod else {} for child in children: result.update(child) @@ -87,6 +95,7 @@ CompiledModuleTSet = transitive_set( }, reductions = { "package_deps": _compiled_module_reduce_as_package_deps, + "packagedb_deps": _compiled_module_reduce_as_packagedb_deps, "toolchain_deps": _compiled_module_reduce_as_toolchain_deps, }, ) From 3a03c53ce349b42a892fb65dbdb5128ec411d966 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 15:02:29 +0200 Subject: [PATCH 0884/1133] Track used package-dbs in the dep file --- haskell/compile.bzl | 2 ++ haskell/tools/ghc_wrapper.py | 12 +++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 2d398894e..ad090d8d4 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -644,6 +644,8 @@ def _compile_module( compile_cmd.add(cmd_args(dependency_modules.reduce("package_deps").keys(), prepend = "-package")) compile_cmd.add(cmd_args(dependency_modules.reduce("toolchain_deps").keys(), prepend = "-package-id")) + compile_cmd.add(cmd_args(dependency_modules.reduce("packagedb_deps").keys(), prepend = "--buck2-package-db")) + dep_file = ctx.actions.declare_output("dep-{}_{}".format(module_name, artifact_suffix)).as_output() tagged_dep_file = abi_tag.tag_artifacts(dep_file) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 977079c4b..d7f84b744 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -30,6 +30,13 @@ def main(): required=True, help="Path to the dep file.", ) + parser.add_argument( + "--buck2-package-db", + required=False, + nargs="*", + default=[], + help="Path to a package db that is used during the module compilation", + ) parser.add_argument( "--ghc", required=True, type=str, help="Path to the Haskell compiler GHC." ) @@ -61,7 +68,10 @@ def main(): # write an empty dep file, to signal that all tagged files are unused try: with open(args.buck2_packagedb_dep, "w") as f: - f.write("\n") + for db in args.buck2_package_db: + f.write(db + "\n") + if not args.buck2_package_db: + f.write("\n") except Exception as e: # remove incomplete dep file From 4cc2446cf092f7fcc68b15d5f700efe9940f0cb8 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 15:21:16 +0200 Subject: [PATCH 0885/1133] FAILS deduplicate .env via anon-target Fails because a `cmd_args` object (or derivates) cannot be passed to an anon rule as an attribute. --- haskell/compile.bzl | 58 ++++++++++++++++++++++++++------------------- 1 file changed, 33 insertions(+), 25 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index ad090d8d4..fd78ed68c 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -213,6 +213,33 @@ _toolchain_library_catalog = anon_rule( } ) +def _package_env_file_impl(ctx: AnalysisContext) -> list[Provider]: + package_env_file = ctx.actions.declare_output("package-db.env") + package_env = cmd_args( + "clear-package-db", + "global-package-db", + delimiter = "\n", + ) + package_env.add(cmd_args( + ctx.attrs.package_dbs, + format = "package-db {}", + ).relative_to(package_env_file, parent = 1)) + ctx.actions.write( + package_env_file, + package_env, + ) + return [DefaultInfo(default_output = package_env_file)] + +_package_env_file = anon_rule( + impl = _package_env_file_impl, + attrs = { + "package_dbs": attrs.list(attrs.source()), + }, + artifact_promise_mappings = { + "env_file": lambda x: x[DefaultInfo].default_outputs[0], + }, +) + def target_metadata( ctx: AnalysisContext, *, @@ -428,34 +455,15 @@ def _common_compile_args( compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) - packagedb_tag = ctx.actions.artifact_tag() + packagedb_env_file = ctx.actions.anon_target(_package_env_file, { + "package_dbs": list(packages_info.packagedb_args.inputs), + }).artifact("env_file") - # TODO[AH] Avoid duplicates and share identical env files. - package_env_file = ctx.actions.declare_output(".".join([ - ctx.label.name, - modname or "pkg", - "package-db", - output_extensions(link_style, enable_profiling)[1], - "env", - ])) - package_env = cmd_args( - "clear-package-db", - "global-package-db", - delimiter = "\n", - ) - packagedb_args = packagedb_tag.tag_artifacts(packages_info.packagedb_args) - package_env.add(cmd_args( - packagedb_args, - format = "package-db {}", - ).relative_to(package_env_file, parent = 1)) - ctx.actions.write( - package_env_file, - package_env, - ) + packagedb_tag = ctx.actions.artifact_tag() compile_args.add(cmd_args( - packagedb_tag.tag_artifacts(package_env_file), + packagedb_tag.tag_artifacts(packagedb_env_file), prepend = "-package-env", - hidden = packagedb_args, + hidden = packagedb_tag.tag_artifacts(packages_info.packagedb_args), )) dep_file = ctx.actions.declare_output(".".join([ From 51df434f5f4507fbfedcd47b1076345dee5ae551 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 15:25:31 +0200 Subject: [PATCH 0886/1133] Revert "FAILS deduplicate .env via anon-target" This reverts commit 4cc2446cf092f7fcc68b15d5f700efe9940f0cb8. --- haskell/compile.bzl | 58 +++++++++++++++++++-------------------------- 1 file changed, 25 insertions(+), 33 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index fd78ed68c..ad090d8d4 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -213,33 +213,6 @@ _toolchain_library_catalog = anon_rule( } ) -def _package_env_file_impl(ctx: AnalysisContext) -> list[Provider]: - package_env_file = ctx.actions.declare_output("package-db.env") - package_env = cmd_args( - "clear-package-db", - "global-package-db", - delimiter = "\n", - ) - package_env.add(cmd_args( - ctx.attrs.package_dbs, - format = "package-db {}", - ).relative_to(package_env_file, parent = 1)) - ctx.actions.write( - package_env_file, - package_env, - ) - return [DefaultInfo(default_output = package_env_file)] - -_package_env_file = anon_rule( - impl = _package_env_file_impl, - attrs = { - "package_dbs": attrs.list(attrs.source()), - }, - artifact_promise_mappings = { - "env_file": lambda x: x[DefaultInfo].default_outputs[0], - }, -) - def target_metadata( ctx: AnalysisContext, *, @@ -455,15 +428,34 @@ def _common_compile_args( compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) - packagedb_env_file = ctx.actions.anon_target(_package_env_file, { - "package_dbs": list(packages_info.packagedb_args.inputs), - }).artifact("env_file") - packagedb_tag = ctx.actions.artifact_tag() + + # TODO[AH] Avoid duplicates and share identical env files. + package_env_file = ctx.actions.declare_output(".".join([ + ctx.label.name, + modname or "pkg", + "package-db", + output_extensions(link_style, enable_profiling)[1], + "env", + ])) + package_env = cmd_args( + "clear-package-db", + "global-package-db", + delimiter = "\n", + ) + packagedb_args = packagedb_tag.tag_artifacts(packages_info.packagedb_args) + package_env.add(cmd_args( + packagedb_args, + format = "package-db {}", + ).relative_to(package_env_file, parent = 1)) + ctx.actions.write( + package_env_file, + package_env, + ) compile_args.add(cmd_args( - packagedb_tag.tag_artifacts(packagedb_env_file), + packagedb_tag.tag_artifacts(package_env_file), prepend = "-package-env", - hidden = packagedb_tag.tag_artifacts(packages_info.packagedb_args), + hidden = packagedb_args, )) dep_file = ctx.actions.declare_output(".".join([ From e565a86f667f7ae5a345de38665bb1ae3fd6bed5 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 15:26:44 +0200 Subject: [PATCH 0887/1133] Expand todo note --- haskell/compile.bzl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index ad090d8d4..ed0addaa8 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -431,6 +431,9 @@ def _common_compile_args( packagedb_tag = ctx.actions.artifact_tag() # TODO[AH] Avoid duplicates and share identical env files. + # The set of package-dbs can be known at the package level, not just the + # module level. So, we could generate this file outside of the + # dynamic_output action. package_env_file = ctx.actions.declare_output(".".join([ ctx.label.name, modname or "pkg", From 0e85e9aaf4ace3015b20cf86c7a1e75d5265e79a Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 16:00:47 +0200 Subject: [PATCH 0888/1133] Only tag package flags for module compilation --- haskell/compile.bzl | 90 +++++++++++++++++++++++---------------------- 1 file changed, 47 insertions(+), 43 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index ed0addaa8..dbb8b28c0 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -392,7 +392,7 @@ def _common_compile_args( modname: str | None = None, resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None, package_deps: None | dict[str, list[str]] = None, - use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args, ArtifactTag, list[Artifact]): + use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args, None | ArtifactTag, list[Artifact]): toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] compile_args = cmd_args() @@ -427,49 +427,53 @@ def _common_compile_args( if not modname: compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) + compile_args.add(packages_info.packagedb_args) + + if modname: + packagedb_tag = ctx.actions.artifact_tag() + + # TODO[AH] Avoid duplicates and share identical env files. + # The set of package-dbs can be known at the package level, not just the + # module level. So, we could generate this file outside of the + # dynamic_output action. + package_env_file = ctx.actions.declare_output(".".join([ + ctx.label.name, + modname or "pkg", + "package-db", + output_extensions(link_style, enable_profiling)[1], + "env", + ])) + package_env = cmd_args( + "clear-package-db", + "global-package-db", + delimiter = "\n", + ) + packagedb_args = packagedb_tag.tag_artifacts(packages_info.packagedb_args) + package_env.add(cmd_args( + packagedb_args, + format = "package-db {}", + ).relative_to(package_env_file, parent = 1)) + ctx.actions.write( + package_env_file, + package_env, + ) + compile_args.add(cmd_args( + packagedb_tag.tag_artifacts(package_env_file), + prepend = "-package-env", + hidden = packagedb_args, + )) - packagedb_tag = ctx.actions.artifact_tag() - - # TODO[AH] Avoid duplicates and share identical env files. - # The set of package-dbs can be known at the package level, not just the - # module level. So, we could generate this file outside of the - # dynamic_output action. - package_env_file = ctx.actions.declare_output(".".join([ - ctx.label.name, - modname or "pkg", - "package-db", - output_extensions(link_style, enable_profiling)[1], - "env", - ])) - package_env = cmd_args( - "clear-package-db", - "global-package-db", - delimiter = "\n", - ) - packagedb_args = packagedb_tag.tag_artifacts(packages_info.packagedb_args) - package_env.add(cmd_args( - packagedb_args, - format = "package-db {}", - ).relative_to(package_env_file, parent = 1)) - ctx.actions.write( - package_env_file, - package_env, - ) - compile_args.add(cmd_args( - packagedb_tag.tag_artifacts(package_env_file), - prepend = "-package-env", - hidden = packagedb_args, - )) - - dep_file = ctx.actions.declare_output(".".join([ - ctx.label.name, - modname or "pkg", - "package-db", - output_extensions(link_style, enable_profiling)[1], - "dep", - ])).as_output() - tagged_dep_file = packagedb_tag.tag_artifacts(dep_file) - compile_args.add("--buck2-packagedb-dep", tagged_dep_file) + dep_file = ctx.actions.declare_output(".".join([ + ctx.label.name, + modname or "pkg", + "package-db", + output_extensions(link_style, enable_profiling)[1], + "dep", + ])).as_output() + tagged_dep_file = packagedb_tag.tag_artifacts(dep_file) + compile_args.add("--buck2-packagedb-dep", tagged_dep_file) + else: + packagedb_tag = None if enable_th: compile_args.add(packages_info.exposed_package_libs) From e4acef45df18c5967d71c5e894450e8a6b8e40d3 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 16:13:00 +0200 Subject: [PATCH 0889/1133] Fix -package-db flags for linking --- haskell/compile.bzl | 2 +- haskell/haskell.bzl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index dbb8b28c0..e201ed497 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -427,7 +427,7 @@ def _common_compile_args( if not modname: compile_args.add(packages_info.exposed_package_args) compile_args.hidden(packages_info.exposed_package_imports) - compile_args.add(packages_info.packagedb_args) + compile_args.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) if modname: packagedb_tag = ctx.actions.artifact_tag() diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 2fdb541b9..b432aa002 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -1048,7 +1048,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link.add("-hide-all-packages") link.add(cmd_args(toolchain_libs, prepend = "-package")) link.add(cmd_args(packages_info.exposed_package_args)) - link.add(packages_info.packagedb_args) + link.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) link.add("-o", output.as_output()) link.add(haskell_toolchain.linker_flags) link.add(ctx.attrs.linker_flags) From e213bd4facf7e4e3a8807013cf4ca75f55f66ae7 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 17:08:48 +0200 Subject: [PATCH 0890/1133] Remove clear-package-db from package-env file That bit was taken from the GHC docs but interferes with the changes introduced in https://github.com/MercuryTechnologies/the-culture-repo/pull/157. --- haskell/compile.bzl | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e201ed497..88279667a 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -443,11 +443,7 @@ def _common_compile_args( output_extensions(link_style, enable_profiling)[1], "env", ])) - package_env = cmd_args( - "clear-package-db", - "global-package-db", - delimiter = "\n", - ) + package_env = cmd_args(delimiter = "\n") packagedb_args = packagedb_tag.tag_artifacts(packages_info.packagedb_args) package_env.add(cmd_args( packagedb_args, From e9fab28aa085dbc1ddba177e5d1c479d4a6c2d88 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 6 Jun 2024 12:24:19 +0200 Subject: [PATCH 0891/1133] Cleanup _common_compile_args `modname` is never `None`. --- haskell/compile.bzl | 90 ++++++++++++++++++++------------------------- 1 file changed, 39 insertions(+), 51 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 88279667a..127a31888 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -389,17 +389,13 @@ def _common_compile_args( enable_profiling: bool, enable_th: bool, pkgname: str | None, - modname: str | None = None, + modname: str, resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None, package_deps: None | dict[str, list[str]] = None, - use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args, None | ArtifactTag, list[Artifact]): - toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] - + use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args, ArtifactTag, list[Artifact]): compile_args = cmd_args() compile_args.add("-no-link", "-i") compile_args.add("-hide-all-packages") - if not modname: - compile_args.add(cmd_args(toolchain_libs, prepend="-package")) if enable_profiling: compile_args.add("-prof") @@ -424,52 +420,44 @@ def _common_compile_args( package_deps = package_deps, ) - if not modname: - compile_args.add(packages_info.exposed_package_args) - compile_args.hidden(packages_info.exposed_package_imports) - compile_args.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) - - if modname: - packagedb_tag = ctx.actions.artifact_tag() - - # TODO[AH] Avoid duplicates and share identical env files. - # The set of package-dbs can be known at the package level, not just the - # module level. So, we could generate this file outside of the - # dynamic_output action. - package_env_file = ctx.actions.declare_output(".".join([ - ctx.label.name, - modname or "pkg", - "package-db", - output_extensions(link_style, enable_profiling)[1], - "env", - ])) - package_env = cmd_args(delimiter = "\n") - packagedb_args = packagedb_tag.tag_artifacts(packages_info.packagedb_args) - package_env.add(cmd_args( - packagedb_args, - format = "package-db {}", - ).relative_to(package_env_file, parent = 1)) - ctx.actions.write( - package_env_file, - package_env, - ) - compile_args.add(cmd_args( - packagedb_tag.tag_artifacts(package_env_file), - prepend = "-package-env", - hidden = packagedb_args, - )) + packagedb_tag = ctx.actions.artifact_tag() + + # TODO[AH] Avoid duplicates and share identical env files. + # The set of package-dbs can be known at the package level, not just the + # module level. So, we could generate this file outside of the + # dynamic_output action. + package_env_file = ctx.actions.declare_output(".".join([ + ctx.label.name, + modname or "pkg", + "package-db", + output_extensions(link_style, enable_profiling)[1], + "env", + ])) + package_env = cmd_args(delimiter = "\n") + packagedb_args = packagedb_tag.tag_artifacts(packages_info.packagedb_args) + package_env.add(cmd_args( + packagedb_args, + format = "package-db {}", + ).relative_to(package_env_file, parent = 1)) + ctx.actions.write( + package_env_file, + package_env, + ) + compile_args.add(cmd_args( + packagedb_tag.tag_artifacts(package_env_file), + prepend = "-package-env", + hidden = packagedb_args, + )) - dep_file = ctx.actions.declare_output(".".join([ - ctx.label.name, - modname or "pkg", - "package-db", - output_extensions(link_style, enable_profiling)[1], - "dep", - ])).as_output() - tagged_dep_file = packagedb_tag.tag_artifacts(dep_file) - compile_args.add("--buck2-packagedb-dep", tagged_dep_file) - else: - packagedb_tag = None + dep_file = ctx.actions.declare_output(".".join([ + ctx.label.name, + modname or "pkg", + "package-db", + output_extensions(link_style, enable_profiling)[1], + "dep", + ])).as_output() + tagged_dep_file = packagedb_tag.tag_artifacts(dep_file) + compile_args.add("--buck2-packagedb-dep", tagged_dep_file) if enable_th: compile_args.add(packages_info.exposed_package_libs) From 18bc19ae58921f6500f7fcd4a5935bd64aaba0b4 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 07:39:38 +0200 Subject: [PATCH 0892/1133] Add `packages` field to `HaskellToolchainInfo` --- haskell/toolchain.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index fbd140ff7..fc54b9e69 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -37,6 +37,7 @@ HaskellToolchainInfo = provider( "ghci_packager": provider_field(typing.Any, default = None), "cache_links": provider_field(typing.Any, default = None), "script_template_processor": provider_field(typing.Any, default = None), + "packages": provider_field(typing.Any, default = None), }, ) From b07ea6043bf3955b2a0228c93ea6bc0bec7c47d6 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 07:54:21 +0200 Subject: [PATCH 0893/1133] Add `HaskellPackagesInfo` record --- haskell/toolchain.bzl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index fc54b9e69..a552b8933 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -46,3 +46,7 @@ HaskellToolchainLibrary = provider( "name": provider_field(str), }, ) + +HaskellPackagesInfo = record( + package_db = Artifact, +) From 75c8e85f310ab3d50c8a6259c8d8247bd2f206c5 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 07:46:40 +0200 Subject: [PATCH 0894/1133] Use package db from toolchain packages info --- haskell/compile.bzl | 21 ++++++++++++++----- haskell/haskell.bzl | 4 ++++ .../generate_toolchain_library_catalog.py | 11 +++++++--- 3 files changed, 28 insertions(+), 8 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 127a31888..6881580d6 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -189,13 +189,20 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl return modules def _toolchain_library_catalog_impl(ctx: AnalysisContext) -> list[Provider]: - ghc_pkg = ctx.attrs.toolchain[HaskellToolchainInfo].packager + haskell_toolchain = ctx.attrs.toolchain[HaskellToolchainInfo] + + ghc_pkg = haskell_toolchain.packager + catalog_gen = ctx.attrs.generate_toolchain_library_catalog[RunInfo] catalog = ctx.actions.declare_output("haskell_toolchain_libraries.json") - ctx.actions.run( - cmd_args(catalog_gen, "--ghc-pkg", ghc_pkg, "--output", catalog.as_output()), - category = "haskell_toolchain_library_catalog", - ) + + cmd = cmd_args(catalog_gen, "--ghc-pkg", ghc_pkg, "--output", catalog.as_output()) + + if haskell_toolchain.packages: + cmd.add("--package-db", haskell_toolchain.packages.package_db) + + ctx.actions.run(cmd, category = "haskell_toolchain_library_catalog") + return [DefaultInfo(default_output = catalog)] _toolchain_library_catalog = anon_rule( @@ -363,6 +370,10 @@ def get_packages_info( enable_profiling, ) + # TODO[CB] use individual package db for each package + if haskell_toolchain.packages: + packagedb_args.add("-package-db", haskell_toolchain.packages.package_db) + # Expose only the packages we depend on directly for lib in haskell_direct_deps_lib_infos: pkg_name = lib.name diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index b432aa002..24ce373c9 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -568,6 +568,10 @@ def _build_haskell_lib( ), ) + # TODO[CB] use individual package db for each package + if haskell_toolchain.packages: + link.add("-package-db", haskell_toolchain.packages.package_db) + link.add(compiled.objects) infos = get_link_args_for_strategy( diff --git a/haskell/tools/generate_toolchain_library_catalog.py b/haskell/tools/generate_toolchain_library_catalog.py index 2c2eeaa2a..33649573f 100755 --- a/haskell/tools/generate_toolchain_library_catalog.py +++ b/haskell/tools/generate_toolchain_library_catalog.py @@ -27,16 +27,21 @@ def main(): required=True, type=str, help="Path to the Haskell compiler's ghc-pkg utilty.") + parser.add_argument( + "--package-db", + required=False, + type=str, + help="Path to the package db including all haskell libraries.") args = parser.parse_args() - with subprocess.Popen(_ghc_pkg_command(args.ghc_pkg), stdout=subprocess.PIPE, text=True) as proc: + with subprocess.Popen(_ghc_pkg_command(args.ghc_pkg, args.package_db), stdout=subprocess.PIPE, text=True) as proc: packages = list(_parse_ghc_pkg_dump(proc.stdout)) result = _construct_package_mappings(packages) json.dump(result, args.output) -def _ghc_pkg_command(ghc_pkg): +def _ghc_pkg_command(ghc_pkg, package_db): return [ ghc_pkg, "dump", @@ -44,7 +49,7 @@ def _ghc_pkg_command(ghc_pkg): "--no-user-package-db", "--simple-output", "--expand-pkgroot", - ] + ] + (["--package-db", package_db] if package_db else []) def _parse_ghc_pkg_dump(lines): From 3b06681ff861a1c65a632695ede4ac6b2fff5285 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 09:02:53 +0200 Subject: [PATCH 0895/1133] Add `HaskellPackagesInfoTSet` and a dynamic provider --- haskell/toolchain.bzl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index a552b8933..ec11f651f 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -50,3 +50,9 @@ HaskellToolchainLibrary = provider( HaskellPackagesInfo = record( package_db = Artifact, ) + +HaskellPackageDbTSet = transitive_set() + +DynamicHaskellPackageDbInfo = provider(fields = { + "packages": dict[str, HaskellPackageDbTSet], +}) From fe963fb2eafabf1222a97bc9e157022799eb9404 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 09:19:16 +0200 Subject: [PATCH 0896/1133] Add `dynamic` field to `HaskellPackagesInfo` --- haskell/toolchain.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index ec11f651f..227ff4e22 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -49,6 +49,7 @@ HaskellToolchainLibrary = provider( HaskellPackagesInfo = record( package_db = Artifact, + dynamic = DynamicValue, ) HaskellPackageDbTSet = transitive_set() From e5a914af84c5eb992ee6aa491a79c440ff408681 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 10:01:50 +0200 Subject: [PATCH 0897/1133] Add package dependencies to HaskellLibraryInfo --- haskell/haskell.bzl | 3 +++ haskell/library_info.bzl | 2 ++ 2 files changed, 5 insertions(+) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 24ce373c9..9fc6ecef4 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -521,6 +521,8 @@ def _build_haskell_lib( non_profiling_hlib: [HaskellLibBuildOutput, None] = None) -> HaskellLibBuildOutput: linker_info = ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info + toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] + # Link the objects into a library haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -674,6 +676,7 @@ def _build_haskell_lib( version = "1.0.0", is_prebuilt = False, profiling_enabled = enable_profiling, + dependencies = toolchain_libs, ) return HaskellLibBuildOutput( diff --git a/haskell/library_info.bzl b/haskell/library_info.bzl index 3a359b6ff..2a3b16bf3 100644 --- a/haskell/library_info.bzl +++ b/haskell/library_info.bzl @@ -46,6 +46,8 @@ HaskellLibraryInfo = record( version = str, is_prebuilt = bool, profiling_enabled = bool, + # Package dependencies + dependencies = list[str], ) def _project_as_package_db(lib: HaskellLibraryInfo): From 47f0c155c670c88f2aa8a8e5a1721ab71e5ba95f Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 10:04:37 +0200 Subject: [PATCH 0898/1133] Add `packages` reduction for HaskellLibraryInfoTSet --- haskell/library_info.bzl | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/haskell/library_info.bzl b/haskell/library_info.bzl index 2a3b16bf3..b6672a5dc 100644 --- a/haskell/library_info.bzl +++ b/haskell/library_info.bzl @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//utils:utils.bzl", "flatten", "dedupe_by_value") + # If the target is a haskell library, the HaskellLibraryProvider # contains its HaskellLibraryInfo. (in contrast to a HaskellLinkInfo, # which contains the HaskellLibraryInfo for all the transitive @@ -56,9 +58,18 @@ def _project_as_package_db(lib: HaskellLibraryInfo): def _project_as_empty_package_db(lib: HaskellLibraryInfo): return cmd_args(lib.empty_db) +def _get_package_deps(children: list[list[str]], lib: HaskellLibraryInfo | None): + flatted = flatten(children) + if lib: + flatted.extend(lib.dependencies) + return dedupe_by_value(flatted) + HaskellLibraryInfoTSet = transitive_set( args_projections = { "package_db": _project_as_package_db, "empty_package_db": _project_as_empty_package_db, }, + reductions = { + "packages": _get_package_deps, + }, ) From 966a574eb8dcf972b6c9787c03bc954077ea6a49 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 10:37:41 +0200 Subject: [PATCH 0899/1133] Add `package_db` projection to HaskellPackageDbTSet` --- haskell/toolchain.bzl | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index 227ff4e22..8961e9838 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -52,7 +52,14 @@ HaskellPackagesInfo = record( dynamic = DynamicValue, ) -HaskellPackageDbTSet = transitive_set() +def _haskell_package_info_as_package_db(p: Artifact): + return cmd_args(p) + +HaskellPackageDbTSet = transitive_set( + args_projections = { + "package_db": _haskell_package_info_as_package_db, + } +) DynamicHaskellPackageDbInfo = provider(fields = { "packages": dict[str, HaskellPackageDbTSet], From acdbc47f9132f8f5693e41733dbcbf564ad44cd5 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 10:31:21 +0200 Subject: [PATCH 0900/1133] Use individual package dbs from resolved package db info --- haskell/compile.bzl | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 6881580d6..985bdbca7 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -19,6 +19,8 @@ load( "@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo", "HaskellToolchainLibrary", + "DynamicHaskellPackageDbInfo", + "HaskellPackageDbTSet", ) load( "@prelude//haskell:util.bzl", @@ -370,8 +372,24 @@ def get_packages_info( enable_profiling, ) - # TODO[CB] use individual package db for each package - if haskell_toolchain.packages: + if haskell_toolchain.packages and resolved != None: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages + + toolchain_libs = [ + dep[HaskellToolchainLibrary].name + for dep in ctx.attrs.deps + if HaskellToolchainLibrary in dep + ] + libs.reduce("packages") + + package_db_tset = ctx.actions.tset( + HaskellPackageDbTSet, + children = [package_db[name] for name in toolchain_libs if name in package_db] + ) + + packagedb_args.add(package_db_tset.project_as_args("package_db")) + else: packagedb_args.add("-package-db", haskell_toolchain.packages.package_db) # Expose only the packages we depend on directly @@ -733,6 +751,8 @@ def compile( return [DynamicCompileResultInfo(modules = module_tsets)] + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + interfaces = [interface for module in modules.values() for interface in module.interfaces] objects = [object for module in modules.values() for object in module.objects] stub_dirs = [module.stub_dir for module in modules.values()] @@ -748,7 +768,7 @@ def compile( if enable_profiling else lib.info[link_style] ] - ], + ] + [ haskell_toolchain.packages.dynamic ], inputs = ctx.attrs.srcs, outputs = [o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes], f = do_compile) From dae4cdcee885e7c4fca7723f98eedd1eb234711c Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 10:44:56 +0200 Subject: [PATCH 0901/1133] Pass resolved package info to get_packages_info for metadata This needs a dynamic_output in order to retrieve the resolved promises. --- haskell/compile.bzl | 91 ++++++++++++++++++++++++++++----------------- 1 file changed, 57 insertions(+), 34 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 985bdbca7..cd953ea51 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -243,16 +243,6 @@ def target_metadata( "generate_toolchain_library_catalog": ctx.attrs._generate_toolchain_library_catalog, }) - # Add -package-db and -package/-expose-package flags for each Haskell - # library dependency. - packages_info = get_packages_info( - ctx, - LinkStyle("shared"), - specify_pkg_version = False, - enable_profiling = False, - use_empty_lib = True, - ) - # The object and interface file paths are depending on the real module name # as inferred by GHC, not the source file path; currently this requires the # module name to correspond to the source file path as otherwise GHC will @@ -261,31 +251,64 @@ def target_metadata( # # (module X.Y.Z must be defined in a file at X/Y/Z.hs) - package_flag = _package_flag(haskell_toolchain) - ghc_args = cmd_args() - ghc_args.add("-hide-all-packages") - ghc_args.add(package_flag, "base") - ghc_args.add(cmd_args(toolchain_libs, prepend=package_flag)) - ghc_args.add(cmd_args(packages_info.exposed_package_args)) - ghc_args.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) - ghc_args.add(ctx.attrs.compiler_flags) - - md_args = cmd_args(md_gen) - md_args.add("--output", md_file.as_output()) - md_args.add("--toolchain-libs", toolchain_libs_catalog.artifact("catalog")) - md_args.add("--ghc", haskell_toolchain.compiler) - md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) - md_args.add( - "--source-prefix", - _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), - ) - md_args.add(cmd_args(sources, format="--source={}")) + catalog = toolchain_libs_catalog.artifact("catalog") - md_args.add( - _attr_deps_haskell_lib_package_name_and_prefix(ctx), - ) + def get_metadata(ctx, _artifacts, resolved, outputs, catalog=catalog): + + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages - ctx.actions.run(md_args, category = "haskell_metadata", identifier = suffix if suffix else None) + # Add -package-db and -package/-expose-package flags for each Haskell + # library dependency. + + packages_info = get_packages_info( + ctx, + LinkStyle("shared"), + specify_pkg_version = False, + enable_profiling = False, + use_empty_lib = True, + resolved = resolved, + ) + package_flag = _package_flag(haskell_toolchain) + ghc_args = cmd_args() + ghc_args.add("-hide-all-packages") + ghc_args.add(package_flag, "base") + + package_dbs = ctx.actions.tset( + HaskellPackageDbTSet, + children = [package_db[name] for name in toolchain_libs if name in package_db] + ) + + ghc_args.add(cmd_args(toolchain_libs, prepend=package_flag)) + ghc_args.add(cmd_args(packages_info.exposed_package_args)) + ghc_args.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) + ghc_args.add(cmd_args(package_dbs.project_as_args("package_db"), prepend="-package-db")) + ghc_args.add(ctx.attrs.compiler_flags) + + md_args = cmd_args(md_gen) + md_args.add("--toolchain-libs", catalog) + md_args.add("--ghc", haskell_toolchain.compiler) + md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) + md_args.add( + "--source-prefix", + _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), + ) + md_args.add(cmd_args(sources, format="--source={}")) + + md_args.add( + _attr_deps_haskell_lib_package_name_and_prefix(ctx), + ) + md_args.add("--output", outputs[md_file].as_output()) + + ctx.actions.run(md_args, category = "haskell_metadata", identifier = suffix if suffix else None) + + ctx.actions.dynamic_output( + dynamic = [], + promises = [haskell_toolchain.packages.dynamic], + inputs = [], + outputs = [md_file.as_output()], + f = get_metadata, + ) return md_file @@ -390,7 +413,7 @@ def get_packages_info( packagedb_args.add(package_db_tset.project_as_args("package_db")) else: - packagedb_args.add("-package-db", haskell_toolchain.packages.package_db) + packagedb_args.add(haskell_toolchain.packages.package_db) # Expose only the packages we depend on directly for lib in haskell_direct_deps_lib_infos: From 85ce27f499136e4a9de4ec94317dbb7fa52d8beb Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 11:29:11 +0200 Subject: [PATCH 0902/1133] Use individual package db in haskell_binary rule --- haskell/haskell.bzl | 64 +++++++++++++++++++++++++++++++-------------- 1 file changed, 44 insertions(+), 20 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 9fc6ecef4..65407161b 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -78,6 +78,8 @@ load( "@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo", "HaskellToolchainLibrary", + "HaskellPackageDbTSet", + "DynamicHaskellPackageDbInfo", ) load( "@prelude//haskell:util.bzl", @@ -1040,27 +1042,8 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] - # Add -package-db and -package/-expose-package flags for each Haskell - # library dependency. - packages_info = get_packages_info( - ctx, - link_style, - specify_pkg_version = False, - enable_profiling = enable_profiling, - use_empty_lib = False, - ) - output = ctx.actions.declare_output(ctx.attrs.name) link = cmd_args(haskell_toolchain.compiler) - link.add("-hide-all-packages") - link.add(cmd_args(toolchain_libs, prepend = "-package")) - link.add(cmd_args(packages_info.exposed_package_args)) - link.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) - link.add("-o", output.as_output()) - link.add(haskell_toolchain.linker_flags) - link.add(ctx.attrs.linker_flags) - - link.hidden(packages_info.exposed_package_libs) objects = {} @@ -1241,7 +1224,48 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: else: link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) - ctx.actions.run(link, category = "haskell_link") + def do_link(ctx, artifacts, resolved, outputs, output=output, objects=objects): + link_cmd = link.copy() # link is already frozen, make a copy + + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages + + # Add -package-db and -package/-expose-package flags for each Haskell + # library dependency. + packages_info = get_packages_info( + ctx, + link_style, + resolved = resolved, + specify_pkg_version = False, + enable_profiling = enable_profiling, + use_empty_lib = False, + ) + + link_cmd.add("-hide-all-packages") + link_cmd.add(cmd_args(toolchain_libs, prepend = "-package")) + link_cmd.add(cmd_args(packages_info.exposed_package_args)) + link_cmd.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) + link_cmd.add(haskell_toolchain.linker_flags) + link_cmd.add(ctx.attrs.linker_flags) + + link_cmd.hidden(packages_info.exposed_package_libs) + + package_db_tset = ctx.actions.tset( + HaskellPackageDbTSet, + children = [package_db[name] for name in toolchain_libs if name in package_db] + ) + + link_cmd.add("-o", outputs[output].as_output()) + + ctx.actions.run(link_cmd, category = "haskell_link") + + ctx.actions.dynamic_output( + dynamic = [], + promises = [haskell_toolchain.packages.dynamic], + inputs = objects.values(), + outputs = [output.as_output()], + f = do_link, + ) run = cmd_args(output) From bd2a159346798045899134af221762d43e692b39 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 7 Jun 2024 11:36:00 +0200 Subject: [PATCH 0903/1133] Use individual package db in haskell_library rule --- haskell/haskell.bzl | 70 ++++++++++++++++++++++++++++----------------- 1 file changed, 43 insertions(+), 27 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 65407161b..08d7167a6 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -557,36 +557,52 @@ def _build_haskell_lib( if link_style == LinkStyle("shared"): lib = ctx.actions.declare_output(lib_short_path) - link = cmd_args(haskell_toolchain.linker) - link.add(haskell_toolchain.linker_flags) - link.add(ctx.attrs.linker_flags) - link.add("-hide-all-packages") - link.add(cmd_args(toolchain_libs, prepend = "-package")) - link.add("-o", lib.as_output()) - link.add( - get_shared_library_flags(linker_info.type), - "-dynamic", - cmd_args( - _get_haskell_shared_library_name_linker_flags(linker_info.type, libfile), - prepend = "-optl", - ), - ) - # TODO[CB] use individual package db for each package - if haskell_toolchain.packages: - link.add("-package-db", haskell_toolchain.packages.package_db) + def do_link(ctx, artifacts, resolved, outputs, lib=lib, objects=compiled.objects): + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages - link.add(compiled.objects) + package_db_tset = ctx.actions.tset( + HaskellPackageDbTSet, + children = [package_db[name] for name in toolchain_libs if name in package_db] + ) - infos = get_link_args_for_strategy( - ctx, - nlis, - to_link_strategy(link_style), - ) - link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) - ctx.actions.run( - link, - category = "haskell_link" + artifact_suffix.replace("-", "_"), + link = cmd_args(haskell_toolchain.linker) + link.add(haskell_toolchain.linker_flags) + link.add(ctx.attrs.linker_flags) + link.add("-hide-all-packages") + link.add(cmd_args(toolchain_libs, prepend = "-package")) + link.add(cmd_args(package_db_tset.project_as_args("package_db"), prepend="-package-db")) + link.add("-o", outputs[lib].as_output()) + link.add( + get_shared_library_flags(linker_info.type), + "-dynamic", + cmd_args( + _get_haskell_shared_library_name_linker_flags(linker_info.type, libfile), + prepend = "-optl", + ), + ) + + link.add(objects) + + infos = get_link_args_for_strategy( + ctx, + nlis, + to_link_strategy(link_style), + ) + link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) + + ctx.actions.run( + link, + category = "haskell_link" + artifact_suffix.replace("-", "_"), + ) + + ctx.actions.dynamic_output( + dynamic = [], + promises = [haskell_toolchain.packages.dynamic], + inputs = compiled.objects, + outputs = [lib.as_output()], + f = do_link, ) solibs[libfile] = LinkedObject(output = lib, unstripped_output = lib) From a748dc8afab4aba5fef937d99981ae90e710bc7c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 17:39:50 +0200 Subject: [PATCH 0904/1133] Remove None options on _compile_module_args The function is only invoked in one place and always with these arguments defined. --- haskell/compile.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index cd953ea51..2022481ca 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -539,8 +539,8 @@ def _compile_module_args( enable_th: bool, outputs: dict[Artifact, Artifact], resolved: dict[DynamicValue, ResolvedDynamicValue], - pkgname = None, - package_deps: None | dict[str, list[str]] = None) -> CompileArgsInfo: + pkgname: str, + package_deps: dict[str, list[str]]) -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] compile_cmd = cmd_args() From cdeee436f78d1df17f8404b46a24dbbc55b1ac01 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 17:43:42 +0200 Subject: [PATCH 0905/1133] Remove None options on _common_compile_args That function is only invoked in one place with these args always set. --- haskell/compile.bzl | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 2022481ca..f48d0d3ce 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -440,11 +440,10 @@ def _common_compile_args( link_style: LinkStyle, enable_profiling: bool, enable_th: bool, - pkgname: str | None, + pkgname: str, modname: str, - resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None, - package_deps: None | dict[str, list[str]] = None, - use_empty_lib = True) -> (None | list[CompiledModuleTSet], cmd_args, ArtifactTag, list[Artifact]): + resolved: dict[DynamicValue, ResolvedDynamicValue], + package_deps: dict[str, list[str]]) -> (None | list[CompiledModuleTSet], cmd_args, ArtifactTag, list[Artifact]): compile_args = cmd_args() compile_args.add("-no-link", "-i") compile_args.add("-hide-all-packages") @@ -467,7 +466,7 @@ def _common_compile_args( link_style, specify_pkg_version = False, enable_profiling = enable_profiling, - use_empty_lib = use_empty_lib, + use_empty_lib = True, resolved = resolved, package_deps = package_deps, ) @@ -513,8 +512,6 @@ def _common_compile_args( if enable_th: compile_args.add(packages_info.exposed_package_libs) - if not modname: - compile_args.hidden(packages_info.exposed_package_objects) # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) @@ -522,8 +519,7 @@ def _common_compile_args( pre_args = pre.set.project_as_args("args") compile_args.add(cmd_args(pre_args, format = "-optP={}")) - if pkgname: - compile_args.add(["-this-unit-id", pkgname]) + compile_args.add(["-this-unit-id", pkgname]) module_tsets = packages_info.exposed_package_modules From e75c9cbf61dcf159f2dd2a1be0b4f6ca6ba056d6 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 17:46:20 +0200 Subject: [PATCH 0906/1133] Fix: pkgname can be None --- haskell/compile.bzl | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index f48d0d3ce..628fd7115 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -440,7 +440,7 @@ def _common_compile_args( link_style: LinkStyle, enable_profiling: bool, enable_th: bool, - pkgname: str, + pkgname: str | None, modname: str, resolved: dict[DynamicValue, ResolvedDynamicValue], package_deps: dict[str, list[str]]) -> (None | list[CompiledModuleTSet], cmd_args, ArtifactTag, list[Artifact]): @@ -519,7 +519,8 @@ def _common_compile_args( pre_args = pre.set.project_as_args("args") compile_args.add(cmd_args(pre_args, format = "-optP={}")) - compile_args.add(["-this-unit-id", pkgname]) + if pkgname: + compile_args.add(["-this-unit-id", pkgname]) module_tsets = packages_info.exposed_package_modules @@ -535,7 +536,7 @@ def _compile_module_args( enable_th: bool, outputs: dict[Artifact, Artifact], resolved: dict[DynamicValue, ResolvedDynamicValue], - pkgname: str, + pkgname: str | None, package_deps: dict[str, list[str]]) -> CompileArgsInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] From f34a994d05a116052f2c8457e814fd6e6443bb80 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 17:49:12 +0200 Subject: [PATCH 0907/1133] Remove None from _common_compile_args return type --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 628fd7115..a8214b0af 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -443,7 +443,7 @@ def _common_compile_args( pkgname: str | None, modname: str, resolved: dict[DynamicValue, ResolvedDynamicValue], - package_deps: dict[str, list[str]]) -> (None | list[CompiledModuleTSet], cmd_args, ArtifactTag, list[Artifact]): + package_deps: dict[str, list[str]]) -> (list[CompiledModuleTSet], cmd_args, ArtifactTag, list[Artifact]): compile_args = cmd_args() compile_args.add("-no-link", "-i") compile_args.add("-hide-all-packages") From 1ba43fa5fd95acfbaa5b95b2a76034134469db3b Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 17:49:41 +0200 Subject: [PATCH 0908/1133] Remove None from CompileResultInfo --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a8214b0af..97dc5d624 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -113,7 +113,7 @@ CompileResultInfo = record( stubs = field(Artifact), hashes = field(list[Artifact]), producing_indices = field(bool), - module_tsets = field(None | list[CompiledModuleTSet] | DynamicValue), + module_tsets = field(list[CompiledModuleTSet] | DynamicValue), ) CompileArgsInfo = record( From dda55487be4995afa3f98b72a3d3184ec296eaaa Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 5 Jun 2024 17:57:45 +0200 Subject: [PATCH 0909/1133] Inline _common_compile_args The function was only invoked in one location --- haskell/compile.bzl | 57 +++++++++++++++++---------------------------- 1 file changed, 22 insertions(+), 35 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 97dc5d624..e493ff5d6 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -434,16 +434,30 @@ def get_packages_info( transitive_deps = libs, ) - -def _common_compile_args( +def _compile_module_args( ctx: AnalysisContext, + module: _Module, link_style: LinkStyle, enable_profiling: bool, + enable_haddock: bool, enable_th: bool, - pkgname: str | None, - modname: str, + outputs: dict[Artifact, Artifact], resolved: dict[DynamicValue, ResolvedDynamicValue], - package_deps: dict[str, list[str]]) -> (list[CompiledModuleTSet], cmd_args, ArtifactTag, list[Artifact]): + pkgname: str | None, + package_deps: dict[str, list[str]]) -> CompileArgsInfo: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + + compile_cmd = cmd_args() + compile_cmd.add(haskell_toolchain.compiler_flags) + + # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't + # be parsed when inside an argsfile. + compile_cmd.add(ctx.attrs.compiler_flags) + compile_cmd.add("-c") + + if enable_haddock: + compile_cmd.add("-haddock") + compile_args = cmd_args() compile_args.add("-no-link", "-i") compile_args.add("-hide-all-packages") @@ -473,6 +487,8 @@ def _common_compile_args( packagedb_tag = ctx.actions.artifact_tag() + modname = src_to_module_name(module.source.short_path) + # TODO[AH] Avoid duplicates and share identical env files. # The set of package-dbs can be known at the package level, not just the # module level. So, we could generate this file outside of the @@ -524,35 +540,6 @@ def _common_compile_args( module_tsets = packages_info.exposed_package_modules - return module_tsets, compile_args, packagedb_tag, packages_info.exposed_package_dbs - - -def _compile_module_args( - ctx: AnalysisContext, - module: _Module, - link_style: LinkStyle, - enable_profiling: bool, - enable_haddock: bool, - enable_th: bool, - outputs: dict[Artifact, Artifact], - resolved: dict[DynamicValue, ResolvedDynamicValue], - pkgname: str | None, - package_deps: dict[str, list[str]]) -> CompileArgsInfo: - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - - compile_cmd = cmd_args() - compile_cmd.add(haskell_toolchain.compiler_flags) - - # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't - # be parsed when inside an argsfile. - compile_cmd.add(ctx.attrs.compiler_flags) - compile_cmd.add("-c") - - if enable_haddock: - compile_cmd.add("-haddock") - - module_tsets, compile_args, packagedb_tag, exposed_package_dbs = _common_compile_args(ctx, link_style, enable_profiling, enable_th, pkgname, modname = src_to_module_name(module.source.short_path), resolved = resolved, package_deps = package_deps) - objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] stubs = outputs[module.stub_dir] @@ -589,7 +576,7 @@ def _compile_module_args( args_for_cmd = compile_cmd, args_for_file = compile_args, packagedb_tag = packagedb_tag, - packagedbs = exposed_package_dbs, + packagedbs = packages_info.exposed_package_dbs, ) From e000ef2388ea95c69a06086c9330b312a697c7e3 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 11 Jun 2024 17:32:30 +0200 Subject: [PATCH 0910/1133] Document compile_args vs. compile_cmd --- haskell/compile.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e493ff5d6..27baedde3 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -458,6 +458,7 @@ def _compile_module_args( if enable_haddock: compile_cmd.add("-haddock") + # These compiler arguments can be passed in a response file. compile_args = cmd_args() compile_args.add("-no-link", "-i") compile_args.add("-hide-all-packages") From 15e17a376356e4494c75fdd510c6cfa0ad8048c7 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 15:48:39 +0200 Subject: [PATCH 0911/1133] Inline _compile_module_args It was only used in one location and did not actually encapsulate the full argument generation. --- haskell/compile.bzl | 95 ++++++++++++++++++--------------------------- 1 file changed, 37 insertions(+), 58 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 27baedde3..b5b7c77a3 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -434,29 +434,45 @@ def get_packages_info( transitive_deps = libs, ) -def _compile_module_args( - ctx: AnalysisContext, - module: _Module, - link_style: LinkStyle, - enable_profiling: bool, - enable_haddock: bool, - enable_th: bool, - outputs: dict[Artifact, Artifact], - resolved: dict[DynamicValue, ResolvedDynamicValue], - pkgname: str | None, - package_deps: dict[str, list[str]]) -> CompileArgsInfo: +def _compile_module( + ctx: AnalysisContext, + *, + link_style: LinkStyle, + enable_profiling: bool, + enable_haddock: bool, + enable_th: bool, + module_name: str, + modules: dict[str, _Module], + module_tsets: dict[str, CompiledModuleTSet], + md_file: Artifact, + graph: dict[str, list[str]], + package_deps: dict[str, list[str]], + toolchain_deps: list[str], + outputs: dict[Artifact, Artifact], + resolved: dict[DynamicValue, ResolvedDynamicValue], + artifact_suffix: str, + pkgname: str | None = None, +) -> CompiledModuleTSet: + module = modules[module_name] + + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + compile_cmd = cmd_args(ctx.attrs._ghc_wrapper[RunInfo]) + compile_cmd.add("--ghc", haskell_toolchain.compiler) + + # ------------------------------------------------------------ + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - compile_cmd = cmd_args() - compile_cmd.add(haskell_toolchain.compiler_flags) + compile_cmd_ = cmd_args() + compile_cmd_.add(haskell_toolchain.compiler_flags) # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't # be parsed when inside an argsfile. - compile_cmd.add(ctx.attrs.compiler_flags) - compile_cmd.add("-c") + compile_cmd_.add(ctx.attrs.compiler_flags) + compile_cmd_.add("-c") if enable_haddock: - compile_cmd.add("-haddock") + compile_cmd_.add("-haddock") # These compiler arguments can be passed in a response file. compile_args = cmd_args() @@ -539,7 +555,7 @@ def _compile_module_args( if pkgname: compile_args.add(["-this-unit-id", pkgname]) - module_tsets = packages_info.exposed_package_modules + module_tsets_ = packages_info.exposed_package_modules objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] @@ -564,60 +580,23 @@ def _compile_module_args( producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags + haskell_toolchain.compiler_flags - return CompileArgsInfo( + args = CompileArgsInfo( result = CompileResultInfo( objects = objects, hi = his, hashes = [module.hash], stubs = stubs, producing_indices = producing_indices, - module_tsets = module_tsets, + module_tsets = module_tsets_, ), srcs = srcs, - args_for_cmd = compile_cmd, + args_for_cmd = compile_cmd_, args_for_file = compile_args, packagedb_tag = packagedb_tag, packagedbs = packages_info.exposed_package_dbs, ) - -def _compile_module( - ctx: AnalysisContext, - *, - link_style: LinkStyle, - enable_profiling: bool, - enable_haddock: bool, - enable_th: bool, - module_name: str, - modules: dict[str, _Module], - module_tsets: dict[str, CompiledModuleTSet], - md_file: Artifact, - graph: dict[str, list[str]], - package_deps: dict[str, list[str]], - toolchain_deps: list[str], - outputs: dict[Artifact, Artifact], - resolved: dict[DynamicValue, ResolvedDynamicValue], - artifact_suffix: str, - pkgname: str | None = None, -) -> CompiledModuleTSet: - module = modules[module_name] - - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - compile_cmd = cmd_args(ctx.attrs._ghc_wrapper[RunInfo]) - compile_cmd.add("--ghc", haskell_toolchain.compiler) - - args = _compile_module_args( - ctx, - module, - link_style, - enable_profiling, - enable_haddock, - enable_th, - outputs, - resolved, - pkgname, - package_deps = package_deps - ) + # ------------------------------------------------------------ if args.args_for_file: if haskell_toolchain.use_argsfile: From 0e2a50cc8d443737757781202771de1f5c9096da Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 15:51:34 +0200 Subject: [PATCH 0912/1133] Remove duplicate compile_cmd object --- haskell/compile.bzl | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index b5b7c77a3..efa748709 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -461,18 +461,15 @@ def _compile_module( # ------------------------------------------------------------ - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - - compile_cmd_ = cmd_args() - compile_cmd_.add(haskell_toolchain.compiler_flags) + compile_cmd.add(haskell_toolchain.compiler_flags) # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't # be parsed when inside an argsfile. - compile_cmd_.add(ctx.attrs.compiler_flags) - compile_cmd_.add("-c") + compile_cmd.add(ctx.attrs.compiler_flags) + compile_cmd.add("-c") if enable_haddock: - compile_cmd_.add("-haddock") + compile_cmd.add("-haddock") # These compiler arguments can be passed in a response file. compile_args = cmd_args() @@ -590,7 +587,7 @@ def _compile_module( module_tsets = module_tsets_, ), srcs = srcs, - args_for_cmd = compile_cmd_, + args_for_cmd = cmd_args(), args_for_file = compile_args, packagedb_tag = packagedb_tag, packagedbs = packages_info.exposed_package_dbs, @@ -611,8 +608,6 @@ def _compile_module( compile_cmd.add(args.args_for_file) compile_cmd.add(args.srcs) - compile_cmd.add(args.args_for_cmd) - compile_cmd.add( cmd_args( cmd_args(md_file, format = "-i{}").parent(), From b0aecaebdaed8dbc726cc45998c1b72c94d4f660 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 15:54:28 +0200 Subject: [PATCH 0913/1133] Inline compile_args_for_file --- haskell/compile.bzl | 46 ++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index efa748709..711514eab 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -472,20 +472,20 @@ def _compile_module( compile_cmd.add("-haddock") # These compiler arguments can be passed in a response file. - compile_args = cmd_args() - compile_args.add("-no-link", "-i") - compile_args.add("-hide-all-packages") + compile_args_for_file = cmd_args() + compile_args_for_file.add("-no-link", "-i") + compile_args_for_file.add("-hide-all-packages") if enable_profiling: - compile_args.add("-prof") + compile_args_for_file.add("-prof") if link_style == LinkStyle("shared"): - compile_args.add("-dynamic", "-fPIC") + compile_args_for_file.add("-dynamic", "-fPIC") elif link_style == LinkStyle("static_pic"): - compile_args.add("-fPIC", "-fexternal-dynamic-refs") + compile_args_for_file.add("-fPIC", "-fexternal-dynamic-refs") osuf, hisuf = output_extensions(link_style, enable_profiling) - compile_args.add("-osuf", osuf, "-hisuf", hisuf) + compile_args_for_file.add("-osuf", osuf, "-hisuf", hisuf) # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. @@ -524,7 +524,7 @@ def _compile_module( package_env_file, package_env, ) - compile_args.add(cmd_args( + compile_args_for_file.add(cmd_args( packagedb_tag.tag_artifacts(package_env_file), prepend = "-package-env", hidden = packagedb_args, @@ -538,19 +538,19 @@ def _compile_module( "dep", ])).as_output() tagged_dep_file = packagedb_tag.tag_artifacts(dep_file) - compile_args.add("--buck2-packagedb-dep", tagged_dep_file) + compile_args_for_file.add("--buck2-packagedb-dep", tagged_dep_file) if enable_th: - compile_args.add(packages_info.exposed_package_libs) + compile_args_for_file.add(packages_info.exposed_package_libs) # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) pre_args = pre.set.project_as_args("args") - compile_args.add(cmd_args(pre_args, format = "-optP={}")) + compile_args_for_file.add(cmd_args(pre_args, format = "-optP={}")) if pkgname: - compile_args.add(["-this-unit-id", pkgname]) + compile_args_for_file.add(["-this-unit-id", pkgname]) module_tsets_ = packages_info.exposed_package_modules @@ -558,15 +558,15 @@ def _compile_module( his = [outputs[hi] for hi in module.interfaces] stubs = outputs[module.stub_dir] - compile_args.add("-outputdir", cmd_args([cmd_args(stubs.as_output()).parent(), module.prefix_dir], delimiter="/")) - compile_args.add("-o", objects[0].as_output()) - compile_args.add("-ohi", his[0].as_output()) - compile_args.add("-stubdir", stubs.as_output()) + compile_args_for_file.add("-outputdir", cmd_args([cmd_args(stubs.as_output()).parent(), module.prefix_dir], delimiter="/")) + compile_args_for_file.add("-o", objects[0].as_output()) + compile_args_for_file.add("-ohi", his[0].as_output()) + compile_args_for_file.add("-stubdir", stubs.as_output()) if link_style in [LinkStyle("static_pic"), LinkStyle("static")]: - compile_args.add("-dynamic-too") - compile_args.add("-dyno", objects[1].as_output()) - compile_args.add("-dynohi", his[1].as_output()) + compile_args_for_file.add("-dynamic-too") + compile_args_for_file.add("-dyno", objects[1].as_output()) + compile_args_for_file.add("-dynohi", his[1].as_output()) srcs = cmd_args(module.source) for (path, src) in srcs_to_pairs(ctx.attrs.srcs): @@ -588,24 +588,24 @@ def _compile_module( ), srcs = srcs, args_for_cmd = cmd_args(), - args_for_file = compile_args, + args_for_file = cmd_args(), packagedb_tag = packagedb_tag, packagedbs = packages_info.exposed_package_dbs, ) # ------------------------------------------------------------ - if args.args_for_file: + if compile_args_for_file: if haskell_toolchain.use_argsfile: argsfile = ctx.actions.declare_output( "haskell_compile_" + artifact_suffix + ".argsfile", ) - for_file = cmd_args(args.args_for_file).add(args.srcs) + for_file = cmd_args(compile_args_for_file).add(args.srcs) ctx.actions.write(argsfile.as_output(), for_file, allow_args = True) compile_cmd.add(cmd_args(argsfile, format = "@{}")) compile_cmd.hidden(for_file) else: - compile_cmd.add(args.args_for_file) + compile_cmd.add(compile_args_for_file) compile_cmd.add(args.srcs) compile_cmd.add( From b25cdd7a83acbcb7536f1c76e73172f2abc3591d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 15:56:49 +0200 Subject: [PATCH 0914/1133] Remove indirection for srcs --- haskell/compile.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 711514eab..83b7f2c3f 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -586,7 +586,7 @@ def _compile_module( producing_indices = producing_indices, module_tsets = module_tsets_, ), - srcs = srcs, + srcs = cmd_args(), args_for_cmd = cmd_args(), args_for_file = cmd_args(), packagedb_tag = packagedb_tag, @@ -600,13 +600,13 @@ def _compile_module( argsfile = ctx.actions.declare_output( "haskell_compile_" + artifact_suffix + ".argsfile", ) - for_file = cmd_args(compile_args_for_file).add(args.srcs) + for_file = cmd_args(compile_args_for_file).add(srcs) ctx.actions.write(argsfile.as_output(), for_file, allow_args = True) compile_cmd.add(cmd_args(argsfile, format = "@{}")) compile_cmd.hidden(for_file) else: compile_cmd.add(compile_args_for_file) - compile_cmd.add(args.srcs) + compile_cmd.add(srcs) compile_cmd.add( cmd_args( From 30b4b58522b7fe8ba8085ca45d8e3e9c5fed900e Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 15:59:15 +0200 Subject: [PATCH 0915/1133] Remove CompileArgsInfo The type was no longer used to encapsulate values across function boundaries. --- haskell/compile.bzl | 36 ++++++++++-------------------------- 1 file changed, 10 insertions(+), 26 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 83b7f2c3f..04d140c2a 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -116,15 +116,6 @@ CompileResultInfo = record( module_tsets = field(list[CompiledModuleTSet] | DynamicValue), ) -CompileArgsInfo = record( - result = field(CompileResultInfo), - srcs = field(cmd_args), - args_for_cmd = field(cmd_args), - args_for_file = field(cmd_args), - packagedb_tag = field(ArtifactTag), - packagedbs = field(list[Artifact]), -) - PackagesInfo = record( exposed_package_modules = field(None | list[CompiledModuleTSet]), exposed_package_imports = field(list[Artifact]), @@ -577,20 +568,13 @@ def _compile_module( producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags + haskell_toolchain.compiler_flags - args = CompileArgsInfo( - result = CompileResultInfo( - objects = objects, - hi = his, - hashes = [module.hash], - stubs = stubs, - producing_indices = producing_indices, - module_tsets = module_tsets_, - ), - srcs = cmd_args(), - args_for_cmd = cmd_args(), - args_for_file = cmd_args(), - packagedb_tag = packagedb_tag, - packagedbs = packages_info.exposed_package_dbs, + result = CompileResultInfo( + objects = objects, + hi = his, + hashes = [module.hash], + stubs = stubs, + producing_indices = producing_indices, + module_tsets = module_tsets_, ) # ------------------------------------------------------------ @@ -620,7 +604,7 @@ def _compile_module( # Transitive module dependencies from other packages. cross_package_modules = ctx.actions.tset( CompiledModuleTSet, - children = args.result.module_tsets, + children = result.module_tsets, ) # Transitive module dependencies from the same package. this_package_modules = [ @@ -660,7 +644,7 @@ def _compile_module( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, dep_files = { "abi": abi_tag, - "packagedb": args.packagedb_tag, + "packagedb": packagedb_tag, } ) @@ -680,7 +664,7 @@ def _compile_module( dyn_object_dot_o = dyn_object_dot_o, package_deps = package_deps.keys(), toolchain_deps = toolchain_deps, - db_deps = args.packagedbs, + db_deps = packages_info.exposed_package_dbs, ), children = [cross_package_modules] + this_package_modules, ) From de6c51704e658e86372e9d7a8fc11b429219776f Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:00:53 +0200 Subject: [PATCH 0916/1133] Remove intermediate CompileResultInfo The struct was effectively unused. --- haskell/compile.bzl | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 04d140c2a..e9390a77b 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -568,15 +568,6 @@ def _compile_module( producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags + haskell_toolchain.compiler_flags - result = CompileResultInfo( - objects = objects, - hi = his, - hashes = [module.hash], - stubs = stubs, - producing_indices = producing_indices, - module_tsets = module_tsets_, - ) - # ------------------------------------------------------------ if compile_args_for_file: @@ -604,7 +595,7 @@ def _compile_module( # Transitive module dependencies from other packages. cross_package_modules = ctx.actions.tset( CompiledModuleTSet, - children = result.module_tsets, + children = module_tsets_, ) # Transitive module dependencies from the same package. this_package_modules = [ From 52c60ff794b52fd23d390afe088da20efebfebcd Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:03:36 +0200 Subject: [PATCH 0917/1133] Remove unused field type variant --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e9390a77b..2900396ab 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -113,7 +113,7 @@ CompileResultInfo = record( stubs = field(Artifact), hashes = field(list[Artifact]), producing_indices = field(bool), - module_tsets = field(list[CompiledModuleTSet] | DynamicValue), + module_tsets = field(DynamicValue), ) PackagesInfo = record( From 8361d3a51fcbf57263ca11506a7c9e9477da65e9 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:12:10 +0200 Subject: [PATCH 0918/1133] inline packages_info.exposed_package_modules --- haskell/compile.bzl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 2900396ab..19a89ff16 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -543,8 +543,6 @@ def _compile_module( if pkgname: compile_args_for_file.add(["-this-unit-id", pkgname]) - module_tsets_ = packages_info.exposed_package_modules - objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] stubs = outputs[module.stub_dir] @@ -595,7 +593,7 @@ def _compile_module( # Transitive module dependencies from other packages. cross_package_modules = ctx.actions.tset( CompiledModuleTSet, - children = module_tsets_, + children = packages_info.exposed_package_modules, ) # Transitive module dependencies from the same package. this_package_modules = [ From 1d5a626402b72b54b089a15c4babbbbd61a8b83c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:13:56 +0200 Subject: [PATCH 0919/1133] Remove redundant branch The condition was never false --- haskell/compile.bzl | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 19a89ff16..f5af136b3 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -568,18 +568,17 @@ def _compile_module( # ------------------------------------------------------------ - if compile_args_for_file: - if haskell_toolchain.use_argsfile: - argsfile = ctx.actions.declare_output( - "haskell_compile_" + artifact_suffix + ".argsfile", - ) - for_file = cmd_args(compile_args_for_file).add(srcs) - ctx.actions.write(argsfile.as_output(), for_file, allow_args = True) - compile_cmd.add(cmd_args(argsfile, format = "@{}")) - compile_cmd.hidden(for_file) - else: - compile_cmd.add(compile_args_for_file) - compile_cmd.add(srcs) + if haskell_toolchain.use_argsfile: + argsfile = ctx.actions.declare_output( + "haskell_compile_" + artifact_suffix + ".argsfile", + ) + for_file = cmd_args(compile_args_for_file).add(srcs) + ctx.actions.write(argsfile.as_output(), for_file, allow_args = True) + compile_cmd.add(cmd_args(argsfile, format = "@{}")) + compile_cmd.hidden(for_file) + else: + compile_cmd.add(compile_args_for_file) + compile_cmd.add(srcs) compile_cmd.add( cmd_args( From 7d6b2cefa8e671f9bdb7194e9b207b23d217055c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:15:57 +0200 Subject: [PATCH 0920/1133] TODO note --- haskell/compile.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index f5af136b3..4fdf02cb6 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -561,6 +561,8 @@ def _compile_module( for (path, src) in srcs_to_pairs(ctx.attrs.srcs): # hs-boot files aren't expected to be an argument to compiler but does need # to be included in the directory of the associated src file + # TODO(ah) We should not indiscriminately include all non-hs sources, + # but only those that this module actually depends on. if not is_haskell_src(path): srcs.hidden(src) From a4744d6c7aaffc3bac32ed7add948b1542604515 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:18:21 +0200 Subject: [PATCH 0921/1133] Remove redundant intermediate variable --- haskell/compile.bzl | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 4fdf02cb6..3cd1c7c23 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -557,14 +557,14 @@ def _compile_module( compile_args_for_file.add("-dyno", objects[1].as_output()) compile_args_for_file.add("-dynohi", his[1].as_output()) - srcs = cmd_args(module.source) + compile_args_for_file.add(module.source) for (path, src) in srcs_to_pairs(ctx.attrs.srcs): # hs-boot files aren't expected to be an argument to compiler but does need # to be included in the directory of the associated src file # TODO(ah) We should not indiscriminately include all non-hs sources, # but only those that this module actually depends on. if not is_haskell_src(path): - srcs.hidden(src) + compile_args_for_file.hidden(src) producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags + haskell_toolchain.compiler_flags @@ -574,13 +574,11 @@ def _compile_module( argsfile = ctx.actions.declare_output( "haskell_compile_" + artifact_suffix + ".argsfile", ) - for_file = cmd_args(compile_args_for_file).add(srcs) - ctx.actions.write(argsfile.as_output(), for_file, allow_args = True) + ctx.actions.write(argsfile.as_output(), compile_args_for_file, allow_args = True) compile_cmd.add(cmd_args(argsfile, format = "@{}")) - compile_cmd.hidden(for_file) + compile_cmd.hidden(compile_args_for_file) else: compile_cmd.add(compile_args_for_file) - compile_cmd.add(srcs) compile_cmd.add( cmd_args( From 15566d8f29ff169fc7556013e3a667f613414589 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:21:50 +0200 Subject: [PATCH 0922/1133] remove inline markers --- haskell/compile.bzl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 3cd1c7c23..2db5d32b1 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -450,8 +450,6 @@ def _compile_module( compile_cmd = cmd_args(ctx.attrs._ghc_wrapper[RunInfo]) compile_cmd.add("--ghc", haskell_toolchain.compiler) - # ------------------------------------------------------------ - compile_cmd.add(haskell_toolchain.compiler_flags) # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't @@ -568,8 +566,6 @@ def _compile_module( producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags + haskell_toolchain.compiler_flags - # ------------------------------------------------------------ - if haskell_toolchain.use_argsfile: argsfile = ctx.actions.declare_output( "haskell_compile_" + artifact_suffix + ".argsfile", From 2c9f8b7242a1001682bf71f4e0a875e9563dd81b Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:24:54 +0200 Subject: [PATCH 0923/1133] remove redundant modname --- haskell/compile.bzl | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 2db5d32b1..673c75052 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -490,15 +490,13 @@ def _compile_module( packagedb_tag = ctx.actions.artifact_tag() - modname = src_to_module_name(module.source.short_path) - # TODO[AH] Avoid duplicates and share identical env files. # The set of package-dbs can be known at the package level, not just the # module level. So, we could generate this file outside of the # dynamic_output action. package_env_file = ctx.actions.declare_output(".".join([ ctx.label.name, - modname or "pkg", + module_name or "pkg", "package-db", output_extensions(link_style, enable_profiling)[1], "env", @@ -521,7 +519,7 @@ def _compile_module( dep_file = ctx.actions.declare_output(".".join([ ctx.label.name, - modname or "pkg", + module_name or "pkg", "package-db", output_extensions(link_style, enable_profiling)[1], "dep", From 44989602b716f8022740eaa0bd4b37b477e5cc32 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:33:10 +0200 Subject: [PATCH 0924/1133] Remove unused variable --- haskell/compile.bzl | 2 -- 1 file changed, 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 673c75052..8fc952a53 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -562,8 +562,6 @@ def _compile_module( if not is_haskell_src(path): compile_args_for_file.hidden(src) - producing_indices = "-fwrite-ide-info" in ctx.attrs.compiler_flags + haskell_toolchain.compiler_flags - if haskell_toolchain.use_argsfile: argsfile = ctx.actions.declare_output( "haskell_compile_" + artifact_suffix + ".argsfile", From b816494fec5139502b52f321bece50c2db477141 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 11 Jun 2024 14:57:36 +0200 Subject: [PATCH 0925/1133] Remove duplicate variable --- haskell/haskell.bzl | 2 -- 1 file changed, 2 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 08d7167a6..bb3498b86 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -523,8 +523,6 @@ def _build_haskell_lib( non_profiling_hlib: [HaskellLibBuildOutput, None] = None) -> HaskellLibBuildOutput: linker_info = ctx.attrs._cxx_toolchain[CxxToolchainInfo].linker_info - toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] - # Link the objects into a library haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] From 1d02ff639468459287b1d7239767faafff8bde01 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 11 Jun 2024 16:14:13 +0200 Subject: [PATCH 0926/1133] Remove superfluous `-package-db` flags passed to ghc The same flags are already part of `packages_info.packagedb_args`. --- haskell/compile.bzl | 9 --------- 1 file changed, 9 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 8fc952a53..31a913a73 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -246,9 +246,6 @@ def target_metadata( def get_metadata(ctx, _artifacts, resolved, outputs, catalog=catalog): - pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages - # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. @@ -265,15 +262,9 @@ def target_metadata( ghc_args.add("-hide-all-packages") ghc_args.add(package_flag, "base") - package_dbs = ctx.actions.tset( - HaskellPackageDbTSet, - children = [package_db[name] for name in toolchain_libs if name in package_db] - ) - ghc_args.add(cmd_args(toolchain_libs, prepend=package_flag)) ghc_args.add(cmd_args(packages_info.exposed_package_args)) ghc_args.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) - ghc_args.add(cmd_args(package_dbs.project_as_args("package_db"), prepend="-package-db")) ghc_args.add(ctx.attrs.compiler_flags) md_args = cmd_args(md_gen) From da813a32279431c55d5d4df98b2f1ad533708c98 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 11 Jun 2024 14:58:39 +0200 Subject: [PATCH 0927/1133] Keep track of the path for each haskell package db - add package `path` projection to `HaskellPackageDbTSet` --- haskell/toolchain.bzl | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index 8961e9838..b7f4d636f 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -52,12 +52,21 @@ HaskellPackagesInfo = record( dynamic = DynamicValue, ) -def _haskell_package_info_as_package_db(p: Artifact): - return cmd_args(p) +HaskellPackage = record( + db = Artifact, + path = Artifact, +) + +def _haskell_package_info_as_package_db(p: HaskellPackage): + return cmd_args(p.db) + +def _haskell_package_info_as_package_path(p: HaskellPackage): + return cmd_args(p.path) HaskellPackageDbTSet = transitive_set( args_projections = { "package_db": _haskell_package_info_as_package_db, + "path": _haskell_package_info_as_package_path, } ) From c5f99d7aebee5269b1fee0fbec21179a1324d329 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 11 Jun 2024 17:20:49 +0200 Subject: [PATCH 0928/1133] Add `--bin-path` argument to `ghc_wrapper.py` and `generate_target_metadata.py` This adds the given directory to the `PATH` of the environment when calling ghc. --- haskell/tools/generate_target_metadata.py | 19 ++++++++++++++++--- haskell/tools/ghc_wrapper.py | 14 +++++++++++++- 2 files changed, 29 insertions(+), 4 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 7de325fe9..2c533c582 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -66,6 +66,13 @@ def main(): action="append", default=[], help="Package dependencies formated as `NAME:PREFIX_PATH`.") + parser.add_argument( + "--bin-path", + type=Path, + action="append", + default=[], + help="Add given path to PATH.", + ) args = parser.parse_args() result = obtain_target_metadata(args) @@ -82,7 +89,8 @@ def json_default_handler(o): def obtain_target_metadata(args): toolchain_packages = load_toolchain_packages(args.toolchain_libs) ghc_args = fix_ghc_args(args.ghc_arg, toolchain_packages) - ghc_depends, ghc_options = run_ghc_depends(args.ghc, ghc_args, args.source) + paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] + ghc_depends, ghc_options = run_ghc_depends(args.ghc, ghc_args, args.source, paths) th_modules = determine_th_modules(ghc_options, args.source_prefix) package_prefixes = calc_package_prefixes(args.package) module_mapping, module_graph, package_deps, toolchain_deps = interpret_ghc_depends( @@ -151,7 +159,7 @@ def fix_ghc_args(ghc_args, toolchain_packages): return result -def run_ghc_depends(ghc, ghc_args, sources): +def run_ghc_depends(ghc, ghc_args, sources, aux_paths): with tempfile.TemporaryDirectory() as dname: json_fname = os.path.join(dname, "depends.json") opt_json_fname = os.path.join(dname, "options.json") @@ -165,7 +173,12 @@ def run_ghc_depends(ghc, ghc_args, sources): "-opt-json", opt_json_fname, "-dep-makefile", make_fname, ] + ghc_args + sources - subprocess.run(args, check=True) + + env = os.environ.copy() + path = env.get("PATH", "") + env["PATH"] = os.pathsep.join([path] + aux_paths) + + subprocess.run(args, env=env, check=True) with open(json_fname) as f, open(opt_json_fname) as o: return json.load(f), json.load(o) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index d7f84b744..567f99f43 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -46,12 +46,24 @@ def main(): type=Path, help="Output path of the abi file to create.", ) + parser.add_argument( + "--bin-path", + type=Path, + action="append", + default=[], + help="Add given path to PATH.", + ) args, ghc_args = parser.parse_known_args() cmd = [args.ghc] + ghc_args - subprocess.check_call(cmd) + aux_paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] + env = os.environ.copy() + path = env.get("PATH", "") + env["PATH"] = os.pathsep.join([path] + aux_paths) + + subprocess.check_call(cmd, env=env) recompute_abi_hash(args.ghc, args.abi_out) From 912c4f6dd3340de66973b26cdf3dfea05e675f55 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 11 Jun 2024 17:24:21 +0200 Subject: [PATCH 0929/1133] Pass `--bin-path` flag for each toolchain library dependency --- haskell/compile.bzl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 31a913a73..18c003f4f 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -125,6 +125,7 @@ PackagesInfo = record( exposed_package_dbs = field(list[Artifact]), packagedb_args = cmd_args, transitive_deps = field(HaskellLibraryInfoTSet), + bin_paths = cmd_args, ) _Module = record( @@ -268,6 +269,7 @@ def target_metadata( ghc_args.add(ctx.attrs.compiler_flags) md_args = cmd_args(md_gen) + md_args.add(packages_info.bin_paths) md_args.add("--toolchain-libs", catalog) md_args.add("--ghc", haskell_toolchain.compiler) md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) @@ -394,8 +396,10 @@ def get_packages_info( ) packagedb_args.add(package_db_tset.project_as_args("package_db")) + bin_paths = cmd_args(package_db_tset.project_as_args("path"), format="--bin-path={}/bin") else: packagedb_args.add(haskell_toolchain.packages.package_db) + bin_paths = cmd_args() # Expose only the packages we depend on directly for lib in haskell_direct_deps_lib_infos: @@ -414,6 +418,7 @@ def get_packages_info( exposed_package_dbs = exposed_package_dbs, packagedb_args = packagedb_args, transitive_deps = libs, + bin_paths = bin_paths, ) def _compile_module( @@ -538,6 +543,7 @@ def _compile_module( compile_args_for_file.add("-o", objects[0].as_output()) compile_args_for_file.add("-ohi", his[0].as_output()) compile_args_for_file.add("-stubdir", stubs.as_output()) + compile_args_for_file.add(packages_info.bin_paths) if link_style in [LinkStyle("static_pic"), LinkStyle("static")]: compile_args_for_file.add("-dynamic-too") From 516bfdfd49c245e5c795a4228e40dd5ac56bd0b1 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 13 Jun 2024 13:26:11 +0200 Subject: [PATCH 0930/1133] Allow to declare dependencies for haskell sources manually --- decls/haskell_common.bzl | 3 +++ haskell/compile.bzl | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index 8bcf122be..6a8f15998 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -24,6 +24,9 @@ def _deps_arg(): from which this rules sources import modules or native linkable rules exporting symbols this rules sources call into. """), + "srcs_deps": attrs.dict(attrs.source(), attrs.list(attrs.source()), default = {}, doc = """ + Allows to declare dependencies for sources manually, additionally to the dependencies automatically detected. + """), } def _compiler_flags_arg(): diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 18c003f4f..a0f8852e0 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -551,6 +551,11 @@ def _compile_module( compile_args_for_file.add("-dynohi", his[1].as_output()) compile_args_for_file.add(module.source) + + aux_deps = ctx.attrs.srcs_deps.get(module.source) + if aux_deps: + compile_args_for_file.hidden(aux_deps) + for (path, src) in srcs_to_pairs(ctx.attrs.srcs): # hs-boot files aren't expected to be an argument to compiler but does need # to be included in the directory of the associated src file From e9ef00fdf674187e043829e30759902b54509b9e Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 13 Jun 2024 17:14:46 +0200 Subject: [PATCH 0931/1133] Warn about non-haskell source files in `srcs`, advice to use `srcs_deps` --- haskell/compile.bzl | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a0f8852e0..58c29cc82 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -556,13 +556,12 @@ def _compile_module( if aux_deps: compile_args_for_file.hidden(aux_deps) - for (path, src) in srcs_to_pairs(ctx.attrs.srcs): - # hs-boot files aren't expected to be an argument to compiler but does need - # to be included in the directory of the associated src file - # TODO(ah) We should not indiscriminately include all non-hs sources, - # but only those that this module actually depends on. - if not is_haskell_src(path): - compile_args_for_file.hidden(src) + non_haskell_sources = [src for (path, src) in srcs_to_pairs(ctx.attrs.srcs) if not is_haskell_src(path)] + + if non_haskell_sources: + warning("{} specifies non-haskell file in `srcs`, consider using `srcs_deps` instead".format(ctx.label)) + + compile_args_for_file.hidden(non_haskell_sources) if haskell_toolchain.use_argsfile: argsfile = ctx.actions.declare_output( From 307e26e766adab0a6e19c90ba05a94abfb1e38f7 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:55:57 +0200 Subject: [PATCH 0932/1133] inline get_packages_info for _compile_module --- haskell/compile.bzl | 109 +++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 103 insertions(+), 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 58c29cc82..4771efc09 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -472,18 +472,115 @@ def _compile_module( osuf, hisuf = output_extensions(link_style, enable_profiling) compile_args_for_file.add("-osuf", osuf, "-hisuf", hisuf) + # ------------------------------------------------------------ + # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. - packages_info = get_packages_info( + #packages_info = get_packages_info( + # ctx, + # link_style, + # specify_pkg_version = False, + # enable_profiling = enable_profiling, + # use_empty_lib = True, + # resolved = resolved, + # package_deps = package_deps, + #) + specify_pkg_version = False, + use_empty_lib = True, + + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + + # Collect library dependencies. Note that these don't need to be in a + # particular order. + direct_deps_link_info = attr_deps_haskell_link_infos(ctx) + libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = [ + lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + for lib in direct_deps_link_info + ]) + + # base is special and gets exposed by default + package_flag = _package_flag(haskell_toolchain) + exposed_package_modules = None + exposed_package_imports = [] + exposed_package_objects = [] + exposed_package_libs = cmd_args() + exposed_package_args = cmd_args([package_flag, "base"]) + exposed_package_dbs = [] + + if True: + exposed_package_modules = [] + + for lib in direct_deps_link_info: + info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + direct = info.value + dynamic = direct.dynamic[enable_profiling] + dynamic_info = resolved[dynamic][DynamicCompileResultInfo] + + for mod in package_deps.get(direct.name, []): + exposed_package_modules.append(dynamic_info.modules[mod]) + + if direct.name in package_deps: + db = direct.empty_db if use_empty_lib else direct.db + exposed_package_dbs.append(db) + else: + for lib in libs.traverse(): + exposed_package_imports.extend(lib.import_dirs[enable_profiling]) + exposed_package_objects.extend(lib.objects[enable_profiling]) + # libs of dependencies might be needed at compile time if + # we're using Template Haskell: + exposed_package_libs.hidden(lib.libs) + + packagedb_args = cmd_args(libs.project_as_args( + "empty_package_db" if use_empty_lib else "package_db", + )) + + haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( ctx, link_style, - specify_pkg_version = False, - enable_profiling = enable_profiling, - use_empty_lib = True, - resolved = resolved, - package_deps = package_deps, + enable_profiling, ) + if haskell_toolchain.packages and True: + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages + + toolchain_libs = [ + dep[HaskellToolchainLibrary].name + for dep in ctx.attrs.deps + if HaskellToolchainLibrary in dep + ] + libs.reduce("packages") + + package_db_tset = ctx.actions.tset( + HaskellPackageDbTSet, + children = [package_db[name] for name in toolchain_libs if name in package_db] + ) + + packagedb_args.add(package_db_tset.project_as_args("package_db")) + else: + packagedb_args.add(haskell_toolchain.packages.package_db) + + # Expose only the packages we depend on directly + for lib in haskell_direct_deps_lib_infos: + pkg_name = lib.name + if (specify_pkg_version): + pkg_name += "-{}".format(lib.version) + + exposed_package_args.add(package_flag, pkg_name) + + packages_info = PackagesInfo( + exposed_package_modules = exposed_package_modules, + exposed_package_imports = exposed_package_imports, + exposed_package_objects = exposed_package_objects, + exposed_package_libs = exposed_package_libs, + exposed_package_args = exposed_package_args, + exposed_package_dbs = exposed_package_dbs, + packagedb_args = packagedb_args, + transitive_deps = libs, + ) + + # ------------------------------------------------------------ + packagedb_tag = ctx.actions.artifact_tag() # TODO[AH] Avoid duplicates and share identical env files. From bce4d37ecdb05d09c1a2e8dd2c2a632a64f65a2b Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:57:00 +0200 Subject: [PATCH 0933/1133] Inline bool constants --- haskell/compile.bzl | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 4771efc09..3328ba6dc 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -485,8 +485,6 @@ def _compile_module( # resolved = resolved, # package_deps = package_deps, #) - specify_pkg_version = False, - use_empty_lib = True, haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -520,7 +518,7 @@ def _compile_module( exposed_package_modules.append(dynamic_info.modules[mod]) if direct.name in package_deps: - db = direct.empty_db if use_empty_lib else direct.db + db = direct.empty_db if True else direct.db exposed_package_dbs.append(db) else: for lib in libs.traverse(): @@ -531,7 +529,7 @@ def _compile_module( exposed_package_libs.hidden(lib.libs) packagedb_args = cmd_args(libs.project_as_args( - "empty_package_db" if use_empty_lib else "package_db", + "empty_package_db" if True else "package_db", )) haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( @@ -563,8 +561,6 @@ def _compile_module( # Expose only the packages we depend on directly for lib in haskell_direct_deps_lib_infos: pkg_name = lib.name - if (specify_pkg_version): - pkg_name += "-{}".format(lib.version) exposed_package_args.add(package_flag, pkg_name) From d78eba69f443ffe23840c6621b266403c74f2cf2 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:58:45 +0200 Subject: [PATCH 0934/1133] Resolve branches under bool constants --- haskell/compile.bzl | 36 +++++++++++++----------------------- 1 file changed, 13 insertions(+), 23 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 3328ba6dc..0ada8cf53 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -505,32 +505,22 @@ def _compile_module( exposed_package_args = cmd_args([package_flag, "base"]) exposed_package_dbs = [] - if True: - exposed_package_modules = [] + exposed_package_modules = [] - for lib in direct_deps_link_info: - info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - direct = info.value - dynamic = direct.dynamic[enable_profiling] - dynamic_info = resolved[dynamic][DynamicCompileResultInfo] + for lib in direct_deps_link_info: + info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + direct = info.value + dynamic = direct.dynamic[enable_profiling] + dynamic_info = resolved[dynamic][DynamicCompileResultInfo] - for mod in package_deps.get(direct.name, []): - exposed_package_modules.append(dynamic_info.modules[mod]) + for mod in package_deps.get(direct.name, []): + exposed_package_modules.append(dynamic_info.modules[mod]) - if direct.name in package_deps: - db = direct.empty_db if True else direct.db - exposed_package_dbs.append(db) - else: - for lib in libs.traverse(): - exposed_package_imports.extend(lib.import_dirs[enable_profiling]) - exposed_package_objects.extend(lib.objects[enable_profiling]) - # libs of dependencies might be needed at compile time if - # we're using Template Haskell: - exposed_package_libs.hidden(lib.libs) + if direct.name in package_deps: + db = direct.empty_db + exposed_package_dbs.append(db) - packagedb_args = cmd_args(libs.project_as_args( - "empty_package_db" if True else "package_db", - )) + packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( ctx, @@ -538,7 +528,7 @@ def _compile_module( enable_profiling, ) - if haskell_toolchain.packages and True: + if haskell_toolchain.packages: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] pkg_deps = resolved[haskell_toolchain.packages.dynamic] package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages From 01556eb96fda0a3a542ff1c69f98c99d23818061 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 16:59:07 +0200 Subject: [PATCH 0935/1133] Remove redundant if If `haskell_toolchain.packages` is falsy then the `else` branch would fail. --- haskell/compile.bzl | 29 +++++++++++++---------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 0ada8cf53..2a5f3f70e 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -528,25 +528,22 @@ def _compile_module( enable_profiling, ) - if haskell_toolchain.packages: - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages - toolchain_libs = [ - dep[HaskellToolchainLibrary].name - for dep in ctx.attrs.deps - if HaskellToolchainLibrary in dep - ] + libs.reduce("packages") + toolchain_libs = [ + dep[HaskellToolchainLibrary].name + for dep in ctx.attrs.deps + if HaskellToolchainLibrary in dep + ] + libs.reduce("packages") - package_db_tset = ctx.actions.tset( - HaskellPackageDbTSet, - children = [package_db[name] for name in toolchain_libs if name in package_db] - ) + package_db_tset = ctx.actions.tset( + HaskellPackageDbTSet, + children = [package_db[name] for name in toolchain_libs if name in package_db] + ) - packagedb_args.add(package_db_tset.project_as_args("package_db")) - else: - packagedb_args.add(haskell_toolchain.packages.package_db) + packagedb_args.add(package_db_tset.project_as_args("package_db")) # Expose only the packages we depend on directly for lib in haskell_direct_deps_lib_infos: From ee811959f79b8a5e9da5cf9cda8066dddcb93feb Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 17:39:03 +0200 Subject: [PATCH 0936/1133] Remove duplicate variables --- haskell/compile.bzl | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 2a5f3f70e..96cde0d3b 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -486,8 +486,6 @@ def _compile_module( # package_deps = package_deps, #) - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - # Collect library dependencies. Note that these don't need to be in a # particular order. direct_deps_link_info = attr_deps_haskell_link_infos(ctx) @@ -498,15 +496,13 @@ def _compile_module( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - exposed_package_modules = None + exposed_package_modules = [] exposed_package_imports = [] exposed_package_objects = [] exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) exposed_package_dbs = [] - exposed_package_modules = [] - for lib in direct_deps_link_info: info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] direct = info.value @@ -528,7 +524,6 @@ def _compile_module( enable_profiling, ) - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] pkg_deps = resolved[haskell_toolchain.packages.dynamic] package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages From eb3467543c72b53665987fdf1b698121d0021e26 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 17:39:22 +0200 Subject: [PATCH 0937/1133] Inline pkg_name To avoid collision with pkgname --- haskell/compile.bzl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 96cde0d3b..7759cdf8a 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -542,9 +542,7 @@ def _compile_module( # Expose only the packages we depend on directly for lib in haskell_direct_deps_lib_infos: - pkg_name = lib.name - - exposed_package_args.add(package_flag, pkg_name) + exposed_package_args.add(package_flag, lib.name) packages_info = PackagesInfo( exposed_package_modules = exposed_package_modules, From 0421ee6e44035ccbab53f446ed4d74754d7e38a1 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 17:45:05 +0200 Subject: [PATCH 0938/1133] Inline packages_info.packagedb_args --- haskell/compile.bzl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 7759cdf8a..0bda7f10f 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -551,7 +551,7 @@ def _compile_module( exposed_package_libs = exposed_package_libs, exposed_package_args = exposed_package_args, exposed_package_dbs = exposed_package_dbs, - packagedb_args = packagedb_args, + packagedb_args = cmd_args(), transitive_deps = libs, ) @@ -571,9 +571,9 @@ def _compile_module( "env", ])) package_env = cmd_args(delimiter = "\n") - packagedb_args = packagedb_tag.tag_artifacts(packages_info.packagedb_args) + packagedb_args_tagged = packagedb_tag.tag_artifacts(packagedb_args) package_env.add(cmd_args( - packagedb_args, + packagedb_args_tagged, format = "package-db {}", ).relative_to(package_env_file, parent = 1)) ctx.actions.write( @@ -583,7 +583,7 @@ def _compile_module( compile_args_for_file.add(cmd_args( packagedb_tag.tag_artifacts(package_env_file), prepend = "-package-env", - hidden = packagedb_args, + hidden = packagedb_args_tagged, )) dep_file = ctx.actions.declare_output(".".join([ From be0af5f54fc1dab1d0dc121b22497b1a5594f136 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 17:47:35 +0200 Subject: [PATCH 0939/1133] Remove unused exposed_package_libs --- haskell/compile.bzl | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 0bda7f10f..a62589c4f 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -499,7 +499,6 @@ def _compile_module( exposed_package_modules = [] exposed_package_imports = [] exposed_package_objects = [] - exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) exposed_package_dbs = [] @@ -548,7 +547,7 @@ def _compile_module( exposed_package_modules = exposed_package_modules, exposed_package_imports = exposed_package_imports, exposed_package_objects = exposed_package_objects, - exposed_package_libs = exposed_package_libs, + exposed_package_libs = cmd_args(), exposed_package_args = exposed_package_args, exposed_package_dbs = exposed_package_dbs, packagedb_args = cmd_args(), @@ -596,9 +595,6 @@ def _compile_module( tagged_dep_file = packagedb_tag.tag_artifacts(dep_file) compile_args_for_file.add("--buck2-packagedb-dep", tagged_dep_file) - if enable_th: - compile_args_for_file.add(packages_info.exposed_package_libs) - # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) From e6079b8f036ed7fae1766cabd605fe23a905c8fe Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 17:48:54 +0200 Subject: [PATCH 0940/1133] inline exposed_package_modules --- haskell/compile.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a62589c4f..87c82d9b9 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -544,7 +544,7 @@ def _compile_module( exposed_package_args.add(package_flag, lib.name) packages_info = PackagesInfo( - exposed_package_modules = exposed_package_modules, + exposed_package_modules = [], exposed_package_imports = exposed_package_imports, exposed_package_objects = exposed_package_objects, exposed_package_libs = cmd_args(), @@ -654,7 +654,7 @@ def _compile_module( # Transitive module dependencies from other packages. cross_package_modules = ctx.actions.tset( CompiledModuleTSet, - children = packages_info.exposed_package_modules, + children = exposed_package_modules, ) # Transitive module dependencies from the same package. this_package_modules = [ From 6cab823e4f9efb60dea30684112f63db0016972c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 17:49:49 +0200 Subject: [PATCH 0941/1133] inline exposed_package_dbs --- haskell/compile.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 87c82d9b9..931e2916f 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -549,7 +549,7 @@ def _compile_module( exposed_package_objects = exposed_package_objects, exposed_package_libs = cmd_args(), exposed_package_args = exposed_package_args, - exposed_package_dbs = exposed_package_dbs, + exposed_package_dbs = [], packagedb_args = cmd_args(), transitive_deps = libs, ) @@ -714,7 +714,7 @@ def _compile_module( dyn_object_dot_o = dyn_object_dot_o, package_deps = package_deps.keys(), toolchain_deps = toolchain_deps, - db_deps = packages_info.exposed_package_dbs, + db_deps = exposed_package_dbs, ), children = [cross_package_modules] + this_package_modules, ) From 537b49e92a41790b613b72487110a8711ffc462c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 17:53:27 +0200 Subject: [PATCH 0942/1133] Remove unused PackagesInfo --- haskell/compile.bzl | 27 --------------------------- 1 file changed, 27 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 931e2916f..bd6990a6c 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -476,15 +476,6 @@ def _compile_module( # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. - #packages_info = get_packages_info( - # ctx, - # link_style, - # specify_pkg_version = False, - # enable_profiling = enable_profiling, - # use_empty_lib = True, - # resolved = resolved, - # package_deps = package_deps, - #) # Collect library dependencies. Note that these don't need to be in a # particular order. @@ -497,9 +488,6 @@ def _compile_module( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) exposed_package_modules = [] - exposed_package_imports = [] - exposed_package_objects = [] - exposed_package_args = cmd_args([package_flag, "base"]) exposed_package_dbs = [] for lib in direct_deps_link_info: @@ -539,21 +527,6 @@ def _compile_module( packagedb_args.add(package_db_tset.project_as_args("package_db")) - # Expose only the packages we depend on directly - for lib in haskell_direct_deps_lib_infos: - exposed_package_args.add(package_flag, lib.name) - - packages_info = PackagesInfo( - exposed_package_modules = [], - exposed_package_imports = exposed_package_imports, - exposed_package_objects = exposed_package_objects, - exposed_package_libs = cmd_args(), - exposed_package_args = exposed_package_args, - exposed_package_dbs = [], - packagedb_args = cmd_args(), - transitive_deps = libs, - ) - # ------------------------------------------------------------ packagedb_tag = ctx.actions.artifact_tag() From 16cb90bc12a4bf9c795c308a7b030ea7906a6d8b Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 17:58:27 +0200 Subject: [PATCH 0943/1133] get_packages_info remove unused package_deps --- haskell/compile.bzl | 31 +++++++------------------------ 1 file changed, 7 insertions(+), 24 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index bd6990a6c..a94dce662 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -325,8 +325,7 @@ def get_packages_info( specify_pkg_version: bool, enable_profiling: bool, use_empty_lib: bool, - resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None, - package_deps: None | dict[str, list[str]] = None) -> PackagesInfo: + resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] # Collect library dependencies. Note that these don't need to be in a @@ -346,28 +345,12 @@ def get_packages_info( exposed_package_args = cmd_args([package_flag, "base"]) exposed_package_dbs = [] - if resolved != None and package_deps != None: - exposed_package_modules = [] - - for lib in direct_deps_link_info: - info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - direct = info.value - dynamic = direct.dynamic[enable_profiling] - dynamic_info = resolved[dynamic][DynamicCompileResultInfo] - - for mod in package_deps.get(direct.name, []): - exposed_package_modules.append(dynamic_info.modules[mod]) - - if direct.name in package_deps: - db = direct.empty_db if use_empty_lib else direct.db - exposed_package_dbs.append(db) - else: - for lib in libs.traverse(): - exposed_package_imports.extend(lib.import_dirs[enable_profiling]) - exposed_package_objects.extend(lib.objects[enable_profiling]) - # libs of dependencies might be needed at compile time if - # we're using Template Haskell: - exposed_package_libs.hidden(lib.libs) + for lib in libs.traverse(): + exposed_package_imports.extend(lib.import_dirs[enable_profiling]) + exposed_package_objects.extend(lib.objects[enable_profiling]) + # libs of dependencies might be needed at compile time if + # we're using Template Haskell: + exposed_package_libs.hidden(lib.libs) packagedb_args = cmd_args(libs.project_as_args( "empty_package_db" if use_empty_lib else "package_db", From b31371db0fa545780f3f1eb660a33a1c94f251fe Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 18:00:05 +0200 Subject: [PATCH 0944/1133] get_packages_info consistently use no empty lib It does not make a difference for the metadata generation use-case and it reduces an implementation difference. --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a94dce662..900feb4e3 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -255,7 +255,7 @@ def target_metadata( LinkStyle("shared"), specify_pkg_version = False, enable_profiling = False, - use_empty_lib = True, + use_empty_lib = False, resolved = resolved, ) package_flag = _package_flag(haskell_toolchain) From 67e57ac52465beec92b4e4a778080b4bcb14e4fa Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 18:01:24 +0200 Subject: [PATCH 0945/1133] Revert "get_packages_info consistently use no empty lib" This reverts commit 57d78c539f623d14bf2d602e6c3bb8121e72fb62. It does make a different on the dependency graph. Module compilation always uses the empty package-db. Using the non-empty one for metadata generation means that two separate package-db generation steps are on the critical path for module compilation. --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 900feb4e3..a94dce662 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -255,7 +255,7 @@ def target_metadata( LinkStyle("shared"), specify_pkg_version = False, enable_profiling = False, - use_empty_lib = False, + use_empty_lib = True, resolved = resolved, ) package_flag = _package_flag(haskell_toolchain) From f3c43015e731ac7b134e815850556b2c5638b2c4 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 18:04:01 +0200 Subject: [PATCH 0946/1133] Remove unused exposed_package_modules --- haskell/compile.bzl | 3 --- 1 file changed, 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a94dce662..52a735d5d 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -117,7 +117,6 @@ CompileResultInfo = record( ) PackagesInfo = record( - exposed_package_modules = field(None | list[CompiledModuleTSet]), exposed_package_imports = field(list[Artifact]), exposed_package_objects = field(list[Artifact]), exposed_package_libs = cmd_args, @@ -338,7 +337,6 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - exposed_package_modules = None exposed_package_imports = [] exposed_package_objects = [] exposed_package_libs = cmd_args() @@ -393,7 +391,6 @@ def get_packages_info( exposed_package_args.add(package_flag, pkg_name) return PackagesInfo( - exposed_package_modules = exposed_package_modules, exposed_package_imports = exposed_package_imports, exposed_package_objects = exposed_package_objects, exposed_package_libs = exposed_package_libs, From 6781f87ac9da123f1a71f909a9e8065aa5777aba Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 18:05:01 +0200 Subject: [PATCH 0947/1133] Remove unused exposed_package_imports --- haskell/compile.bzl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 52a735d5d..34bf985c9 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -117,7 +117,6 @@ CompileResultInfo = record( ) PackagesInfo = record( - exposed_package_imports = field(list[Artifact]), exposed_package_objects = field(list[Artifact]), exposed_package_libs = cmd_args, exposed_package_args = cmd_args, @@ -337,14 +336,12 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - exposed_package_imports = [] exposed_package_objects = [] exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) exposed_package_dbs = [] for lib in libs.traverse(): - exposed_package_imports.extend(lib.import_dirs[enable_profiling]) exposed_package_objects.extend(lib.objects[enable_profiling]) # libs of dependencies might be needed at compile time if # we're using Template Haskell: @@ -391,7 +388,6 @@ def get_packages_info( exposed_package_args.add(package_flag, pkg_name) return PackagesInfo( - exposed_package_imports = exposed_package_imports, exposed_package_objects = exposed_package_objects, exposed_package_libs = exposed_package_libs, exposed_package_args = exposed_package_args, From a765e7adca175d9292725d2e4d539f863758f18c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 18:05:44 +0200 Subject: [PATCH 0948/1133] Remove unused exposed_package_objects --- haskell/compile.bzl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 34bf985c9..6e0de46cd 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -117,7 +117,6 @@ CompileResultInfo = record( ) PackagesInfo = record( - exposed_package_objects = field(list[Artifact]), exposed_package_libs = cmd_args, exposed_package_args = cmd_args, exposed_package_dbs = field(list[Artifact]), @@ -336,13 +335,11 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - exposed_package_objects = [] exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) exposed_package_dbs = [] for lib in libs.traverse(): - exposed_package_objects.extend(lib.objects[enable_profiling]) # libs of dependencies might be needed at compile time if # we're using Template Haskell: exposed_package_libs.hidden(lib.libs) @@ -388,7 +385,6 @@ def get_packages_info( exposed_package_args.add(package_flag, pkg_name) return PackagesInfo( - exposed_package_objects = exposed_package_objects, exposed_package_libs = exposed_package_libs, exposed_package_args = exposed_package_args, exposed_package_dbs = exposed_package_dbs, From 39e3140b32b447b203f6354ebe2070e75092bb8e Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 18:06:30 +0200 Subject: [PATCH 0949/1133] Remove outdated comment These libs are only used for linking now --- haskell/compile.bzl | 2 -- 1 file changed, 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 6e0de46cd..24e35d3b7 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -340,8 +340,6 @@ def get_packages_info( exposed_package_dbs = [] for lib in libs.traverse(): - # libs of dependencies might be needed at compile time if - # we're using Template Haskell: exposed_package_libs.hidden(lib.libs) packagedb_args = cmd_args(libs.project_as_args( From 1369e33332ea349f6f2f6502984bb3cee9d17adc Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 12 Jun 2024 18:07:21 +0200 Subject: [PATCH 0950/1133] Remove unused exposed_package_dbs --- haskell/compile.bzl | 3 --- 1 file changed, 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 24e35d3b7..05de562a7 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -119,7 +119,6 @@ CompileResultInfo = record( PackagesInfo = record( exposed_package_libs = cmd_args, exposed_package_args = cmd_args, - exposed_package_dbs = field(list[Artifact]), packagedb_args = cmd_args, transitive_deps = field(HaskellLibraryInfoTSet), bin_paths = cmd_args, @@ -337,7 +336,6 @@ def get_packages_info( package_flag = _package_flag(haskell_toolchain) exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) - exposed_package_dbs = [] for lib in libs.traverse(): exposed_package_libs.hidden(lib.libs) @@ -385,7 +383,6 @@ def get_packages_info( return PackagesInfo( exposed_package_libs = exposed_package_libs, exposed_package_args = exposed_package_args, - exposed_package_dbs = exposed_package_dbs, packagedb_args = packagedb_args, transitive_deps = libs, bin_paths = bin_paths, From 19186dce0c3fd8591030907f8eef432fa9ec1deb Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:03:37 +0200 Subject: [PATCH 0951/1133] reorder binding --- haskell/compile.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 05de562a7..918521a41 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -451,6 +451,7 @@ def _compile_module( lib.prof_info[link_style] if enable_profiling else lib.info[link_style] for lib in direct_deps_link_info ]) + packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) @@ -470,8 +471,6 @@ def _compile_module( db = direct.empty_db exposed_package_dbs.append(db) - packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) - haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( ctx, link_style, From 84f51650d33c37057816fb02d65053cd75c0bf03 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:04:22 +0200 Subject: [PATCH 0952/1133] Remove unused binding --- haskell/compile.bzl | 6 ------ 1 file changed, 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 918521a41..4896708e0 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -471,12 +471,6 @@ def _compile_module( db = direct.empty_db exposed_package_dbs.append(db) - haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( - ctx, - link_style, - enable_profiling, - ) - pkg_deps = resolved[haskell_toolchain.packages.dynamic] package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages From 71529677be2aa40ecfe25307358c4644cfe3bc50 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:05:17 +0200 Subject: [PATCH 0953/1133] Reorder bindings --- haskell/compile.bzl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 4896708e0..947c62340 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -344,12 +344,6 @@ def get_packages_info( "empty_package_db" if use_empty_lib else "package_db", )) - haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( - ctx, - link_style, - enable_profiling, - ) - if haskell_toolchain.packages and resolved != None: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] pkg_deps = resolved[haskell_toolchain.packages.dynamic] @@ -372,6 +366,12 @@ def get_packages_info( packagedb_args.add(haskell_toolchain.packages.package_db) bin_paths = cmd_args() + haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( + ctx, + link_style, + enable_profiling, + ) + # Expose only the packages we depend on directly for lib in haskell_direct_deps_lib_infos: pkg_name = lib.name From 863391eb413b00c3711a98d7ce70e512e435cd89 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:06:48 +0200 Subject: [PATCH 0954/1133] Remove unused binding --- haskell/compile.bzl | 1 - 1 file changed, 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 947c62340..1df7e072e 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -454,7 +454,6 @@ def _compile_module( packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) # base is special and gets exposed by default - package_flag = _package_flag(haskell_toolchain) exposed_package_modules = [] exposed_package_dbs = [] From 115d0f88af5dbf14ce305be7aed92470f957e86f Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:10:01 +0200 Subject: [PATCH 0955/1133] Reorder bindings --- haskell/compile.bzl | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 1df7e072e..0b43d25f3 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -447,11 +447,6 @@ def _compile_module( # Collect library dependencies. Note that these don't need to be in a # particular order. direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = [ - lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - for lib in direct_deps_link_info - ]) - packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) # base is special and gets exposed by default exposed_package_modules = [] @@ -470,20 +465,24 @@ def _compile_module( db = direct.empty_db exposed_package_dbs.append(db) - pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages - + libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = [ + lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + for lib in direct_deps_link_info + ]) toolchain_libs = [ dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep ] + libs.reduce("packages") + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages package_db_tset = ctx.actions.tset( HaskellPackageDbTSet, children = [package_db[name] for name in toolchain_libs if name in package_db] ) + packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) packagedb_args.add(package_db_tset.project_as_args("package_db")) # ------------------------------------------------------------ From 1b643c5014ae18f48e6dc62caadc193c38bd3cb3 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:10:36 +0200 Subject: [PATCH 0956/1133] Remove inlining markers --- haskell/compile.bzl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 0b43d25f3..517074452 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -439,8 +439,6 @@ def _compile_module( osuf, hisuf = output_extensions(link_style, enable_profiling) compile_args_for_file.add("-osuf", osuf, "-hisuf", hisuf) - # ------------------------------------------------------------ - # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. @@ -485,8 +483,6 @@ def _compile_module( packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) packagedb_args.add(package_db_tset.project_as_args("package_db")) - # ------------------------------------------------------------ - packagedb_tag = ctx.actions.artifact_tag() # TODO[AH] Avoid duplicates and share identical env files. From b6f92c54488b6779bdbe9c11b8c01af0317f177e Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:17:40 +0200 Subject: [PATCH 0957/1133] Iterate over module's package deps Instead of iterating over all the package's package deps and filtering out the ones that are module deps. --- haskell/compile.bzl | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 517074452..58e30ee8d 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -450,18 +450,22 @@ def _compile_module( exposed_package_modules = [] exposed_package_dbs = [] + libs_by_name = {} for lib in direct_deps_link_info: info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] direct = info.value dynamic = direct.dynamic[enable_profiling] dynamic_info = resolved[dynamic][DynamicCompileResultInfo] - for mod in package_deps.get(direct.name, []): - exposed_package_modules.append(dynamic_info.modules[mod]) + libs_by_name[direct.name] = struct( + package_db = direct.empty_db, + modules = dynamic_info.modules, + ) - if direct.name in package_deps: - db = direct.empty_db - exposed_package_dbs.append(db) + for dep_pkgname, dep_modules in package_deps.items(): + exposed_package_dbs.append(libs_by_name[dep_pkgname].package_db) + for dep_modname in dep_modules: + exposed_package_modules.append(libs_by_name[dep_pkgname].modules[dep_modname]) libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = [ lib.prof_info[link_style] if enable_profiling else lib.info[link_style] From a99fdb0ec3985c4649c718c467d345dc89464c7c Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:19:35 +0200 Subject: [PATCH 0958/1133] remove outdated comment --- haskell/compile.bzl | 1 - 1 file changed, 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 58e30ee8d..da36174c7 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -446,7 +446,6 @@ def _compile_module( # particular order. direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - # base is special and gets exposed by default exposed_package_modules = [] exposed_package_dbs = [] From a8d23c1379788cbd87422936890cf104f312b6e2 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:23:15 +0200 Subject: [PATCH 0959/1133] binding order --- haskell/compile.bzl | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index da36174c7..4d6800e0f 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -446,9 +446,6 @@ def _compile_module( # particular order. direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - exposed_package_modules = [] - exposed_package_dbs = [] - libs_by_name = {} for lib in direct_deps_link_info: info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] @@ -461,6 +458,8 @@ def _compile_module( modules = dynamic_info.modules, ) + exposed_package_modules = [] + exposed_package_dbs = [] for dep_pkgname, dep_modules in package_deps.items(): exposed_package_dbs.append(libs_by_name[dep_pkgname].package_db) for dep_modname in dep_modules: From ceb8404178541de1fa4af17cb59134de51e57b1b Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 14 Jun 2024 13:49:30 +0200 Subject: [PATCH 0960/1133] Fix bin_paths error due to rebase --- haskell/compile.bzl | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 4d6800e0f..2483ef9ac 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -482,6 +482,12 @@ def _compile_module( children = [package_db[name] for name in toolchain_libs if name in package_db] ) + # TODO(ah) breaks recompilation avoidance on transitive toolchain library changes. + compile_args_for_file.add(cmd_args( + package_db_tset.project_as_args("path"), + format="--bin-path={}/bin", + )) + packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) packagedb_args.add(package_db_tset.project_as_args("package_db")) @@ -541,7 +547,6 @@ def _compile_module( compile_args_for_file.add("-o", objects[0].as_output()) compile_args_for_file.add("-ohi", his[0].as_output()) compile_args_for_file.add("-stubdir", stubs.as_output()) - compile_args_for_file.add(packages_info.bin_paths) if link_style in [LinkStyle("static_pic"), LinkStyle("static")]: compile_args_for_file.add("-dynamic-too") From abf52ddb164ffc68c19e97dda1e1f9798ab078da Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:42:13 +0200 Subject: [PATCH 0961/1133] Pass only the current module to _compile_module Previously the entire module graph was passed, but not actually used. --- haskell/compile.bzl | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 2483ef9ac..de25411dd 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -396,7 +396,7 @@ def _compile_module( enable_haddock: bool, enable_th: bool, module_name: str, - modules: dict[str, _Module], + module: _Module, module_tsets: dict[str, CompiledModuleTSet], md_file: Artifact, graph: dict[str, list[str]], @@ -407,8 +407,6 @@ def _compile_module( artifact_suffix: str, pkgname: str | None = None, ) -> CompiledModuleTSet: - module = modules[module_name] - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] compile_cmd = cmd_args(ctx.attrs._ghc_wrapper[RunInfo]) compile_cmd.add("--ghc", haskell_toolchain.compiler) @@ -687,7 +685,7 @@ def compile( enable_haddock = enable_haddock, enable_th = module_name in th_modules, module_name = module_name, - modules = mapped_modules, + module = mapped_modules[module_name], module_tsets = module_tsets, graph = graph, package_deps = package_deps.get(module_name, {}), From 8df1495079286b22b2943c8413902f67f2664b4d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:46:07 +0200 Subject: [PATCH 0962/1133] bubble up non module specific args --- haskell/compile.bzl | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index de25411dd..e81c24a64 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -444,25 +444,6 @@ def _compile_module( # particular order. direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - libs_by_name = {} - for lib in direct_deps_link_info: - info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - direct = info.value - dynamic = direct.dynamic[enable_profiling] - dynamic_info = resolved[dynamic][DynamicCompileResultInfo] - - libs_by_name[direct.name] = struct( - package_db = direct.empty_db, - modules = dynamic_info.modules, - ) - - exposed_package_modules = [] - exposed_package_dbs = [] - for dep_pkgname, dep_modules in package_deps.items(): - exposed_package_dbs.append(libs_by_name[dep_pkgname].package_db) - for dep_modname in dep_modules: - exposed_package_modules.append(libs_by_name[dep_pkgname].modules[dep_modname]) - libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = [ lib.prof_info[link_style] if enable_profiling else lib.info[link_style] for lib in direct_deps_link_info @@ -489,6 +470,25 @@ def _compile_module( packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) packagedb_args.add(package_db_tset.project_as_args("package_db")) + libs_by_name = {} + for lib in direct_deps_link_info: + info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + direct = info.value + dynamic = direct.dynamic[enable_profiling] + dynamic_info = resolved[dynamic][DynamicCompileResultInfo] + + libs_by_name[direct.name] = struct( + package_db = direct.empty_db, + modules = dynamic_info.modules, + ) + + exposed_package_modules = [] + exposed_package_dbs = [] + for dep_pkgname, dep_modules in package_deps.items(): + exposed_package_dbs.append(libs_by_name[dep_pkgname].package_db) + for dep_modname in dep_modules: + exposed_package_modules.append(libs_by_name[dep_pkgname].modules[dep_modname]) + packagedb_tag = ctx.actions.artifact_tag() # TODO[AH] Avoid duplicates and share identical env files. From 0aae557a0ff129327905af2511e8e99543ad6e57 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:48:00 +0200 Subject: [PATCH 0963/1133] bubble cpp flags --- haskell/compile.bzl | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e81c24a64..8f67b0c04 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -437,6 +437,15 @@ def _compile_module( osuf, hisuf = output_extensions(link_style, enable_profiling) compile_args_for_file.add("-osuf", osuf, "-hisuf", hisuf) + # Add args from preprocess-able inputs. + inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) + pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) + pre_args = pre.set.project_as_args("args") + compile_args_for_file.add(cmd_args(pre_args, format = "-optP={}")) + + if pkgname: + compile_args_for_file.add(["-this-unit-id", pkgname]) + # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. @@ -528,15 +537,6 @@ def _compile_module( tagged_dep_file = packagedb_tag.tag_artifacts(dep_file) compile_args_for_file.add("--buck2-packagedb-dep", tagged_dep_file) - # Add args from preprocess-able inputs. - inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) - pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) - pre_args = pre.set.project_as_args("args") - compile_args_for_file.add(cmd_args(pre_args, format = "-optP={}")) - - if pkgname: - compile_args_for_file.add(["-this-unit-id", pkgname]) - objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] stubs = outputs[module.stub_dir] From bfdbb9f65858f441c7862c58e300a9cb3c1219d9 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 14:49:45 +0200 Subject: [PATCH 0964/1133] bubble up non-module specific extra source files --- haskell/compile.bzl | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 8f67b0c04..fcfab1f32 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -437,6 +437,13 @@ def _compile_module( osuf, hisuf = output_extensions(link_style, enable_profiling) compile_args_for_file.add("-osuf", osuf, "-hisuf", hisuf) + non_haskell_sources = [src for (path, src) in srcs_to_pairs(ctx.attrs.srcs) if not is_haskell_src(path)] + + if non_haskell_sources: + warning("{} specifies non-haskell file in `srcs`, consider using `srcs_deps` instead".format(ctx.label)) + + compile_args_for_file.hidden(non_haskell_sources) + # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) @@ -557,13 +564,6 @@ def _compile_module( if aux_deps: compile_args_for_file.hidden(aux_deps) - non_haskell_sources = [src for (path, src) in srcs_to_pairs(ctx.attrs.srcs) if not is_haskell_src(path)] - - if non_haskell_sources: - warning("{} specifies non-haskell file in `srcs`, consider using `srcs_deps` instead".format(ctx.label)) - - compile_args_for_file.hidden(non_haskell_sources) - if haskell_toolchain.use_argsfile: argsfile = ctx.actions.declare_output( "haskell_compile_" + artifact_suffix + ".argsfile", From 550eeedd87b640da48b7ea2b0a8e743fe91f4aa0 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 13 Jun 2024 15:00:07 +0200 Subject: [PATCH 0965/1133] Factor out common command line arguments --- haskell/compile.bzl | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index fcfab1f32..8c3c6ab8d 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -388,9 +388,24 @@ def get_packages_info( bin_paths = bin_paths, ) +CommonCompileModuleArgs = record( + command = field(cmd_args), + args_for_file = field(cmd_args), +) + +def _common_compile_module_args( + ctx: AnalysisContext, + *, +) -> CommonCompileModuleArgs: + return CommonCompileModuleArgs( + command = cmd_args(), + args_for_file = cmd_args(), + ) + def _compile_module( ctx: AnalysisContext, *, + common_args: CommonCompileModuleArgs, link_style: LinkStyle, enable_profiling: bool, enable_haddock: bool, @@ -407,8 +422,12 @@ def _compile_module( artifact_suffix: str, pkgname: str | None = None, ) -> CompiledModuleTSet: + compile_cmd = cmd_args(common_args.command) + # These compiler arguments can be passed in a response file. + compile_args_for_file = cmd_args(common_args.args_for_file) + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - compile_cmd = cmd_args(ctx.attrs._ghc_wrapper[RunInfo]) + compile_cmd.add(ctx.attrs._ghc_wrapper[RunInfo]) compile_cmd.add("--ghc", haskell_toolchain.compiler) compile_cmd.add(haskell_toolchain.compiler_flags) @@ -421,8 +440,6 @@ def _compile_module( if enable_haddock: compile_cmd.add("-haddock") - # These compiler arguments can be passed in a response file. - compile_args_for_file = cmd_args() compile_args_for_file.add("-no-link", "-i") compile_args_for_file.add("-hide-all-packages") @@ -667,6 +684,8 @@ def compile( modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) def do_compile(ctx, artifacts, resolved, outputs, md_file=md_file, modules=modules): + common_args = _common_compile_module_args(ctx) + md = artifacts[md_file].read_json() th_modules = md["th_modules"] module_map = md["module_mapping"] @@ -680,6 +699,7 @@ def compile( for module_name in post_order_traversal(graph): module_tsets[module_name] = _compile_module( ctx, + common_args = common_args, link_style = link_style, enable_profiling = enable_profiling, enable_haddock = enable_haddock, From 108fee334468b5eede3631601f44a44fd762ca33 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 14 Jun 2024 15:06:32 +0200 Subject: [PATCH 0966/1133] Move common compile flags --- haskell/compile.bzl | 109 +++++++++++++++++++++++++------------------- 1 file changed, 63 insertions(+), 46 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 8c3c6ab8d..b2a80d56a 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -396,79 +396,91 @@ CommonCompileModuleArgs = record( def _common_compile_module_args( ctx: AnalysisContext, *, -) -> CommonCompileModuleArgs: - return CommonCompileModuleArgs( - command = cmd_args(), - args_for_file = cmd_args(), - ) - -def _compile_module( - ctx: AnalysisContext, - *, - common_args: CommonCompileModuleArgs, - link_style: LinkStyle, - enable_profiling: bool, enable_haddock: bool, - enable_th: bool, - module_name: str, - module: _Module, - module_tsets: dict[str, CompiledModuleTSet], - md_file: Artifact, - graph: dict[str, list[str]], - package_deps: dict[str, list[str]], - toolchain_deps: list[str], - outputs: dict[Artifact, Artifact], - resolved: dict[DynamicValue, ResolvedDynamicValue], - artifact_suffix: str, + enable_profiling: bool, + link_style: LinkStyle, pkgname: str | None = None, -) -> CompiledModuleTSet: - compile_cmd = cmd_args(common_args.command) - # These compiler arguments can be passed in a response file. - compile_args_for_file = cmd_args(common_args.args_for_file) - +) -> CommonCompileModuleArgs: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - compile_cmd.add(ctx.attrs._ghc_wrapper[RunInfo]) - compile_cmd.add("--ghc", haskell_toolchain.compiler) - compile_cmd.add(haskell_toolchain.compiler_flags) + command = cmd_args(ctx.attrs._ghc_wrapper[RunInfo]) + command.add("--ghc", haskell_toolchain.compiler) # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't # be parsed when inside an argsfile. - compile_cmd.add(ctx.attrs.compiler_flags) - compile_cmd.add("-c") + command.add(haskell_toolchain.compiler_flags) + command.add(ctx.attrs.compiler_flags) + + command.add("-c") if enable_haddock: - compile_cmd.add("-haddock") + command.add("-haddock") + + args_for_file = cmd_args() - compile_args_for_file.add("-no-link", "-i") - compile_args_for_file.add("-hide-all-packages") + args_for_file.add("-no-link", "-i") + args_for_file.add("-hide-all-packages") if enable_profiling: - compile_args_for_file.add("-prof") + args_for_file.add("-prof") if link_style == LinkStyle("shared"): - compile_args_for_file.add("-dynamic", "-fPIC") + args_for_file.add("-dynamic", "-fPIC") elif link_style == LinkStyle("static_pic"): - compile_args_for_file.add("-fPIC", "-fexternal-dynamic-refs") + args_for_file.add("-fPIC", "-fexternal-dynamic-refs") osuf, hisuf = output_extensions(link_style, enable_profiling) - compile_args_for_file.add("-osuf", osuf, "-hisuf", hisuf) + args_for_file.add("-osuf", osuf, "-hisuf", hisuf) - non_haskell_sources = [src for (path, src) in srcs_to_pairs(ctx.attrs.srcs) if not is_haskell_src(path)] + non_haskell_sources = [ + src + for (path, src) in srcs_to_pairs(ctx.attrs.srcs) + if not is_haskell_src(path) + ] if non_haskell_sources: warning("{} specifies non-haskell file in `srcs`, consider using `srcs_deps` instead".format(ctx.label)) - compile_args_for_file.hidden(non_haskell_sources) + args_for_file.hidden(non_haskell_sources) # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) pre_args = pre.set.project_as_args("args") - compile_args_for_file.add(cmd_args(pre_args, format = "-optP={}")) + args_for_file.add(cmd_args(pre_args, format = "-optP={}")) if pkgname: - compile_args_for_file.add(["-this-unit-id", pkgname]) + args_for_file.add(["-this-unit-id", pkgname]) + + return CommonCompileModuleArgs( + command = command, + args_for_file = args_for_file, + ) + +def _compile_module( + ctx: AnalysisContext, + *, + common_args: CommonCompileModuleArgs, + link_style: LinkStyle, + enable_profiling: bool, + enable_th: bool, + module_name: str, + module: _Module, + module_tsets: dict[str, CompiledModuleTSet], + md_file: Artifact, + graph: dict[str, list[str]], + package_deps: dict[str, list[str]], + toolchain_deps: list[str], + outputs: dict[Artifact, Artifact], + resolved: dict[DynamicValue, ResolvedDynamicValue], + artifact_suffix: str, + pkgname: str | None = None, +) -> CompiledModuleTSet: + compile_cmd = cmd_args(common_args.command) + # These compiler arguments can be passed in a response file. + compile_args_for_file = cmd_args(common_args.args_for_file) + + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. @@ -684,7 +696,13 @@ def compile( modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) def do_compile(ctx, artifacts, resolved, outputs, md_file=md_file, modules=modules): - common_args = _common_compile_module_args(ctx) + common_args = _common_compile_module_args( + ctx, + enable_haddock = enable_haddock, + enable_profiling = enable_profiling, + link_style = link_style, + pkgname = pkgname, + ) md = artifacts[md_file].read_json() th_modules = md["th_modules"] @@ -702,7 +720,6 @@ def compile( common_args = common_args, link_style = link_style, enable_profiling = enable_profiling, - enable_haddock = enable_haddock, enable_th = module_name in th_modules, module_name = module_name, module = mapped_modules[module_name], From 4b73cef8d48d7a62c0f258e7da12885beec33b9a Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 14 Jun 2024 15:23:40 +0200 Subject: [PATCH 0967/1133] Factor out direct_deps_info --- haskell/compile.bzl | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index b2a80d56a..1c403f4c0 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -474,6 +474,7 @@ def _compile_module( outputs: dict[Artifact, Artifact], resolved: dict[DynamicValue, ResolvedDynamicValue], artifact_suffix: str, + direct_deps_info: list[HaskellLibraryInfoTSet], pkgname: str | None = None, ) -> CompiledModuleTSet: compile_cmd = cmd_args(common_args.command) @@ -485,14 +486,7 @@ def _compile_module( # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. - # Collect library dependencies. Note that these don't need to be in a - # particular order. - direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - - libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = [ - lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - for lib in direct_deps_link_info - ]) + libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = direct_deps_info) toolchain_libs = [ dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps @@ -516,8 +510,7 @@ def _compile_module( packagedb_args.add(package_db_tset.project_as_args("package_db")) libs_by_name = {} - for lib in direct_deps_link_info: - info = lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + for info in direct_deps_info: direct = info.value dynamic = direct.dynamic[enable_profiling] dynamic_info = resolved[dynamic][DynamicCompileResultInfo] @@ -696,6 +689,12 @@ def compile( modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) def do_compile(ctx, artifacts, resolved, outputs, md_file=md_file, modules=modules): + # Collect library dependencies. Note that these don't need to be in a + # particular order. + direct_deps_info = [ + lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + for lib in attr_deps_haskell_link_infos(ctx) + ] common_args = _common_compile_module_args( ctx, enable_haddock = enable_haddock, @@ -731,6 +730,7 @@ def compile( resolved = resolved, md_file=md_file, artifact_suffix = artifact_suffix, + direct_deps_info = direct_deps_info, pkgname = pkgname, ) From 09ad9ff5c45c58acd22c0cac410a254866f1bac4 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 14 Jun 2024 15:34:00 +0200 Subject: [PATCH 0968/1133] Factor out direct_deps_by_name --- haskell/compile.bzl | 24 +++++++++++------------- 1 file changed, 11 insertions(+), 13 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 1c403f4c0..a08a64436 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -475,6 +475,7 @@ def _compile_module( resolved: dict[DynamicValue, ResolvedDynamicValue], artifact_suffix: str, direct_deps_info: list[HaskellLibraryInfoTSet], + direct_deps_by_name: dict[str, typing.Any], pkgname: str | None = None, ) -> CompiledModuleTSet: compile_cmd = cmd_args(common_args.command) @@ -509,23 +510,12 @@ def _compile_module( packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) packagedb_args.add(package_db_tset.project_as_args("package_db")) - libs_by_name = {} - for info in direct_deps_info: - direct = info.value - dynamic = direct.dynamic[enable_profiling] - dynamic_info = resolved[dynamic][DynamicCompileResultInfo] - - libs_by_name[direct.name] = struct( - package_db = direct.empty_db, - modules = dynamic_info.modules, - ) - exposed_package_modules = [] exposed_package_dbs = [] for dep_pkgname, dep_modules in package_deps.items(): - exposed_package_dbs.append(libs_by_name[dep_pkgname].package_db) + exposed_package_dbs.append(direct_deps_by_name[dep_pkgname].package_db) for dep_modname in dep_modules: - exposed_package_modules.append(libs_by_name[dep_pkgname].modules[dep_modname]) + exposed_package_modules.append(direct_deps_by_name[dep_pkgname].modules[dep_modname]) packagedb_tag = ctx.actions.artifact_tag() @@ -695,6 +685,13 @@ def compile( lib.prof_info[link_style] if enable_profiling else lib.info[link_style] for lib in attr_deps_haskell_link_infos(ctx) ] + direct_deps_by_name = { + info.value.name: struct( + package_db = info.value.empty_db, + modules = resolved[info.value.dynamic[enable_profiling]][DynamicCompileResultInfo].modules, + ) + for info in direct_deps_info + } common_args = _common_compile_module_args( ctx, enable_haddock = enable_haddock, @@ -731,6 +728,7 @@ def compile( md_file=md_file, artifact_suffix = artifact_suffix, direct_deps_info = direct_deps_info, + direct_deps_by_name = direct_deps_by_name, pkgname = pkgname, ) From 26bf6167371378e85b09125c6978989335815eb5 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 14 Jun 2024 17:17:45 +0200 Subject: [PATCH 0969/1133] pull down per module analysis --- haskell/compile.bzl | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a08a64436..a67f6c8b5 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -510,13 +510,6 @@ def _compile_module( packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) packagedb_args.add(package_db_tset.project_as_args("package_db")) - exposed_package_modules = [] - exposed_package_dbs = [] - for dep_pkgname, dep_modules in package_deps.items(): - exposed_package_dbs.append(direct_deps_by_name[dep_pkgname].package_db) - for dep_modname in dep_modules: - exposed_package_modules.append(direct_deps_by_name[dep_pkgname].modules[dep_modname]) - packagedb_tag = ctx.actions.artifact_tag() # TODO[AH] Avoid duplicates and share identical env files. @@ -595,6 +588,13 @@ def _compile_module( ) ) + exposed_package_modules = [] + exposed_package_dbs = [] + for dep_pkgname, dep_modules in package_deps.items(): + exposed_package_dbs.append(direct_deps_by_name[dep_pkgname].package_db) + for dep_modname in dep_modules: + exposed_package_modules.append(direct_deps_by_name[dep_pkgname].modules[dep_modname]) + # Transitive module dependencies from other packages. cross_package_modules = ctx.actions.tset( CompiledModuleTSet, From c19828e1be358c8fe293d3937331e336c11058f2 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 14 Jun 2024 17:39:00 +0200 Subject: [PATCH 0970/1133] Factor out packagedb tagging --- haskell/compile.bzl | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a67f6c8b5..f84fa7372 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -510,8 +510,6 @@ def _compile_module( packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) packagedb_args.add(package_db_tset.project_as_args("package_db")) - packagedb_tag = ctx.actions.artifact_tag() - # TODO[AH] Avoid duplicates and share identical env files. # The set of package-dbs can be known at the package level, not just the # module level. So, we could generate this file outside of the @@ -524,20 +522,22 @@ def _compile_module( "env", ])) package_env = cmd_args(delimiter = "\n") - packagedb_args_tagged = packagedb_tag.tag_artifacts(packagedb_args) package_env.add(cmd_args( - packagedb_args_tagged, + packagedb_args, format = "package-db {}", ).relative_to(package_env_file, parent = 1)) ctx.actions.write( package_env_file, package_env, ) - compile_args_for_file.add(cmd_args( - packagedb_tag.tag_artifacts(package_env_file), + package_env_arg = cmd_args( + package_env_file, prepend = "-package-env", - hidden = packagedb_args_tagged, - )) + hidden = packagedb_args, + ) + + packagedb_tag = ctx.actions.artifact_tag() + compile_args_for_file.add(packagedb_tag.tag_artifacts(package_env_arg)) dep_file = ctx.actions.declare_output(".".join([ ctx.label.name, From f0138684cfc37fbe9a9a1e5c3ecc1124a6d3a53d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 17 Jun 2024 11:46:00 +0200 Subject: [PATCH 0971/1133] Factor out common package-db flags We expose all package-dbs of transitive package dependencies of the current package and use dep-files to achieve recompilation avoidance on changes to packages that are not transitive dependencies of a given module in the current package. This means that the set of package-dbs can be determined once for the whole package rather than once per module and we can share the generated package environment file. --- haskell/compile.bzl | 77 ++++++++++++++++++++++++--------------------- 1 file changed, 41 insertions(+), 36 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index f84fa7372..484cdf38c 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -391,14 +391,17 @@ def get_packages_info( CommonCompileModuleArgs = record( command = field(cmd_args), args_for_file = field(cmd_args), + package_env_args = field(cmd_args), ) def _common_compile_module_args( ctx: AnalysisContext, *, + resolved: dict[DynamicValue, ResolvedDynamicValue], enable_haddock: bool, enable_profiling: bool, link_style: LinkStyle, + direct_deps_info: list[HaskellLibraryInfoTSet], pkgname: str | None = None, ) -> CommonCompileModuleArgs: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -452,38 +455,6 @@ def _common_compile_module_args( if pkgname: args_for_file.add(["-this-unit-id", pkgname]) - return CommonCompileModuleArgs( - command = command, - args_for_file = args_for_file, - ) - -def _compile_module( - ctx: AnalysisContext, - *, - common_args: CommonCompileModuleArgs, - link_style: LinkStyle, - enable_profiling: bool, - enable_th: bool, - module_name: str, - module: _Module, - module_tsets: dict[str, CompiledModuleTSet], - md_file: Artifact, - graph: dict[str, list[str]], - package_deps: dict[str, list[str]], - toolchain_deps: list[str], - outputs: dict[Artifact, Artifact], - resolved: dict[DynamicValue, ResolvedDynamicValue], - artifact_suffix: str, - direct_deps_info: list[HaskellLibraryInfoTSet], - direct_deps_by_name: dict[str, typing.Any], - pkgname: str | None = None, -) -> CompiledModuleTSet: - compile_cmd = cmd_args(common_args.command) - # These compiler arguments can be passed in a response file. - compile_args_for_file = cmd_args(common_args.args_for_file) - - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. @@ -502,7 +473,7 @@ def _compile_module( ) # TODO(ah) breaks recompilation avoidance on transitive toolchain library changes. - compile_args_for_file.add(cmd_args( + args_for_file.add(cmd_args( package_db_tset.project_as_args("path"), format="--bin-path={}/bin", )) @@ -516,7 +487,6 @@ def _compile_module( # dynamic_output action. package_env_file = ctx.actions.declare_output(".".join([ ctx.label.name, - module_name or "pkg", "package-db", output_extensions(link_style, enable_profiling)[1], "env", @@ -530,14 +500,47 @@ def _compile_module( package_env_file, package_env, ) - package_env_arg = cmd_args( + package_env_args = cmd_args( package_env_file, prepend = "-package-env", hidden = packagedb_args, ) + return CommonCompileModuleArgs( + command = command, + args_for_file = args_for_file, + package_env_args = package_env_args, + ) + +def _compile_module( + ctx: AnalysisContext, + *, + common_args: CommonCompileModuleArgs, + link_style: LinkStyle, + enable_profiling: bool, + enable_th: bool, + module_name: str, + module: _Module, + module_tsets: dict[str, CompiledModuleTSet], + md_file: Artifact, + graph: dict[str, list[str]], + package_deps: dict[str, list[str]], + toolchain_deps: list[str], + outputs: dict[Artifact, Artifact], + resolved: dict[DynamicValue, ResolvedDynamicValue], + artifact_suffix: str, + direct_deps_info: list[HaskellLibraryInfoTSet], + direct_deps_by_name: dict[str, typing.Any], + pkgname: str | None = None, +) -> CompiledModuleTSet: + compile_cmd = cmd_args(common_args.command) + # These compiler arguments can be passed in a response file. + compile_args_for_file = cmd_args(common_args.args_for_file) + + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + packagedb_tag = ctx.actions.artifact_tag() - compile_args_for_file.add(packagedb_tag.tag_artifacts(package_env_arg)) + compile_args_for_file.add(packagedb_tag.tag_artifacts(common_args.package_env_args)) dep_file = ctx.actions.declare_output(".".join([ ctx.label.name, @@ -694,9 +697,11 @@ def compile( } common_args = _common_compile_module_args( ctx, + resolved = resolved, enable_haddock = enable_haddock, enable_profiling = enable_profiling, link_style = link_style, + direct_deps_info = direct_deps_info, pkgname = pkgname, ) From d5e7dc05fcc66425d5be9cd8eaf27ca40f05f271 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Mon, 17 Jun 2024 11:51:30 +0200 Subject: [PATCH 0972/1133] remove unused arguments --- haskell/compile.bzl | 6 ------ 1 file changed, 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 484cdf38c..e76971a78 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -527,11 +527,8 @@ def _compile_module( package_deps: dict[str, list[str]], toolchain_deps: list[str], outputs: dict[Artifact, Artifact], - resolved: dict[DynamicValue, ResolvedDynamicValue], artifact_suffix: str, - direct_deps_info: list[HaskellLibraryInfoTSet], direct_deps_by_name: dict[str, typing.Any], - pkgname: str | None = None, ) -> CompiledModuleTSet: compile_cmd = cmd_args(common_args.command) # These compiler arguments can be passed in a response file. @@ -729,12 +726,9 @@ def compile( package_deps = package_deps.get(module_name, {}), toolchain_deps = toolchain_deps.get(module_name, []), outputs = outputs, - resolved = resolved, md_file=md_file, artifact_suffix = artifact_suffix, - direct_deps_info = direct_deps_info, direct_deps_by_name = direct_deps_by_name, - pkgname = pkgname, ) return [DynamicCompileResultInfo(modules = module_tsets)] From dc554a467e04bebdd696907d0de69929254b2ae4 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 17 Jun 2024 11:48:56 +0200 Subject: [PATCH 0973/1133] Avoid recompilation for binaries from haskell packages --- haskell/compile.bzl | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e76971a78..7c6083fbb 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -349,11 +349,13 @@ def get_packages_info( pkg_deps = resolved[haskell_toolchain.packages.dynamic] package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages - toolchain_libs = [ + direct_toolchain_libs = [ dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep - ] + libs.reduce("packages") + ] + + toolchain_libs = direct_toolchain_libs + libs.reduce("packages") package_db_tset = ctx.actions.tset( HaskellPackageDbTSet, @@ -361,7 +363,9 @@ def get_packages_info( ) packagedb_args.add(package_db_tset.project_as_args("package_db")) - bin_paths = cmd_args(package_db_tset.project_as_args("path"), format="--bin-path={}/bin") + + direct_package_paths = [package_db[name].value.path for name in direct_toolchain_libs if name in package_db] + bin_paths = cmd_args(direct_package_paths, format="--bin-path={}/bin") else: packagedb_args.add(haskell_toolchain.packages.package_db) bin_paths = cmd_args() @@ -459,11 +463,13 @@ def _common_compile_module_args( # library dependency. libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = direct_deps_info) - toolchain_libs = [ + + direct_toolchain_libs = [ dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep - ] + libs.reduce("packages") + ] + toolchain_libs = direct_toolchain_libs + libs.reduce("packages") pkg_deps = resolved[haskell_toolchain.packages.dynamic] package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages @@ -472,9 +478,9 @@ def _common_compile_module_args( children = [package_db[name] for name in toolchain_libs if name in package_db] ) - # TODO(ah) breaks recompilation avoidance on transitive toolchain library changes. + direct_package_paths = [package_db[name].value.path for name in direct_toolchain_libs if name in package_db] args_for_file.add(cmd_args( - package_db_tset.project_as_args("path"), + direct_package_paths, format="--bin-path={}/bin", )) From ec8f4f1e962f5ed8213992fcfd13b654006ba651 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 17 Jun 2024 11:58:24 +0200 Subject: [PATCH 0974/1133] Remove `path` projection for HaskellPackageDbTSet --- haskell/toolchain.bzl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index b7f4d636f..335fa23cd 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -60,13 +60,9 @@ HaskellPackage = record( def _haskell_package_info_as_package_db(p: HaskellPackage): return cmd_args(p.db) -def _haskell_package_info_as_package_path(p: HaskellPackage): - return cmd_args(p.path) - HaskellPackageDbTSet = transitive_set( args_projections = { "package_db": _haskell_package_info_as_package_db, - "path": _haskell_package_info_as_package_path, } ) From a598146bd56eda36186aadeaf82f2e1bc688fa6f Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 10:17:02 +0200 Subject: [PATCH 0975/1133] TMP: capture raw GHC -M output --- haskell/tools/generate_target_metadata.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 2c533c582..205a5ec10 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -101,6 +101,8 @@ def obtain_target_metadata(args): "module_graph": module_graph, "package_deps": package_deps, "toolchain_deps": toolchain_deps, + "ghc_depends": ghc_depends, + "ghc_options": ghc_options, } From e956579ce243281de62deb3d9a91d943e57ad7bb Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 11:53:43 +0200 Subject: [PATCH 0976/1133] The -opt-json GHC flag was removed --- haskell/tools/generate_target_metadata.py | 27 ++++++++++------------- 1 file changed, 12 insertions(+), 15 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 205a5ec10..6b8a769cb 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -90,19 +90,18 @@ def obtain_target_metadata(args): toolchain_packages = load_toolchain_packages(args.toolchain_libs) ghc_args = fix_ghc_args(args.ghc_arg, toolchain_packages) paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] - ghc_depends, ghc_options = run_ghc_depends(args.ghc, ghc_args, args.source, paths) - th_modules = determine_th_modules(ghc_options, args.source_prefix) - package_prefixes = calc_package_prefixes(args.package) - module_mapping, module_graph, package_deps, toolchain_deps = interpret_ghc_depends( - ghc_depends, args.source_prefix, package_prefixes, toolchain_packages) + ghc_depends = run_ghc_depends(args.ghc, ghc_args, args.source, paths) + #th_modules = determine_th_modules(ghc_options, args.source_prefix) + #package_prefixes = calc_package_prefixes(args.package) + #module_mapping, module_graph, package_deps, toolchain_deps = interpret_ghc_depends( + # ghc_depends, args.source_prefix, package_prefixes, toolchain_packages) return { - "th_modules": th_modules, - "module_mapping": module_mapping, - "module_graph": module_graph, - "package_deps": package_deps, - "toolchain_deps": toolchain_deps, + #"th_modules": th_modules, + #"module_mapping": module_mapping, + #"module_graph": module_graph, + #"package_deps": package_deps, + #"toolchain_deps": toolchain_deps, "ghc_depends": ghc_depends, - "ghc_options": ghc_options, } @@ -164,7 +163,6 @@ def fix_ghc_args(ghc_args, toolchain_packages): def run_ghc_depends(ghc, ghc_args, sources, aux_paths): with tempfile.TemporaryDirectory() as dname: json_fname = os.path.join(dname, "depends.json") - opt_json_fname = os.path.join(dname, "options.json") make_fname = os.path.join(dname, "depends.make") args = [ ghc, "-M", "-include-pkg-deps", @@ -172,7 +170,6 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): # backend/src/Foo/Util. => Foo/Util. "-outputdir", ".", "-dep-json", json_fname, - "-opt-json", opt_json_fname, "-dep-makefile", make_fname, ] + ghc_args + sources @@ -182,8 +179,8 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): subprocess.run(args, env=env, check=True) - with open(json_fname) as f, open(opt_json_fname) as o: - return json.load(f), json.load(o) + with open(json_fname) as f: + return json.load(f) def calc_package_prefixes(package_specs): From 1bae1593574aedeac9d243d5e81ce92ea3d9a9ae Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 11:59:16 +0200 Subject: [PATCH 0977/1133] update TH detection --- haskell/tools/generate_target_metadata.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 6b8a769cb..12ff4978e 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -91,12 +91,13 @@ def obtain_target_metadata(args): ghc_args = fix_ghc_args(args.ghc_arg, toolchain_packages) paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] ghc_depends = run_ghc_depends(args.ghc, ghc_args, args.source, paths) - #th_modules = determine_th_modules(ghc_options, args.source_prefix) + th_modules = determine_th_modules(ghc_depends) + module_mapping = determine_module_mapping(ghc_depends) #package_prefixes = calc_package_prefixes(args.package) #module_mapping, module_graph, package_deps, toolchain_deps = interpret_ghc_depends( # ghc_depends, args.source_prefix, package_prefixes, toolchain_packages) return { - #"th_modules": th_modules, + "th_modules": th_modules, #"module_mapping": module_mapping, #"module_graph": module_graph, #"package_deps": package_deps, @@ -110,11 +111,11 @@ def load_toolchain_packages(filepath): return json.load(f) -def determine_th_modules(ghc_options, source_prefix): +def determine_th_modules(ghc_depends): return [ - src_to_module_name(strip_prefix_(source_prefix, fname).lstrip("/")) - for fname, opts in ghc_options.items() - if uses_th(opts) + modname + for modname, properties in ghc_depends.items() + if uses_th(properties.get("options", [])) ] @@ -126,6 +127,9 @@ def uses_th(opts): return any([f"-X{ext}" in opts for ext in __TH_EXTENSIONS]) +def determine_module_mapping(ghc_depends + + def fix_ghc_args(ghc_args, toolchain_packages): """Replaces -package flags by -package-id where applicable. From 87d9a5170f7cb615e140e7ce942488ac3c412112 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 13:39:52 +0200 Subject: [PATCH 0978/1133] calculate module mapping --- haskell/tools/generate_target_metadata.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 12ff4978e..994df11c4 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -92,13 +92,13 @@ def obtain_target_metadata(args): paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] ghc_depends = run_ghc_depends(args.ghc, ghc_args, args.source, paths) th_modules = determine_th_modules(ghc_depends) - module_mapping = determine_module_mapping(ghc_depends) + module_mapping = determine_module_mapping(ghc_depends, args.source_prefix) #package_prefixes = calc_package_prefixes(args.package) #module_mapping, module_graph, package_deps, toolchain_deps = interpret_ghc_depends( # ghc_depends, args.source_prefix, package_prefixes, toolchain_packages) return { "th_modules": th_modules, - #"module_mapping": module_mapping, + "module_mapping": module_mapping, #"module_graph": module_graph, #"package_deps": package_deps, #"toolchain_deps": toolchain_deps, @@ -127,7 +127,21 @@ def uses_th(opts): return any([f"-X{ext}" in opts for ext in __TH_EXTENSIONS]) -def determine_module_mapping(ghc_depends +def determine_module_mapping(ghc_depends, source_prefix): + result = {} + + for modname, properties in ghc_depends.items(): + sources = list(filter(is_haskell_src, properties.get("sources", []))) + + if len(sources) != 1: + raise RuntimeError(f"Expected exactly one Haskell source for module '{modname}' but got '{sources}'.") + + apparent_name = src_to_module_name(strip_prefix_(source_prefix, sources[0]).lstrip("/")) + + if apparent_name != modname: + result[apparent_name] = modname + + return result def fix_ghc_args(ghc_args, toolchain_packages): From 0704a08997b4158167c0c5ef9bd3eb1464bf0eda Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 13:43:03 +0200 Subject: [PATCH 0979/1133] determine module graph --- haskell/tools/generate_target_metadata.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 994df11c4..e4755e67b 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -93,13 +93,15 @@ def obtain_target_metadata(args): ghc_depends = run_ghc_depends(args.ghc, ghc_args, args.source, paths) th_modules = determine_th_modules(ghc_depends) module_mapping = determine_module_mapping(ghc_depends, args.source_prefix) + # TODO(ah) handle .hi-boot dependencies + module_graph = determine_module_graph(ghc_depends) #package_prefixes = calc_package_prefixes(args.package) #module_mapping, module_graph, package_deps, toolchain_deps = interpret_ghc_depends( # ghc_depends, args.source_prefix, package_prefixes, toolchain_packages) return { "th_modules": th_modules, "module_mapping": module_mapping, - #"module_graph": module_graph, + "module_graph": module_graph, #"package_deps": package_deps, #"toolchain_deps": toolchain_deps, "ghc_depends": ghc_depends, @@ -144,6 +146,13 @@ def determine_module_mapping(ghc_depends, source_prefix): return result +def determine_module_graph(ghc_depends): + return { + modname: description.get("modules", []) + for modname, description in ghc_depends.items() + } + + def fix_ghc_args(ghc_args, toolchain_packages): """Replaces -package flags by -package-id where applicable. From c7013a449e56cae5f096c5e3f873373cbbeb913e Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 14:00:40 +0200 Subject: [PATCH 0980/1133] determine package and toolchain dependencies --- haskell/tools/generate_target_metadata.py | 27 ++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index e4755e67b..4c42f801d 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -12,7 +12,7 @@ * `module_mapping`: Mapping from source inferred module name to actual module name, if different. * `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. * `package_deps`": Cross-package module dependencies, `dict[modname, dict[pkgname, list[modname]]`. -* `toolchain_deps`": Toolchain library dependencies, `dict[modname, pkgname]`. +* `toolchain_deps`": Toolchain library dependencies, `dict[modname, list[pkgname]]`. """ import argparse @@ -98,12 +98,13 @@ def obtain_target_metadata(args): #package_prefixes = calc_package_prefixes(args.package) #module_mapping, module_graph, package_deps, toolchain_deps = interpret_ghc_depends( # ghc_depends, args.source_prefix, package_prefixes, toolchain_packages) + package_deps, toolchain_deps = determine_package_deps(ghc_depends, toolchain_packages) return { "th_modules": th_modules, "module_mapping": module_mapping, "module_graph": module_graph, - #"package_deps": package_deps, - #"toolchain_deps": toolchain_deps, + "package_deps": package_deps, + "toolchain_deps": toolchain_deps, "ghc_depends": ghc_depends, } @@ -153,6 +154,26 @@ def determine_module_graph(ghc_depends): } +def determine_package_deps(ghc_depends, toolchain_packages): + toolchain_by_name = toolchain_packages["by-package-name"] + package_deps = {} + toolchain_deps = {} + + for modname, description in ghc_depends.items(): + for pkgdep in description.get("packages", {}): + pkgname = pkgdep.get("name") + pkgid = pkgdep.get("id") + + if pkgname in toolchain_by_name: + if pkgid == toolchain_by_name[pkgname]: + toolchain_deps.setdefault(modname, []).append(pkgname) + # TODO(ah) is this an error? + else: + package_deps.setdefault(modname, {})[pkgname] = pkgdep.get("modules", []) + + return package_deps, toolchain_deps + + def fix_ghc_args(ghc_args, toolchain_packages): """Replaces -package flags by -package-id where applicable. From f4d21c3f1ca94069c2eaa0954da8d03974a222b9 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 14:10:49 +0200 Subject: [PATCH 0981/1133] resolve package-ids of package dependencies --- haskell/tools/generate_target_metadata.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 4c42f801d..0cc82673f 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -12,7 +12,7 @@ * `module_mapping`: Mapping from source inferred module name to actual module name, if different. * `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. * `package_deps`": Cross-package module dependencies, `dict[modname, dict[pkgname, list[modname]]`. -* `toolchain_deps`": Toolchain library dependencies, `dict[modname, list[pkgname]]`. +* `toolchain_deps`": Toolchain library dependencies, `dict[modname, list[pkgid]]`. """ import argparse @@ -166,7 +166,10 @@ def determine_package_deps(ghc_depends, toolchain_packages): if pkgname in toolchain_by_name: if pkgid == toolchain_by_name[pkgname]: - toolchain_deps.setdefault(modname, []).append(pkgname) + toolchain_deps.setdefault(modname, []).append(pkgid) + elif pkgid == "base": + # TODO(ah) why is base's package-id cropped to `base`? + toolchain_deps.setdefault(modname, []).append("base") # TODO(ah) is this an error? else: package_deps.setdefault(modname, {})[pkgname] = pkgdep.get("modules", []) From 8b7aa6e6e528d53c0b6961aaf27645ea06625ff7 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 14:16:28 +0200 Subject: [PATCH 0982/1133] obtain base package id from catalog --- haskell/tools/generate_target_metadata.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 0cc82673f..a0b67f14f 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -167,9 +167,9 @@ def determine_package_deps(ghc_depends, toolchain_packages): if pkgname in toolchain_by_name: if pkgid == toolchain_by_name[pkgname]: toolchain_deps.setdefault(modname, []).append(pkgid) - elif pkgid == "base": + elif pkgid == pkgname: # TODO(ah) why is base's package-id cropped to `base`? - toolchain_deps.setdefault(modname, []).append("base") + toolchain_deps.setdefault(modname, []).append(toolchain_by_name.get(pkgid, pkgid)) # TODO(ah) is this an error? else: package_deps.setdefault(modname, {})[pkgname] = pkgdep.get("modules", []) From 31455ef882ffe9b9df14d1ea0713e7e08ecafe05 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 14:29:54 +0200 Subject: [PATCH 0983/1133] Remove unused code --- haskell/tools/generate_target_metadata.py | 111 ---------------------- 1 file changed, 111 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index a0b67f14f..1fc39a270 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -95,9 +95,6 @@ def obtain_target_metadata(args): module_mapping = determine_module_mapping(ghc_depends, args.source_prefix) # TODO(ah) handle .hi-boot dependencies module_graph = determine_module_graph(ghc_depends) - #package_prefixes = calc_package_prefixes(args.package) - #module_mapping, module_graph, package_deps, toolchain_deps = interpret_ghc_depends( - # ghc_depends, args.source_prefix, package_prefixes, toolchain_packages) package_deps, toolchain_deps = determine_package_deps(ghc_depends, toolchain_packages) return { "th_modules": th_modules, @@ -234,114 +231,6 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): return json.load(f) -def calc_package_prefixes(package_specs): - """Creates a trie to look up modules in dependency packages. - - Package names are stored under the marker key `//pkgname`. This is - unambiguous since path components may not contain `/` characters. - """ - result = {} - for pkgname, path in (spec.split(":", 1) for spec in package_specs): - layer = result - for part in Path(path).parts: - layer = layer.setdefault(part, {}) - layer["//pkgname"] = pkgname - return result - - -def lookup_toolchain_dep(module_dep, toolchain_packages): - module_path = Path(module_dep) - layer = toolchain_packages["by-import-dirs"] - for part in module_path.parts: - if (layer := layer.get(part)) is None: - return None - - if (pkgid := layer.get("//pkgid")) is not None: - return pkgid - - -def lookup_package_dep(module_dep, package_prefixes): - """Look up a cross-packge module dependency. - - Assumes that `module_dep` is a relative path to an interface file of the form - `buck-out/.../__my_package__/mod-shared/Some/Package.hi`. - """ - module_path = Path(module_dep) - layer = package_prefixes - for offset, part in enumerate(module_path.parts): - if (layer := layer.get(part)) is None: - return None - - if (pkgname := layer.get("//pkgname")) is not None: - modname = src_to_module_name("/".join(module_path.parts[offset+2:])) - return pkgname, modname - - -def interpret_ghc_depends(ghc_depends, source_prefix, package_prefixes, toolchain_packages): - mapping = {} - graph = {} - extgraph = {} - toolchaingraph = {} - - for k, vs in ghc_depends.items(): - module_name = src_to_module_name(k) - intdeps, extdeps, toolchaindeps = parse_module_deps(vs, package_prefixes, toolchain_packages) - - graph.setdefault(module_name, []).extend(intdeps) - for pkg, mods in extdeps.items(): - extgraph.setdefault(module_name, {}).setdefault(pkg, []).extend(mods) - for pkg in toolchaindeps: - toolchaingraph.setdefault(module_name, set()).add(pkg) - - ext = os.path.splitext(k)[1] - - if ext != ".o": - continue - - sources = list(filter(is_haskell_src, vs)) - - if not sources: - continue - - assert len(sources) == 1, "one object file must correspond to exactly one haskell source " - - hs_file = sources[0] - - hs_module_name = src_to_module_name( - strip_prefix_(source_prefix, hs_file).lstrip("/")) - - if hs_module_name != module_name: - mapping[hs_module_name] = module_name - - return mapping, graph, extgraph, toolchaingraph - - -def parse_module_deps(module_deps, package_prefixes, toolchain_packages): - internal_deps = [] - external_deps = {} - toolchain_deps = set() - - for module_dep in module_deps: - if is_haskell_src(module_dep): - continue - - if (tooldep := lookup_toolchain_dep(module_dep, toolchain_packages)) is not None: - toolchain_deps.add(tooldep) - continue - - if os.path.isabs(module_dep): - raise RuntimeError(f"Unexpected module dependency `{module_dep}`. Perhaps a missing `haskell_toolchain_library`?") - - if (pkgdep := lookup_package_dep(module_dep, package_prefixes)) is not None: - pkgname, modname = pkgdep - external_deps.setdefault(pkgname, []).append(modname) - continue - - internal_deps.append(src_to_module_name(module_dep)) - - return internal_deps, external_deps, toolchain_deps - - def src_to_module_name(x): base, _ = os.path.splitext(x) return base.replace("/", ".") From 0bd7c79230fc9a9e44a516c25c08050cce89356f Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 14:35:11 +0200 Subject: [PATCH 0984/1133] Remove raw ghc_depends output --- haskell/tools/generate_target_metadata.py | 1 - 1 file changed, 1 deletion(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 1fc39a270..f1fd638e3 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -102,7 +102,6 @@ def obtain_target_metadata(args): "module_graph": module_graph, "package_deps": package_deps, "toolchain_deps": toolchain_deps, - "ghc_depends": ghc_depends, } From e22bb61b5dc3a27b35a0c4d6565f2a91bc076d10 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 19 Jun 2024 14:41:54 +0200 Subject: [PATCH 0985/1133] Remove unused import-dirs trie --- .../tools/generate_toolchain_library_catalog.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/haskell/tools/generate_toolchain_library_catalog.py b/haskell/tools/generate_toolchain_library_catalog.py index 33649573f..4b86a6f62 100755 --- a/haskell/tools/generate_toolchain_library_catalog.py +++ b/haskell/tools/generate_toolchain_library_catalog.py @@ -48,7 +48,6 @@ def _ghc_pkg_command(ghc_pkg, package_db): "--global", "--no-user-package-db", "--simple-output", - "--expand-pkgroot", ] + (["--package-db", package_db] if package_db else []) @@ -73,17 +72,11 @@ def _parse_ghc_pkg_dump(lines): current_key = "id" if value: current_package["id"] = value - elif key == "import-dirs": - current_key = "import-dirs" - if value: - current_package.setdefault("import-dirs", []).append(value) else: current_key = None elif line.strip(): if current_key in ["name", "id"]: current_package[current_key] = line.strip() - elif current_key == "import-dirs": - current_package.setdefault("import-dirs", []).append(line.strip()) if current_package: yield current_package @@ -91,19 +84,11 @@ def _parse_ghc_pkg_dump(lines): def _construct_package_mappings(packages): result = { - "by-import-dirs": {}, "by-package-name": {}, } for package in packages: result["by-package-name"][package["name"]] = package["id"] - for import_dir in package.get("import-dirs", []): - layer = result["by-import-dirs"] - - for part in Path(import_dir).parts: - layer = layer.setdefault(part, {}) - - layer["//pkgid"] = package["id"] return result From f967596baa354725517fee4746d2050cadd3aa87 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 28 Jun 2024 15:33:53 +0200 Subject: [PATCH 0986/1133] Remove toolchain library catalog At this point it was only needed to map package names to package ids, which was only needed due to GHC not handling package sub-libraries as expected with the `-package` flag, see [GHC#25025]. https://gitlab.haskell.org/ghc/ghc/-/merge_requests/11994/diffs?commit_id=6f82305420f4e64b3c365997447e86b8d1765977 addresses this issue. [GHC#25025]: https://gitlab.haskell.org/ghc/ghc/-/issues/25025 --- decls/haskell_common.bzl | 4 - haskell/compile.bzl | 74 +++++--------- haskell/tools/BUCK.v2 | 6 -- haskell/tools/generate_target_metadata.py | 65 +------------ .../generate_toolchain_library_catalog.py | 97 ------------------- haskell/util.bzl | 13 +++ 6 files changed, 41 insertions(+), 218 deletions(-) delete mode 100755 haskell/tools/generate_toolchain_library_catalog.py diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index 6a8f15998..09c64e492 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -49,10 +49,6 @@ def _scripts_arg(): providers = [RunInfo], default = "prelude//haskell/tools:generate_target_metadata", ), - "_generate_toolchain_library_catalog": attrs.dep( - providers = [RunInfo], - default = "prelude//haskell/tools:generate_toolchain_library_catalog", - ), "_ghc_wrapper": attrs.dep( providers = [RunInfo], default = "prelude//haskell/tools:ghc_wrapper", diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 7c6083fbb..d9b496383 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -27,6 +27,7 @@ load( "attr_deps", "attr_deps_haskell_lib_infos", "attr_deps_haskell_link_infos", + "attr_deps_haskell_toolchain_libraries", "get_artifact_suffix", "is_haskell_src", "output_extensions", @@ -178,38 +179,6 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl return modules -def _toolchain_library_catalog_impl(ctx: AnalysisContext) -> list[Provider]: - haskell_toolchain = ctx.attrs.toolchain[HaskellToolchainInfo] - - ghc_pkg = haskell_toolchain.packager - - catalog_gen = ctx.attrs.generate_toolchain_library_catalog[RunInfo] - catalog = ctx.actions.declare_output("haskell_toolchain_libraries.json") - - cmd = cmd_args(catalog_gen, "--ghc-pkg", ghc_pkg, "--output", catalog.as_output()) - - if haskell_toolchain.packages: - cmd.add("--package-db", haskell_toolchain.packages.package_db) - - ctx.actions.run(cmd, category = "haskell_toolchain_library_catalog") - - return [DefaultInfo(default_output = catalog)] - -_toolchain_library_catalog = anon_rule( - impl = _toolchain_library_catalog_impl, - attrs = { - "toolchain": attrs.dep( - providers = [HaskellToolchainInfo], - ), - "generate_toolchain_library_catalog": attrs.dep( - providers = [RunInfo], - ), - }, - artifact_promise_mappings = { - "catalog": lambda x: x[DefaultInfo].default_outputs[0], - } -) - def target_metadata( ctx: AnalysisContext, *, @@ -226,11 +195,6 @@ def target_metadata( if HaskellToolchainLibrary in dep ] - toolchain_libs_catalog = ctx.actions.anon_target(_toolchain_library_catalog, { - "toolchain": ctx.attrs._haskell_toolchain, - "generate_toolchain_library_catalog": ctx.attrs._generate_toolchain_library_catalog, - }) - # The object and interface file paths are depending on the real module name # as inferred by GHC, not the source file path; currently this requires the # module name to correspond to the source file path as otherwise GHC will @@ -239,9 +203,7 @@ def target_metadata( # # (module X.Y.Z must be defined in a file at X/Y/Z.hs) - catalog = toolchain_libs_catalog.artifact("catalog") - - def get_metadata(ctx, _artifacts, resolved, outputs, catalog=catalog): + def get_metadata(ctx, _artifacts, resolved, outputs): # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. @@ -266,7 +228,6 @@ def target_metadata( md_args = cmd_args(md_gen) md_args.add(packages_info.bin_paths) - md_args.add("--toolchain-libs", catalog) md_args.add("--ghc", haskell_toolchain.compiler) md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) md_args.add( @@ -531,10 +492,10 @@ def _compile_module( md_file: Artifact, graph: dict[str, list[str]], package_deps: dict[str, list[str]], - toolchain_deps: list[str], outputs: dict[Artifact, Artifact], artifact_suffix: str, direct_deps_by_name: dict[str, typing.Any], + toolchain_deps_by_name: dict[str, None], ) -> CompiledModuleTSet: compile_cmd = cmd_args(common_args.command) # These compiler arguments can be passed in a response file. @@ -594,12 +555,20 @@ def _compile_module( ) ) + toolchain_deps = [] + library_deps = [] exposed_package_modules = [] exposed_package_dbs = [] for dep_pkgname, dep_modules in package_deps.items(): - exposed_package_dbs.append(direct_deps_by_name[dep_pkgname].package_db) - for dep_modname in dep_modules: - exposed_package_modules.append(direct_deps_by_name[dep_pkgname].modules[dep_modname]) + if dep_pkgname in toolchain_deps_by_name: + toolchain_deps.append(dep_pkgname) + elif dep_pkgname in direct_deps_by_name: + library_deps.append(dep_pkgname) + exposed_package_dbs.append(direct_deps_by_name[dep_pkgname].package_db) + for dep_modname in dep_modules: + exposed_package_modules.append(direct_deps_by_name[dep_pkgname].modules[dep_modname]) + else: + fail("Unknown library dependency '{}'. Add the library to the `deps` attribute".format(dep_pkgname)) # Transitive module dependencies from other packages. cross_package_modules = ctx.actions.tset( @@ -617,8 +586,8 @@ def _compile_module( children = [cross_package_modules] + this_package_modules, ) - compile_cmd.add(cmd_args(toolchain_deps, prepend = "-package-id")) - compile_cmd.add(cmd_args(package_deps.keys(), prepend = "-package")) + compile_cmd.add(cmd_args(library_deps, prepend = "-package")) + compile_cmd.add(cmd_args(toolchain_deps, prepend = "-package")) abi_tag = ctx.actions.artifact_tag() @@ -628,7 +597,7 @@ def _compile_module( compile_cmd.hidden(dependency_modules.project_as_args("objects")) compile_cmd.add(dependency_modules.project_as_args("dyn_objects_dot_o")) compile_cmd.add(cmd_args(dependency_modules.reduce("package_deps").keys(), prepend = "-package")) - compile_cmd.add(cmd_args(dependency_modules.reduce("toolchain_deps").keys(), prepend = "-package-id")) + compile_cmd.add(cmd_args(dependency_modules.reduce("toolchain_deps").keys(), prepend = "-package")) compile_cmd.add(cmd_args(dependency_modules.reduce("packagedb_deps").keys(), prepend = "--buck2-package-db")) @@ -662,7 +631,7 @@ def _compile_module( interfaces = module.interfaces, objects = module.objects, dyn_object_dot_o = dyn_object_dot_o, - package_deps = package_deps.keys(), + package_deps = library_deps, toolchain_deps = toolchain_deps, db_deps = exposed_package_dbs, ), @@ -687,6 +656,10 @@ def compile( def do_compile(ctx, artifacts, resolved, outputs, md_file=md_file, modules=modules): # Collect library dependencies. Note that these don't need to be in a # particular order. + toolchain_deps_by_name = { + lib.name: None + for lib in attr_deps_haskell_toolchain_libraries(ctx) + } direct_deps_info = [ lib.prof_info[link_style] if enable_profiling else lib.info[link_style] for lib in attr_deps_haskell_link_infos(ctx) @@ -713,7 +686,6 @@ def compile( module_map = md["module_mapping"] graph = md["module_graph"] package_deps = md["package_deps"] - toolchain_deps = md["toolchain_deps"] mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } module_tsets = {} @@ -730,11 +702,11 @@ def compile( module_tsets = module_tsets, graph = graph, package_deps = package_deps.get(module_name, {}), - toolchain_deps = toolchain_deps.get(module_name, []), outputs = outputs, md_file=md_file, artifact_suffix = artifact_suffix, direct_deps_by_name = direct_deps_by_name, + toolchain_deps_by_name = toolchain_deps_by_name, ) return [DynamicCompileResultInfo(modules = module_tsets)] diff --git a/haskell/tools/BUCK.v2 b/haskell/tools/BUCK.v2 index 6c7b58c3f..610e419c8 100644 --- a/haskell/tools/BUCK.v2 +++ b/haskell/tools/BUCK.v2 @@ -12,12 +12,6 @@ prelude.python_bootstrap_binary( visibility = ["PUBLIC"], ) -prelude.python_bootstrap_binary( - name = "generate_toolchain_library_catalog", - main = "generate_toolchain_library_catalog.py", - visibility = ["PUBLIC"], -) - prelude.python_bootstrap_binary( name = "ghc_wrapper", main = "ghc_wrapper.py", diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index f1fd638e3..bd173e7ee 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -12,7 +12,6 @@ * `module_mapping`: Mapping from source inferred module name to actual module name, if different. * `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. * `package_deps`": Cross-package module dependencies, `dict[modname, dict[pkgname, list[modname]]`. -* `toolchain_deps`": Toolchain library dependencies, `dict[modname, list[pkgid]]`. """ import argparse @@ -32,11 +31,6 @@ def main(): required=True, type=argparse.FileType("w"), help="Write package metadata to this file in JSON format.") - parser.add_argument( - "--toolchain-libs", - required=True, - type=str, - help="Path to the toolchain libraries catalog file.") parser.add_argument( "--ghc", required=True, @@ -87,21 +81,18 @@ def json_default_handler(o): def obtain_target_metadata(args): - toolchain_packages = load_toolchain_packages(args.toolchain_libs) - ghc_args = fix_ghc_args(args.ghc_arg, toolchain_packages) paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] - ghc_depends = run_ghc_depends(args.ghc, ghc_args, args.source, paths) + ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source, paths) th_modules = determine_th_modules(ghc_depends) module_mapping = determine_module_mapping(ghc_depends, args.source_prefix) # TODO(ah) handle .hi-boot dependencies module_graph = determine_module_graph(ghc_depends) - package_deps, toolchain_deps = determine_package_deps(ghc_depends, toolchain_packages) + package_deps = determine_package_deps(ghc_depends) return { "th_modules": th_modules, "module_mapping": module_mapping, "module_graph": module_graph, "package_deps": package_deps, - "toolchain_deps": toolchain_deps, } @@ -150,61 +141,15 @@ def determine_module_graph(ghc_depends): } -def determine_package_deps(ghc_depends, toolchain_packages): - toolchain_by_name = toolchain_packages["by-package-name"] +def determine_package_deps(ghc_depends): package_deps = {} - toolchain_deps = {} for modname, description in ghc_depends.items(): for pkgdep in description.get("packages", {}): pkgname = pkgdep.get("name") - pkgid = pkgdep.get("id") - - if pkgname in toolchain_by_name: - if pkgid == toolchain_by_name[pkgname]: - toolchain_deps.setdefault(modname, []).append(pkgid) - elif pkgid == pkgname: - # TODO(ah) why is base's package-id cropped to `base`? - toolchain_deps.setdefault(modname, []).append(toolchain_by_name.get(pkgid, pkgid)) - # TODO(ah) is this an error? - else: - package_deps.setdefault(modname, {})[pkgname] = pkgdep.get("modules", []) - - return package_deps, toolchain_deps - - -def fix_ghc_args(ghc_args, toolchain_packages): - """Replaces -package flags by -package-id where applicable. + package_deps.setdefault(modname, {})[pkgname] = pkgdep.get("modules", []) - Packages that have hidden internal packages cause failures of the form: - - Could not load module ‘Data.Attoparsec.Text’. - It is a member of the hidden package ‘attoparsec-0.14.4’. - - This can be avoided by specifying the corresponding packages by package-id - rather than package name. - - The toolchain libraries catalog tracks a mapping from package name to - package id. We apply it here to any toolchain library dependencies. - """ - result = [] - mapping = toolchain_packages["by-package-name"] - - args_iter = iter(ghc_args) - for arg in args_iter: - if arg == "-package": - package_name = next(args_iter) - if package_name is None: - raise RuntimeError("Missing package name argument for -package flag") - - if (package_id := mapping.get(package_name, None)) is not None: - result.extend(["-package-id", package_id]) - else: - result.extend(["-package", package_name]) - else: - result.append(arg) - - return result + return package_deps def run_ghc_depends(ghc, ghc_args, sources, aux_paths): diff --git a/haskell/tools/generate_toolchain_library_catalog.py b/haskell/tools/generate_toolchain_library_catalog.py deleted file mode 100755 index 4b86a6f62..000000000 --- a/haskell/tools/generate_toolchain_library_catalog.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python3 - -"""Helper script to generate a mapping from interface paths to toolchain library names. - -The result is a JSON object with the following fields: -* `by-import-dirs`: A trie mapping import directory prefixes to package names. Encoded as nested dictionaries with leafs denoted by the special key `//pkgid`. -* `by-package-name`: A mapping from package name to package id. -""" - -import argparse -import json -from pathlib import Path -import subprocess - - -def main(): - parser = argparse.ArgumentParser( - description=__doc__, - fromfile_prefix_chars="@") - parser.add_argument( - "--output", - required=True, - type=argparse.FileType("w"), - help="Write package mapping to this file in JSON format.") - parser.add_argument( - "--ghc-pkg", - required=True, - type=str, - help="Path to the Haskell compiler's ghc-pkg utilty.") - parser.add_argument( - "--package-db", - required=False, - type=str, - help="Path to the package db including all haskell libraries.") - args = parser.parse_args() - - with subprocess.Popen(_ghc_pkg_command(args.ghc_pkg, args.package_db), stdout=subprocess.PIPE, text=True) as proc: - packages = list(_parse_ghc_pkg_dump(proc.stdout)) - result = _construct_package_mappings(packages) - - json.dump(result, args.output) - - -def _ghc_pkg_command(ghc_pkg, package_db): - return [ - ghc_pkg, - "dump", - "--global", - "--no-user-package-db", - "--simple-output", - ] + (["--package-db", package_db] if package_db else []) - - -def _parse_ghc_pkg_dump(lines): - current_package = {} - current_key = None - - for line in lines: - if "---" == line.strip(): - if current_package: - yield(current_package) - - current_package = {} - elif ":" in line: - key, value = map(str.strip, line.split(":", 1)) - - if key == "name": - current_key = "name" - if value: - current_package["name"] = value - elif key == "id": - current_key = "id" - if value: - current_package["id"] = value - else: - current_key = None - elif line.strip(): - if current_key in ["name", "id"]: - current_package[current_key] = line.strip() - - if current_package: - yield current_package - - -def _construct_package_mappings(packages): - result = { - "by-package-name": {}, - } - - for package in packages: - result["by-package-name"][package["name"]] = package["id"] - - return result - - -if __name__ == "__main__": - main() diff --git a/haskell/util.bzl b/haskell/util.bzl index 89545e7af..7ed573395 100644 --- a/haskell/util.bzl +++ b/haskell/util.bzl @@ -10,6 +10,10 @@ load( "@prelude//cxx:cxx_toolchain_types.bzl", "CxxPlatformInfo", ) +load( + "@prelude//haskell:toolchain.bzl", + "HaskellToolchainLibrary", +) load( "@prelude//haskell:library_info.bzl", "HaskellLibraryInfo", @@ -73,6 +77,15 @@ def attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: ], ) +def attr_deps_haskell_toolchain_libraries(ctx: AnalysisContext) -> list[HaskellToolchainLibrary]: + return filter( + None, + [ + d.get(HaskellToolchainLibrary) + for d in attr_deps(ctx) + ctx.attrs.template_deps + ], + ) + # DONT CALL THIS FUNCTION, you want attr_deps_haskell_link_infos instead def attr_deps_haskell_link_infos_sans_template_deps(ctx: AnalysisContext) -> list[HaskellLinkInfo]: return filter( From e3e2571a8b9db123d52eda53042089f67ba2e09a Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 27 Jun 2024 13:54:45 +0200 Subject: [PATCH 0987/1133] Use ArgLike for `db` in `HaskellPackage` The db does not need to be an artifact, but should be something useful as an argument. --- haskell/toolchain.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index 335fa23cd..977c1ff19 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//utils:arglike.bzl", "ArgLike") + HaskellPlatformInfo = provider(fields = { "name": provider_field(typing.Any, default = None), }) @@ -53,7 +55,7 @@ HaskellPackagesInfo = record( ) HaskellPackage = record( - db = Artifact, + db = ArgLike, path = Artifact, ) From 878263b083e21a61e898b28ead8954f61b22cd7c Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 1 Jul 2024 12:07:33 +0200 Subject: [PATCH 0988/1133] Remove `package_db` from `HaskellPackagesInfo` It was used for the package catalog, but is no longer needed. --- haskell/compile.bzl | 1 - haskell/toolchain.bzl | 1 - 2 files changed, 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index d9b496383..f7ea46f38 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -328,7 +328,6 @@ def get_packages_info( direct_package_paths = [package_db[name].value.path for name in direct_toolchain_libs if name in package_db] bin_paths = cmd_args(direct_package_paths, format="--bin-path={}/bin") else: - packagedb_args.add(haskell_toolchain.packages.package_db) bin_paths = cmd_args() haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index 977c1ff19..f57d6d161 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -50,7 +50,6 @@ HaskellToolchainLibrary = provider( ) HaskellPackagesInfo = record( - package_db = Artifact, dynamic = DynamicValue, ) From 0c629b884d2909c716a495fb11ce9639f0194525 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Jul 2024 17:00:29 +0200 Subject: [PATCH 0989/1133] remove objects from TH compile actions --- haskell/compile.bzl | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index f7ea46f38..47c3e74e9 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -592,11 +592,12 @@ def _compile_module( compile_cmd.hidden( abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces"))) - if enable_th: - compile_cmd.hidden(dependency_modules.project_as_args("objects")) - compile_cmd.add(dependency_modules.project_as_args("dyn_objects_dot_o")) - compile_cmd.add(cmd_args(dependency_modules.reduce("package_deps").keys(), prepend = "-package")) - compile_cmd.add(cmd_args(dependency_modules.reduce("toolchain_deps").keys(), prepend = "-package")) + # TODO remove redundant data and dead code + #if enable_th: + # compile_cmd.hidden(dependency_modules.project_as_args("objects")) + # compile_cmd.add(dependency_modules.project_as_args("dyn_objects_dot_o")) + # compile_cmd.add(cmd_args(dependency_modules.reduce("package_deps").keys(), prepend = "-package")) + # compile_cmd.add(cmd_args(dependency_modules.reduce("toolchain_deps").keys(), prepend = "-package")) compile_cmd.add(cmd_args(dependency_modules.reduce("packagedb_deps").keys(), prepend = "--buck2-package-db")) From 1cb9b2d834d03b05e84fab5ed06ae89c882ca27d Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Jul 2024 17:05:27 +0200 Subject: [PATCH 0990/1133] Set -fprefer-byte-code for TH splices Also sets `-fbyte-code-and-object-code` on every compile action. --- haskell/compile.bzl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 47c3e74e9..0aa13c80b 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -592,6 +592,9 @@ def _compile_module( compile_cmd.hidden( abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces"))) + compile_cmd.add("-fbyte-code-and-object-code") + if enable_th: + compile_cmd.add("-fprefer-byte-code") # TODO remove redundant data and dead code #if enable_th: # compile_cmd.hidden(dependency_modules.project_as_args("objects")) From fbbc43b941e8611e29a51ae63df77dd7163d4757 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Jul 2024 17:09:38 +0200 Subject: [PATCH 0991/1133] Remove .dyn_o to .o symlinks --- haskell/compile.bzl | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 0aa13c80b..703afd37a 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -46,7 +46,6 @@ CompiledModuleInfo = provider(fields = { "abi": provider_field(Artifact), "interfaces": provider_field(list[Artifact]), "objects": provider_field(list[Artifact]), - "dyn_object_dot_o": provider_field(Artifact), # TODO[AH] track this module's package-name/id & package-db instead. "db_deps": provider_field(list[Artifact]), "package_deps": provider_field(list[str]), @@ -62,9 +61,6 @@ def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: def _compiled_module_project_as_objects(mod: CompiledModuleInfo) -> cmd_args: return cmd_args(mod.objects) -def _compiled_module_project_as_dyn_objects_dot_o(mod: CompiledModuleInfo) -> cmd_args: - return cmd_args(mod.dyn_object_dot_o) - def _compiled_module_reduce_as_package_deps(children: list[dict[str, None]], mod: CompiledModuleInfo | None) -> dict[str, None]: # TODO[AH] is there a better way to avoid duplicate -package flags? # Using a projection instead would produce duplicates. @@ -94,7 +90,6 @@ CompiledModuleTSet = transitive_set( "abi": _compiled_module_project_as_abi, "interfaces": _compiled_module_project_as_interfaces, "objects": _compiled_module_project_as_objects, - "dyn_objects_dot_o": _compiled_module_project_as_dyn_objects_dot_o, }, reductions = { "package_deps": _compiled_module_reduce_as_package_deps, @@ -598,7 +593,6 @@ def _compile_module( # TODO remove redundant data and dead code #if enable_th: # compile_cmd.hidden(dependency_modules.project_as_args("objects")) - # compile_cmd.add(dependency_modules.project_as_args("dyn_objects_dot_o")) # compile_cmd.add(cmd_args(dependency_modules.reduce("package_deps").keys(), prepend = "-package")) # compile_cmd.add(cmd_args(dependency_modules.reduce("toolchain_deps").keys(), prepend = "-package")) @@ -620,20 +614,12 @@ def _compile_module( } ) - object = module.objects[-1] - if object.extension == ".o": - dyn_object_dot_o = object - else: - dyn_object_dot_o = ctx.actions.declare_output("dot-o", paths.replace_extension(object.short_path, ".o")) - ctx.actions.symlink_file(dyn_object_dot_o, object) - module_tset = ctx.actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( abi = module.hash, interfaces = module.interfaces, objects = module.objects, - dyn_object_dot_o = dyn_object_dot_o, package_deps = library_deps, toolchain_deps = toolchain_deps, db_deps = exposed_package_dbs, From 8f345131afc54a9d3348077f4c5fd39e9b222293 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Jul 2024 17:16:16 +0200 Subject: [PATCH 0992/1133] Remove module package dependency tracking It was only used for TH slice compilation with object files. --- haskell/compile.bzl | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 703afd37a..99e4980c2 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -48,7 +48,6 @@ CompiledModuleInfo = provider(fields = { "objects": provider_field(list[Artifact]), # TODO[AH] track this module's package-name/id & package-db instead. "db_deps": provider_field(list[Artifact]), - "package_deps": provider_field(list[str]), "toolchain_deps": provider_field(list[str]), }) @@ -61,14 +60,6 @@ def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: def _compiled_module_project_as_objects(mod: CompiledModuleInfo) -> cmd_args: return cmd_args(mod.objects) -def _compiled_module_reduce_as_package_deps(children: list[dict[str, None]], mod: CompiledModuleInfo | None) -> dict[str, None]: - # TODO[AH] is there a better way to avoid duplicate -package flags? - # Using a projection instead would produce duplicates. - result = {pkg: None for pkg in mod.package_deps} if mod else {} - for child in children: - result.update(child) - return result - def _compiled_module_reduce_as_packagedb_deps(children: list[dict[Artifact, None]], mod: CompiledModuleInfo | None) -> dict[Artifact, None]: # TODO[AH] is there a better way to avoid duplicate package-dbs? # Using a projection instead would produce duplicates. @@ -92,7 +83,6 @@ CompiledModuleTSet = transitive_set( "objects": _compiled_module_project_as_objects, }, reductions = { - "package_deps": _compiled_module_reduce_as_package_deps, "packagedb_deps": _compiled_module_reduce_as_packagedb_deps, "toolchain_deps": _compiled_module_reduce_as_toolchain_deps, }, @@ -593,7 +583,6 @@ def _compile_module( # TODO remove redundant data and dead code #if enable_th: # compile_cmd.hidden(dependency_modules.project_as_args("objects")) - # compile_cmd.add(cmd_args(dependency_modules.reduce("package_deps").keys(), prepend = "-package")) # compile_cmd.add(cmd_args(dependency_modules.reduce("toolchain_deps").keys(), prepend = "-package")) compile_cmd.add(cmd_args(dependency_modules.reduce("packagedb_deps").keys(), prepend = "--buck2-package-db")) @@ -620,7 +609,6 @@ def _compile_module( abi = module.hash, interfaces = module.interfaces, objects = module.objects, - package_deps = library_deps, toolchain_deps = toolchain_deps, db_deps = exposed_package_dbs, ), From f8c0a3bbc114a234b3c5b045f8da635686d49f04 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Jul 2024 17:25:02 +0200 Subject: [PATCH 0993/1133] Remove module toolchain lib dependency tracking It was only used for TH slice compilation with object files. --- haskell/compile.bzl | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 99e4980c2..8312bd9bd 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -48,7 +48,6 @@ CompiledModuleInfo = provider(fields = { "objects": provider_field(list[Artifact]), # TODO[AH] track this module's package-name/id & package-db instead. "db_deps": provider_field(list[Artifact]), - "toolchain_deps": provider_field(list[str]), }) def _compiled_module_project_as_abi(mod: CompiledModuleInfo) -> cmd_args: @@ -68,14 +67,6 @@ def _compiled_module_reduce_as_packagedb_deps(children: list[dict[Artifact, None result.update(child) return result -def _compiled_module_reduce_as_toolchain_deps(children: list[dict[str, None]], mod: CompiledModuleInfo | None) -> dict[str, None]: - # TODO[AH] is there a better way to avoid duplicate -package-id flags? - # Using a projection instead would produce duplicates. - result = {pkg: None for pkg in mod.toolchain_deps} if mod else {} - for child in children: - result.update(child) - return result - CompiledModuleTSet = transitive_set( args_projections = { "abi": _compiled_module_project_as_abi, @@ -84,7 +75,6 @@ CompiledModuleTSet = transitive_set( }, reductions = { "packagedb_deps": _compiled_module_reduce_as_packagedb_deps, - "toolchain_deps": _compiled_module_reduce_as_toolchain_deps, }, ) @@ -583,7 +573,6 @@ def _compile_module( # TODO remove redundant data and dead code #if enable_th: # compile_cmd.hidden(dependency_modules.project_as_args("objects")) - # compile_cmd.add(cmd_args(dependency_modules.reduce("toolchain_deps").keys(), prepend = "-package")) compile_cmd.add(cmd_args(dependency_modules.reduce("packagedb_deps").keys(), prepend = "--buck2-package-db")) @@ -609,7 +598,6 @@ def _compile_module( abi = module.hash, interfaces = module.interfaces, objects = module.objects, - toolchain_deps = toolchain_deps, db_deps = exposed_package_dbs, ), children = [cross_package_modules] + this_package_modules, From e75357af3f98cf03d376776b265c9cc51dd60230 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 16 Jul 2024 17:28:35 +0200 Subject: [PATCH 0994/1133] Remove module objects reduction It was only used for TH splice compilation with object files. --- haskell/compile.bzl | 9 --------- 1 file changed, 9 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 8312bd9bd..abce144b6 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -45,7 +45,6 @@ load("@prelude//utils:strings.bzl", "strip_prefix") CompiledModuleInfo = provider(fields = { "abi": provider_field(Artifact), "interfaces": provider_field(list[Artifact]), - "objects": provider_field(list[Artifact]), # TODO[AH] track this module's package-name/id & package-db instead. "db_deps": provider_field(list[Artifact]), }) @@ -56,9 +55,6 @@ def _compiled_module_project_as_abi(mod: CompiledModuleInfo) -> cmd_args: def _compiled_module_project_as_interfaces(mod: CompiledModuleInfo) -> cmd_args: return cmd_args(mod.interfaces) -def _compiled_module_project_as_objects(mod: CompiledModuleInfo) -> cmd_args: - return cmd_args(mod.objects) - def _compiled_module_reduce_as_packagedb_deps(children: list[dict[Artifact, None]], mod: CompiledModuleInfo | None) -> dict[Artifact, None]: # TODO[AH] is there a better way to avoid duplicate package-dbs? # Using a projection instead would produce duplicates. @@ -71,7 +67,6 @@ CompiledModuleTSet = transitive_set( args_projections = { "abi": _compiled_module_project_as_abi, "interfaces": _compiled_module_project_as_interfaces, - "objects": _compiled_module_project_as_objects, }, reductions = { "packagedb_deps": _compiled_module_reduce_as_packagedb_deps, @@ -570,9 +565,6 @@ def _compile_module( compile_cmd.add("-fbyte-code-and-object-code") if enable_th: compile_cmd.add("-fprefer-byte-code") - # TODO remove redundant data and dead code - #if enable_th: - # compile_cmd.hidden(dependency_modules.project_as_args("objects")) compile_cmd.add(cmd_args(dependency_modules.reduce("packagedb_deps").keys(), prepend = "--buck2-package-db")) @@ -597,7 +589,6 @@ def _compile_module( value = CompiledModuleInfo( abi = module.hash, interfaces = module.interfaces, - objects = module.objects, db_deps = exposed_package_dbs, ), children = [cross_package_modules] + this_package_modules, From d618d7bdbe2c427b44ad848ee732e936d56abde9 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 24 Jul 2024 08:18:00 -0700 Subject: [PATCH 0995/1133] No more md_json dependency per module --- haskell/compile.bzl | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index abce144b6..d392d2042 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -458,7 +458,6 @@ def _compile_module( module_name: str, module: _Module, module_tsets: dict[str, CompiledModuleTSet], - md_file: Artifact, graph: dict[str, list[str]], package_deps: dict[str, list[str]], outputs: dict[Artifact, Artifact], @@ -515,15 +514,6 @@ def _compile_module( else: compile_cmd.add(compile_args_for_file) - compile_cmd.add( - cmd_args( - cmd_args(md_file, format = "-i{}").parent(), - "/", - module.prefix_dir, - delimiter="" - ) - ) - toolchain_deps = [] library_deps = [] exposed_package_modules = [] @@ -659,7 +649,6 @@ def compile( graph = graph, package_deps = package_deps.get(module_name, {}), outputs = outputs, - md_file=md_file, artifact_suffix = artifact_suffix, direct_deps_by_name = direct_deps_by_name, toolchain_deps_by_name = toolchain_deps_by_name, From 94d50d4d9a111f119872a534bffc1ca17d0c1448 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 24 Jul 2024 08:42:19 -0700 Subject: [PATCH 0996/1133] try to fix -i(dir) --- haskell/compile.bzl | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index d392d2042..a26a460b8 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -514,6 +514,15 @@ def _compile_module( else: compile_cmd.add(compile_args_for_file) + compile_cmd.add( + cmd_args( + cmd_args(stubs.as_output(), format = "-i{}").parent(), + "/", + module.prefix_dir, + delimiter="" + ) + ) + toolchain_deps = [] library_deps = [] exposed_package_modules = [] From 596683f9bd13aae0976522ced2fd71e9342f2944 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 16:05:31 +0200 Subject: [PATCH 0997/1133] Extract boot module dependencies --- haskell/tools/generate_target_metadata.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index bd173e7ee..d092d5f67 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -11,6 +11,7 @@ * `th_modules`: List of modules that require Template Haskell. * `module_mapping`: Mapping from source inferred module name to actual module name, if different. * `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. +* `boot_deps`: Intra-package dependencies on boot-modules, `dict[modname, list[modname]]`. * `package_deps`": Cross-package module dependencies, `dict[modname, dict[pkgname, list[modname]]`. """ @@ -86,12 +87,13 @@ def obtain_target_metadata(args): th_modules = determine_th_modules(ghc_depends) module_mapping = determine_module_mapping(ghc_depends, args.source_prefix) # TODO(ah) handle .hi-boot dependencies - module_graph = determine_module_graph(ghc_depends) + module_graph, boot_deps = determine_module_graph(ghc_depends) package_deps = determine_package_deps(ghc_depends) return { "th_modules": th_modules, "module_mapping": module_mapping, "module_graph": module_graph, + "boot_deps": boot_deps, "package_deps": package_deps, } @@ -135,10 +137,15 @@ def determine_module_mapping(ghc_depends, source_prefix): def determine_module_graph(ghc_depends): - return { + module_deps = { modname: description.get("modules", []) for modname, description in ghc_depends.items() } + boot_deps = { + modname: description.get("modules-boot", []) + for modname, description in ghc_depends.items() + } + return module_deps, boot_deps def determine_package_deps(ghc_depends): From 2095e58a403ffc64649a09e93c294adefb02a205 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 16:22:13 +0200 Subject: [PATCH 0998/1133] Generate module objects for boot files --- haskell/compile.bzl | 16 ++++++++++------ haskell/haskell_haddock.bzl | 1 + haskell/util.bzl | 9 +++++++++ 3 files changed, 20 insertions(+), 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a26a460b8..44f17a28e 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -29,6 +29,7 @@ load( "attr_deps_haskell_link_infos", "attr_deps_haskell_toolchain_libraries", "get_artifact_suffix", + "is_haskell_boot", "is_haskell_src", "output_extensions", "src_to_module_name", @@ -117,24 +118,27 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl osuf, hisuf = output_extensions(link_style, enable_profiling) for src in sources: - if not is_haskell_src(src.short_path): + bootsuf = "" + if is_haskell_boot(src.short_path): + bootsuf = "-boot" + elif not is_haskell_src(src.short_path): continue - module_name = src_to_module_name(src.short_path) - interface_path = paths.replace_extension(src.short_path, "." + hisuf) + module_name = src_to_module_name(src.short_path) + bootsuf + interface_path = paths.replace_extension(src.short_path, "." + hisuf + bootsuf) interface = ctx.actions.declare_output("mod-" + suffix, interface_path) interfaces = [interface] - object_path = paths.replace_extension(src.short_path, "." + osuf) + object_path = paths.replace_extension(src.short_path, "." + osuf + bootsuf) object = ctx.actions.declare_output("mod-" + suffix, object_path) objects = [object] hash = ctx.actions.declare_output("mod-" + suffix, interface_path + ".hash") if link_style in [LinkStyle("static"), LinkStyle("static_pic")]: dyn_osuf, dyn_hisuf = output_extensions(LinkStyle("shared"), enable_profiling) - interface_path = paths.replace_extension(src.short_path, "." + dyn_hisuf) + interface_path = paths.replace_extension(src.short_path, "." + dyn_hisuf + bootsuf) interface = ctx.actions.declare_output("mod-" + suffix, interface_path) interfaces.append(interface) - object_path = paths.replace_extension(src.short_path, "." + dyn_osuf) + object_path = paths.replace_extension(src.short_path, "." + dyn_osuf + bootsuf) object = ctx.actions.declare_output("mod-" + suffix, object_path) objects.append(object) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index fe7ca6039..2e49761f8 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -124,6 +124,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes html = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(hi.short_path).replace(".", "-"))), ) for hi in compiled.hi + if not hi.extension.endswith("-boot") } direct_deps_link_info = attr_deps_haskell_link_infos(ctx) diff --git a/haskell/util.bzl b/haskell/util.bzl index 7ed573395..eab727a6a 100644 --- a/haskell/util.bzl +++ b/haskell/util.bzl @@ -45,6 +45,11 @@ HASKELL_EXTENSIONS = [ ".y", ] +HASKELL_BOOT_EXTENSIONS = [ + ".hs-boot", + ".lhs-boot", +] + # We take a named_set for srcs, which is sometimes a list, sometimes a dict. # In future we should only accept a list, but for now, cope with both. def srcs_to_pairs(srcs) -> list[(str, Artifact)]: @@ -57,6 +62,10 @@ def is_haskell_src(x: str) -> bool: _, ext = paths.split_extension(x) return ext in HASKELL_EXTENSIONS +def is_haskell_boot(x: str) -> bool: + _, ext = paths.split_extension(x) + return ext in HASKELL_BOOT_EXTENSIONS + def src_to_module_name(x: str) -> str: base, _ext = paths.split_extension(x) return base.replace("/", ".") From 8a7473eab3fccb9dd6b608a1916d51e01931925f Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 16:27:03 +0200 Subject: [PATCH 0999/1133] Extend module dep graph by boot deps --- haskell/compile.bzl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 44f17a28e..cdf6bed13 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -644,8 +644,14 @@ def compile( th_modules = md["th_modules"] module_map = md["module_mapping"] graph = md["module_graph"] + boot_deps = md["boot_deps"] package_deps = md["package_deps"] + for module_name, boot_deps in md["boot_deps"].items(): + for boot_dep in boot_deps: + graph.setdefault(module_name, []).append(boot_dep + "-boot") + graph.setdefault(boot_dep + "-boot", []) + mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } module_tsets = {} From 055378c3f9b7a23da08c59c6d01bb550701ebd62 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 16:29:15 +0200 Subject: [PATCH 1000/1133] Copy package deps onto boot module --- haskell/compile.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index cdf6bed13..f96993b0f 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -651,6 +651,8 @@ def compile( for boot_dep in boot_deps: graph.setdefault(module_name, []).append(boot_dep + "-boot") graph.setdefault(boot_dep + "-boot", []) + if boot_dep + "-boot" not in package_deps: + package_deps[boot_dep + "-boot"] = package_deps[boot_dep] mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } module_tsets = {} From fa1a20dda39c2d7b54430bc652131e9a2469fede Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 16:40:02 +0200 Subject: [PATCH 1001/1133] Use md_file for `-outputdir` Instead of `stubs` which is not generated for boot modules. We set `ignore_artifacts` to not incur a dependency. --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index f96993b0f..30354f87c 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -492,7 +492,7 @@ def _compile_module( his = [outputs[hi] for hi in module.interfaces] stubs = outputs[module.stub_dir] - compile_args_for_file.add("-outputdir", cmd_args([cmd_args(stubs.as_output()).parent(), module.prefix_dir], delimiter="/")) + compile_args_for_file.add("-outputdir", cmd_args([cmd_args(md_file, ignore_artifacts=True).parent(), module.prefix_dir], delimiter="/")) compile_args_for_file.add("-o", objects[0].as_output()) compile_args_for_file.add("-ohi", his[0].as_output()) compile_args_for_file.add("-stubdir", stubs.as_output()) From d67535e81a97663218722b719f052d2663a7f0e1 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 16:44:26 +0200 Subject: [PATCH 1002/1133] Skip stubdir output for boot modules --- haskell/compile.bzl | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 30354f87c..2e249c3bb 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -101,7 +101,7 @@ _Module = record( interfaces = field(list[Artifact]), hash = field(Artifact), objects = field(list[Artifact]), - stub_dir = field(Artifact), + stub_dir = field(Artifact | None), prefix_dir = field(str), ) @@ -142,7 +142,11 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl object = ctx.actions.declare_output("mod-" + suffix, object_path) objects.append(object) - stub_dir = ctx.actions.declare_output("stub-" + suffix + "-" + module_name, dir=True) + if bootsuf == "": + stub_dir = ctx.actions.declare_output("stub-" + suffix + "-" + module_name, dir=True) + else: + stub_dir = None + modules[module_name] = _Module( source = src, interfaces = interfaces, @@ -490,12 +494,14 @@ def _compile_module( objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] - stubs = outputs[module.stub_dir] + if module.stub_dir != None: + stubs = outputs[module.stub_dir] compile_args_for_file.add("-outputdir", cmd_args([cmd_args(md_file, ignore_artifacts=True).parent(), module.prefix_dir], delimiter="/")) compile_args_for_file.add("-o", objects[0].as_output()) compile_args_for_file.add("-ohi", his[0].as_output()) - compile_args_for_file.add("-stubdir", stubs.as_output()) + if module.stub_dir != None: + compile_args_for_file.add("-stubdir", stubs.as_output()) if link_style in [LinkStyle("static_pic"), LinkStyle("static")]: compile_args_for_file.add("-dynamic-too") @@ -681,7 +687,11 @@ def compile( interfaces = [interface for module in modules.values() for interface in module.interfaces] objects = [object for module in modules.values() for object in module.objects] - stub_dirs = [module.stub_dir for module in modules.values()] + stub_dirs = [ + module.stub_dir + for module in modules.values() + if module.stub_dir != None + ] abi_hashes = [module.hash for module in modules.values()] dyn_module_tsets = ctx.actions.dynamic_output( From e17d3f2c0a6aaba3d078865394c084e27e7638f7 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 16:48:03 +0200 Subject: [PATCH 1003/1133] Skip .o-boot files when linking --- haskell/haskell.bzl | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index bb3498b86..2033f92e8 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -555,8 +555,13 @@ def _build_haskell_lib( if link_style == LinkStyle("shared"): lib = ctx.actions.declare_output(lib_short_path) + objects = [ + object + for object in compiled.objects + if not object.extension.endswith("-boot") + ] - def do_link(ctx, artifacts, resolved, outputs, lib=lib, objects=compiled.objects): + def do_link(ctx, artifacts, resolved, outputs, lib=lib, objects=objects): pkg_deps = resolved[haskell_toolchain.packages.dynamic] package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages @@ -598,7 +603,7 @@ def _build_haskell_lib( ctx.actions.dynamic_output( dynamic = [], promises = [haskell_toolchain.packages.dynamic], - inputs = compiled.objects, + inputs = objects, outputs = [lib.as_output()], f = do_link, ) From c143bcac222106d74cb97dc5802d8606ab67d6af Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 16:53:23 +0200 Subject: [PATCH 1004/1133] Simplify boot module dep graph integration --- haskell/compile.bzl | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 2e249c3bb..292dbf7e9 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -653,12 +653,20 @@ def compile( boot_deps = md["boot_deps"] package_deps = md["package_deps"] + # TODO GHC --dep-json should integrate boot modules directly into the dependency graph. + for module_name, module in modules.items(): + if not module_name.endswith("-boot"): + continue + + # Add boot modules to the module graph + graph[module_name] = [] + # Add package dependencies for the boot module + # TODO GHC --dep-json should report boot module dependencies. + package_deps[module_name] = package_deps.get(module_name[:-5], []) + for module_name, boot_deps in md["boot_deps"].items(): for boot_dep in boot_deps: graph.setdefault(module_name, []).append(boot_dep + "-boot") - graph.setdefault(boot_dep + "-boot", []) - if boot_dep + "-boot" not in package_deps: - package_deps[boot_dep + "-boot"] = package_deps[boot_dep] mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } module_tsets = {} From f34c43b276b3576b38af976703e8d2d6ab7802b9 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 17:00:38 +0200 Subject: [PATCH 1005/1133] Calculate reverse boot file dep graph --- haskell/compile.bzl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 292dbf7e9..c6ebdb316 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -650,9 +650,13 @@ def compile( th_modules = md["th_modules"] module_map = md["module_mapping"] graph = md["module_graph"] - boot_deps = md["boot_deps"] package_deps = md["package_deps"] + boot_rev_deps = {} + for module_name, boot_deps in md["boot_deps"].items(): + for boot_dep in boot_deps: + boot_rev_deps.setdefault(boot_dep, []).append(module_name) + # TODO GHC --dep-json should integrate boot modules directly into the dependency graph. for module_name, module in modules.items(): if not module_name.endswith("-boot"): From c5457ce0d93df19c7e05187a86fb1ed07795207e Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 17:09:50 +0200 Subject: [PATCH 1006/1133] Add boot module dependencies --- haskell/compile.bzl | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index c6ebdb316..41a0addb7 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -655,7 +655,7 @@ def compile( boot_rev_deps = {} for module_name, boot_deps in md["boot_deps"].items(): for boot_dep in boot_deps: - boot_rev_deps.setdefault(boot_dep, []).append(module_name) + boot_rev_deps.setdefault(boot_dep + "-boot", []).append(module_name) # TODO GHC --dep-json should integrate boot modules directly into the dependency graph. for module_name, module in modules.items(): @@ -664,8 +664,19 @@ def compile( # Add boot modules to the module graph graph[module_name] = [] - # Add package dependencies for the boot module # TODO GHC --dep-json should report boot module dependencies. + # The following is a naive approximation of the boot module's dependencies, + # taking the corresponding module's dependencies + # minus those that depend on the boot module. + + # Add module dependencies for the boot module + graph[module_name].extend([ + dep + for dep in graph[module_name[:-5]] + if not dep in boot_rev_deps[module_name] + ]) + + # Add package dependencies for the boot module package_deps[module_name] = package_deps.get(module_name[:-5], []) for module_name, boot_deps in md["boot_deps"].items(): From 9a176da2106b96fec65f1161a7484ef3629feafb Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 17:13:17 +0200 Subject: [PATCH 1007/1133] Revert "try to fix -i(dir)" This reverts commit 94d50d4d9a111f119872a534bffc1ca17d0c1448. --- haskell/compile.bzl | 9 --------- 1 file changed, 9 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 41a0addb7..8f784889a 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -524,15 +524,6 @@ def _compile_module( else: compile_cmd.add(compile_args_for_file) - compile_cmd.add( - cmd_args( - cmd_args(stubs.as_output(), format = "-i{}").parent(), - "/", - module.prefix_dir, - delimiter="" - ) - ) - toolchain_deps = [] library_deps = [] exposed_package_modules = [] From 0d82d971cc3cc2223bb861196028c03631b691fe Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 17:13:19 +0200 Subject: [PATCH 1008/1133] Revert "No more md_json dependency per module" This reverts commit d618d7bdbe2c427b44ad848ee732e936d56abde9. --- haskell/compile.bzl | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 8f784889a..0da2dfc50 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -466,6 +466,7 @@ def _compile_module( module_name: str, module: _Module, module_tsets: dict[str, CompiledModuleTSet], + md_file: Artifact, graph: dict[str, list[str]], package_deps: dict[str, list[str]], outputs: dict[Artifact, Artifact], @@ -524,6 +525,15 @@ def _compile_module( else: compile_cmd.add(compile_args_for_file) + compile_cmd.add( + cmd_args( + cmd_args(md_file, format = "-i{}").parent(), + "/", + module.prefix_dir, + delimiter="" + ) + ) + toolchain_deps = [] library_deps = [] exposed_package_modules = [] @@ -690,6 +700,7 @@ def compile( graph = graph, package_deps = package_deps.get(module_name, {}), outputs = outputs, + md_file=md_file, artifact_suffix = artifact_suffix, direct_deps_by_name = direct_deps_by_name, toolchain_deps_by_name = toolchain_deps_by_name, From 6a18a8d094ed20b8674eb8b7dd16921d7d787cd8 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 17:13:51 +0200 Subject: [PATCH 1009/1133] Set ignore_artifacts on md_file input I confirmed that a change of the md_file alone that does not affect the dependency graph does not trigger a rerun of the module compile action. --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 0da2dfc50..cf70576a2 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -527,7 +527,7 @@ def _compile_module( compile_cmd.add( cmd_args( - cmd_args(md_file, format = "-i{}").parent(), + cmd_args(md_file, format = "-i{}", ignore_artifacts=True).parent(), "/", module.prefix_dir, delimiter="" From fe24a6d8da4526cf1127b956ca5a19acfcf1df44 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Wed, 14 Aug 2024 17:29:03 +0200 Subject: [PATCH 1010/1133] Avoid non-haskell source warning for boot files --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index cf70576a2..1a3935df7 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -380,7 +380,7 @@ def _common_compile_module_args( non_haskell_sources = [ src for (path, src) in srcs_to_pairs(ctx.attrs.srcs) - if not is_haskell_src(path) + if not is_haskell_src(path) and not is_haskell_boot(path) ] if non_haskell_sources: From 08bea3ecbb34789723f204b4a2ca34fc8968dcf1 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 14 Aug 2024 09:45:53 +0200 Subject: [PATCH 1011/1133] Do not require haskell toolchain packages This makes a haskell toolchain configured with `system_haskell_toolchain` work again. --- haskell/compile.bzl | 12 ++++++++---- haskell/haskell.bzl | 9 ++++++--- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 1a3935df7..80f454c78 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -223,7 +223,7 @@ def target_metadata( ctx.actions.dynamic_output( dynamic = [], - promises = [haskell_toolchain.packages.dynamic], + promises = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [], inputs = [], outputs = [md_file.as_output()], f = get_metadata, @@ -409,8 +409,12 @@ def _common_compile_module_args( ] toolchain_libs = direct_toolchain_libs + libs.reduce("packages") - pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages + if haskell_toolchain.packages: + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages + else: + package_db = [] + package_db_tset = ctx.actions.tset( HaskellPackageDbTSet, children = [package_db[name] for name in toolchain_libs if name in package_db] @@ -729,7 +733,7 @@ def compile( if enable_profiling else lib.info[link_style] ] - ] + [ haskell_toolchain.packages.dynamic ], + ] + ([ haskell_toolchain.packages.dynamic ] if haskell_toolchain.packages else [ ]), inputs = ctx.attrs.srcs, outputs = [o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes], f = do_compile) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 2033f92e8..8f4a834cd 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -1246,8 +1246,11 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: def do_link(ctx, artifacts, resolved, outputs, output=output, objects=objects): link_cmd = link.copy() # link is already frozen, make a copy - pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages + if haskell_toolchain.packages: + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages + else: + package_db = [] # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. @@ -1280,7 +1283,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions.dynamic_output( dynamic = [], - promises = [haskell_toolchain.packages.dynamic], + promises = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [ ], inputs = objects.values(), outputs = [output.as_output()], f = do_link, From e664b5ccc63f0e4e2e88d97baf5f6317544fe6e0 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 14 Aug 2024 09:46:52 +0200 Subject: [PATCH 1012/1133] Handle `main` attribute for haskell binaries --- haskell/compile.bzl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 80f454c78..33e87e137 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -358,6 +358,9 @@ def _common_compile_module_args( command.add("-c") + if getattr(ctx.attrs, "main", None) != None: + command.add(["-main-is", ctx.attrs.main]) + if enable_haddock: command.add("-haddock") From a525b5508b23ff7769e4b7a12cd6f4f4eb028674 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 14 Aug 2024 11:47:01 +0200 Subject: [PATCH 1013/1133] Do not use `-outputdir` This flag is a shorthand for setting several output directories. We want to set these explicitly to avoid creating files inside the project directory. Using `-outputdir` also sets `-hidir`. Unfortunately, setting `-hidir` effectivly disables the use of the search path when looking up interface files as ghc expects all interface files in that directory. --- haskell/compile.bzl | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 33e87e137..999271aa4 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -505,9 +505,17 @@ def _compile_module( if module.stub_dir != None: stubs = outputs[module.stub_dir] - compile_args_for_file.add("-outputdir", cmd_args([cmd_args(md_file, ignore_artifacts=True).parent(), module.prefix_dir], delimiter="/")) compile_args_for_file.add("-o", objects[0].as_output()) compile_args_for_file.add("-ohi", his[0].as_output()) + + # Set the output directories. We do not use the -outputdir flag, but set the directories individually. + # Note, the -outputdir option is shorthand for the combination of -odir, -hidir, -hiedir, -stubdir and -dumpdir. + # But setting -hidir effectively disables the use of the search path to look up interface files, + # as ghc exclusively looks in that directory when it is set. + for dir in ["o", "hie", "dump"]: + compile_args_for_file.add( + "-{}dir".format(dir), cmd_args([cmd_args(stubs.as_output(), parent=1), module.prefix_dir], delimiter="/"), + ) if module.stub_dir != None: compile_args_for_file.add("-stubdir", stubs.as_output()) From 45b724ff417f7156e1b9903fe7d2843751be552c Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 14 Aug 2024 15:19:20 +0200 Subject: [PATCH 1014/1133] Add `src_strip_prefix` attribute --- decls/haskell_common.bzl | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index 09c64e492..074d29a9d 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -17,6 +17,13 @@ def _srcs_arg(): """), } +def _src_strip_prefix_arg(): + return { + "src_strip_prefix": attrs.string(default = "", doc = """ + A prefix to strip from the source files when compiling. +"""), + } + def _deps_arg(): return { "deps": attrs.list(attrs.dep(), default = [], doc = """ @@ -57,6 +64,7 @@ def _scripts_arg(): haskell_common = struct( srcs_arg = _srcs_arg, + src_strip_prefix = _src_strip_prefix_arg, deps_arg = _deps_arg, compiler_flags_arg = _compiler_flags_arg, exported_linker_flags_arg = _exported_linker_flags_arg, From 1e204c1a778d5d8ad1dd3f6ae5dd1ece325c8df6 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 14 Aug 2024 15:25:54 +0200 Subject: [PATCH 1015/1133] Add `src_strip_prefix` to `haskell_library` rule --- decls/haskell_rules.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/decls/haskell_rules.bzl b/decls/haskell_rules.bzl index c11ab6040..eec1d1022 100644 --- a/decls/haskell_rules.bzl +++ b/decls/haskell_rules.bzl @@ -164,6 +164,7 @@ haskell_library = prelude_rule( attrs = ( # @unsorted-dict-items haskell_common.srcs_arg() | + haskell_common.src_strip_prefix() | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | From c1964b3b54142d8b55a6d7db44df39facb731de3 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 14 Aug 2024 15:42:22 +0200 Subject: [PATCH 1016/1133] Append `src_strip_prefix` to module prefix_dir The prefix_dir is added to the search path so that .hi files can be found properly. --- haskell/compile.bzl | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 999271aa4..37fea1c6c 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -147,13 +147,20 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl else: stub_dir = None + prefix_dir = "mod-" + suffix + + src_strip_prefix = getattr(ctx.attrs, "src_strip_prefix", None) + + if src_strip_prefix: + prefix_dir += "/" + src_strip_prefix + modules[module_name] = _Module( source = src, interfaces = interfaces, hash = hash, objects = objects, stub_dir = stub_dir, - prefix_dir = "mod-" + suffix) + prefix_dir = prefix_dir) return modules From 400e62445725f3e8c5609fd65945e2d3a61dd3e3 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 14 Aug 2024 15:45:13 +0200 Subject: [PATCH 1017/1133] Strip src prefix from exposed-modules in package config --- haskell/haskell.bzl | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 8f4a834cd..41395f43e 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -423,8 +423,16 @@ def _make_package( use_empty_lib: bool) -> Artifact: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) + def strip_prefix_dir(path, prefix): + if not prefix: + return path + prefix = prefix if prefix.endswith("/") else prefix + "/" + if path.startswith(prefix): + return path[len(prefix):] + return path + # Don't expose boot sources, as they're only meant to be used for compiling. - modules = [src_to_module_name(x) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] + modules = [src_to_module_name(strip_prefix_dir(x, ctx.attrs.src_strip_prefix)) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] def mk_artifact_dir(dir_prefix: str, profiled: bool) -> str: art_suff = get_artifact_suffix(link_style, profiled) From f8530ec8ed251e45c2fc9c6c5b5fbee6c75f8e5e Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 14 Aug 2024 16:08:17 +0200 Subject: [PATCH 1018/1133] Append src prefix to import-dirs of package conf --- haskell/haskell.bzl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 41395f43e..af98e3366 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -434,9 +434,13 @@ def _make_package( # Don't expose boot sources, as they're only meant to be used for compiling. modules = [src_to_module_name(strip_prefix_dir(x, ctx.attrs.src_strip_prefix)) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] + src_prefix = getattr(ctx.attrs, "src_strip_prefix", "") + if src_prefix: + src_prefix = "/" + src_prefix + def mk_artifact_dir(dir_prefix: str, profiled: bool) -> str: art_suff = get_artifact_suffix(link_style, profiled) - return "\"${pkgroot}/" + dir_prefix + "-" + art_suff + "\"" + return "\"${pkgroot}/" + dir_prefix + "-" + art_suff + src_prefix + "\"" import_dirs = [mk_artifact_dir("mod", profiled) for profiled in profiling] From ab08a62bfe38a53994d3268750cadd916a2f5804 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 15 Aug 2024 08:14:14 +0200 Subject: [PATCH 1019/1133] Do not append `src_prefix` to `library-dirs` in package conf --- haskell/haskell.bzl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index af98e3366..1ed66c31c 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -434,15 +434,15 @@ def _make_package( # Don't expose boot sources, as they're only meant to be used for compiling. modules = [src_to_module_name(strip_prefix_dir(x, ctx.attrs.src_strip_prefix)) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] + def mk_artifact_dir(dir_prefix: str, profiled: bool, subdir: str = "") -> str: + art_suff = get_artifact_suffix(link_style, profiled) + return "\"${pkgroot}/" + dir_prefix + "-" + art_suff + subdir + "\"" + src_prefix = getattr(ctx.attrs, "src_strip_prefix", "") if src_prefix: src_prefix = "/" + src_prefix - def mk_artifact_dir(dir_prefix: str, profiled: bool) -> str: - art_suff = get_artifact_suffix(link_style, profiled) - return "\"${pkgroot}/" + dir_prefix + "-" + art_suff + src_prefix + "\"" - - import_dirs = [mk_artifact_dir("mod", profiled) for profiled in profiling] + import_dirs = [mk_artifact_dir("mod", profiled, src_prefix) for profiled in profiling] conf = [ "name: " + pkgname, From 3365ba046c39946c25903c0c7a2f803fd723fa35 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 15 Aug 2024 09:30:27 +0200 Subject: [PATCH 1020/1133] Extend `src_to_module` function to handle source prefix --- haskell/haskell.bzl | 10 +--------- haskell/util.bzl | 8 +++++++- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 1ed66c31c..778ec3734 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -423,16 +423,8 @@ def _make_package( use_empty_lib: bool) -> Artifact: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) - def strip_prefix_dir(path, prefix): - if not prefix: - return path - prefix = prefix if prefix.endswith("/") else prefix + "/" - if path.startswith(prefix): - return path[len(prefix):] - return path - # Don't expose boot sources, as they're only meant to be used for compiling. - modules = [src_to_module_name(strip_prefix_dir(x, ctx.attrs.src_strip_prefix)) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] + modules = [src_to_module_name(x, ctx.attrs.src_strip_prefix) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] def mk_artifact_dir(dir_prefix: str, profiled: bool, subdir: str = "") -> str: art_suff = get_artifact_suffix(link_style, profiled) diff --git a/haskell/util.bzl b/haskell/util.bzl index eab727a6a..dbf5144c7 100644 --- a/haskell/util.bzl +++ b/haskell/util.bzl @@ -35,6 +35,7 @@ load( ) load("@prelude//utils:platform_flavors_util.bzl", "by_platform") load("@prelude//utils:utils.bzl", "flatten") +load("@prelude//utils:strings.bzl", "strip_prefix") HASKELL_EXTENSIONS = [ ".hs", @@ -66,8 +67,13 @@ def is_haskell_boot(x: str) -> bool: _, ext = paths.split_extension(x) return ext in HASKELL_BOOT_EXTENSIONS -def src_to_module_name(x: str) -> str: +def src_to_module_name(x: str, src_prefix: str = "") -> str: base, _ext = paths.split_extension(x) + if src_prefix: + prefix = src_prefix if src_prefix.endswith("/") else src_prefix + "/" + stripped = strip_prefix(prefix, base) + if stripped: base = stripped + return base.replace("/", ".") def _by_platform(ctx: AnalysisContext, xs: list[(str, list[typing.Any])]) -> list[typing.Any]: From 97157d4f59094295b4c0d4c3c6a3d6f1c61142b9 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 15 Aug 2024 09:32:39 +0200 Subject: [PATCH 1021/1133] Strip source prefix in haskell_haddock rule --- haskell/compile.bzl | 2 ++ haskell/haskell_haddock.bzl | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 37fea1c6c..0940b0c90 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -86,6 +86,7 @@ CompileResultInfo = record( hashes = field(list[Artifact]), producing_indices = field(bool), module_tsets = field(DynamicValue), + src_prefix = field(str), ) PackagesInfo = record( @@ -783,4 +784,5 @@ def compile( stubs = stubs_dir, producing_indices = False, module_tsets = dyn_module_tsets, + src_prefix = getattr(ctx.attrs, "src_strip_prefix", ""), ) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 2e49761f8..8ebc56377 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -118,10 +118,10 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes cmd.add("--source-entity", source_entity) haddock_infos = { - src_to_module_name(hi.short_path): _HaddockInfo( + src_to_module_name(hi.short_path, compiled.src_prefix): _HaddockInfo( interface = hi, - haddock = ctx.actions.declare_output("haddock-interface/{}.haddock".format(src_to_module_name(hi.short_path))), - html = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(hi.short_path).replace(".", "-"))), + haddock = ctx.actions.declare_output("haddock-interface/{}.haddock".format(src_to_module_name(hi.short_path, compiled.src_prefix))), + html = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(hi.short_path, compiled.src_prefix).replace(".", "-"))), ) for hi in compiled.hi if not hi.extension.endswith("-boot") From 0c79a6c3a0607d5ce4048966d484010ded0fe800 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 16 Aug 2024 10:06:50 +0200 Subject: [PATCH 1022/1133] Filter boot files from GHC -M command line Otherwise the boot file metadata will not be generated. See https://github.com/MercuryTechnologies/the-culture-repo/issues/200#issuecomment-2293029404 --- haskell/tools/generate_target_metadata.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index d092d5f67..d3cff7eff 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -163,6 +163,7 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): with tempfile.TemporaryDirectory() as dname: json_fname = os.path.join(dname, "depends.json") make_fname = os.path.join(dname, "depends.make") + haskell_sources = list(filter(is_haskell_src, sources)) args = [ ghc, "-M", "-include-pkg-deps", # Note: `-outputdir '.'` removes the prefix of all targets: @@ -170,7 +171,7 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): "-outputdir", ".", "-dep-json", json_fname, "-dep-makefile", make_fname, - ] + ghc_args + sources + ] + ghc_args + haskell_sources env = os.environ.copy() path = env.get("PATH", "") From 0c997ebc05ca11803a2f4b6ceffac6d18ff4c9ef Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 16 Aug 2024 10:17:44 +0200 Subject: [PATCH 1023/1133] Handle boot files in module_mapping --- haskell/tools/generate_target_metadata.py | 31 ++++++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index d3cff7eff..3b2f3ded5 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -7,11 +7,12 @@ * The cross-package module dependencies. * Which modules require Template Haskell. +Note, boot files will be represented by the module name with a `-boot` suffix. + The result is a JSON object with the following fields: * `th_modules`: List of modules that require Template Haskell. * `module_mapping`: Mapping from source inferred module name to actual module name, if different. * `module_graph`: Intra-package module dependencies, `dict[modname, list[modname]]`. -* `boot_deps`: Intra-package dependencies on boot-modules, `dict[modname, list[modname]]`. * `package_deps`": Cross-package module dependencies, `dict[modname, dict[pkgname, list[modname]]`. """ @@ -86,14 +87,12 @@ def obtain_target_metadata(args): ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source, paths) th_modules = determine_th_modules(ghc_depends) module_mapping = determine_module_mapping(ghc_depends, args.source_prefix) - # TODO(ah) handle .hi-boot dependencies - module_graph, boot_deps = determine_module_graph(ghc_depends) + module_graph = determine_module_graph(ghc_depends) package_deps = determine_package_deps(ghc_depends) return { "th_modules": th_modules, "module_mapping": module_mapping, "module_graph": module_graph, - "boot_deps": boot_deps, "package_deps": package_deps, } @@ -133,6 +132,19 @@ def determine_module_mapping(ghc_depends, source_prefix): if apparent_name != modname: result[apparent_name] = modname + boot_properties = properties["boot"] + if boot_properties != None: + boot_modname = modname + "-boot" + boot_sources = list(filter(is_haskell_boot, boot_properties.get("sources", []))) + + if len(boot_sources) != 1: + raise RuntimeError(f"Expected at most one Haskell boot file for module '{modname}' but got '{boot_sources}'.") + + boot_apparent_name = src_to_module_name(strip_prefix_(source_prefix, sources[0]).lstrip("/")) + "-boot" + + if boot_apparent_name != boot_modname: + result[boot_apparent_name] = boot_modname + return result @@ -193,6 +205,11 @@ def is_haskell_src(x): return ext in HASKELL_EXTENSIONS +def is_haskell_boot(x): + _, ext = os.path.splitext(x) + return ext in HASKELL_BOOT_EXTENSIONS + + HASKELL_EXTENSIONS = [ ".hs", ".lhs", @@ -203,6 +220,12 @@ def is_haskell_src(x): ] +HASKELL_BOOT_EXTENSIONS = [ + ".hs-boot", + ".lhs-boot", +] + + def strip_prefix_(prefix, s): stripped = strip_prefix(prefix, s) From 9488b2316a89fc293113d7adc56912b9f3cd9a1a Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 16 Aug 2024 10:23:04 +0200 Subject: [PATCH 1024/1133] Handle boot files in module graph --- haskell/tools/generate_target_metadata.py | 26 ++++++++++++++--------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 3b2f3ded5..32d99da95 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -132,7 +132,7 @@ def determine_module_mapping(ghc_depends, source_prefix): if apparent_name != modname: result[apparent_name] = modname - boot_properties = properties["boot"] + boot_properties = properties.get("boot", None) if boot_properties != None: boot_modname = modname + "-boot" boot_sources = list(filter(is_haskell_boot, boot_properties.get("sources", []))) @@ -149,15 +149,21 @@ def determine_module_mapping(ghc_depends, source_prefix): def determine_module_graph(ghc_depends): - module_deps = { - modname: description.get("modules", []) - for modname, description in ghc_depends.items() - } - boot_deps = { - modname: description.get("modules-boot", []) - for modname, description in ghc_depends.items() - } - return module_deps, boot_deps + module_deps = {} + for modname, description in ghc_depends.items(): + module_deps[modname] = description.get("modules", []) + [ + dep + "-boot" + for dep in description.get("modules-boot", []) + ] + + boot_description = description.get("boot", None) + if boot_description != None: + module_deps[modname + "-boot"] = boot_description.get("modules", []) + [ + dep + "-boot" + for dep in boot_description.get("modules-boot", []) + ] + + return module_deps def determine_package_deps(ghc_depends): From c3a1a2af8ce679ff84c841213a536af23c0ad888 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 16 Aug 2024 10:25:07 +0200 Subject: [PATCH 1025/1133] Handle boot files in package deps --- haskell/tools/generate_target_metadata.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 32d99da95..1d2770f00 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -174,6 +174,12 @@ def determine_package_deps(ghc_depends): pkgname = pkgdep.get("name") package_deps.setdefault(modname, {})[pkgname] = pkgdep.get("modules", []) + boot_description = description.get("boot", None) + if boot_description != None: + for pkgdep in boot_description.get("packages", {}): + pkgname = pkgdep.get("name") + package_deps.setdefault(modname + "-boot", {})[pkgname] = pkgdep.get("modules", []) + return package_deps From 46ff39460438b88faa445ce60fa541987e81548a Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 16 Aug 2024 10:29:33 +0200 Subject: [PATCH 1026/1133] Support imports in boot files --- haskell/compile.bzl | 31 ------------------------------- 1 file changed, 31 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 1a3935df7..f66edf764 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -653,37 +653,6 @@ def compile( graph = md["module_graph"] package_deps = md["package_deps"] - boot_rev_deps = {} - for module_name, boot_deps in md["boot_deps"].items(): - for boot_dep in boot_deps: - boot_rev_deps.setdefault(boot_dep + "-boot", []).append(module_name) - - # TODO GHC --dep-json should integrate boot modules directly into the dependency graph. - for module_name, module in modules.items(): - if not module_name.endswith("-boot"): - continue - - # Add boot modules to the module graph - graph[module_name] = [] - # TODO GHC --dep-json should report boot module dependencies. - # The following is a naive approximation of the boot module's dependencies, - # taking the corresponding module's dependencies - # minus those that depend on the boot module. - - # Add module dependencies for the boot module - graph[module_name].extend([ - dep - for dep in graph[module_name[:-5]] - if not dep in boot_rev_deps[module_name] - ]) - - # Add package dependencies for the boot module - package_deps[module_name] = package_deps.get(module_name[:-5], []) - - for module_name, boot_deps in md["boot_deps"].items(): - for boot_dep in boot_deps: - graph.setdefault(module_name, []).append(boot_dep + "-boot") - mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } module_tsets = {} From 97626650d19fffe9f34118b67e913ccfd68818cf Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 16 Aug 2024 10:31:58 +0200 Subject: [PATCH 1027/1133] wording --- haskell/tools/generate_target_metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 1d2770f00..f109eba2e 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -7,7 +7,7 @@ * The cross-package module dependencies. * Which modules require Template Haskell. -Note, boot files will be represented by the module name with a `-boot` suffix. +Note, boot files will be represented by a `-boot` suffix in the module name. The result is a JSON object with the following fields: * `th_modules`: List of modules that require Template Haskell. From b811942e63a4dbf2c6810fc7cec5b195b30104f0 Mon Sep 17 00:00:00 2001 From: Rebecca Turner Date: Tue, 14 May 2024 10:47:21 -0700 Subject: [PATCH 1028/1133] Improve error messages for GHC compilation failures Before, failed `ghc_wrapper` invocations would print a Python stack trace including the entire failed command (typically hundreds of lines long). By just returning a non-zero integer and not producing the requested output, Buck2 will realize the build failed and just print stdout/stderr (and the full command can be retrieved with `buck2 log what-failed`). --- haskell/tools/ghc_wrapper.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 567f99f43..d658d6f9c 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -9,11 +9,9 @@ """ import argparse -import json import os from pathlib import Path import subprocess -import sys def main(): @@ -63,7 +61,9 @@ def main(): path = env.get("PATH", "") env["PATH"] = os.pathsep.join([path] + aux_paths) - subprocess.check_call(cmd, env=env) + returncode = subprocess.call(cmd, env=env) + if returncode != 0: + return returncode recompute_abi_hash(args.ghc, args.abi_out) @@ -90,6 +90,8 @@ def main(): os.remove(args.buck2_packagedb_dep) raise e + return 0 + def recompute_abi_hash(ghc, abi_out): """Call ghc on the hi file and write the ABI hash to abi_out.""" @@ -102,7 +104,7 @@ def recompute_abi_hash(ghc, abi_out): with open(abi_out, "w") as outfile: print(hash, file=outfile) return - raise "ABI hash not found in ghc output" + raise RuntimeError("ABI hash not found in ghc output") if __name__ == "__main__": From 020127fd0f538a6b2c91cc0179b7e19a29ee027b Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 16 Aug 2024 11:36:02 +0200 Subject: [PATCH 1029/1133] Improve error messages for metadata generation --- haskell/tools/generate_target_metadata.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index d092d5f67..7f4dcd9e4 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -16,9 +16,11 @@ """ import argparse +import sys import json import os from pathlib import Path +import shlex import subprocess import tempfile @@ -176,7 +178,18 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): path = env.get("PATH", "") env["PATH"] = os.pathsep.join([path] + aux_paths) - subprocess.run(args, env=env, check=True) + res = subprocess.run(args, env=env, capture_output=True) + if res.returncode != 0: + # Write the GHC command on failure. + print(shlex.join(args), file=sys.stderr) + + # Always forward stdout/stderr. + sys.stdout.buffer.write(res.stdout) + sys.stderr.buffer.write(res.stderr) + + if res.returncode != 0: + # Fail if GHC failed. + sys.exit(res.returncode) with open(json_fname) as f: return json.load(f) From 6b39d3c2fc5f70dd803bd5cb2754b23cc080c3d3 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 16 Aug 2024 11:47:13 +0200 Subject: [PATCH 1030/1133] Redirect GHC stdout to stderr This is to work around Buck2 swallowing stdout on successful builds. --- haskell/tools/generate_target_metadata.py | 4 +++- haskell/tools/ghc_wrapper.py | 5 ++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 7f4dcd9e4..5215c382e 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -184,7 +184,9 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): print(shlex.join(args), file=sys.stderr) # Always forward stdout/stderr. - sys.stdout.buffer.write(res.stdout) + # Note, Buck2 swallows stdout on successful builds. + # Redirect to stderr to avoid this. + sys.stderr.buffer.write(res.stdout) sys.stderr.buffer.write(res.stderr) if res.returncode != 0: diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index d658d6f9c..f5bdc3df0 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -12,6 +12,7 @@ import os from pathlib import Path import subprocess +import sys def main(): @@ -61,7 +62,9 @@ def main(): path = env.get("PATH", "") env["PATH"] = os.pathsep.join([path] + aux_paths) - returncode = subprocess.call(cmd, env=env) + # Note, Buck2 swallows stdout on successful builds. + # Redirect to stderr to avoid this. + returncode = subprocess.call(cmd, env=env, stdout=sys.stderr.buffer) if returncode != 0: return returncode From 8dbef29d05002c70f6af2d6687bbde938c3d5845 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Fri, 16 Aug 2024 14:15:08 +0200 Subject: [PATCH 1031/1133] Fix stubs merge conflict Conflict between a525b5508b23ff7769e4b7a12cd6f4f4eb028674 and fa1a20dda39c2d7b54430bc652131e9a2469fede. --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 0940b0c90..46bd21a5f 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -522,7 +522,7 @@ def _compile_module( # as ghc exclusively looks in that directory when it is set. for dir in ["o", "hie", "dump"]: compile_args_for_file.add( - "-{}dir".format(dir), cmd_args([cmd_args(stubs.as_output(), parent=1), module.prefix_dir], delimiter="/"), + "-{}dir".format(dir), cmd_args([cmd_args(md_file, ignore_artifacts=True, parent=1), module.prefix_dir], delimiter="/"), ) if module.stub_dir != None: compile_args_for_file.add("-stubdir", stubs.as_output()) From 8c7e1a1a0f5bedd2521d6bebc14edf5dcf68f9f3 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Sun, 18 Aug 2024 08:57:59 -0700 Subject: [PATCH 1032/1133] Add external_tools to haskell_library (#27) non-cabal build tool dependency can be added. * added external_tools * --bin-exe --- decls/haskell_common.bzl | 8 ++++++++ decls/haskell_rules.bzl | 1 + haskell/compile.bzl | 6 ++++++ haskell/tools/ghc_wrapper.py | 10 +++++++++- 4 files changed, 24 insertions(+), 1 deletion(-) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index 074d29a9d..b51f4ae39 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -62,6 +62,13 @@ def _scripts_arg(): ), } +def _external_tools_arg(): + return { + "external_tools": attrs.list(attrs.dep(providers = [RunInfo]), default = [], doc = """ + External executables called from Haskell compiler during preprocessing or compilation. +"""), + } + haskell_common = struct( srcs_arg = _srcs_arg, src_strip_prefix = _src_strip_prefix_arg, @@ -69,4 +76,5 @@ haskell_common = struct( compiler_flags_arg = _compiler_flags_arg, exported_linker_flags_arg = _exported_linker_flags_arg, scripts_arg = _scripts_arg, + external_tools_arg = _external_tools_arg, ) diff --git a/decls/haskell_rules.bzl b/decls/haskell_rules.bzl index eec1d1022..95de89585 100644 --- a/decls/haskell_rules.bzl +++ b/decls/haskell_rules.bzl @@ -165,6 +165,7 @@ haskell_library = prelude_rule( # @unsorted-dict-items haskell_common.srcs_arg() | haskell_common.src_strip_prefix() | + haskell_common.external_tools_arg() | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 46bd21a5f..df54a5cea 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -437,6 +437,12 @@ def _common_compile_module_args( format="--bin-path={}/bin", )) + external_tool_paths = [tool[RunInfo] for tool in ctx.attrs.external_tools] + args_for_file.add(cmd_args( + external_tool_paths, + format="--bin-exe={}", + )) + packagedb_args = cmd_args(libs.project_as_args("empty_package_db")) packagedb_args.add(package_db_tset.project_as_args("package_db")) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index f5bdc3df0..05f077f78 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -53,11 +53,19 @@ def main(): help="Add given path to PATH.", ) + parser.add_argument( + "--bin-exe", + type=Path, + action="append", + default=[], + help="Add given exe (more specific than bin-path)", + ) + args, ghc_args = parser.parse_known_args() cmd = [args.ghc] + ghc_args - aux_paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] + aux_paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] + [str(os.path.dirname(binexepath)) for binexepath in args.bin_exe] env = os.environ.copy() path = env.get("PATH", "") env["PATH"] = os.pathsep.join([path] + aux_paths) From d78eab969733bf3ed5ae8560ab1ea66a9b72c293 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Tue, 20 Aug 2024 15:16:32 +0200 Subject: [PATCH 1033/1133] Fix: Missing instance of external_tools_arg See https://github.com/MercuryTechnologies/buck2-prelude/pull/27 --- decls/haskell_rules.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/decls/haskell_rules.bzl b/decls/haskell_rules.bzl index 95de89585..eeec65274 100644 --- a/decls/haskell_rules.bzl +++ b/decls/haskell_rules.bzl @@ -46,6 +46,7 @@ haskell_binary = prelude_rule( native_common.link_group_public_deps_label() | native_common.link_style() | haskell_common.srcs_arg() | + haskell_common.external_tools_arg() | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | From 8d567b60593d6b5f424d38426bac54c86f0f166f Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 15 Aug 2024 16:49:43 +0200 Subject: [PATCH 1034/1133] Determine source prefix automatically in _compile_module --- haskell/compile.bzl | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 7405bce05..42dc4ab30 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -48,6 +48,7 @@ CompiledModuleInfo = provider(fields = { "interfaces": provider_field(list[Artifact]), # TODO[AH] track this module's package-name/id & package-db instead. "db_deps": provider_field(list[Artifact]), + "src_prefix": provider_field(str), }) def _compiled_module_project_as_abi(mod: CompiledModuleInfo) -> cmd_args: @@ -64,6 +65,14 @@ def _compiled_module_reduce_as_packagedb_deps(children: list[dict[Artifact, None result.update(child) return result +def _compiled_module_reduce_as_source_prefixes(children: list[dict[str, None]], mod: CompiledModuleInfo | None) -> dict[str, None]: + d = { k: None for c in children for k in c.keys() } + + if mod: + d[mod.src_prefix] = None + + return d + CompiledModuleTSet = transitive_set( args_projections = { "abi": _compiled_module_project_as_abi, @@ -71,6 +80,7 @@ CompiledModuleTSet = transitive_set( }, reductions = { "packagedb_deps": _compiled_module_reduce_as_packagedb_deps, + "source_prefixes": _compiled_module_reduce_as_source_prefixes, }, ) @@ -554,6 +564,7 @@ def _compile_module( else: compile_cmd.add(compile_args_for_file) + # add each module dir prefix to search path compile_cmd.add( cmd_args( cmd_args(md_file, format = "-i{}", ignore_artifacts=True).parent(), @@ -623,12 +634,26 @@ def _compile_module( } ) + source_path = paths.replace_extension(module.source.short_path, "") + module_name_for_file = src_to_module_name(source_path) + + # assert that source_path (without extension) and its module name have the same length + if len(source_path) != len(module_name_for_file): + fail("{} should have the same length as {}".format(source_path, module_name_for_file)) + + if module_name != module_name_for_file and module_name_for_file.endswith("." + module_name): + # N.B. the prefix could have some '.' characters in it, use the source_path to determine the prefix + src_prefix = source_path[0:-len(module_name) - 1] + else: + src_prefix = "" + module_tset = ctx.actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( abi = module.hash, interfaces = module.interfaces, db_deps = exposed_package_dbs, + src_prefix = src_prefix, ), children = [cross_package_modules] + this_package_modules, ) From d9dced8f4f1e53f28dbd18e4d6250e3ea5bb2d07 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 07:59:37 +0200 Subject: [PATCH 1035/1133] Use inferred source_prefixes in _compile_module --- haskell/compile.bzl | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 42dc4ab30..384e81023 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -160,11 +160,6 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl prefix_dir = "mod-" + suffix - src_strip_prefix = getattr(ctx.attrs, "src_strip_prefix", None) - - if src_strip_prefix: - prefix_dir += "/" + src_strip_prefix - modules[module_name] = _Module( source = src, interfaces = interfaces, @@ -564,16 +559,6 @@ def _compile_module( else: compile_cmd.add(compile_args_for_file) - # add each module dir prefix to search path - compile_cmd.add( - cmd_args( - cmd_args(md_file, format = "-i{}", ignore_artifacts=True).parent(), - "/", - module.prefix_dir, - delimiter="" - ) - ) - toolchain_deps = [] library_deps = [] exposed_package_modules = [] @@ -605,6 +590,20 @@ def _compile_module( children = [cross_package_modules] + this_package_modules, ) + # add each module dir prefix to search path + source_prefixes = dependency_modules.reduce("source_prefixes").keys() + + for prefix in source_prefixes: + compile_cmd.add( + cmd_args( + cmd_args(md_file, format = "-i{}", ignore_artifacts=True).parent(), + "/", + paths.join(module.prefix_dir, prefix), + delimiter="" + ) + ) + + compile_cmd.add(cmd_args(library_deps, prepend = "-package")) compile_cmd.add(cmd_args(toolchain_deps, prepend = "-package")) From b52fc03334015339ad7de6206730e9e0e9fa2c8a Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 08:29:44 +0200 Subject: [PATCH 1036/1133] Move package conf actions into dynamic output --- haskell/haskell.bzl | 117 +++++++++++++++++++++++++------------------- 1 file changed, 66 insertions(+), 51 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 778ec3734..3ac23c1ee 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -420,7 +420,8 @@ def _make_package( hlis: list[HaskellLibraryInfo], profiling: list[bool], enable_profiling: bool, - use_empty_lib: bool) -> Artifact: + use_empty_lib: bool, + md_file: Artifact) -> Artifact: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) # Don't expose boot sources, as they're only meant to be used for compiling. @@ -430,65 +431,77 @@ def _make_package( art_suff = get_artifact_suffix(link_style, profiled) return "\"${pkgroot}/" + dir_prefix + "-" + art_suff + subdir + "\"" - src_prefix = getattr(ctx.attrs, "src_strip_prefix", "") - if src_prefix: - src_prefix = "/" + src_prefix - - import_dirs = [mk_artifact_dir("mod", profiled, src_prefix) for profiled in profiling] - - conf = [ - "name: " + pkgname, - "version: 1.0.0", - "id: " + pkgname, - "key: " + pkgname, - "exposed: False", - "exposed-modules: " + ", ".join(modules), - "import-dirs:" + ", ".join(import_dirs), - "depends: " + ", ".join([lib.id for lib in hlis]), - ] - if use_empty_lib: - pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + "_empty.conf", conf) + pkg_conf = ctx.actions.declare_output("pkg-" + artifact_suffix + "_empty.conf") db = ctx.actions.declare_output("db-" + artifact_suffix + "_empty", dir = True) else: - if not libname: - fail("argument `libname` cannot be empty, when use_empty_lib == False") + pkg_conf = ctx.actions.declare_output("pkg-" + artifact_suffix + ".conf") + db = ctx.actions.declare_output("db-" + artifact_suffix, dir = True) - if enable_profiling: - # Add the `-p` suffix otherwise ghc will look for objects - # following this logic (https://fburl.com/code/3gmobm5x) and will fail. - libname += "_p" + def write_package_conf(ctx, artifacts, resolved, outputs, md_file=md_file, libname=libname): + src_prefix = getattr(ctx.attrs, "src_strip_prefix", "") + if src_prefix: + src_prefix = "/" + src_prefix + + import_dirs = [mk_artifact_dir("mod", profiled, src_prefix) for profiled in profiling] + + conf = [ + "name: " + pkgname, + "version: 1.0.0", + "id: " + pkgname, + "key: " + pkgname, + "exposed: False", + "exposed-modules: " + ", ".join(modules), + "import-dirs:" + ", ".join(import_dirs), + "depends: " + ", ".join([lib.id for lib in hlis]), + ] - library_dirs = [mk_artifact_dir("lib", profiled) for profiled in profiling] - conf.append("library-dirs:" + ", ".join(library_dirs)) - conf.append("extra-libraries: " + libname) + if not use_empty_lib: + if not libname: + fail("argument `libname` cannot be empty, when use_empty_lib == False") - pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + ".conf", conf) - db = ctx.actions.declare_output("db-" + artifact_suffix, dir = True) + if enable_profiling: + # Add the `-p` suffix otherwise ghc will look for objects + # following this logic (https://fburl.com/code/3gmobm5x) and will fail. + libname += "_p" - db_deps = [x.db for x in hlis] + library_dirs = [mk_artifact_dir("lib", profiled) for profiled in profiling] + conf.append("library-dirs:" + ", ".join(library_dirs)) + conf.append("extra-libraries: " + libname) - # So that ghc-pkg can find the DBs for the dependencies. We might - # be able to use flags for this instead, but this works. - ghc_package_path = cmd_args( - db_deps, - delimiter = ":", - ) + ctx.actions.write(outputs[pkg_conf].as_output(), conf) - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - ctx.actions.run( - cmd_args([ - "sh", - "-c", - _REGISTER_PACKAGE, - "", - haskell_toolchain.packager, - db.as_output(), - pkg_conf, - ]), - category = "haskell_package_" + artifact_suffix.replace("-", "_"), - identifier = "empty" if use_empty_lib else "final", - env = {"GHC_PACKAGE_PATH": ghc_package_path} if db_deps else {}, + db_deps = [x.db for x in hlis] + + # So that ghc-pkg can find the DBs for the dependencies. We might + # be able to use flags for this instead, but this works. + ghc_package_path = cmd_args( + db_deps, + delimiter = ":", + ) + + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + ctx.actions.run( + cmd_args([ + "sh", + "-c", + _REGISTER_PACKAGE, + "", + haskell_toolchain.packager, + outputs[db].as_output(), + pkg_conf, + ]), + category = "haskell_package_" + artifact_suffix.replace("-", "_"), + identifier = "empty" if use_empty_lib else "final", + env = {"GHC_PACKAGE_PATH": ghc_package_path} if db_deps else {}, + ) + + ctx.actions.dynamic_output( + dynamic = [md_file], + promises = [], + inputs = [], + outputs = [pkg_conf.as_output(), db.as_output()], + f = write_package_conf ) return db @@ -676,6 +689,7 @@ def _build_haskell_lib( import_artifacts.keys(), enable_profiling = enable_profiling, use_empty_lib = False, + md_file = md_file, ) empty_db = _make_package( ctx, @@ -686,6 +700,7 @@ def _build_haskell_lib( import_artifacts.keys(), enable_profiling = enable_profiling, use_empty_lib = True, + md_file = md_file, ) hlib = HaskellLibraryInfo( From 8ca86cf3e4aea067ea9b47ab89228af00af8f939 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 09:59:48 +0200 Subject: [PATCH 1037/1133] Infer source prefixes for exposed-modules and import-dirs ... in package conf files --- haskell/haskell.bzl | 41 ++++++++++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 3ac23c1ee..ca419e68e 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -424,12 +424,11 @@ def _make_package( md_file: Artifact) -> Artifact: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) - # Don't expose boot sources, as they're only meant to be used for compiling. - modules = [src_to_module_name(x, ctx.attrs.src_strip_prefix) for x, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(x)] - def mk_artifact_dir(dir_prefix: str, profiled: bool, subdir: str = "") -> str: - art_suff = get_artifact_suffix(link_style, profiled) - return "\"${pkgroot}/" + dir_prefix + "-" + art_suff + subdir + "\"" + suffix = get_artifact_suffix(link_style, profiled) + if subdir: + suffix = paths.join(suffix, subdir) + return "\"${pkgroot}/" + dir_prefix + "-" + suffix + "\"" if use_empty_lib: pkg_conf = ctx.actions.declare_output("pkg-" + artifact_suffix + "_empty.conf") @@ -439,11 +438,35 @@ def _make_package( db = ctx.actions.declare_output("db-" + artifact_suffix, dir = True) def write_package_conf(ctx, artifacts, resolved, outputs, md_file=md_file, libname=libname): - src_prefix = getattr(ctx.attrs, "src_strip_prefix", "") - if src_prefix: - src_prefix = "/" + src_prefix + md = artifacts[md_file].read_json() + module_map = md["module_mapping"] + + def source_prefix(module_path): + name = src_to_module_name(module_path) + ghc_name = module_map.get(name) + + if ghc_name and name.endswith("." + ghc_name): + start = len(name) - len(ghc_name) + else: + start = 0 + + return module_path[0:start] + + def path_to_module_name(module_path): + prefix = source_prefix(module_path) + + return src_to_module_name(module_path[len(prefix):]) + + haskell_sources = [src for src, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(src)] + + # Don't expose boot sources, as they're only meant to be used for compiling. + modules = [path_to_module_name(x) for x in haskell_sources] + + source_prefixes = { + source_prefix(mod): None for mod in haskell_sources + }.keys() - import_dirs = [mk_artifact_dir("mod", profiled, src_prefix) for profiled in profiling] + import_dirs = [mk_artifact_dir("mod", profiled, src_prefix) for profiled in profiling for src_prefix in source_prefixes] conf = [ "name: " + pkgname, From 66794b4b9ef9d983180d6f48fda199e6e3a6ad5a Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 13:37:30 +0200 Subject: [PATCH 1038/1133] Revert "Strip source prefix in haskell_haddock rule" This reverts commit 97157d4f59094295b4c0d4c3c6a3d6f1c61142b9. --- haskell/compile.bzl | 2 -- haskell/haskell_haddock.bzl | 6 +++--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 384e81023..047b11e31 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -96,7 +96,6 @@ CompileResultInfo = record( hashes = field(list[Artifact]), producing_indices = field(bool), module_tsets = field(DynamicValue), - src_prefix = field(str), ) PackagesInfo = record( @@ -783,5 +782,4 @@ def compile( stubs = stubs_dir, producing_indices = False, module_tsets = dyn_module_tsets, - src_prefix = getattr(ctx.attrs, "src_strip_prefix", ""), ) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 8ebc56377..2e49761f8 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -118,10 +118,10 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes cmd.add("--source-entity", source_entity) haddock_infos = { - src_to_module_name(hi.short_path, compiled.src_prefix): _HaddockInfo( + src_to_module_name(hi.short_path): _HaddockInfo( interface = hi, - haddock = ctx.actions.declare_output("haddock-interface/{}.haddock".format(src_to_module_name(hi.short_path, compiled.src_prefix))), - html = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(hi.short_path, compiled.src_prefix).replace(".", "-"))), + haddock = ctx.actions.declare_output("haddock-interface/{}.haddock".format(src_to_module_name(hi.short_path))), + html = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(hi.short_path).replace(".", "-"))), ) for hi in compiled.hi if not hi.extension.endswith("-boot") From cbba36d637d5f5b67127f1993907544d52a91e1a Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 11:48:29 +0200 Subject: [PATCH 1039/1133] Properly handle html output with source prefixes --- haskell/haskell_haddock.bzl | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 2e49761f8..05039b3ca 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -65,9 +65,21 @@ def _haddock_dump_interface( for dep_name in graph[module_name] ] + interface_path = haddock_info.interface.short_path + + module_name_for_file = src_to_module_name(interface_path) + + if module_name != module_name_for_file and module_name_for_file.endswith("." + module_name): + start = len(module_name_for_file) - len(module_name) + html_output = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(interface_path[start:]).replace(".", "-"))) + make_copy = True + else: + html_output = outputs[haddock_info.html] + make_copy = False + ctx.actions.run( cmd.copy().add( - "--odir", cmd_args(outputs[haddock_info.html].as_output(), parent = 1), + "--odir", cmd_args(html_output.as_output(), parent = 1), "--dump-interface", outputs[haddock_info.haddock].as_output(), "--html", "--hoogle", @@ -85,6 +97,8 @@ def _haddock_dump_interface( identifier = module_name, no_outputs_cleanup = True, ) + if make_copy: + ctx.actions.copy_file(outputs[haddock_info.html].as_output(), html_output) return ctx.actions.tset( _HaddockInfoTSet, From 46e84397caa276cf9cab6b52d9bdce8f2f4dc0a1 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 13:23:47 +0200 Subject: [PATCH 1040/1133] Handle source prefix for boot modules --- haskell/compile.bzl | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 047b11e31..9c7553b70 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -705,6 +705,48 @@ def compile( graph = md["module_graph"] package_deps = md["package_deps"] + # boot_rev_deps = {} + # for module_name, boot_deps in md["boot_deps"].items(): + # for boot_dep in boot_deps: + # boot_rev_deps.setdefault(boot_dep + "-boot", []).append(module_name) + + # # TODO GHC --dep-json should integrate boot modules directly into the dependency graph. + # for module_name, module in modules.items(): + # if not module_name.endswith("-boot"): + # continue + + # # deduce the real name from the corresponding non-boot module + # non_boot_module_name = module_name[:-5] + # non_boot_module_name = module_map.get(non_boot_module_name, non_boot_module_name) + + # boot_module_name = non_boot_module_name + "-boot" + + # if module_name != boot_module_name: + # module_map[module_name] = boot_module_name + + # # Add boot modules to the module graph + # graph[boot_module_name] = [] + # # TODO GHC --dep-json should report boot module dependencies. + # # The following is a naive approximation of the boot module's dependencies, + # # taking the corresponding module's dependencies + # # minus those that depend on the boot module. + + # # Add module dependencies for the boot module + # graph[boot_module_name].extend([ + # dep + # for dep in graph[non_boot_module_name] + # if not dep in boot_rev_deps[boot_module_name] + # ]) + + # # Add package dependencies for the boot module + # package_deps[boot_module_name] = package_deps.get(non_boot_module_name, []) + + # for module_name, boot_deps in md["boot_deps"].items(): + # for boot_dep in boot_deps: + # graph.setdefault(module_name, []).append(boot_dep + "-boot") + + #>>>>>>> 8a06e69b (Handle source prefix for boot modules) + mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } module_tsets = {} From b86e03f6ced147d1ae29ed8ad37fb4afac8d6f18 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 13:31:19 +0200 Subject: [PATCH 1041/1133] Revert "Add `src_strip_prefix` to `haskell_library` rule" This reverts commit 1e204c1a778d5d8ad1dd3f6ae5dd1ece325c8df6. --- decls/haskell_rules.bzl | 1 - 1 file changed, 1 deletion(-) diff --git a/decls/haskell_rules.bzl b/decls/haskell_rules.bzl index eeec65274..e9ba7e9c0 100644 --- a/decls/haskell_rules.bzl +++ b/decls/haskell_rules.bzl @@ -165,7 +165,6 @@ haskell_library = prelude_rule( attrs = ( # @unsorted-dict-items haskell_common.srcs_arg() | - haskell_common.src_strip_prefix() | haskell_common.external_tools_arg() | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | From fde1250911cac577b696aea4535a7262805c295c Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 13:31:30 +0200 Subject: [PATCH 1042/1133] Revert "Add `src_strip_prefix` attribute" This reverts commit 45b724ff417f7156e1b9903fe7d2843751be552c. --- decls/haskell_common.bzl | 8 -------- 1 file changed, 8 deletions(-) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index b51f4ae39..e92dadf08 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -17,13 +17,6 @@ def _srcs_arg(): """), } -def _src_strip_prefix_arg(): - return { - "src_strip_prefix": attrs.string(default = "", doc = """ - A prefix to strip from the source files when compiling. -"""), - } - def _deps_arg(): return { "deps": attrs.list(attrs.dep(), default = [], doc = """ @@ -71,7 +64,6 @@ def _external_tools_arg(): haskell_common = struct( srcs_arg = _srcs_arg, - src_strip_prefix = _src_strip_prefix_arg, deps_arg = _deps_arg, compiler_flags_arg = _compiler_flags_arg, exported_linker_flags_arg = _exported_linker_flags_arg, From 1c00e460bbd6cae1b7ab7eb5d4a4c2d7e51e752b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 13:46:54 +0200 Subject: [PATCH 1043/1133] Refactor: move assignment to `stubs` --- haskell/compile.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 9c7553b70..42195a9b3 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -520,8 +520,6 @@ def _compile_module( objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] - if module.stub_dir != None: - stubs = outputs[module.stub_dir] compile_args_for_file.add("-o", objects[0].as_output()) compile_args_for_file.add("-ohi", his[0].as_output()) @@ -535,6 +533,7 @@ def _compile_module( "-{}dir".format(dir), cmd_args([cmd_args(md_file, ignore_artifacts=True, parent=1), module.prefix_dir], delimiter="/"), ) if module.stub_dir != None: + stubs = outputs[module.stub_dir] compile_args_for_file.add("-stubdir", stubs.as_output()) if link_style in [LinkStyle("static_pic"), LinkStyle("static")]: From 1ccbb6485b9b1a3de23179ed491b323ebd017612 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 14:08:34 +0200 Subject: [PATCH 1044/1133] Revert "Extend `src_to_module` function to handle source prefix" This reverts commit 3365ba046c39946c25903c0c7a2f803fd723fa35. --- haskell/util.bzl | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/haskell/util.bzl b/haskell/util.bzl index dbf5144c7..eab727a6a 100644 --- a/haskell/util.bzl +++ b/haskell/util.bzl @@ -35,7 +35,6 @@ load( ) load("@prelude//utils:platform_flavors_util.bzl", "by_platform") load("@prelude//utils:utils.bzl", "flatten") -load("@prelude//utils:strings.bzl", "strip_prefix") HASKELL_EXTENSIONS = [ ".hs", @@ -67,13 +66,8 @@ def is_haskell_boot(x: str) -> bool: _, ext = paths.split_extension(x) return ext in HASKELL_BOOT_EXTENSIONS -def src_to_module_name(x: str, src_prefix: str = "") -> str: +def src_to_module_name(x: str) -> str: base, _ext = paths.split_extension(x) - if src_prefix: - prefix = src_prefix if src_prefix.endswith("/") else src_prefix + "/" - stripped = strip_prefix(prefix, base) - if stripped: base = stripped - return base.replace("/", ".") def _by_platform(ctx: AnalysisContext, xs: list[(str, list[typing.Any])]) -> list[typing.Any]: From 8596ff243a144d68354ef4d2b4a3051921873d3b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 14:13:58 +0200 Subject: [PATCH 1045/1133] Add `source_prefix` helper function --- haskell/util.bzl | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/haskell/util.bzl b/haskell/util.bzl index eab727a6a..875fd2eb0 100644 --- a/haskell/util.bzl +++ b/haskell/util.bzl @@ -171,3 +171,20 @@ def get_artifact_suffix(link_style: LinkStyle, enable_profiling: bool, suffix: s if enable_profiling: artifact_suffix += "-prof" return artifact_suffix + suffix + +def source_prefix(source: Artifact, module_name: str) -> str: + """Determine the directory prefix of the given artifact, considering that ghc has determined `module_name` for that file.""" + source_path = paths.replace_extension(source.short_path, "") + + module_name_for_file = src_to_module_name(source_path) + + # assert that source_path (without extension) and its module name have the same length + if len(source_path) != len(module_name_for_file): + fail("{} should have the same length as {}".format(source_path, module_name_for_file)) + + if module_name != module_name_for_file and module_name_for_file.endswith("." + module_name): + # N.B. the prefix could have some '.' characters in it, use the source_path to determine the prefix + return source_path[0:-len(module_name) - 1] + + return "" + From c2e19cbcd638a38fa03387c8836bc5e20499182b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 14:18:58 +0200 Subject: [PATCH 1046/1133] Use source_prefix function in _compile_module --- haskell/compile.bzl | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 42195a9b3..8478736c3 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -42,6 +42,7 @@ load( load("@prelude//:paths.bzl", "paths") load("@prelude//utils:graph_utils.bzl", "post_order_traversal") load("@prelude//utils:strings.bzl", "strip_prefix") +load("@prelude//haskell:util.bzl", "source_prefix") CompiledModuleInfo = provider(fields = { "abi": provider_field(Artifact), @@ -631,18 +632,7 @@ def _compile_module( } ) - source_path = paths.replace_extension(module.source.short_path, "") - module_name_for_file = src_to_module_name(source_path) - - # assert that source_path (without extension) and its module name have the same length - if len(source_path) != len(module_name_for_file): - fail("{} should have the same length as {}".format(source_path, module_name_for_file)) - - if module_name != module_name_for_file and module_name_for_file.endswith("." + module_name): - # N.B. the prefix could have some '.' characters in it, use the source_path to determine the prefix - src_prefix = source_path[0:-len(module_name) - 1] - else: - src_prefix = "" + src_prefix = source_prefix(module.source, module_name) module_tset = ctx.actions.tset( CompiledModuleTSet, From 7f460c3cfaee2397a2c15541b8b1b090c8038bfd Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 14:51:59 +0200 Subject: [PATCH 1047/1133] Use source_prefix function in _make_package --- haskell/haskell.bzl | 33 ++++++++++++--------------------- 1 file changed, 12 insertions(+), 21 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index ca419e68e..774e7e36d 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -93,6 +93,7 @@ load( "output_extensions", "src_to_module_name", "srcs_to_pairs", + "source_prefix", ) load( "@prelude//linking:link_groups.bzl", @@ -441,30 +442,20 @@ def _make_package( md = artifacts[md_file].read_json() module_map = md["module_mapping"] - def source_prefix(module_path): - name = src_to_module_name(module_path) - ghc_name = module_map.get(name) - - if ghc_name and name.endswith("." + ghc_name): - start = len(name) - len(ghc_name) - else: - start = 0 - - return module_path[0:start] - - def path_to_module_name(module_path): - prefix = source_prefix(module_path) - - return src_to_module_name(module_path[len(prefix):]) + modules = [] + source_prefixes = {} + for path, src in srcs_to_pairs(ctx.attrs.srcs): + # Don't expose boot sources, as they're only meant to be used for compiling. + if not is_haskell_src(path): + continue - haskell_sources = [src for src, _ in srcs_to_pairs(ctx.attrs.srcs) if is_haskell_src(src)] + name = src_to_module_name(path) + prefix = source_prefix(src, module_map.get(name, name)) + source_prefixes[prefix] = None - # Don't expose boot sources, as they're only meant to be used for compiling. - modules = [path_to_module_name(x) for x in haskell_sources] + modules.append(src_to_module_name(path[len(prefix) + 1:])) - source_prefixes = { - source_prefix(mod): None for mod in haskell_sources - }.keys() + source_prefixes = source_prefixes.keys() import_dirs = [mk_artifact_dir("mod", profiled, src_prefix) for profiled in profiling for src_prefix in source_prefixes] From ebf0d6b50b0217cdbe5c991d3e634dcd4bd742cf Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 16 Aug 2024 15:01:10 +0200 Subject: [PATCH 1048/1133] Use source_prefix function in _haddock_dump_interface --- haskell/haskell_haddock.bzl | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 05039b3ca..a91f20fdf 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -16,6 +16,7 @@ load( "attr_deps", "attr_deps_haskell_link_infos", "src_to_module_name", + "source_prefix", ) load("@prelude//utils:graph_utils.bzl", "post_order_traversal") @@ -65,13 +66,13 @@ def _haddock_dump_interface( for dep_name in graph[module_name] ] - interface_path = haddock_info.interface.short_path + prefix = source_prefix(haddock_info.interface, module_name) - module_name_for_file = src_to_module_name(interface_path) - - if module_name != module_name_for_file and module_name_for_file.endswith("." + module_name): - start = len(module_name_for_file) - len(module_name) - html_output = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(interface_path[start:]).replace(".", "-"))) + if prefix: + interface_path = haddock_info.interface.short_path + html_output = ctx.actions.declare_output( + "haddock-html/{}.html".format(src_to_module_name(interface_path[len(prefix) + 1:]).replace(".", "-")) + ) make_copy = True else: html_output = outputs[haddock_info.html] From ce6a47b58301aa2f1ac532c3e941fc069adb8c83 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 19 Aug 2024 10:18:48 +0200 Subject: [PATCH 1049/1133] Simplify handling of haddock html output generation --- haskell/haskell_haddock.bzl | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index a91f20fdf..b225f954b 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -16,9 +16,9 @@ load( "attr_deps", "attr_deps_haskell_link_infos", "src_to_module_name", - "source_prefix", ) load("@prelude//utils:graph_utils.bzl", "post_order_traversal") +load("@prelude//:paths.bzl", "paths") HaskellHaddockInfo = provider( fields = { @@ -43,6 +43,9 @@ _HaddockInfoTSet = transitive_set( } ) +def _haddock_module_to_html(module_name: str) -> str: + return module_name.replace(".", "-") + ".html" + def _haddock_dump_interface( ctx: AnalysisContext, cmd: cmd_args, @@ -66,16 +69,14 @@ def _haddock_dump_interface( for dep_name in graph[module_name] ] - prefix = source_prefix(haddock_info.interface, module_name) + expected_html = outputs[haddock_info.html] + module_html = _haddock_module_to_html(module_name) - if prefix: - interface_path = haddock_info.interface.short_path - html_output = ctx.actions.declare_output( - "haddock-html/{}.html".format(src_to_module_name(interface_path[len(prefix) + 1:]).replace(".", "-")) - ) + if paths.basename(expected_html.short_path) != module_html: + html_output = ctx.actions.declare_output("haddock-html", module_html) make_copy = True else: - html_output = outputs[haddock_info.html] + html_output = expected_html make_copy = False ctx.actions.run( @@ -99,7 +100,9 @@ def _haddock_dump_interface( no_outputs_cleanup = True, ) if make_copy: - ctx.actions.copy_file(outputs[haddock_info.html].as_output(), html_output) + # XXX might as well use `symlink_file`` but that does not work with buck2 RE + # (see https://github.com/facebook/buck2/issues/222) + ctx.actions.copy_file(expected_html.as_output(), html_output) return ctx.actions.tset( _HaddockInfoTSet, @@ -136,7 +139,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes src_to_module_name(hi.short_path): _HaddockInfo( interface = hi, haddock = ctx.actions.declare_output("haddock-interface/{}.haddock".format(src_to_module_name(hi.short_path))), - html = ctx.actions.declare_output("haddock-html/{}.html".format(src_to_module_name(hi.short_path).replace(".", "-"))), + html = ctx.actions.declare_output("haddock-html", _haddock_module_to_html(src_to_module_name(hi.short_path))), ) for hi in compiled.hi if not hi.extension.endswith("-boot") From f3c8782d20d9b9ec2aad4f0f490510086c75c834 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 19 Aug 2024 10:19:49 +0200 Subject: [PATCH 1050/1133] Add `get_source_prefixes` function to haskell/utils and use it --- haskell/compile.bzl | 20 ++++---------------- haskell/haskell.bzl | 20 +++----------------- haskell/util.bzl | 16 +++++++++++++++- 3 files changed, 22 insertions(+), 34 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 8478736c3..4a7677726 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -29,6 +29,7 @@ load( "attr_deps_haskell_link_infos", "attr_deps_haskell_toolchain_libraries", "get_artifact_suffix", + "get_source_prefixes", "is_haskell_boot", "is_haskell_src", "output_extensions", @@ -42,14 +43,12 @@ load( load("@prelude//:paths.bzl", "paths") load("@prelude//utils:graph_utils.bzl", "post_order_traversal") load("@prelude//utils:strings.bzl", "strip_prefix") -load("@prelude//haskell:util.bzl", "source_prefix") CompiledModuleInfo = provider(fields = { "abi": provider_field(Artifact), "interfaces": provider_field(list[Artifact]), # TODO[AH] track this module's package-name/id & package-db instead. "db_deps": provider_field(list[Artifact]), - "src_prefix": provider_field(str), }) def _compiled_module_project_as_abi(mod: CompiledModuleInfo) -> cmd_args: @@ -66,14 +65,6 @@ def _compiled_module_reduce_as_packagedb_deps(children: list[dict[Artifact, None result.update(child) return result -def _compiled_module_reduce_as_source_prefixes(children: list[dict[str, None]], mod: CompiledModuleInfo | None) -> dict[str, None]: - d = { k: None for c in children for k in c.keys() } - - if mod: - d[mod.src_prefix] = None - - return d - CompiledModuleTSet = transitive_set( args_projections = { "abi": _compiled_module_project_as_abi, @@ -81,7 +72,6 @@ CompiledModuleTSet = transitive_set( }, reductions = { "packagedb_deps": _compiled_module_reduce_as_packagedb_deps, - "source_prefixes": _compiled_module_reduce_as_source_prefixes, }, ) @@ -499,6 +489,7 @@ def _compile_module( artifact_suffix: str, direct_deps_by_name: dict[str, typing.Any], toolchain_deps_by_name: dict[str, None], + source_prefixes: list[str], ) -> CompiledModuleTSet: compile_cmd = cmd_args(common_args.command) # These compiler arguments can be passed in a response file. @@ -590,8 +581,6 @@ def _compile_module( ) # add each module dir prefix to search path - source_prefixes = dependency_modules.reduce("source_prefixes").keys() - for prefix in source_prefixes: compile_cmd.add( cmd_args( @@ -632,15 +621,12 @@ def _compile_module( } ) - src_prefix = source_prefix(module.source, module_name) - module_tset = ctx.actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( abi = module.hash, interfaces = module.interfaces, db_deps = exposed_package_dbs, - src_prefix = src_prefix, ), children = [cross_package_modules] + this_package_modules, ) @@ -738,6 +724,7 @@ def compile( mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } module_tsets = {} + source_prefixes = get_source_prefixes(ctx.attrs.srcs, module_map) for module_name in post_order_traversal(graph): module_tsets[module_name] = _compile_module( @@ -756,6 +743,7 @@ def compile( artifact_suffix = artifact_suffix, direct_deps_by_name = direct_deps_by_name, toolchain_deps_by_name = toolchain_deps_by_name, + source_prefixes = source_prefixes, ) return [DynamicCompileResultInfo(modules = module_tsets)] diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 774e7e36d..57c4d72a2 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -89,11 +89,9 @@ load( "attr_deps_profiling_link_infos", "attr_deps_shared_library_infos", "get_artifact_suffix", - "is_haskell_src", "output_extensions", "src_to_module_name", - "srcs_to_pairs", - "source_prefix", + "get_source_prefixes", ) load( "@prelude//linking:link_groups.bzl", @@ -442,21 +440,9 @@ def _make_package( md = artifacts[md_file].read_json() module_map = md["module_mapping"] - modules = [] - source_prefixes = {} - for path, src in srcs_to_pairs(ctx.attrs.srcs): - # Don't expose boot sources, as they're only meant to be used for compiling. - if not is_haskell_src(path): - continue - - name = src_to_module_name(path) - prefix = source_prefix(src, module_map.get(name, name)) - source_prefixes[prefix] = None - - modules.append(src_to_module_name(path[len(prefix) + 1:])) - - source_prefixes = source_prefixes.keys() + source_prefixes = get_source_prefixes(ctx.attrs.srcs, module_map) + modules = md["module_graph"].keys() import_dirs = [mk_artifact_dir("mod", profiled, src_prefix) for profiled in profiling for src_prefix in source_prefixes] conf = [ diff --git a/haskell/util.bzl b/haskell/util.bzl index 875fd2eb0..6d43b09b8 100644 --- a/haskell/util.bzl +++ b/haskell/util.bzl @@ -172,7 +172,7 @@ def get_artifact_suffix(link_style: LinkStyle, enable_profiling: bool, suffix: s artifact_suffix += "-prof" return artifact_suffix + suffix -def source_prefix(source: Artifact, module_name: str) -> str: +def _source_prefix(source: Artifact, module_name: str) -> str: """Determine the directory prefix of the given artifact, considering that ghc has determined `module_name` for that file.""" source_path = paths.replace_extension(source.short_path, "") @@ -188,3 +188,17 @@ def source_prefix(source: Artifact, module_name: str) -> str: return "" + +def get_source_prefixes(srcs: list[Artifact], module_map: dict[str, str]) -> list[str]: + """Determine source prefixes for the given haskell files and a mapping from source file module name to module name.""" + source_prefixes = {} + for path, src in srcs_to_pairs(srcs): + if not is_haskell_src(path): + continue + + name = src_to_module_name(path) + real_name = module_map.get(name) + prefix = _source_prefix(src, real_name) if real_name else "" + source_prefixes[prefix] = None + + return source_prefixes.keys() From 1d1b83320b900138f41d89857ee208633eb67e91 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 21 Aug 2024 07:34:05 +0200 Subject: [PATCH 1051/1133] lint: unused assignment --- haskell/haskell.bzl | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 57c4d72a2..697faed86 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -1265,12 +1265,6 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: def do_link(ctx, artifacts, resolved, outputs, output=output, objects=objects): link_cmd = link.copy() # link is already frozen, make a copy - if haskell_toolchain.packages: - pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages - else: - package_db = [] - # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. packages_info = get_packages_info( @@ -1291,11 +1285,6 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_cmd.hidden(packages_info.exposed_package_libs) - package_db_tset = ctx.actions.tset( - HaskellPackageDbTSet, - children = [package_db[name] for name in toolchain_libs if name in package_db] - ) - link_cmd.add("-o", outputs[output].as_output()) ctx.actions.run(link_cmd, category = "haskell_link") From e975fdb0ebfd451b49ba134e9f2fe3a87f87f2d6 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 21 Aug 2024 07:37:12 +0200 Subject: [PATCH 1052/1133] lint: unused parameters --- haskell/haskell.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 697faed86..78068a7be 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -436,7 +436,7 @@ def _make_package( pkg_conf = ctx.actions.declare_output("pkg-" + artifact_suffix + ".conf") db = ctx.actions.declare_output("db-" + artifact_suffix, dir = True) - def write_package_conf(ctx, artifacts, resolved, outputs, md_file=md_file, libname=libname): + def write_package_conf(ctx, artifacts, _resolved, outputs, md_file=md_file, libname=libname): md = artifacts[md_file].read_json() module_map = md["module_mapping"] @@ -578,7 +578,7 @@ def _build_haskell_lib( if not object.extension.endswith("-boot") ] - def do_link(ctx, artifacts, resolved, outputs, lib=lib, objects=objects): + def do_link(ctx, _artifacts, resolved, outputs, lib=lib, objects=objects): pkg_deps = resolved[haskell_toolchain.packages.dynamic] package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages @@ -1262,7 +1262,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: else: link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) - def do_link(ctx, artifacts, resolved, outputs, output=output, objects=objects): + def do_link(ctx, _artifacts, resolved, outputs, output=output): link_cmd = link.copy() # link is already frozen, make a copy # Add -package-db and -package/-expose-package flags for each Haskell From 60346fa415422dd23542f218b19aeadb8a2bbc41 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 21 Aug 2024 07:38:14 +0200 Subject: [PATCH 1053/1133] Remove left-over comment --- haskell/compile.bzl | 42 ------------------------------------------ 1 file changed, 42 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 4a7677726..1ccdaae2b 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -680,48 +680,6 @@ def compile( graph = md["module_graph"] package_deps = md["package_deps"] - # boot_rev_deps = {} - # for module_name, boot_deps in md["boot_deps"].items(): - # for boot_dep in boot_deps: - # boot_rev_deps.setdefault(boot_dep + "-boot", []).append(module_name) - - # # TODO GHC --dep-json should integrate boot modules directly into the dependency graph. - # for module_name, module in modules.items(): - # if not module_name.endswith("-boot"): - # continue - - # # deduce the real name from the corresponding non-boot module - # non_boot_module_name = module_name[:-5] - # non_boot_module_name = module_map.get(non_boot_module_name, non_boot_module_name) - - # boot_module_name = non_boot_module_name + "-boot" - - # if module_name != boot_module_name: - # module_map[module_name] = boot_module_name - - # # Add boot modules to the module graph - # graph[boot_module_name] = [] - # # TODO GHC --dep-json should report boot module dependencies. - # # The following is a naive approximation of the boot module's dependencies, - # # taking the corresponding module's dependencies - # # minus those that depend on the boot module. - - # # Add module dependencies for the boot module - # graph[boot_module_name].extend([ - # dep - # for dep in graph[non_boot_module_name] - # if not dep in boot_rev_deps[boot_module_name] - # ]) - - # # Add package dependencies for the boot module - # package_deps[boot_module_name] = package_deps.get(non_boot_module_name, []) - - # for module_name, boot_deps in md["boot_deps"].items(): - # for boot_dep in boot_deps: - # graph.setdefault(module_name, []).append(boot_dep + "-boot") - - #>>>>>>> 8a06e69b (Handle source prefix for boot modules) - mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } module_tsets = {} source_prefixes = get_source_prefixes(ctx.attrs.srcs, module_map) From e5caf3cefeb9cd9f821cb46cd478b2a0162ae158 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 20 Aug 2024 11:24:14 +0200 Subject: [PATCH 1054/1133] Use a separate package db when gathering package metadata Using multiple source directories results in multiple entries in the `import-dirs` field of the package conf file. With multiple entries in that field, GHC tries to find all interface files for the exposed modules (see [1] and [2]). For the dependency analysis we do not want to depend on any interface files. To make this work we have to ensure there only is a single entry. But also, the multiple entries are needed for compilation (using the empty package dbs). That is why yet another package db is introduced here. [1]: https://gitlab.haskell.org/ghc/ghc/-/blob/3a145315052d6f66f9682ecff87b522011165d59/compiler/GHC/Unit/Finder.hs#L491-499 [2]: https://github.com/tweag/mercury-ghc-internal/issues/51 --- haskell/compile.bzl | 13 ++++++++++--- haskell/haskell.bzl | 30 +++++++++++++++++++++++++++--- haskell/library_info.bzl | 6 ++++++ 3 files changed, 43 insertions(+), 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 1ccdaae2b..e5b89301d 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -195,6 +195,7 @@ def target_metadata( specify_pkg_version = False, enable_profiling = False, use_empty_lib = True, + for_deps = True, resolved = resolved, ) package_flag = _package_flag(haskell_toolchain) @@ -263,6 +264,7 @@ def get_packages_info( specify_pkg_version: bool, enable_profiling: bool, use_empty_lib: bool, + for_deps: bool = False, resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -282,9 +284,14 @@ def get_packages_info( for lib in libs.traverse(): exposed_package_libs.hidden(lib.libs) - packagedb_args = cmd_args(libs.project_as_args( - "empty_package_db" if use_empty_lib else "package_db", - )) + if for_deps: + package_db_projection = "deps_package_db" + elif use_empty_lib: + package_db_projection = "empty_package_db" + else: + package_db_projection = "package_db" + + packagedb_args = cmd_args(libs.project_as_args(package_db_projection)) if haskell_toolchain.packages and resolved != None: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 78068a7be..c181f654e 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -420,7 +420,8 @@ def _make_package( profiling: list[bool], enable_profiling: bool, use_empty_lib: bool, - md_file: Artifact) -> Artifact: + md_file: Artifact, + for_deps: bool = False) -> Artifact: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) def mk_artifact_dir(dir_prefix: str, profiled: bool, subdir: str = "") -> str: @@ -429,7 +430,10 @@ def _make_package( suffix = paths.join(suffix, subdir) return "\"${pkgroot}/" + dir_prefix + "-" + suffix + "\"" - if use_empty_lib: + if for_deps: + pkg_conf = ctx.actions.declare_output("pkg-" + artifact_suffix + "_deps.conf") + db = ctx.actions.declare_output("db-" + artifact_suffix + "_deps", dir = True) + elif use_empty_lib: pkg_conf = ctx.actions.declare_output("pkg-" + artifact_suffix + "_empty.conf") db = ctx.actions.declare_output("db-" + artifact_suffix + "_empty", dir = True) else: @@ -443,7 +447,13 @@ def _make_package( source_prefixes = get_source_prefixes(ctx.attrs.srcs, module_map) modules = md["module_graph"].keys() - import_dirs = [mk_artifact_dir("mod", profiled, src_prefix) for profiled in profiling for src_prefix in source_prefixes] + + # XXX use a single import dir when this package db is used for resolving dependencies with ghc -M, + # which works around an issue with multiple import dirs resulting in GHC trying to locate interface files + # for each exposed module + import_dirs = ["."] if for_deps else [ + mk_artifact_dir("mod", profiled, src_prefix) for profiled in profiling for src_prefix in source_prefixes + ] conf = [ "name: " + pkgname, @@ -702,11 +712,25 @@ def _build_haskell_lib( use_empty_lib = True, md_file = md_file, ) + deps_db = _make_package( + ctx, + link_style, + pkgname, + None, + uniq_infos, + import_artifacts.keys(), + enable_profiling = enable_profiling, + use_empty_lib = True, + md_file = md_file, + for_deps = True, + ) + hlib = HaskellLibraryInfo( name = pkgname, db = db, empty_db = empty_db, + deps_db = deps_db, id = pkgname, dynamic = dynamic, # TODO(ah) refine with dynamic projections import_dirs = import_artifacts, diff --git a/haskell/library_info.bzl b/haskell/library_info.bzl index b6672a5dc..5eb033a84 100644 --- a/haskell/library_info.bzl +++ b/haskell/library_info.bzl @@ -27,6 +27,8 @@ HaskellLibraryInfo = record( db = Artifact, # package config database, referring to the empty lib which is only used for compilation empty_db = Artifact, + # pacakge config database, used for ghc -M + deps_db = Artifact, # e.g. "base-4.13.0.0" id = str, # dynamic dependency information @@ -58,6 +60,9 @@ def _project_as_package_db(lib: HaskellLibraryInfo): def _project_as_empty_package_db(lib: HaskellLibraryInfo): return cmd_args(lib.empty_db) +def _project_as_deps_package_db(lib: HaskellLibraryInfo): + return cmd_args(lib.deps_db) + def _get_package_deps(children: list[list[str]], lib: HaskellLibraryInfo | None): flatted = flatten(children) if lib: @@ -68,6 +73,7 @@ HaskellLibraryInfoTSet = transitive_set( args_projections = { "package_db": _project_as_package_db, "empty_package_db": _project_as_empty_package_db, + "deps_package_db": _project_as_deps_package_db, }, reductions = { "packages": _get_package_deps, From e55bba1f25c60db2d264c0b33fa39d2c1783bf86 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 21 Aug 2024 09:14:23 -0700 Subject: [PATCH 1055/1133] Add srcs_envs. configure environment variable per source. (#28) For some cases where TH needs environment variable, the srcs_envs field is added, and user can set an env variable for a specific module. ghc_wrapper takes --extra-env-key and --extra-env-value to set up for GHC. Note that the value is attrs.arg() type so that Buck2 macro like $(location //:target) can be used. * Add srcs_envs. configure environment variable per source. * handle --extra-env-key and --extra-env-value * remove debug print * key-value match assertion Co-authored-by: Andreas Herrmann --- decls/haskell_common.bzl | 8 ++++++++ decls/haskell_rules.bzl | 1 + haskell/compile.bzl | 13 +++++++++++++ haskell/tools/ghc_wrapper.py | 25 ++++++++++++++++++++++++- 4 files changed, 46 insertions(+), 1 deletion(-) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index e92dadf08..0fec72e9f 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -62,6 +62,13 @@ def _external_tools_arg(): """), } +def _srcs_envs_arg(): + return { + "srcs_envs": attrs.dict(attrs.source(), attrs.list(attrs.tuple(attrs.string(), attrs.arg())), default = {}, doc = """ + Individual run-time env for each source compilation. +"""), + } + haskell_common = struct( srcs_arg = _srcs_arg, deps_arg = _deps_arg, @@ -69,4 +76,5 @@ haskell_common = struct( exported_linker_flags_arg = _exported_linker_flags_arg, scripts_arg = _scripts_arg, external_tools_arg = _external_tools_arg, + srcs_envs_arg = _srcs_envs_arg, ) diff --git a/decls/haskell_rules.bzl b/decls/haskell_rules.bzl index e9ba7e9c0..31dbdf65e 100644 --- a/decls/haskell_rules.bzl +++ b/decls/haskell_rules.bzl @@ -166,6 +166,7 @@ haskell_library = prelude_rule( # @unsorted-dict-items haskell_common.srcs_arg() | haskell_common.external_tools_arg() | + haskell_common.srcs_envs_arg() | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e5b89301d..65d156770 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -546,6 +546,19 @@ def _compile_module( if aux_deps: compile_args_for_file.hidden(aux_deps) + src_envs = ctx.attrs.srcs_envs.get(module.source) + if src_envs: + for k, v in src_envs: + compile_args_for_file.add(cmd_args( + k, + format="--extra-env-key={}", + )) + compile_args_for_file.add(cmd_args( + v, + format="--extra-env-value={}", + )) + + if haskell_toolchain.use_argsfile: argsfile = ctx.actions.declare_output( "haskell_compile_" + artifact_suffix + ".argsfile", diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 05f077f78..241fa991b 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -52,7 +52,6 @@ def main(): default=[], help="Add given path to PATH.", ) - parser.add_argument( "--bin-exe", type=Path, @@ -60,6 +59,20 @@ def main(): default=[], help="Add given exe (more specific than bin-path)", ) + parser.add_argument( + "--extra-env-key", + type=str, + action="append", + default=[], + help="Extra environment variable name", + ) + parser.add_argument( + "--extra-env-value", + type=str, + action="append", + default=[], + help="Extra environment variable value", + ) args, ghc_args = parser.parse_known_args() @@ -70,6 +83,16 @@ def main(): path = env.get("PATH", "") env["PATH"] = os.pathsep.join([path] + aux_paths) + extra_env_keys = [str(k) for k in args.extra_env_key] + extra_env_values = [str(v) for v in args.extra_env_value] + assert len(extra_env_keys) == len(extra_env_values), "number of --extra-env-key and --extra-env-value flags must match" + n_extra_env = len(extra_env_keys) + if n_extra_env > 0: + for i in range(0, n_extra_env): + k = extra_env_keys[i] + v = extra_env_values[i] + env[k] = v + # Note, Buck2 swallows stdout on successful builds. # Redirect to stderr to avoid this. returncode = subprocess.call(cmd, env=env, stdout=sys.stderr.buffer) From 2719e2fdda1d0a9aad981b1672c8db1f47060943 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 22 Aug 2024 10:14:10 +0200 Subject: [PATCH 1056/1133] Add hidden inputs when constructing cmd_args The API changed, there no longer is a `.hidden()` method on the `cmd_args` type. --- haskell/compile.bzl | 54 ++++++++++++++++++++++----------------------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 4a7677726..098204039 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -276,11 +276,11 @@ def get_packages_info( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - exposed_package_libs = cmd_args() - exposed_package_args = cmd_args([package_flag, "base"]) - for lib in libs.traverse(): - exposed_package_libs.hidden(lib.libs) + hidden_args = [l for lib in libs.traverse() for l in lib.libs] + + exposed_package_libs = cmd_args() + exposed_package_args = cmd_args([package_flag, "base"], hidden = hidden_args) packagedb_args = cmd_args(libs.project_as_args( "empty_package_db" if use_empty_lib else "package_db", @@ -367,7 +367,10 @@ def _common_compile_module_args( if enable_haddock: command.add("-haddock") - args_for_file = cmd_args() + if non_haskell_sources: + warning("{} specifies non-haskell file in `srcs`, consider using `srcs_deps` instead".format(ctx.label)) + + args_for_file = cmd_args(hidden = non_haskell_sources) args_for_file.add("-no-link", "-i") args_for_file.add("-hide-all-packages") @@ -389,11 +392,6 @@ def _common_compile_module_args( if not is_haskell_src(path) and not is_haskell_boot(path) ] - if non_haskell_sources: - warning("{} specifies non-haskell file in `srcs`, consider using `srcs_deps` instead".format(ctx.label)) - - args_for_file.hidden(non_haskell_sources) - # Add args from preprocess-able inputs. inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) @@ -491,9 +489,10 @@ def _compile_module( toolchain_deps_by_name: dict[str, None], source_prefixes: list[str], ) -> CompiledModuleTSet: - compile_cmd = cmd_args(common_args.command) + aux_deps = ctx.attrs.srcs_deps.get(module.source) + # These compiler arguments can be passed in a response file. - compile_args_for_file = cmd_args(common_args.args_for_file) + compile_args_for_file = cmd_args(common_args.args_for_file, hidden = aux_deps) haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -535,9 +534,21 @@ def _compile_module( compile_args_for_file.add(module.source) - aux_deps = ctx.attrs.srcs_deps.get(module.source) - if aux_deps: - compile_args_for_file.hidden(aux_deps) + abi_tag = ctx.actions.artifact_tag() + + dependency_modules = ctx.actions.tset( + CompiledModuleTSet, + children = [cross_package_modules] + this_package_modules, + ) + + compile_cmd = cmd_args( + common_args.command, + hidden = [ + compile_args_for_file, + abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces")), + dependency_modules.project_as_args("abi"), + ] + ) if haskell_toolchain.use_argsfile: argsfile = ctx.actions.declare_output( @@ -545,7 +556,6 @@ def _compile_module( ) ctx.actions.write(argsfile.as_output(), compile_args_for_file, allow_args = True) compile_cmd.add(cmd_args(argsfile, format = "@{}")) - compile_cmd.hidden(compile_args_for_file) else: compile_cmd.add(compile_args_for_file) @@ -575,16 +585,11 @@ def _compile_module( for dep_name in graph[module_name] ] - dependency_modules = ctx.actions.tset( - CompiledModuleTSet, - children = [cross_package_modules] + this_package_modules, - ) - # add each module dir prefix to search path for prefix in source_prefixes: compile_cmd.add( cmd_args( - cmd_args(md_file, format = "-i{}", ignore_artifacts=True).parent(), + cmd_args(md_file, format = "-i{}", ignore_artifacts=True, parent=1), "/", paths.join(module.prefix_dir, prefix), delimiter="" @@ -595,10 +600,6 @@ def _compile_module( compile_cmd.add(cmd_args(library_deps, prepend = "-package")) compile_cmd.add(cmd_args(toolchain_deps, prepend = "-package")) - abi_tag = ctx.actions.artifact_tag() - - compile_cmd.hidden( - abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces"))) compile_cmd.add("-fbyte-code-and-object-code") if enable_th: compile_cmd.add("-fprefer-byte-code") @@ -611,7 +612,6 @@ def _compile_module( compile_cmd.add("--buck2-dep", tagged_dep_file) compile_cmd.add("--abi-out", outputs[module.hash].as_output()) - compile_cmd.hidden(dependency_modules.project_as_args("abi")) ctx.actions.run( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, From 35a1c856ee03e709f952e1f3e15551b869ca2234 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 22 Aug 2024 10:15:54 +0200 Subject: [PATCH 1057/1133] Adapt to new dynamic actions API --- haskell/compile.bzl | 105 ++++++++++++++++++++++++-------------------- 1 file changed, 57 insertions(+), 48 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 098204039..cf0545e95 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -160,6 +160,47 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl return modules +def _dynamic_target_metadata_impl(actions, artifacts, dynamic_values, outputs, arg): + # Add -package-db and -package/-expose-package flags for each Haskell + # library dependency. + + packages_info = get_packages_info( + actions, + LinkStyle("shared"), + specify_pkg_version = False, + enable_profiling = False, + use_empty_lib = True, + resolved = dynamic_values, + ) + package_flag = _package_flag(arg.haskell_toolchain) + ghc_args = cmd_args() + ghc_args.add("-hide-all-packages") + ghc_args.add(package_flag, "base") + + ghc_args.add(cmd_args(arg.toolchain_libs, prepend=package_flag)) + ghc_args.add(cmd_args(packages_info.exposed_package_args)) + ghc_args.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) + ghc_args.add(arg.compiler_flags) + + md_args = cmd_args(arg.md_gen) + md_args.add(packages_info.bin_paths) + md_args.add("--ghc", arg.haskell_toolchain.compiler) + md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) + md_args.add( + "--source-prefix", + arg.strip_prefix, + ) + md_args.add(cmd_args(arg.sources, format="--source={}")) + + md_args.add( + _attr_deps_haskell_lib_package_name_and_prefix(arg.ctx), + ) + md_args.add("--output", outputs[arg.md_file].as_output()) + + actions.run(md_args, category = "haskell_metadata", identifier = arg.suffix if arg.suffix else None) + +_dynamic_target_metadata = dynamic_actions(impl = _dynamic_target_metadata_impl) + def target_metadata( ctx: AnalysisContext, *, @@ -184,53 +225,21 @@ def target_metadata( # # (module X.Y.Z must be defined in a file at X/Y/Z.hs) - def get_metadata(ctx, _artifacts, resolved, outputs): - - # Add -package-db and -package/-expose-package flags for each Haskell - # library dependency. - - packages_info = get_packages_info( - ctx, - LinkStyle("shared"), - specify_pkg_version = False, - enable_profiling = False, - use_empty_lib = True, - resolved = resolved, - ) - package_flag = _package_flag(haskell_toolchain) - ghc_args = cmd_args() - ghc_args.add("-hide-all-packages") - ghc_args.add(package_flag, "base") - - ghc_args.add(cmd_args(toolchain_libs, prepend=package_flag)) - ghc_args.add(cmd_args(packages_info.exposed_package_args)) - ghc_args.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) - ghc_args.add(ctx.attrs.compiler_flags) - - md_args = cmd_args(md_gen) - md_args.add(packages_info.bin_paths) - md_args.add("--ghc", haskell_toolchain.compiler) - md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) - md_args.add( - "--source-prefix", - _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), - ) - md_args.add(cmd_args(sources, format="--source={}")) - - md_args.add( - _attr_deps_haskell_lib_package_name_and_prefix(ctx), - ) - md_args.add("--output", outputs[md_file].as_output()) - - ctx.actions.run(md_args, category = "haskell_metadata", identifier = suffix if suffix else None) - - ctx.actions.dynamic_output( - dynamic = [], - promises = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [], - inputs = [], + ctx.actions.dynamic_output_new(_dynamic_target_metadata( + dynamic_values = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [], outputs = [md_file.as_output()], - f = get_metadata, - ) + arg = struct( + compiler_flags = ctx.attrs.compiler_flags, + ctx = ctx, + haskell_toolchain = haskell_toolchain, + md_file = md_file, + md_gen = md_gen, + strip_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), + sources = sources, + suffix = suffix, + toolchain_libs = toolchain_libs, + ), + )) return md_file @@ -289,7 +298,7 @@ def get_packages_info( if haskell_toolchain.packages and resolved != None: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages + package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages direct_toolchain_libs = [ dep[HaskellToolchainLibrary].name @@ -415,7 +424,7 @@ def _common_compile_module_args( if haskell_toolchain.packages: pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages + package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages else: package_db = [] From 947d80c6691cc23b1b25ba252cfe652cd83b1d66 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 22 Aug 2024 10:20:17 +0200 Subject: [PATCH 1058/1133] WIP adapt to dynamic actions API --- haskell/compile.bzl | 180 ++++++++++++++++++-------------------------- 1 file changed, 75 insertions(+), 105 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index cf0545e95..7e76d2336 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -642,6 +642,70 @@ def _compile_module( return module_tset +def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): + #def do_compile(ctx, artifacts, resolved, outputs, md_file=md_file, modules=modules): + # Collect library dependencies. Note that these don't need to be in a + # particular order. + toolchain_deps_by_name = { + lib.name: None + for lib in attr_deps_haskell_toolchain_libraries(arg.ctx) + } + direct_deps_info = [ + lib.prof_info[arg.link_style] if arg.enable_profiling else lib.info[arg.link_style] + for lib in attr_deps_haskell_link_infos(arg.ctx) + ] + direct_deps_by_name = { + info.value.name: struct( + package_db = info.value.empty_db, + modules = dynamic_values[info.value.dynamic[arg.enable_profiling]][DynamicCompileResultInfo].modules, + ) + for info in direct_deps_info + } + common_args = _common_compile_module_args( + arg.ctx, + resolved = dynamic_values, + enable_haddock = arg.enable_haddock, + enable_profiling = arg.enable_profiling, + link_style = arg.link_style, + direct_deps_info = direct_deps_info, + pkgname = arg.pkgname, + ) + + md = artifacts[arg.md_file].read_json() + th_modules = md["th_modules"] + module_map = md["module_mapping"] + graph = md["module_graph"] + package_deps = md["package_deps"] + + mapped_modules = { module_map.get(k, k): v for k, v in arg.modules.items() } + module_tsets = {} + source_prefixes = get_source_prefixes(arg.ctx.attrs.srcs, module_map) + + for module_name in post_order_traversal(graph): + module_tsets[module_name] = _compile_module( + arg.ctx, + common_args = common_args, + link_style = arg.link_style, + enable_profiling = arg.enable_profiling, + enable_th = module_name in th_modules, + module_name = module_name, + module = mapped_modules[module_name], + module_tsets = module_tsets, + graph = graph, + package_deps = package_deps.get(module_name, {}), + outputs = outputs, + md_file = arg.md_file, + artifact_suffix = arg.artifact_suffix, + direct_deps_by_name = direct_deps_by_name, + toolchain_deps_by_name = toolchain_deps_by_name, + source_prefixes = source_prefixes, + ) + + return [DynamicCompileResultInfo(modules = module_tsets)] + + + +_dynamic_do_compile = dynamic_actions(impl = _dynamic_do_compile_impl) # Compile all the context's sources. def compile( @@ -655,108 +719,6 @@ def compile( modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) - def do_compile(ctx, artifacts, resolved, outputs, md_file=md_file, modules=modules): - # Collect library dependencies. Note that these don't need to be in a - # particular order. - toolchain_deps_by_name = { - lib.name: None - for lib in attr_deps_haskell_toolchain_libraries(ctx) - } - direct_deps_info = [ - lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - for lib in attr_deps_haskell_link_infos(ctx) - ] - direct_deps_by_name = { - info.value.name: struct( - package_db = info.value.empty_db, - modules = resolved[info.value.dynamic[enable_profiling]][DynamicCompileResultInfo].modules, - ) - for info in direct_deps_info - } - common_args = _common_compile_module_args( - ctx, - resolved = resolved, - enable_haddock = enable_haddock, - enable_profiling = enable_profiling, - link_style = link_style, - direct_deps_info = direct_deps_info, - pkgname = pkgname, - ) - - md = artifacts[md_file].read_json() - th_modules = md["th_modules"] - module_map = md["module_mapping"] - graph = md["module_graph"] - package_deps = md["package_deps"] - - # boot_rev_deps = {} - # for module_name, boot_deps in md["boot_deps"].items(): - # for boot_dep in boot_deps: - # boot_rev_deps.setdefault(boot_dep + "-boot", []).append(module_name) - - # # TODO GHC --dep-json should integrate boot modules directly into the dependency graph. - # for module_name, module in modules.items(): - # if not module_name.endswith("-boot"): - # continue - - # # deduce the real name from the corresponding non-boot module - # non_boot_module_name = module_name[:-5] - # non_boot_module_name = module_map.get(non_boot_module_name, non_boot_module_name) - - # boot_module_name = non_boot_module_name + "-boot" - - # if module_name != boot_module_name: - # module_map[module_name] = boot_module_name - - # # Add boot modules to the module graph - # graph[boot_module_name] = [] - # # TODO GHC --dep-json should report boot module dependencies. - # # The following is a naive approximation of the boot module's dependencies, - # # taking the corresponding module's dependencies - # # minus those that depend on the boot module. - - # # Add module dependencies for the boot module - # graph[boot_module_name].extend([ - # dep - # for dep in graph[non_boot_module_name] - # if not dep in boot_rev_deps[boot_module_name] - # ]) - - # # Add package dependencies for the boot module - # package_deps[boot_module_name] = package_deps.get(non_boot_module_name, []) - - # for module_name, boot_deps in md["boot_deps"].items(): - # for boot_dep in boot_deps: - # graph.setdefault(module_name, []).append(boot_dep + "-boot") - - #>>>>>>> 8a06e69b (Handle source prefix for boot modules) - - mapped_modules = { module_map.get(k, k): v for k, v in modules.items() } - module_tsets = {} - source_prefixes = get_source_prefixes(ctx.attrs.srcs, module_map) - - for module_name in post_order_traversal(graph): - module_tsets[module_name] = _compile_module( - ctx, - common_args = common_args, - link_style = link_style, - enable_profiling = enable_profiling, - enable_th = module_name in th_modules, - module_name = module_name, - module = mapped_modules[module_name], - module_tsets = module_tsets, - graph = graph, - package_deps = package_deps.get(module_name, {}), - outputs = outputs, - md_file=md_file, - artifact_suffix = artifact_suffix, - direct_deps_by_name = direct_deps_by_name, - toolchain_deps_by_name = toolchain_deps_by_name, - source_prefixes = source_prefixes, - ) - - return [DynamicCompileResultInfo(modules = module_tsets)] - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] interfaces = [interface for module in modules.values() for interface in module.interfaces] @@ -768,9 +730,9 @@ def compile( ] abi_hashes = [module.hash for module in modules.values()] - dyn_module_tsets = ctx.actions.dynamic_output( + dyn_module_tsets = ctx.actions.dynamic_output_new(_dynamic_do_compile( dynamic = [md_file], - promises = [ + dynamic_values = [ info.value.dynamic[enable_profiling] for lib in attr_deps_haskell_link_infos(ctx) for info in [ @@ -781,7 +743,15 @@ def compile( ] + ([ haskell_toolchain.packages.dynamic ] if haskell_toolchain.packages else [ ]), inputs = ctx.attrs.srcs, outputs = [o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes], - f = do_compile) + arg = struct( + artifact_suffix = artifact_suffix, + enable_profiling = enable_profiling, + link_style = link_style, + md_file = md_file, + modules = modules, + pkgname = pkgname, + ), + )) stubs_dir = ctx.actions.declare_output("stubs-" + artifact_suffix, dir=True) From dcd1c3ba8f324e032d637ca88b59041086843e65 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Tue, 16 Apr 2024 10:53:29 -0700 Subject: [PATCH 1059/1133] Make shared lib soname opaque (take 2) Summary: Use a new, opaque type for the`soname` field on `SharedLibrary`, which can hold either a static `str` or an `Artifact` that contains a build-time generated SONAME, and updated users that require a static SONAME to enumerate this clearly in the code. This will allow migrating some rules to support SONAMEs which aren't known at analysis time (see D55000520). Reviewed By: dtolnay Differential Revision: D55369940 fbshipit-source-id: eaa97bdf4483af1d97634807429f2783e534a07e (cherry picked from commit 854ed17ffa80c63bf584f190c9e40444cd60baa0) --- haskell/haskell_ghci.bzl | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/haskell/haskell_ghci.bzl b/haskell/haskell_ghci.bzl index 4bb8ada36..418e67b41 100644 --- a/haskell/haskell_ghci.bzl +++ b/haskell/haskell_ghci.bzl @@ -51,8 +51,9 @@ load( load( "@prelude//linking:shared_libraries.bzl", "SharedLibraryInfo", + "create_shlib_symlink_tree", "traverse_shared_library_info", - "with_unique_sonames", + "with_unique_str_sonames", ) load("@prelude//linking:types.bzl", "Linkage") load( @@ -274,7 +275,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: # Handle third-party dependencies of the omnibus SO tp_deps_shared_link_infos = {} - so_symlinks = {} + prebuilt_shlibs = [] for node_label in prebuilt_so_deps.keys(): node = graph_nodes[node_label] @@ -288,14 +289,14 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: shared_li = node.link_infos.get(output_style, None) if shared_li != None: tp_deps_shared_link_infos[node_label] = shared_li.default - for shlib in node.shared_libs.libraries: - so_symlinks[shlib.soname] = shlib.lib.output + prebuilt_shlibs.extend(node.shared_libs.libraries) # Create symlinks to the TP dependencies' SOs so_symlinks_root_path = ctx.label.name + ".so-symlinks" - so_symlinks_root = ctx.actions.symlinked_dir( - so_symlinks_root_path, - so_symlinks, + so_symlinks_root = create_shlib_symlink_tree( + actions = ctx.actions, + out = so_symlinks_root_path, + shared_libs = prebuilt_shlibs, ) linker_info = get_cxx_toolchain_info(ctx).linker_info @@ -485,7 +486,7 @@ def _build_preload_deps_root( shlib = traverse_shared_library_info(slib_info) - for soname, shared_lib in with_unique_sonames(shlib).items(): + for soname, shared_lib in with_unique_str_sonames(shlib).items(): preload_symlinks[soname] = shared_lib.lib.output # TODO(T150785851): build or get SO for direct preload_deps From c1d12e612a693dc5d945ac3d361bd0ea4212be17 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 22 Aug 2024 12:13:29 +0200 Subject: [PATCH 1060/1133] hidden --- haskell/haskell_ghci.bzl | 2 +- haskell/haskell_haddock.bzl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/haskell/haskell_ghci.bzl b/haskell/haskell_ghci.bzl index 418e67b41..fd8d1f874 100644 --- a/haskell/haskell_ghci.bzl +++ b/haskell/haskell_ghci.bzl @@ -727,7 +727,7 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: "__{}__".format(ctx.label.name), output_artifacts, ) - run = cmd_args(final_ghci_script, hidden=ctx.attrs.ghci_bin_dep.get(RunInfo) or []).hidden(outputs) + run = cmd_args(final_ghci_script, hidden=(ctx.attrs.ghci_bin_dep.get(RunInfo) or []) + outputs) return [ DefaultInfo(default_outputs = [root_output_dir]), diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index b225f954b..a363745c0 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -252,7 +252,7 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: ) ctx.actions.run( - cmd_args(script).hidden(script_args), + cmd_args(script, hidden = script_args), category = "haskell_haddock", no_outputs_cleanup = True, ) From 9bbe2bf12c04cc4975ba2aad22a78c17d0343db0 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 22 Aug 2024 12:14:25 +0200 Subject: [PATCH 1061/1133] Use original dynamic_actions API --- haskell/haskell.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 61c69e32e..85d3383f2 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -436,7 +436,7 @@ def _make_package( pkg_conf = ctx.actions.declare_output("pkg-" + artifact_suffix + ".conf") db = ctx.actions.declare_output("db-" + artifact_suffix, dir = True) - def write_package_conf(ctx, artifacts, resolved, outputs, md_file=md_file, libname=libname): + def write_package_conf(ctx, artifacts, outputs, md_file=md_file, libname=libname): md = artifacts[md_file].read_json() module_map = md["module_mapping"] @@ -498,7 +498,6 @@ def _make_package( ctx.actions.dynamic_output( dynamic = [md_file], - promises = [], inputs = [], outputs = [pkg_conf.as_output(), db.as_output()], f = write_package_conf From 0e9d6190c89ac796a9038c0e37bcad9bda30875b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 22 Aug 2024 13:33:35 +0200 Subject: [PATCH 1062/1133] Adapt to new dynamic actions API --- haskell/haskell_haddock.bzl | 88 ++++++++++++++++++++----------------- 1 file changed, 47 insertions(+), 41 deletions(-) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index a363745c0..54ca6b13c 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -110,6 +110,44 @@ def _haddock_dump_interface( children = this_package_modules, ) +def _dynamic_haddock_dump_interfaces_impl(actions, artifacts, dynamic_values, outputs, arg): + md = artifacts[arg.md_file].read_json() + module_map = md["module_mapping"] + graph = md["module_graph"] + package_deps = md["package_deps"] + + dynamic_info_lib = {} + + for lib in arg.direct_deps_link_info: + info = lib.info[arg.link_style] + direct = info.value + dynamic = direct.dynamic[False] + dynamic_info = dynamic_values[dynamic].providers[DynamicCompileResultInfo] + + dynamic_info_lib[direct.name] = dynamic_info + + haddock_infos = { module_map.get(k, k): v for k, v in haddock_infos.items() } + module_tsets = {} + + for module_name in post_order_traversal(graph): + module_deps = [ + info.modules[mod] + for lib, info in dynamic_info_lib.items() + for mod in package_deps.get(module_name, {}).get(lib, []) + ] + + module_tsets[module_name] = _haddock_dump_interface( + actions, + arg.dyn_cmd.copy(), + module_name = module_name, + module_tsets = module_tsets, + haddock_info = haddock_infos[module_name], + module_deps = module_deps, + graph = graph, + outputs = outputs, + ) + +_dynamic_haddock_dump_interfaces = dynamic_actions(impl = _dynamic_haddock_dump_interfaces_impl) def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileResultInfo, md_file: Artifact) -> HaskellHaddockInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -147,46 +185,9 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - def haddock_dump_interfaces(ctx, artifacts, resolved, outputs, md_file=md_file, dyn_cmd=cmd.copy(), haddock_infos=haddock_infos): - md = artifacts[md_file].read_json() - module_map = md["module_mapping"] - graph = md["module_graph"] - package_deps = md["package_deps"] - - dynamic_info_lib = {} - - for lib in direct_deps_link_info: - info = lib.info[link_style] - direct = info.value - dynamic = direct.dynamic[False] - dynamic_info = resolved[dynamic][DynamicCompileResultInfo] - - dynamic_info_lib[direct.name] = dynamic_info - - haddock_infos = { module_map.get(k, k): v for k, v in haddock_infos.items() } - module_tsets = {} - - for module_name in post_order_traversal(graph): - module_deps = [ - info.modules[mod] - for lib, info in dynamic_info_lib.items() - for mod in package_deps.get(module_name, {}).get(lib, []) - ] - - module_tsets[module_name] = _haddock_dump_interface( - ctx, - dyn_cmd.copy(), - module_name = module_name, - module_tsets = module_tsets, - haddock_info = haddock_infos[module_name], - module_deps = module_deps, - graph = graph, - outputs = outputs, - ) - - ctx.actions.dynamic_output( + ctx.actions.dynamic_output_new(_dynamic_haddock_dump_interfaces( dynamic = [md_file], - promises = [ + dynamic_values = [ info.value.dynamic[False] for lib in direct_deps_link_info for info in [ @@ -197,8 +198,13 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes ], inputs = compiled.hi, outputs = [output.as_output() for info in haddock_infos.values() for output in [info.haddock, info.html]], - f = haddock_dump_interfaces, - ) + arg = struct( + md_file = md_file, + direct_deps_link_info = direct_deps_link_info, + dyn_cmd = cmd.copy(), + haddock_infos = haddock_infos + ), + )) return HaskellHaddockInfo( interfaces = [i.haddock for i in haddock_infos.values()], From 9e8d097d83e5a648128a6eff04f2356366d9eb61 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 22 Aug 2024 13:37:18 +0200 Subject: [PATCH 1063/1133] Adapt to renaming breadth_first_traversal -> depth_first_traversal See commit f4887a31e96da840b3fa2371c27511aead967ea9 --- haskell/haskell_ghci.bzl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/haskell/haskell_ghci.bzl b/haskell/haskell_ghci.bzl index fd8d1f874..2535106da 100644 --- a/haskell/haskell_ghci.bzl +++ b/haskell/haskell_ghci.bzl @@ -62,8 +62,8 @@ load( ) load( "@prelude//utils:graph_utils.bzl", - "breadth_first_traversal", - "breadth_first_traversal_by", + "depth_first_traversal", + "depth_first_traversal_by", ) load("@prelude//utils:utils.bzl", "flatten") @@ -187,7 +187,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: dep_graph[ctx.label] = all_direct_deps # Need to exclude all transitive deps of excluded deps - all_nodes_to_exclude = breadth_first_traversal( + all_nodes_to_exclude = depth_first_traversal( dep_graph, [dep.label for dep in preload_deps], ) @@ -232,7 +232,7 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: # This is not the final set of body nodes, because it still includes # nodes that don't support omnibus (e.g. haskell_library nodes) - breadth_first_traversal_by( + depth_first_traversal_by( dep_graph, [ctx.label], find_deps_for_body, From ec9edfe85fad5911ee4d65e4ad83093c43f1d496 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 22 Aug 2024 14:17:45 +0200 Subject: [PATCH 1064/1133] Adapt to hidden API change --- haskell/haskell.bzl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 85d3383f2..1a8153474 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -1258,7 +1258,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link.add(cmd_args(db, prepend = "-package-db")) link.add("-package", pkgname) - link.hidden(linkable_artifacts) + link.add(cmd_args(hidden = linkable_artifacts)) else: link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) @@ -1289,7 +1289,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link_cmd.add(haskell_toolchain.linker_flags) link_cmd.add(ctx.attrs.linker_flags) - link_cmd.hidden(packages_info.exposed_package_libs) + link_cmd.add(cmd_args(hidden = packages_info.exposed_package_libs)) package_db_tset = ctx.actions.tset( HaskellPackageDbTSet, @@ -1308,8 +1308,6 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: f = do_link, ) - run = cmd_args(output) - if link_style == LinkStyle("shared") or link_group_info != None: sos_dir = "__{}__shared_libs_symlink_tree".format(ctx.attrs.name) rpath_ref = get_rpath_origin(get_cxx_toolchain_info(ctx).linker_info.type) @@ -1320,7 +1318,9 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: out = sos_dir, shared_libs = sos, ) - run.hidden(symlink_dir) + run = cmd_args(output, hidden = symlink_dir) + else: + run = cmd_args(output) sub_targets = {} sub_targets.update(_haskell_module_sub_targets( From f5ff27dd3672c1fbbb767b98c8849022275fecf9 Mon Sep 17 00:00:00 2001 From: Andrew Gallagher Date: Thu, 20 Jun 2024 12:44:39 -0700 Subject: [PATCH 1065/1133] Support `exported_post_linker_flags` in `prebuilt_haskell_library` Summary: As per other rules, support linker flags to put at the end of the linkable Reviewed By: shayne-fletcher Differential Revision: D58766267 fbshipit-source-id: 6663c4b8e8ae01aac313a08b83d706289824eaff (cherry picked from commit 96aafd3f88d46265fe525c6d49792503c868c11a) --- haskell/haskell.bzl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 1a8153474..7abe7a204 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -293,12 +293,14 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: link_infos[link_style] = LinkInfos( default = LinkInfo( pre_flags = ctx.attrs.exported_linker_flags, + post_flags = ctx.attrs.exported_post_linker_flags, linkables = linkables, ), ) prof_link_infos[link_style] = LinkInfos( default = LinkInfo( pre_flags = ctx.attrs.exported_linker_flags, + post_flags = ctx.attrs.exported_post_linker_flags, linkables = prof_linkables, ), ) From 7f585d623866b5894a95debec2016c6614c62096 Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Mon, 19 Aug 2024 13:13:08 -0700 Subject: [PATCH 1066/1133] Propagate link strategy to link groups linking Summary: This was hardcoded to `static_pic`. For proper dynamic linking setup with link groups we want to derive strategy from binary configuration. Reviewed By: capickett Differential Revision: D58787721 fbshipit-source-id: de5a9a30ab91a2f26c9b6286af8fb0a4a5797abd (cherry picked from commit ab268f0700d12183120a90a3128be807938ddc7f) --- haskell/haskell.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 7abe7a204..9c704baa9 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -1112,6 +1112,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: sos = [] + link_strategy = to_link_strategy(link_style) if link_group_info != None: own_binary_link_flags = [] auto_link_groups = {} @@ -1134,6 +1135,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: if auto_link_group_specs != None: linked_link_groups = create_link_groups( ctx = ctx, + link_strategy = link_strategy, link_group_mappings = link_group_info.mappings, link_group_preferred_linkage = link_group_preferred_linkage, executable_deps = executable_deps, @@ -1172,7 +1174,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: name: (lib.label, lib.shared_link_infos) for name, lib in link_group_libs.items() }, - link_strategy = to_link_strategy(link_style), + link_strategy = link_strategy, roots = ( [ d.linkable_graph.nodes.value.label From 1057d7053447cbb9ebd1e8180a0215570b845f1b Mon Sep 17 00:00:00 2001 From: Nikita Patskov Date: Mon, 19 Aug 2024 06:08:34 -0700 Subject: [PATCH 1067/1133] Added record for filtered labels to links Summary: This record will be extended in further diffs with targets consumed by link group. Reviewed By: artempyanykh Differential Revision: D60959583 fbshipit-source-id: f357012361b1c153cb391907fddca580a0ce5cd6 (cherry picked from commit 1de13458a7e49535904f1ab542ae673d741115ab) --- haskell/haskell.bzl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 9c704baa9..745ae3042 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -1163,7 +1163,7 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: roots = get_dedupped_roots_from_groups(link_group_info.groups.values()), ) - labels_to_links_map = get_filtered_labels_to_links_map( + labels_to_links = get_filtered_labels_to_links_map( public_nodes = public_nodes, linkable_graph_node_map = linkable_graph_node_map, link_group = None, @@ -1206,14 +1206,14 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: pre_flags = own_binary_link_flags, ), ) - link_infos.extend(get_filtered_links(labels_to_links_map, set(public_nodes))) + link_infos.extend(get_filtered_links(labels_to_links.map, set(public_nodes))) infos = LinkArgs(infos = link_infos) link_group_ctx = LinkGroupContext( link_group_mappings = link_group_info.mappings, link_group_libs = link_group_libs, link_group_preferred_linkage = link_group_preferred_linkage, - labels_to_links_map = labels_to_links_map, + labels_to_links_map = labels_to_links.map, targets_consumed_by_link_groups = {}, ) From 485ec2b0c4d613c47cc9d7e096fa4720ef0e9012 Mon Sep 17 00:00:00 2001 From: Andreas Herrmann Date: Thu, 22 Aug 2024 16:30:59 +0200 Subject: [PATCH 1068/1133] Do not expose boot modules Filter out boot modules when generating the exposed-modules entry in the package database. --- haskell/haskell.bzl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index c181f654e..8fd3e5d59 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -446,7 +446,11 @@ def _make_package( source_prefixes = get_source_prefixes(ctx.attrs.srcs, module_map) - modules = md["module_graph"].keys() + modules = [ + module + for module in md["module_graph"].keys() + if not module.endswith("-boot") + ] # XXX use a single import dir when this package db is used for resolving dependencies with ghc -M, # which works around an issue with multiple import dirs resulting in GHC trying to locate interface files From a19fe16439c7816fc8fb914eb8cb7fd7d5a7f9d1 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Thu, 22 Aug 2024 09:18:40 -0700 Subject: [PATCH 1069/1133] add missing srcs_envs in haskell_binary (#32) haskell_binary should have the same attribute. --- decls/haskell_rules.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/decls/haskell_rules.bzl b/decls/haskell_rules.bzl index 31dbdf65e..05665b6e8 100644 --- a/decls/haskell_rules.bzl +++ b/decls/haskell_rules.bzl @@ -47,6 +47,7 @@ haskell_binary = prelude_rule( native_common.link_style() | haskell_common.srcs_arg() | haskell_common.external_tools_arg() | + haskell_common.srcs_envs_arg () | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | From 368ede99b2be6b176bba101aaff1ada70046a5c5 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 23 Aug 2024 08:29:33 +0200 Subject: [PATCH 1070/1133] Remove invalid args --- haskell/compile.bzl | 4 ++-- haskell/haskell_haddock.bzl | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 7e76d2336..57bf98842 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -226,11 +226,11 @@ def target_metadata( # (module X.Y.Z must be defined in a file at X/Y/Z.hs) ctx.actions.dynamic_output_new(_dynamic_target_metadata( + dynamic = [], dynamic_values = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [], outputs = [md_file.as_output()], arg = struct( compiler_flags = ctx.attrs.compiler_flags, - ctx = ctx, haskell_toolchain = haskell_toolchain, md_file = md_file, md_gen = md_gen, @@ -741,7 +741,7 @@ def compile( lib.info[link_style] ] ] + ([ haskell_toolchain.packages.dynamic ] if haskell_toolchain.packages else [ ]), - inputs = ctx.attrs.srcs, + #inputs = ctx.attrs.srcs, outputs = [o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes], arg = struct( artifact_suffix = artifact_suffix, diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 54ca6b13c..43dd9173d 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -196,7 +196,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes lib.info[link_style], ] ], - inputs = compiled.hi, + #inputs = compiled.hi, outputs = [output.as_output() for info in haddock_infos.values() for output in [info.haddock, info.html]], arg = struct( md_file = md_file, From 8c6aee7bebec73926a115d96738c10b6854eddef Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 23 Aug 2024 08:29:50 +0200 Subject: [PATCH 1071/1133] Remove duplicate assignment to haskell_toolchain --- haskell/compile.bzl | 1 - 1 file changed, 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 57bf98842..cbaa48ba0 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -296,7 +296,6 @@ def get_packages_info( )) if haskell_toolchain.packages and resolved != None: - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] pkg_deps = resolved[haskell_toolchain.packages.dynamic] package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages From 9671dea2fcc856c5a638a8a52dde2d7c9661f736 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 23 Aug 2024 08:30:14 +0200 Subject: [PATCH 1072/1133] Adapt to new dynamic actions API --- haskell/haskell.bzl | 194 ++++++++++++++++++++++++-------------------- 1 file changed, 108 insertions(+), 86 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 745ae3042..3b7d6c803 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -526,6 +526,42 @@ def _get_haskell_shared_library_name_linker_flags(linker_type: str, soname: str) else: fail("Unknown linker type '{}'.".format(linker_type)) +def _dynamic_link_shared_impl(actions, artifacts, dynamic_values, outputs, arg): + pkg_deps = dynamic_values[arg.haskell_toolchain.packages.dynamic] + package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages + + package_db_tset = actions.tset( + HaskellPackageDbTSet, + children = [package_db[name] for name in arg.toolchain_libs if name in package_db] + ) + + link = cmd_args(arg.haskell_toolchain.linker) + link.add(arg.haskell_toolchain.linker_flags) + link.add(arg.linker_flags) + link.add("-hide-all-packages") + link.add(cmd_args(arg.toolchain_libs, prepend = "-package")) + link.add(cmd_args(package_db_tset.project_as_args("package_db"), prepend="-package-db")) + link.add("-o", outputs[arg.lib].as_output()) + link.add( + get_shared_library_flags(arg.linker_info.type), + "-dynamic", + cmd_args( + _get_haskell_shared_library_name_linker_flags(arg.linker_info.type, arg.libfile), + prepend = "-optl", + ), + ) + + link.add(arg.objects) + + link.add(cmd_args(unpack_link_args(arg.infos), prepend = "-optl")) + + actions.run( + link, + category = "haskell_link" + arg.artifact_suffix.replace("-", "_"), + ) + +_dynamic_link_shared = dynamic_actions(impl = _dynamic_link_shared_impl) + def _build_haskell_lib( ctx, libname: str, @@ -579,52 +615,29 @@ def _build_haskell_lib( if not object.extension.endswith("-boot") ] - def do_link(ctx, artifacts, resolved, outputs, lib=lib, objects=objects): - pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages - - package_db_tset = ctx.actions.tset( - HaskellPackageDbTSet, - children = [package_db[name] for name in toolchain_libs if name in package_db] - ) - - link = cmd_args(haskell_toolchain.linker) - link.add(haskell_toolchain.linker_flags) - link.add(ctx.attrs.linker_flags) - link.add("-hide-all-packages") - link.add(cmd_args(toolchain_libs, prepend = "-package")) - link.add(cmd_args(package_db_tset.project_as_args("package_db"), prepend="-package-db")) - link.add("-o", outputs[lib].as_output()) - link.add( - get_shared_library_flags(linker_info.type), - "-dynamic", - cmd_args( - _get_haskell_shared_library_name_linker_flags(linker_info.type, libfile), - prepend = "-optl", - ), - ) - - link.add(objects) - - infos = get_link_args_for_strategy( - ctx, - nlis, - to_link_strategy(link_style), - ) - link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) - - ctx.actions.run( - link, - category = "haskell_link" + artifact_suffix.replace("-", "_"), - ) + infos = get_link_args_for_strategy( + ctx, + nlis, + to_link_strategy(link_style), + ) - ctx.actions.dynamic_output( + ctx.actions.dynamic_output_new(_dynamic_link_shared( dynamic = [], - promises = [haskell_toolchain.packages.dynamic], - inputs = objects, + dynamic_values = [haskell_toolchain.packages.dynamic], + #inputs = objects, outputs = [lib.as_output()], - f = do_link, - ) + arg = struct( + artifact_suffix = artifact_suffix, + haskell_toolchain = haskell_toolchain, + infos = infos, + lib = lib, + libfile = libfile, + linker_flags = ctx.attrs.linker_flags, + linker_info = linker_info, + objects = objects, + toolchain_libs = toolchain_libs, + ), + )) solibs[libfile] = LinkedObject(output = lib, unstripped_output = lib) libs = [lib] @@ -1053,6 +1066,46 @@ def _make_link_package( return db +def _dynamic_link_binary_impl(actions, artifacts, dynamic_values, outputs, arg): + link_cmd = arg.link.copy() # link is already frozen, make a copy + + if arg.haskell_toolchain.packages: + pkg_deps = dynamic_values[arg.haskell_toolchain.packages.dynamic] + package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages + else: + package_db = [] + + # Add -package-db and -package/-expose-package flags for each Haskell + # library dependency. + packages_info = get_packages_info( + arg.ctx, + arg.link_style, + resolved = dynamic_values, + specify_pkg_version = False, + enable_profiling = arg.enable_profiling, + use_empty_lib = False, + ) + + link_cmd.add("-hide-all-packages") + link_cmd.add(cmd_args(arg.toolchain_libs, prepend = "-package")) + link_cmd.add(cmd_args(packages_info.exposed_package_args)) + link_cmd.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) + link_cmd.add(arg.haskell_toolchain.linker_flags) + link_cmd.add(arg.ctx.attrs.linker_flags) + + link_cmd.add(cmd_args(hidden = packages_info.exposed_package_libs)) + + package_db_tset = actions.tset( + HaskellPackageDbTSet, + children = [package_db[name] for name in arg.toolchain_libs if name in package_db] + ) + + link_cmd.add("-o", outputs[arg.output].as_output()) + + actions.run(link_cmd, category = "haskell_link") + +_dynamic_link_binary = dynamic_actions(impl = _dynamic_link_binary_impl) + def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: enable_profiling = ctx.attrs.enable_profiling @@ -1266,51 +1319,20 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: else: link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) - def do_link(ctx, artifacts, resolved, outputs, output=output, objects=objects): - link_cmd = link.copy() # link is already frozen, make a copy - - if haskell_toolchain.packages: - pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps[DynamicHaskellPackageDbInfo].packages - else: - package_db = [] - - # Add -package-db and -package/-expose-package flags for each Haskell - # library dependency. - packages_info = get_packages_info( - ctx, - link_style, - resolved = resolved, - specify_pkg_version = False, - enable_profiling = enable_profiling, - use_empty_lib = False, - ) - - link_cmd.add("-hide-all-packages") - link_cmd.add(cmd_args(toolchain_libs, prepend = "-package")) - link_cmd.add(cmd_args(packages_info.exposed_package_args)) - link_cmd.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) - link_cmd.add(haskell_toolchain.linker_flags) - link_cmd.add(ctx.attrs.linker_flags) - - link_cmd.add(cmd_args(hidden = packages_info.exposed_package_libs)) - - package_db_tset = ctx.actions.tset( - HaskellPackageDbTSet, - children = [package_db[name] for name in toolchain_libs if name in package_db] - ) - - link_cmd.add("-o", outputs[output].as_output()) - - ctx.actions.run(link_cmd, category = "haskell_link") - - ctx.actions.dynamic_output( + ctx.actions.dynamic_output_new(_dynamic_link_binary( dynamic = [], - promises = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [ ], - inputs = objects.values(), + dynamic_values = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [ ], + #inputs = objects.values(), outputs = [output.as_output()], - f = do_link, - ) + arg = struct( + enable_profiling = enable_profiling, + haskell_toolchain = haskell_toolchain, + link = link, + link_style = link_style, + output = output, + toolchain_libs = toolchain_libs, + ), + )) if link_style == LinkStyle("shared") or link_group_info != None: sos_dir = "__{}__shared_libs_symlink_tree".format(ctx.attrs.name) From f97ef595db2d4ebde7c25d437052fc4340f44f0a Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 23 Aug 2024 08:31:01 +0200 Subject: [PATCH 1073/1133] Fix error about haskell_toolchain_library_impl not existing --- rules_impl.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rules_impl.bzl b/rules_impl.bzl index 9a6f02656..0518124cf 100644 --- a/rules_impl.bzl +++ b/rules_impl.bzl @@ -29,7 +29,7 @@ load("@prelude//go:go_library.bzl", "go_library_impl") load("@prelude//go:go_stdlib.bzl", "go_stdlib_impl") load("@prelude//go:go_test.bzl", "go_test_impl") load("@prelude//go/transitions:defs.bzl", "asan_attr", "cgo_enabled_attr", "coverage_mode_attr", "go_binary_transition", "go_exported_library_transition", "go_test_transition", "race_attr", "tags_attr") -load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl") +load("@prelude//haskell:haskell.bzl", "haskell_binary_impl", "haskell_library_impl", "haskell_prebuilt_library_impl", "haskell_toolchain_library_impl") load("@prelude//haskell:haskell_ghci.bzl", "haskell_ghci_impl") load("@prelude//haskell:haskell_haddock.bzl", "haskell_haddock_impl") load("@prelude//haskell:haskell_ide.bzl", "haskell_ide_impl") @@ -188,6 +188,7 @@ extra_implemented_rules = struct( haskell_haddock = haskell_haddock_impl, haskell_ide = haskell_ide_impl, haskell_prebuilt_library = haskell_prebuilt_library_impl, + haskell_toolchain_library = haskell_toolchain_library_impl, #lua cxx_lua_extension = cxx_lua_extension_impl, From 1fa99b3f015ad8fb8e4160fa8736538ee9ff41d8 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 23 Aug 2024 10:17:49 +0200 Subject: [PATCH 1074/1133] cxx: Add `cxx_merge_cpreprocessors_actions` This is useful inside of dynamic action implementations since it takes an actions instance as the first argument instead of an action context. --- cxx/preprocessor.bzl | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/cxx/preprocessor.bzl b/cxx/preprocessor.bzl index eb292337b..b530ed110 100644 --- a/cxx/preprocessor.bzl +++ b/cxx/preprocessor.bzl @@ -148,11 +148,14 @@ def cxx_inherited_preprocessor_infos(first_order_deps: list[Dependency]) -> list return filter(None, [x.get(CPreprocessorInfo) for x in first_order_deps]) def cxx_merge_cpreprocessors(ctx: AnalysisContext, own: list[CPreprocessor], xs: list[CPreprocessorInfo]) -> CPreprocessorInfo: + return cxx_merge_cpreprocessors_actions(ctx.actions, own, xs) + +def cxx_merge_cpreprocessors_actions(actions: AnalysisActions, own: list[CPreprocessor], xs: list[CPreprocessorInfo]) -> CPreprocessorInfo: kwargs = {"children": [x.set for x in xs]} if own: kwargs["value"] = own return CPreprocessorInfo( - set = ctx.actions.tset(CPreprocessorTSet, **kwargs), + set = actions.tset(CPreprocessorTSet, **kwargs), ) def _format_include_arg(flag: str, path: cmd_args, compiler_type: str) -> list[cmd_args]: From 5822aac4c8d7e785d50bca6c0ed26fc79ca28727 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 23 Aug 2024 11:58:28 +0200 Subject: [PATCH 1075/1133] Adapt to new dynamic actions API --- haskell/compile.bzl | 289 +++++++++++++++++++++++++++++--------------- haskell/haskell.bzl | 30 ++++- 2 files changed, 219 insertions(+), 100 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index cbaa48ba0..9c4527e05 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -8,13 +8,21 @@ load( "@prelude//cxx:preprocessor.bzl", "cxx_inherited_preprocessor_infos", - "cxx_merge_cpreprocessors", + "cxx_merge_cpreprocessors_actions", ) load( "@prelude//haskell:library_info.bzl", "HaskellLibraryProvider", "HaskellLibraryInfoTSet", ) +load( + "@prelude//haskell:library_info.bzl", + "HaskellLibraryInfo", +) +load( + "@prelude//haskell:link_info.bzl", + "HaskellLinkInfo", +) load( "@prelude//haskell:toolchain.bzl", "HaskellToolchainInfo", @@ -164,8 +172,12 @@ def _dynamic_target_metadata_impl(actions, artifacts, dynamic_values, outputs, a # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. - packages_info = get_packages_info( + packages_info = get_packages_info2( actions, + arg.deps, + arg.direct_deps_link_info, + arg.haskell_toolchain, + arg.haskell_direct_deps_lib_infos, LinkStyle("shared"), specify_pkg_version = False, enable_profiling = False, @@ -193,12 +205,14 @@ def _dynamic_target_metadata_impl(actions, artifacts, dynamic_values, outputs, a md_args.add(cmd_args(arg.sources, format="--source={}")) md_args.add( - _attr_deps_haskell_lib_package_name_and_prefix(arg.ctx), + arg.lib_package_name_and_prefix, ) md_args.add("--output", outputs[arg.md_file].as_output()) actions.run(md_args, category = "haskell_metadata", identifier = arg.suffix if arg.suffix else None) + return [] + _dynamic_target_metadata = dynamic_actions(impl = _dynamic_target_metadata_impl) def target_metadata( @@ -217,6 +231,12 @@ def target_metadata( if HaskellToolchainLibrary in dep ] + haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( + ctx, + LinkStyle("shared"), + enable_profiling = False, + ) + # The object and interface file paths are depending on the real module name # as inferred by GHC, not the source file path; currently this requires the # module name to correspond to the source file path as otherwise GHC will @@ -231,9 +251,13 @@ def target_metadata( outputs = [md_file.as_output()], arg = struct( compiler_flags = ctx.attrs.compiler_flags, + deps = ctx.attrs.deps, + direct_deps_link_info = attr_deps_haskell_link_infos(ctx), haskell_toolchain = haskell_toolchain, + haskell_direct_deps_lib_infos = haskell_direct_deps_lib_infos, md_file = md_file, md_gen = md_gen, + lib_package_name_and_prefix =_attr_deps_haskell_lib_package_name_and_prefix(ctx), strip_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), sources = sources, suffix = suffix, @@ -271,8 +295,7 @@ def get_packages_info( link_style: LinkStyle, specify_pkg_version: bool, enable_profiling: bool, - use_empty_lib: bool, - resolved: None | dict[DynamicValue, ResolvedDynamicValue] = None) -> PackagesInfo: + use_empty_lib: bool) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] # Collect library dependencies. Note that these don't need to be in a @@ -295,36 +318,80 @@ def get_packages_info( "empty_package_db" if use_empty_lib else "package_db", )) - if haskell_toolchain.packages and resolved != None: - pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages + haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( + ctx, + link_style, + enable_profiling, + ) - direct_toolchain_libs = [ - dep[HaskellToolchainLibrary].name - for dep in ctx.attrs.deps - if HaskellToolchainLibrary in dep - ] + # Expose only the packages we depend on directly + for lib in haskell_direct_deps_lib_infos: + pkg_name = lib.name + if (specify_pkg_version): + pkg_name += "-{}".format(lib.version) - toolchain_libs = direct_toolchain_libs + libs.reduce("packages") + exposed_package_args.add(package_flag, pkg_name) - package_db_tset = ctx.actions.tset( - HaskellPackageDbTSet, - children = [package_db[name] for name in toolchain_libs if name in package_db] - ) + return PackagesInfo( + exposed_package_libs = exposed_package_libs, + exposed_package_args = exposed_package_args, + packagedb_args = packagedb_args, + transitive_deps = libs, + bin_paths = cmd_args(), + ) - packagedb_args.add(package_db_tset.project_as_args("package_db")) +def get_packages_info2( + actions: AnalysisActions, + deps: list[Dependency], + direct_deps_link_info: list[HaskellLinkInfo], + haskell_toolchain: HaskellToolchainInfo, + haskell_direct_deps_lib_infos: list[HaskellLibraryInfo], + link_style: LinkStyle, + specify_pkg_version: bool, + enable_profiling: bool, + use_empty_lib: bool, + resolved: dict[DynamicValue, ResolvedDynamicValue]) -> PackagesInfo: - direct_package_paths = [package_db[name].value.path for name in direct_toolchain_libs if name in package_db] - bin_paths = cmd_args(direct_package_paths, format="--bin-path={}/bin") - else: - bin_paths = cmd_args() + # Collect library dependencies. Note that these don't need to be in a + # particular order. + libs = actions.tset(HaskellLibraryInfoTSet, children = [ + lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + for lib in direct_deps_link_info + ]) - haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( - ctx, - link_style, - enable_profiling, + # base is special and gets exposed by default + package_flag = _package_flag(haskell_toolchain) + + hidden_args = [l for lib in libs.traverse() for l in lib.libs] + + exposed_package_libs = cmd_args() + exposed_package_args = cmd_args([package_flag, "base"], hidden = hidden_args) + + packagedb_args = cmd_args(libs.project_as_args( + "empty_package_db" if use_empty_lib else "package_db", + )) + + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages + + direct_toolchain_libs = [ + dep[HaskellToolchainLibrary].name + for dep in deps + if HaskellToolchainLibrary in dep + ] + + toolchain_libs = direct_toolchain_libs + libs.reduce("packages") + + package_db_tset = actions.tset( + HaskellPackageDbTSet, + children = [package_db[name] for name in toolchain_libs if name in package_db] ) + packagedb_args.add(package_db_tset.project_as_args("package_db")) + + direct_package_paths = [package_db[name].value.path for name in direct_toolchain_libs if name in package_db] + bin_paths = cmd_args(direct_package_paths, format="--bin-path={}/bin") + # Expose only the packages we depend on directly for lib in haskell_direct_deps_lib_infos: pkg_name = lib.name @@ -348,35 +415,49 @@ CommonCompileModuleArgs = record( ) def _common_compile_module_args( - ctx: AnalysisContext, + actions: AnalysisActions, *, + compiler_flags: list[str], + ghc_wrapper: RunInfo, + haskell_toolchain: HaskellToolchainInfo, resolved: dict[DynamicValue, ResolvedDynamicValue], enable_haddock: bool, enable_profiling: bool, link_style: LinkStyle, + main: None | str, + label: Label, + deps: list[Dependency], + external_tool_paths: list[RunInfo], + sources: list[Artifact], direct_deps_info: list[HaskellLibraryInfoTSet], pkgname: str | None = None, ) -> CommonCompileModuleArgs: - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + #haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - command = cmd_args(ctx.attrs._ghc_wrapper[RunInfo]) + command = cmd_args(ghc_wrapper) command.add("--ghc", haskell_toolchain.compiler) # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't # be parsed when inside an argsfile. command.add(haskell_toolchain.compiler_flags) - command.add(ctx.attrs.compiler_flags) + command.add(compiler_flags) command.add("-c") - if getattr(ctx.attrs, "main", None) != None: - command.add(["-main-is", ctx.attrs.main]) + if main != None: + command.add(["-main-is", main]) if enable_haddock: command.add("-haddock") + non_haskell_sources = [ + src + for (path, src) in srcs_to_pairs(sources) + if not is_haskell_src(path) and not is_haskell_boot(path) + ] + if non_haskell_sources: - warning("{} specifies non-haskell file in `srcs`, consider using `srcs_deps` instead".format(ctx.label)) + warning("{} specifies non-haskell file in `srcs`, consider using `srcs_deps` instead".format(label)) args_for_file = cmd_args(hidden = non_haskell_sources) @@ -394,15 +475,9 @@ def _common_compile_module_args( osuf, hisuf = output_extensions(link_style, enable_profiling) args_for_file.add("-osuf", osuf, "-hisuf", hisuf) - non_haskell_sources = [ - src - for (path, src) in srcs_to_pairs(ctx.attrs.srcs) - if not is_haskell_src(path) and not is_haskell_boot(path) - ] - # Add args from preprocess-able inputs. - inherited_pre = cxx_inherited_preprocessor_infos(ctx.attrs.deps) - pre = cxx_merge_cpreprocessors(ctx, [], inherited_pre) + inherited_pre = cxx_inherited_preprocessor_infos(deps) + pre = cxx_merge_cpreprocessors_actions(actions, [], inherited_pre) pre_args = pre.set.project_as_args("args") args_for_file.add(cmd_args(pre_args, format = "-optP={}")) @@ -412,11 +487,11 @@ def _common_compile_module_args( # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. - libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = direct_deps_info) + libs = actions.tset(HaskellLibraryInfoTSet, children = direct_deps_info) direct_toolchain_libs = [ dep[HaskellToolchainLibrary].name - for dep in ctx.attrs.deps + for dep in deps if HaskellToolchainLibrary in dep ] toolchain_libs = direct_toolchain_libs + libs.reduce("packages") @@ -427,7 +502,7 @@ def _common_compile_module_args( else: package_db = [] - package_db_tset = ctx.actions.tset( + package_db_tset = actions.tset( HaskellPackageDbTSet, children = [package_db[name] for name in toolchain_libs if name in package_db] ) @@ -451,8 +526,8 @@ def _common_compile_module_args( # The set of package-dbs can be known at the package level, not just the # module level. So, we could generate this file outside of the # dynamic_output action. - package_env_file = ctx.actions.declare_output(".".join([ - ctx.label.name, + package_env_file = actions.declare_output(".".join([ + label.name, "package-db", output_extensions(link_style, enable_profiling)[1], "env", @@ -462,7 +537,7 @@ def _common_compile_module_args( packagedb_args, format = "package-db {}", ).relative_to(package_env_file, parent = 1)) - ctx.actions.write( + actions.write( package_env_file, package_env, ) @@ -479,12 +554,14 @@ def _common_compile_module_args( ) def _compile_module( - ctx: AnalysisContext, + actions: AnalysisActions, *, common_args: CommonCompileModuleArgs, link_style: LinkStyle, enable_profiling: bool, enable_th: bool, + haskell_toolchain: HaskellToolchainInfo, + label: Label, module_name: str, module: _Module, module_tsets: dict[str, CompiledModuleTSet], @@ -495,20 +572,19 @@ def _compile_module( artifact_suffix: str, direct_deps_by_name: dict[str, typing.Any], toolchain_deps_by_name: dict[str, None], + aux_deps: None | list[Artifact], source_prefixes: list[str], ) -> CompiledModuleTSet: - aux_deps = ctx.attrs.srcs_deps.get(module.source) - # These compiler arguments can be passed in a response file. - compile_args_for_file = cmd_args(common_args.args_for_file, hidden = aux_deps) - - haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] + compile_args_for_file = cmd_args(common_args.args_for_file, hidden = aux_deps or []) - packagedb_tag = ctx.actions.artifact_tag() - compile_args_for_file.add(packagedb_tag.tag_artifacts(common_args.package_env_args)) + packagedb_tag = actions.artifact_tag() + compile_args_for_file.add(#packagedb_tag.tag_artifacts( + common_args.package_env_args #) + ) - dep_file = ctx.actions.declare_output(".".join([ - ctx.label.name, + dep_file = actions.declare_output(".".join([ + label.name, module_name or "pkg", "package-db", output_extensions(link_style, enable_profiling)[1], @@ -542,30 +618,7 @@ def _compile_module( compile_args_for_file.add(module.source) - abi_tag = ctx.actions.artifact_tag() - - dependency_modules = ctx.actions.tset( - CompiledModuleTSet, - children = [cross_package_modules] + this_package_modules, - ) - - compile_cmd = cmd_args( - common_args.command, - hidden = [ - compile_args_for_file, - abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces")), - dependency_modules.project_as_args("abi"), - ] - ) - - if haskell_toolchain.use_argsfile: - argsfile = ctx.actions.declare_output( - "haskell_compile_" + artifact_suffix + ".argsfile", - ) - ctx.actions.write(argsfile.as_output(), compile_args_for_file, allow_args = True) - compile_cmd.add(cmd_args(argsfile, format = "@{}")) - else: - compile_cmd.add(compile_args_for_file) + abi_tag = actions.artifact_tag() toolchain_deps = [] library_deps = [] @@ -583,7 +636,7 @@ def _compile_module( fail("Unknown library dependency '{}'. Add the library to the `deps` attribute".format(dep_pkgname)) # Transitive module dependencies from other packages. - cross_package_modules = ctx.actions.tset( + cross_package_modules = actions.tset( CompiledModuleTSet, children = exposed_package_modules, ) @@ -593,6 +646,29 @@ def _compile_module( for dep_name in graph[module_name] ] + dependency_modules = actions.tset( + CompiledModuleTSet, + children = [cross_package_modules] + this_package_modules, + ) + + compile_cmd = cmd_args( + common_args.command, + hidden = [ + compile_args_for_file, + abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces")), + dependency_modules.project_as_args("abi"), + ] + ) + + if haskell_toolchain.use_argsfile: + argsfile = actions.declare_output( + "haskell_compile_" + artifact_suffix + ".argsfile", + ) + actions.write(argsfile.as_output(), compile_args_for_file, allow_args = True) + compile_cmd.add(cmd_args(argsfile, format = "@{}")) + else: + compile_cmd.add(compile_args_for_file) + # add each module dir prefix to search path for prefix in source_prefixes: compile_cmd.add( @@ -614,22 +690,22 @@ def _compile_module( compile_cmd.add(cmd_args(dependency_modules.reduce("packagedb_deps").keys(), prepend = "--buck2-package-db")) - dep_file = ctx.actions.declare_output("dep-{}_{}".format(module_name, artifact_suffix)).as_output() + dep_file = actions.declare_output("dep-{}_{}".format(module_name, artifact_suffix)).as_output() tagged_dep_file = abi_tag.tag_artifacts(dep_file) compile_cmd.add("--buck2-dep", tagged_dep_file) compile_cmd.add("--abi-out", outputs[module.hash].as_output()) - ctx.actions.run( + actions.run( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, dep_files = { "abi": abi_tag, - "packagedb": packagedb_tag, + #"packagedb": packagedb_tag, } ) - module_tset = ctx.actions.tset( + module_tset = actions.tset( CompiledModuleTSet, value = CompiledModuleInfo( abi = module.hash, @@ -642,27 +718,34 @@ def _compile_module( return module_tset def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): - #def do_compile(ctx, artifacts, resolved, outputs, md_file=md_file, modules=modules): # Collect library dependencies. Note that these don't need to be in a # particular order. toolchain_deps_by_name = { lib.name: None - for lib in attr_deps_haskell_toolchain_libraries(arg.ctx) + for lib in arg.attr_deps_haskell_toolchain_libraries } direct_deps_info = [ lib.prof_info[arg.link_style] if arg.enable_profiling else lib.info[arg.link_style] - for lib in attr_deps_haskell_link_infos(arg.ctx) + for lib in arg.attr_deps_haskell_link_infos ] direct_deps_by_name = { info.value.name: struct( package_db = info.value.empty_db, - modules = dynamic_values[info.value.dynamic[arg.enable_profiling]][DynamicCompileResultInfo].modules, + modules = dynamic_values[info.value.dynamic[arg.enable_profiling]].providers[DynamicCompileResultInfo].modules, ) for info in direct_deps_info } common_args = _common_compile_module_args( - arg.ctx, + actions, + compiler_flags = arg.compiler_flags, + deps = arg.deps, + external_tool_paths = arg.external_tool_paths, + ghc_wrapper = arg.ghc_wrapper, + haskell_toolchain = arg.haskell_toolchain, + label = arg.label, + main = arg.main, resolved = dynamic_values, + sources = arg.sources, enable_haddock = arg.enable_haddock, enable_profiling = arg.enable_profiling, link_style = arg.link_style, @@ -678,17 +761,21 @@ def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): mapped_modules = { module_map.get(k, k): v for k, v in arg.modules.items() } module_tsets = {} - source_prefixes = get_source_prefixes(arg.ctx.attrs.srcs, module_map) + source_prefixes = get_source_prefixes(arg.sources, module_map) for module_name in post_order_traversal(graph): + module = mapped_modules[module_name] module_tsets[module_name] = _compile_module( - arg.ctx, + actions, + aux_deps = arg.sources_deps.get(module.source), common_args = common_args, link_style = arg.link_style, enable_profiling = arg.enable_profiling, enable_th = module_name in th_modules, + haskell_toolchain = arg.haskell_toolchain, + label = arg.label, module_name = module_name, - module = mapped_modules[module_name], + module = module, module_tsets = module_tsets, graph = graph, package_deps = package_deps.get(module_name, {}), @@ -744,11 +831,23 @@ def compile( outputs = [o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes], arg = struct( artifact_suffix = artifact_suffix, + attr_deps_haskell_toolchain_libraries = attr_deps_haskell_toolchain_libraries(ctx), + attr_deps_haskell_link_infos = attr_deps_haskell_link_infos(ctx), + compiler_flags = ctx.attrs.compiler_flags, + deps = ctx.attrs.deps, + ghc_wrapper = ctx.attrs._ghc_wrapper[RunInfo], + haskell_toolchain = haskell_toolchain, + external_tool_paths = [tool[RunInfo] for tool in ctx.attrs.external_tools], + enable_haddock = enable_haddock, enable_profiling = enable_profiling, + label = ctx.label, link_style = link_style, + main = getattr(ctx.attrs, "main", None), md_file = md_file, modules = modules, pkgname = pkgname, + sources = ctx.attrs.srcs, + sources_deps = ctx.attrs.srcs_deps, ), )) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 3b7d6c803..26cb8e15a 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -54,7 +54,7 @@ load( "@prelude//haskell:compile.bzl", "CompileResultInfo", "compile", - "get_packages_info", + "get_packages_info2", "target_metadata", ) load( @@ -85,6 +85,8 @@ load( "@prelude//haskell:util.bzl", "attr_deps", "attr_deps_haskell_link_infos_sans_template_deps", + "attr_deps_haskell_lib_infos", + "attr_deps_haskell_link_infos", "attr_deps_merged_link_infos", "attr_deps_profiling_link_infos", "attr_deps_shared_library_infos", @@ -560,6 +562,8 @@ def _dynamic_link_shared_impl(actions, artifacts, dynamic_values, outputs, arg): category = "haskell_link" + arg.artifact_suffix.replace("-", "_"), ) + return [] + _dynamic_link_shared = dynamic_actions(impl = _dynamic_link_shared_impl) def _build_haskell_lib( @@ -1077,9 +1081,13 @@ def _dynamic_link_binary_impl(actions, artifacts, dynamic_values, outputs, arg): # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. - packages_info = get_packages_info( - arg.ctx, - arg.link_style, + packages_info = get_packages_info2( + actions, + deps = arg.deps, + direct_deps_link_info = arg.direct_deps_link_info, + haskell_toolchain = arg.haskell_toolchain, + haskell_direct_deps_lib_infos = arg.haskell_direct_deps_lib_infos, + link_style = arg.link_style, resolved = dynamic_values, specify_pkg_version = False, enable_profiling = arg.enable_profiling, @@ -1091,7 +1099,7 @@ def _dynamic_link_binary_impl(actions, artifacts, dynamic_values, outputs, arg): link_cmd.add(cmd_args(packages_info.exposed_package_args)) link_cmd.add(cmd_args(packages_info.packagedb_args, prepend = "-package-db")) link_cmd.add(arg.haskell_toolchain.linker_flags) - link_cmd.add(arg.ctx.attrs.linker_flags) + link_cmd.add(arg.linker_flags) link_cmd.add(cmd_args(hidden = packages_info.exposed_package_libs)) @@ -1104,6 +1112,8 @@ def _dynamic_link_binary_impl(actions, artifacts, dynamic_values, outputs, arg): actions.run(link_cmd, category = "haskell_link") + return [] + _dynamic_link_binary = dynamic_actions(impl = _dynamic_link_binary_impl) def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: @@ -1319,16 +1329,26 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: else: link.add(cmd_args(unpack_link_args(infos), prepend = "-optl")) + haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( + ctx, + link_style, + enable_profiling = enable_profiling, + ) + ctx.actions.dynamic_output_new(_dynamic_link_binary( dynamic = [], dynamic_values = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [ ], #inputs = objects.values(), outputs = [output.as_output()], arg = struct( + deps = ctx.attrs.deps, enable_profiling = enable_profiling, + direct_deps_link_info = attr_deps_haskell_link_infos(ctx), haskell_toolchain = haskell_toolchain, + haskell_direct_deps_lib_infos = haskell_direct_deps_lib_infos, link = link, link_style = link_style, + linker_flags = ctx.attrs.linker_flags, output = output, toolchain_libs = toolchain_libs, ), From 29f8e1bc0ac56d2eb3513139bef57e0a09d0311b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 23 Aug 2024 12:03:24 +0200 Subject: [PATCH 1076/1133] Cleanup --- haskell/compile.bzl | 2 -- haskell/haskell.bzl | 2 -- haskell/haskell_haddock.bzl | 1 - 3 files changed, 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 9c4527e05..f1b7578e5 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -513,7 +513,6 @@ def _common_compile_module_args( format="--bin-path={}/bin", )) - external_tool_paths = [tool[RunInfo] for tool in ctx.attrs.external_tools] args_for_file.add(cmd_args( external_tool_paths, format="--bin-exe={}", @@ -827,7 +826,6 @@ def compile( lib.info[link_style] ] ] + ([ haskell_toolchain.packages.dynamic ] if haskell_toolchain.packages else [ ]), - #inputs = ctx.attrs.srcs, outputs = [o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes], arg = struct( artifact_suffix = artifact_suffix, diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 26cb8e15a..fb7bc6192 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -628,7 +628,6 @@ def _build_haskell_lib( ctx.actions.dynamic_output_new(_dynamic_link_shared( dynamic = [], dynamic_values = [haskell_toolchain.packages.dynamic], - #inputs = objects, outputs = [lib.as_output()], arg = struct( artifact_suffix = artifact_suffix, @@ -1338,7 +1337,6 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ctx.actions.dynamic_output_new(_dynamic_link_binary( dynamic = [], dynamic_values = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [ ], - #inputs = objects.values(), outputs = [output.as_output()], arg = struct( deps = ctx.attrs.deps, diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 43dd9173d..eb6cac975 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -196,7 +196,6 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes lib.info[link_style], ] ], - #inputs = compiled.hi, outputs = [output.as_output() for info in haddock_infos.values() for output in [info.haddock, info.html]], arg = struct( md_file = md_file, From f79ae13596a0922f3b5c13b33ba8481b20ddc4e0 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 23 Aug 2024 12:20:33 +0200 Subject: [PATCH 1077/1133] Refactor --- haskell/compile.bzl | 45 +++++++++++---------------------------------- 1 file changed, 11 insertions(+), 34 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index f1b7578e5..e9df0d1e9 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -298,46 +298,23 @@ def get_packages_info( use_empty_lib: bool) -> PackagesInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - # Collect library dependencies. Note that these don't need to be in a - # particular order. - direct_deps_link_info = attr_deps_haskell_link_infos(ctx) - libs = ctx.actions.tset(HaskellLibraryInfoTSet, children = [ - lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - for lib in direct_deps_link_info - ]) - - # base is special and gets exposed by default - package_flag = _package_flag(haskell_toolchain) - - hidden_args = [l for lib in libs.traverse() for l in lib.libs] - - exposed_package_libs = cmd_args() - exposed_package_args = cmd_args([package_flag, "base"], hidden = hidden_args) - - packagedb_args = cmd_args(libs.project_as_args( - "empty_package_db" if use_empty_lib else "package_db", - )) - haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( ctx, link_style, enable_profiling, ) - # Expose only the packages we depend on directly - for lib in haskell_direct_deps_lib_infos: - pkg_name = lib.name - if (specify_pkg_version): - pkg_name += "-{}".format(lib.version) - - exposed_package_args.add(package_flag, pkg_name) - - return PackagesInfo( - exposed_package_libs = exposed_package_libs, - exposed_package_args = exposed_package_args, - packagedb_args = packagedb_args, - transitive_deps = libs, - bin_paths = cmd_args(), + return get_packages_info2( + actions = ctx.actions, + deps = [], + direct_deps_link_info = attr_deps_haskell_link_infos(ctx), + haskell_toolchain = haskell_toolchain, + haskell_direct_deps_lib_infos = haskell_direct_deps_lib_infos, + link_style = link_style, + specify_pkg_version = specify_pkg_version, + enable_profiling = enable_profiling, + use_empty_lib = use_empty_lib, + resolved = {}, ) def get_packages_info2( From 85d131bb0169e6e3cebba5c768ac27a4e760c31c Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 23 Aug 2024 13:34:39 +0200 Subject: [PATCH 1078/1133] lint: Remove duplicate load of `HaskellLibraryInfoTSet` --- haskell/compile.bzl | 2 ++ haskell/link_info.bzl | 4 ---- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e9df0d1e9..9d0711fbe 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -677,6 +677,8 @@ def _compile_module( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, dep_files = { "abi": abi_tag, + # FIXME causes an error: `dep_files` value with key `packagedb` has an invalid count of associated outputs. Expected 1, got 2. + # #"packagedb": packagedb_tag, } ) diff --git a/haskell/link_info.bzl b/haskell/link_info.bzl index aa40eacfe..5cdc5f592 100644 --- a/haskell/link_info.bzl +++ b/haskell/link_info.bzl @@ -17,10 +17,6 @@ load( "@prelude//linking:link_info.bzl", "LinkStyle", ) -load( - "@prelude//haskell:library_info.bzl", - "HaskellLibraryInfoTSet", -) # A list of `HaskellLibraryInfo`s. HaskellLinkInfo = provider( From 258e81090cc2de015c6113293487b0020af223fd Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 26 Aug 2024 09:48:21 +0200 Subject: [PATCH 1079/1133] Check if dynamic values are available --- haskell/compile.bzl | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index f56fc3e0f..8d778588e 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -355,8 +355,11 @@ def get_packages_info2( packagedb_args = cmd_args(libs.project_as_args(package_db_projection)) - pkg_deps = resolved[haskell_toolchain.packages.dynamic] - package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages + if resolved: + pkg_deps = resolved[haskell_toolchain.packages.dynamic] + package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages + else: + package_db = {} direct_toolchain_libs = [ dep[HaskellToolchainLibrary].name From 1c2966eb6d98c63e766a5b6bd84ea037527d4ef6 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 26 Aug 2024 09:51:21 +0200 Subject: [PATCH 1080/1133] Fix invalid count of associated outputs for dep file The error was caused by adding the depfile twice to the cmd_args when compiling a module; once as a normal argument and also as a hidden argument. --- haskell/compile.bzl | 29 ++++++++++++----------------- 1 file changed, 12 insertions(+), 17 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 8d778588e..4ca620c51 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -565,9 +565,7 @@ def _compile_module( compile_args_for_file = cmd_args(common_args.args_for_file, hidden = aux_deps or []) packagedb_tag = actions.artifact_tag() - compile_args_for_file.add(#packagedb_tag.tag_artifacts( - common_args.package_env_args #) - ) + compile_args_for_file.add(packagedb_tag.tag_artifacts(common_args.package_env_args)) dep_file = actions.declare_output(".".join([ label.name, @@ -637,23 +635,22 @@ def _compile_module( children = [cross_package_modules] + this_package_modules, ) - compile_cmd = cmd_args( - common_args.command, - hidden = [ - compile_args_for_file, - abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces")), - dependency_modules.project_as_args("abi"), - ] - ) - + compile_cmd_args = [common_args.command] + compile_cmd_hidden = [ + abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces")), + dependency_modules.project_as_args("abi"), + ] if haskell_toolchain.use_argsfile: argsfile = actions.declare_output( "haskell_compile_" + artifact_suffix + ".argsfile", ) actions.write(argsfile.as_output(), compile_args_for_file, allow_args = True) - compile_cmd.add(cmd_args(argsfile, format = "@{}")) + compile_cmd_args.append(cmd_args(argsfile, format = "@{}")) + compile_cmd_hidden.append(compile_args_for_file) else: - compile_cmd.add(compile_args_for_file) + compile_cmd_args.append(compile_args_for_file) + + compile_cmd = cmd_args(compile_cmd_args, hidden = compile_cmd_hidden) # add each module dir prefix to search path for prefix in source_prefixes: @@ -687,9 +684,7 @@ def _compile_module( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, dep_files = { "abi": abi_tag, - # FIXME causes an error: `dep_files` value with key `packagedb` has an invalid count of associated outputs. Expected 1, got 2. - # - #"packagedb": packagedb_tag, + "packagedb": packagedb_tag, } ) From fe0e369540b46a83d88743190139a2d0f953fe96 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 26 Aug 2024 09:53:20 +0200 Subject: [PATCH 1081/1133] Fix type mismatch for + operator --- haskell/haskell_ghci.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/haskell/haskell_ghci.bzl b/haskell/haskell_ghci.bzl index 2535106da..d9005c3f0 100644 --- a/haskell/haskell_ghci.bzl +++ b/haskell/haskell_ghci.bzl @@ -727,7 +727,9 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: "__{}__".format(ctx.label.name), output_artifacts, ) - run = cmd_args(final_ghci_script, hidden=(ctx.attrs.ghci_bin_dep.get(RunInfo) or []) + outputs) + ghci_bin_dep = ctx.attrs.ghci_bin_dep.get(RunInfo) + hidden_dep = [ghci_bin_dep] if ghci_bin_dep else [] + run = cmd_args(final_ghci_script, hidden=hidden_dep + outputs) return [ DefaultInfo(default_outputs = [root_output_dir]), From 9612924a1100292e8487c7870c75e5a69b06785e Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 26 Aug 2024 10:58:33 +0200 Subject: [PATCH 1082/1133] Fix dynamic haddock implementation --- haskell/haskell_haddock.bzl | 89 +++++++++++++++++++------------------ 1 file changed, 46 insertions(+), 43 deletions(-) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index eb6cac975..ec31f17f5 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -47,7 +47,7 @@ def _haddock_module_to_html(module_name: str) -> str: return module_name.replace(".", "-") + ".html" def _haddock_dump_interface( - ctx: AnalysisContext, + actions: AnalysisActions, cmd: cmd_args, module_name: str, module_tsets: dict[str, _HaddockInfoTSet], @@ -57,7 +57,7 @@ def _haddock_dump_interface( outputs: dict[Artifact, Artifact]) -> _HaddockInfoTSet: # Transitive module dependencies from other packages. - cross_package_modules = ctx.actions.tset( + cross_package_modules = actions.tset( CompiledModuleTSet, children = module_deps, ) @@ -73,13 +73,13 @@ def _haddock_dump_interface( module_html = _haddock_module_to_html(module_name) if paths.basename(expected_html.short_path) != module_html: - html_output = ctx.actions.declare_output("haddock-html", module_html) + html_output = actions.declare_output("haddock-html", module_html) make_copy = True else: - html_output = expected_html + html_output = expected_html make_copy = False - ctx.actions.run( + actions.run( cmd.copy().add( "--odir", cmd_args(html_output.as_output(), parent = 1), "--dump-interface", outputs[haddock_info.haddock].as_output(), @@ -102,50 +102,52 @@ def _haddock_dump_interface( if make_copy: # XXX might as well use `symlink_file`` but that does not work with buck2 RE # (see https://github.com/facebook/buck2/issues/222) - ctx.actions.copy_file(expected_html.as_output(), html_output) + actions.copy_file(expected_html.as_output(), html_output) - return ctx.actions.tset( + return actions.tset( _HaddockInfoTSet, value = _HaddockInfo(interface = haddock_info.interface, haddock = outputs[haddock_info.haddock], html = outputs[haddock_info.html]), children = this_package_modules, ) def _dynamic_haddock_dump_interfaces_impl(actions, artifacts, dynamic_values, outputs, arg): - md = artifacts[arg.md_file].read_json() - module_map = md["module_mapping"] - graph = md["module_graph"] - package_deps = md["package_deps"] - - dynamic_info_lib = {} - - for lib in arg.direct_deps_link_info: - info = lib.info[arg.link_style] - direct = info.value - dynamic = direct.dynamic[False] - dynamic_info = dynamic_values[dynamic].providers[DynamicCompileResultInfo] - - dynamic_info_lib[direct.name] = dynamic_info - - haddock_infos = { module_map.get(k, k): v for k, v in haddock_infos.items() } - module_tsets = {} - - for module_name in post_order_traversal(graph): - module_deps = [ - info.modules[mod] - for lib, info in dynamic_info_lib.items() - for mod in package_deps.get(module_name, {}).get(lib, []) - ] - - module_tsets[module_name] = _haddock_dump_interface( - actions, - arg.dyn_cmd.copy(), - module_name = module_name, - module_tsets = module_tsets, - haddock_info = haddock_infos[module_name], - module_deps = module_deps, + md = artifacts[arg.md_file].read_json() + module_map = md["module_mapping"] + graph = md["module_graph"] + package_deps = md["package_deps"] + + dynamic_info_lib = {} + + for lib in arg.direct_deps_link_info: + info = lib.info[arg.link_style] + direct = info.value + dynamic = direct.dynamic[False] + dynamic_info = dynamic_values[dynamic].providers[DynamicCompileResultInfo] + + dynamic_info_lib[direct.name] = dynamic_info + + haddock_infos = { module_map.get(k, k): v for k, v in arg.haddock_infos.items() } + module_tsets = {} + + for module_name in post_order_traversal(graph): + module_deps = [ + info.modules[mod] + for lib, info in dynamic_info_lib.items() + for mod in package_deps.get(module_name, {}).get(lib, []) + ] + + module_tsets[module_name] = _haddock_dump_interface( + actions, + arg.dyn_cmd.copy(), + module_name = module_name, + module_tsets = module_tsets, + haddock_info = haddock_infos[module_name], + module_deps = module_deps, graph = graph, - outputs = outputs, - ) + outputs = outputs, + ) + + return [] _dynamic_haddock_dump_interfaces = dynamic_actions(impl = _dynamic_haddock_dump_interfaces_impl) @@ -198,10 +200,11 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes ], outputs = [output.as_output() for info in haddock_infos.values() for output in [info.haddock, info.html]], arg = struct( - md_file = md_file, direct_deps_link_info = direct_deps_link_info, dyn_cmd = cmd.copy(), - haddock_infos = haddock_infos + haddock_infos = haddock_infos, + link_style = link_style, + md_file = md_file, ), )) From 2a6ce6f6a9b6611c4c6a68cf9eb7fd6ddb595d30 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 26 Aug 2024 11:17:06 +0200 Subject: [PATCH 1083/1133] Cleanup --- haskell/compile.bzl | 14 +++++++------- haskell/haskell.bzl | 15 ++------------- 2 files changed, 9 insertions(+), 20 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 4ca620c51..c0e354042 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -254,13 +254,13 @@ def target_metadata( compiler_flags = ctx.attrs.compiler_flags, deps = ctx.attrs.deps, direct_deps_link_info = attr_deps_haskell_link_infos(ctx), - haskell_toolchain = haskell_toolchain, haskell_direct_deps_lib_infos = haskell_direct_deps_lib_infos, + haskell_toolchain = haskell_toolchain, + lib_package_name_and_prefix =_attr_deps_haskell_lib_package_name_and_prefix(ctx), md_file = md_file, md_gen = md_gen, - lib_package_name_and_prefix =_attr_deps_haskell_lib_package_name_and_prefix(ctx), - strip_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), sources = sources, + strip_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), suffix = suffix, toolchain_libs = toolchain_libs, ), @@ -813,15 +813,15 @@ def compile( outputs = [o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes], arg = struct( artifact_suffix = artifact_suffix, - attr_deps_haskell_toolchain_libraries = attr_deps_haskell_toolchain_libraries(ctx), attr_deps_haskell_link_infos = attr_deps_haskell_link_infos(ctx), + attr_deps_haskell_toolchain_libraries = attr_deps_haskell_toolchain_libraries(ctx), compiler_flags = ctx.attrs.compiler_flags, deps = ctx.attrs.deps, - ghc_wrapper = ctx.attrs._ghc_wrapper[RunInfo], - haskell_toolchain = haskell_toolchain, - external_tool_paths = [tool[RunInfo] for tool in ctx.attrs.external_tools], enable_haddock = enable_haddock, enable_profiling = enable_profiling, + external_tool_paths = [tool[RunInfo] for tool in ctx.attrs.external_tools], + ghc_wrapper = ctx.attrs._ghc_wrapper[RunInfo], + haskell_toolchain = haskell_toolchain, label = ctx.label, link_style = link_style, main = getattr(ctx.attrs, "main", None), diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 59b8252c1..f0e451c40 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -1100,12 +1100,6 @@ def _make_link_package( def _dynamic_link_binary_impl(actions, artifacts, dynamic_values, outputs, arg): link_cmd = arg.link.copy() # link is already frozen, make a copy - if arg.haskell_toolchain.packages: - pkg_deps = dynamic_values[arg.haskell_toolchain.packages.dynamic] - package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages - else: - package_db = [] - # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. packages_info = get_packages_info2( @@ -1130,11 +1124,6 @@ def _dynamic_link_binary_impl(actions, artifacts, dynamic_values, outputs, arg): link_cmd.add(cmd_args(hidden = packages_info.exposed_package_libs)) - package_db_tset = actions.tset( - HaskellPackageDbTSet, - children = [package_db[name] for name in arg.toolchain_libs if name in package_db] - ) - link_cmd.add("-o", outputs[arg.output].as_output()) actions.run(link_cmd, category = "haskell_link") @@ -1368,10 +1357,10 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: outputs = [output.as_output()], arg = struct( deps = ctx.attrs.deps, - enable_profiling = enable_profiling, direct_deps_link_info = attr_deps_haskell_link_infos(ctx), - haskell_toolchain = haskell_toolchain, + enable_profiling = enable_profiling, haskell_direct_deps_lib_infos = haskell_direct_deps_lib_infos, + haskell_toolchain = haskell_toolchain, link = link, link_style = link_style, linker_flags = ctx.attrs.linker_flags, From 5bf1a83f613579b2830afe314bcaaa77505817bf Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 26 Aug 2024 11:50:41 +0200 Subject: [PATCH 1084/1133] Refactor --- haskell/compile.bzl | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index c0e354042..8fdf4041a 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -701,22 +701,12 @@ def _compile_module( return module_tset def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): - # Collect library dependencies. Note that these don't need to be in a - # particular order. - toolchain_deps_by_name = { - lib.name: None - for lib in arg.attr_deps_haskell_toolchain_libraries - } - direct_deps_info = [ - lib.prof_info[arg.link_style] if arg.enable_profiling else lib.info[arg.link_style] - for lib in arg.attr_deps_haskell_link_infos - ] direct_deps_by_name = { info.value.name: struct( package_db = info.value.empty_db, modules = dynamic_values[info.value.dynamic[arg.enable_profiling]].providers[DynamicCompileResultInfo].modules, ) - for info in direct_deps_info + for info in arg.direct_deps_info } common_args = _common_compile_module_args( actions, @@ -732,7 +722,7 @@ def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): enable_haddock = arg.enable_haddock, enable_profiling = arg.enable_profiling, link_style = arg.link_style, - direct_deps_info = direct_deps_info, + direct_deps_info = arg.direct_deps_info, pkgname = arg.pkgname, ) @@ -766,7 +756,7 @@ def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): md_file = arg.md_file, artifact_suffix = arg.artifact_suffix, direct_deps_by_name = direct_deps_by_name, - toolchain_deps_by_name = toolchain_deps_by_name, + toolchain_deps_by_name = arg.toolchain_deps_by_name, source_prefixes = source_prefixes, ) @@ -799,6 +789,17 @@ def compile( ] abi_hashes = [module.hash for module in modules.values()] + # Collect library dependencies. Note that these don't need to be in a + # particular order. + toolchain_deps_by_name = { + lib.name: None + for lib in attr_deps_haskell_toolchain_libraries(ctx) + } + direct_deps_info = [ + lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + for lib in attr_deps_haskell_link_infos(ctx) + ] + dyn_module_tsets = ctx.actions.dynamic_output_new(_dynamic_do_compile( dynamic = [md_file], dynamic_values = [ @@ -813,10 +814,9 @@ def compile( outputs = [o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes], arg = struct( artifact_suffix = artifact_suffix, - attr_deps_haskell_link_infos = attr_deps_haskell_link_infos(ctx), - attr_deps_haskell_toolchain_libraries = attr_deps_haskell_toolchain_libraries(ctx), compiler_flags = ctx.attrs.compiler_flags, deps = ctx.attrs.deps, + direct_deps_info = direct_deps_info, enable_haddock = enable_haddock, enable_profiling = enable_profiling, external_tool_paths = [tool[RunInfo] for tool in ctx.attrs.external_tools], @@ -830,6 +830,7 @@ def compile( pkgname = pkgname, sources = ctx.attrs.srcs, sources_deps = ctx.attrs.srcs_deps, + toolchain_deps_by_name = toolchain_deps_by_name, ), )) From 6c3a1f34de3840afd7576ba846ba790fb10620af Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 21 Aug 2024 16:16:32 -0700 Subject: [PATCH 1085/1133] use --show-iface-abi-hash for extracting hash --- haskell/tools/ghc_wrapper.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 241fa991b..09f33d550 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -131,13 +131,12 @@ def recompute_abi_hash(ghc, abi_out): """Call ghc on the hi file and write the ABI hash to abi_out.""" hi_file = abi_out.with_suffix("") - cmd = [ghc, "--show-iface", hi_file] - for line in subprocess.check_output(cmd, text=True).splitlines(): - if "ABI hash:" in line: - hash = line.split(":", 1)[1] - with open(abi_out, "w") as outfile: - print(hash, file=outfile) - return + cmd = [ghc, "--show-iface-abi-hash", hi_file] + for line in subprocess.check_output(cmd, text=True): + hash = line[0] + with open(abi_out, "w") as outfile: + print(hash, file=outfile) + return raise RuntimeError("ABI hash not found in ghc output") From 515311a865be983db55aeaefd6c3e3daef57aa48 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Tue, 27 Aug 2024 13:32:31 -0700 Subject: [PATCH 1086/1133] fix bug, and make it concise. Co-authored-by: Claudio Bley --- haskell/tools/ghc_wrapper.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 09f33d550..efc51b9cb 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -132,12 +132,10 @@ def recompute_abi_hash(ghc, abi_out): hi_file = abi_out.with_suffix("") cmd = [ghc, "--show-iface-abi-hash", hi_file] - for line in subprocess.check_output(cmd, text=True): - hash = line[0] - with open(abi_out, "w") as outfile: - print(hash, file=outfile) - return - raise RuntimeError("ABI hash not found in ghc output") + + hash = subprocess.check_output(cmd, text=True).split(maxsplit=1)[0] + + abi_out.write_text(hash) if __name__ == "__main__": From 030479b9b66275e050b9672d29e41dc30f055ed5 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 28 Aug 2024 09:53:07 +0200 Subject: [PATCH 1087/1133] Prevent loading of default package environment, suppress messages --- haskell/tools/ghc_wrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index efc51b9cb..0a1c20f2d 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -131,7 +131,7 @@ def recompute_abi_hash(ghc, abi_out): """Call ghc on the hi file and write the ABI hash to abi_out.""" hi_file = abi_out.with_suffix("") - cmd = [ghc, "--show-iface-abi-hash", hi_file] + cmd = [ghc, "-v0", "-package-env=-", "--show-iface-abi-hash", hi_file] hash = subprocess.check_output(cmd, text=True).split(maxsplit=1)[0] From 84f970d89a9b0e1d435640ca1bc5227049575b96 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Thu, 29 Aug 2024 19:47:49 -0700 Subject: [PATCH 1088/1133] make compiler_flags be of arg type (#34) compiler_flags often needs macro like $(location :target). --- decls/haskell_common.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index cf532ec20..13e0913ea 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -31,7 +31,7 @@ def _deps_arg(): def _compiler_flags_arg(): return { - "compiler_flags": attrs.list(attrs.string(), default = [], doc = """ + "compiler_flags": attrs.list(attrs.arg(), default = [], doc = """ Flags to pass to the Haskell compiler when compiling this rule's sources. """), } From 1741158e797ef7b4a32a88ba26715d4e6a14198f Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 11 Sep 2024 11:07:27 -0700 Subject: [PATCH 1089/1133] compiler_flags : list[ArgLike], not list[str] in _common_compile_module_args (#42) * compiler_flags : list, not list[str] after attrs.string() -> attrs.arg(). * use list[ArgLike] instead of list. --- haskell/compile.bzl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 8fdf4041a..e563ddcc1 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -5,6 +5,8 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//utils:arglike.bzl", "ArgLike") + load( "@prelude//cxx:preprocessor.bzl", "cxx_inherited_preprocessor_infos", @@ -404,7 +406,7 @@ CommonCompileModuleArgs = record( def _common_compile_module_args( actions: AnalysisActions, *, - compiler_flags: list[str], + compiler_flags: list[ArgLike], ghc_wrapper: RunInfo, haskell_toolchain: HaskellToolchainInfo, resolved: dict[DynamicValue, ResolvedDynamicValue], From 04a733984fabb79c4a1fea2b51d2b33eb1d4faa8 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 11 Sep 2024 14:18:04 -0700 Subject: [PATCH 1090/1133] Revive accidentally deleted srcs_envs after buck2 upgrade (#43) When upgrading buck2-prelude to the upstream (#36), srcs_envs part was accidentally deleted, so revived it. The type of srcs_envs is changed to dict[str, dict[str, arg]]. * revive accidentally deleted srcs_envs * use dict(str, arg) instead of list(tuple(str, arg)) --- decls/haskell_common.bzl | 2 +- haskell/compile.bzl | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index 13e0913ea..64bb7d8a6 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -64,7 +64,7 @@ def _external_tools_arg(): def _srcs_envs_arg(): return { - "srcs_envs": attrs.dict(attrs.source(), attrs.list(attrs.tuple(attrs.string(), attrs.arg())), default = {}, doc = """ + "srcs_envs": attrs.dict(attrs.source(), attrs.dict(attrs.string(), attrs.arg()), default = {}, doc = """ Individual run-time env for each source compilation. """), } diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e563ddcc1..e63e8d83e 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -561,6 +561,7 @@ def _compile_module( direct_deps_by_name: dict[str, typing.Any], toolchain_deps_by_name: dict[str, None], aux_deps: None | list[Artifact], + src_envs: None | dict[str, ArgLike], source_prefixes: list[str], ) -> CompiledModuleTSet: # These compiler arguments can be passed in a response file. @@ -642,6 +643,16 @@ def _compile_module( abi_tag.tag_artifacts(dependency_modules.project_as_args("interfaces")), dependency_modules.project_as_args("abi"), ] + if src_envs: + for k, v in src_envs.items(): + compile_args_for_file.add(cmd_args( + k, + format="--extra-env-key={}", + )) + compile_args_for_file.add(cmd_args( + v, + format="--extra-env-value={}", + )) if haskell_toolchain.use_argsfile: argsfile = actions.declare_output( "haskell_compile_" + artifact_suffix + ".argsfile", @@ -743,6 +754,7 @@ def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): module_tsets[module_name] = _compile_module( actions, aux_deps = arg.sources_deps.get(module.source), + src_envs = arg.srcs_envs.get(module.source), common_args = common_args, link_style = arg.link_style, enable_profiling = arg.enable_profiling, @@ -832,6 +844,7 @@ def compile( pkgname = pkgname, sources = ctx.attrs.srcs, sources_deps = ctx.attrs.srcs_deps, + srcs_envs = ctx.attrs.srcs_envs, toolchain_deps_by_name = toolchain_deps_by_name, ), )) From ff191d441489c5b2432cb907e9edd8f56a73bd19 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Thu, 12 Sep 2024 04:26:11 -0700 Subject: [PATCH 1091/1133] use_argsfile_at_link to use response file for a large number of objects. (#44) * add use_argsfile_at_link * implementing argsfile in linking * remove debug print --- decls/haskell_common.bzl | 8 ++++++++ decls/haskell_rules.bzl | 2 ++ haskell/haskell.bzl | 39 ++++++++++++++++++++++++++++----------- 3 files changed, 38 insertions(+), 11 deletions(-) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index 64bb7d8a6..0f704f71d 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -69,6 +69,13 @@ def _srcs_envs_arg(): """), } +def _use_argsfile_at_link_arg(): + return { + "use_argsfile_at_link": attrs.bool(default = False, doc = """ + Use response file at linking. +"""), + } + haskell_common = struct( srcs_arg = _srcs_arg, deps_arg = _deps_arg, @@ -77,4 +84,5 @@ haskell_common = struct( scripts_arg = _scripts_arg, external_tools_arg = _external_tools_arg, srcs_envs_arg = _srcs_envs_arg, + use_argsfile_at_link_arg = _use_argsfile_at_link_arg, ) diff --git a/decls/haskell_rules.bzl b/decls/haskell_rules.bzl index 36f406748..61d2107ca 100644 --- a/decls/haskell_rules.bzl +++ b/decls/haskell_rules.bzl @@ -48,6 +48,7 @@ haskell_binary = prelude_rule( haskell_common.srcs_arg() | haskell_common.external_tools_arg() | haskell_common.srcs_envs_arg () | + haskell_common.use_argsfile_at_link_arg () | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | @@ -168,6 +169,7 @@ haskell_library = prelude_rule( haskell_common.srcs_arg() | haskell_common.external_tools_arg() | haskell_common.srcs_envs_arg() | + haskell_common.use_argsfile_at_link_arg() | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index f0e451c40..5294a3909 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -551,14 +551,16 @@ def _dynamic_link_shared_impl(actions, artifacts, dynamic_values, outputs, arg): children = [package_db[name] for name in arg.toolchain_libs if name in package_db] ) - link = cmd_args(arg.haskell_toolchain.linker) - link.add(arg.haskell_toolchain.linker_flags) - link.add(arg.linker_flags) - link.add("-hide-all-packages") - link.add(cmd_args(arg.toolchain_libs, prepend = "-package")) - link.add(cmd_args(package_db_tset.project_as_args("package_db"), prepend="-package-db")) - link.add("-o", outputs[arg.lib].as_output()) - link.add( + link_args = cmd_args() + link_cmd_args = [cmd_args(arg.haskell_toolchain.linker)] + link_cmd_hidden = [] + + link_args.add(arg.haskell_toolchain.linker_flags) + link_args.add(arg.linker_flags) + link_args.add("-hide-all-packages") + link_args.add(cmd_args(arg.toolchain_libs, prepend = "-package")) + link_args.add(cmd_args(package_db_tset.project_as_args("package_db"), prepend="-package-db")) + link_args.add( get_shared_library_flags(arg.linker_info.type), "-dynamic", cmd_args( @@ -567,12 +569,26 @@ def _dynamic_link_shared_impl(actions, artifacts, dynamic_values, outputs, arg): ), ) - link.add(arg.objects) + link_args.add(arg.objects) - link.add(cmd_args(unpack_link_args(arg.infos), prepend = "-optl")) + link_args.add(cmd_args(unpack_link_args(arg.infos), prepend = "-optl")) + + + if arg.use_argsfile_at_link: + argsfile = actions.declare_output( + "haskell_link_" + arg.artifact_suffix.replace("-", "_") + ".argsfile", + ) + actions.write(argsfile.as_output(), link_args, allow_args = True) + link_cmd_args.append(cmd_args(argsfile, format = "@{}")) + link_cmd_hidden.append(link_args) + else: + link_cmd_args.append(link_args) + + link_cmd = cmd_args(link_cmd_args, hidden = link_cmd_hidden) + link_cmd.add("-o", outputs[arg.lib].as_output()) actions.run( - link, + link_cmd, category = "haskell_link" + arg.artifact_suffix.replace("-", "_"), ) @@ -653,6 +669,7 @@ def _build_haskell_lib( linker_info = linker_info, objects = objects, toolchain_libs = toolchain_libs, + use_argsfile_at_link = ctx.attrs.use_argsfile_at_link, ), )) From de7f2c44275d88e1de528f0923e6140c0bfceaee Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 2 Oct 2024 06:56:50 -0700 Subject: [PATCH 1092/1133] Generating dependency graph should include boot files (#38) .hs-boot, .hs, .hsc, all such files should be included in the dependency graph generation. * Generating dependency graph should include boot files * handle haskell_boot_sources separately * boot_apparent_name from boot_sources, not from sources. this gives correct behavior with generated hs files (from hsc2hs for example) * -boot module keys should not be added as exposed modules in conf * undo unnecessary change * no need for special hs-boot treatment --- haskell/tools/generate_target_metadata.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 4913342c1..9e67a8899 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -142,7 +142,7 @@ def determine_module_mapping(ghc_depends, source_prefix): if len(boot_sources) != 1: raise RuntimeError(f"Expected at most one Haskell boot file for module '{modname}' but got '{boot_sources}'.") - boot_apparent_name = src_to_module_name(strip_prefix_(source_prefix, sources[0]).lstrip("/")) + "-boot" + boot_apparent_name = src_to_module_name(strip_prefix_(source_prefix, boot_sources[0]).lstrip("/")) + "-boot" if boot_apparent_name != boot_modname: result[boot_apparent_name] = boot_modname @@ -190,6 +190,7 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): json_fname = os.path.join(dname, "depends.json") make_fname = os.path.join(dname, "depends.make") haskell_sources = list(filter(is_haskell_src, sources)) + haskell_boot_sources = list(filter (is_haskell_boot, sources)) args = [ ghc, "-M", "-include-pkg-deps", # Note: `-outputdir '.'` removes the prefix of all targets: @@ -197,7 +198,7 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): "-outputdir", ".", "-dep-json", json_fname, "-dep-makefile", make_fname, - ] + ghc_args + haskell_sources + ] + ghc_args + haskell_sources + haskell_boot_sources env = os.environ.copy() path = env.get("PATH", "") From bb6b4b5fa91bdf979f7283d0d41b2e692b2bfdb5 Mon Sep 17 00:00:00 2001 From: Neil Mitchell Date: Thu, 18 Apr 2024 13:43:51 -0700 Subject: [PATCH 1093/1133] Dedupe attribute lists of deps Summary: If we duplicate a library twice then the GHC package file becomes an error. We can use tsets to dedupe between instances, but within a list of children, add dedupe here to remove the duplicates. Reviewed By: shayne-fletcher Differential Revision: D56241182 fbshipit-source-id: 0d5ae8f2f48eeb7945986a19d1778a4f2d217b21 --- haskell/util.bzl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/haskell/util.bzl b/haskell/util.bzl index 6d43b09b8..37c261cea 100644 --- a/haskell/util.bzl +++ b/haskell/util.bzl @@ -78,13 +78,13 @@ def attr_deps(ctx: AnalysisContext) -> list[Dependency]: return ctx.attrs.deps + _by_platform(ctx, ctx.attrs.platform_deps) def attr_deps_haskell_link_infos(ctx: AnalysisContext) -> list[HaskellLinkInfo]: - return filter( + return dedupe(filter( None, [ d.get(HaskellLinkInfo) for d in attr_deps(ctx) + ctx.attrs.template_deps ], - ) + )) def attr_deps_haskell_toolchain_libraries(ctx: AnalysisContext) -> list[HaskellToolchainLibrary]: return filter( @@ -97,13 +97,13 @@ def attr_deps_haskell_toolchain_libraries(ctx: AnalysisContext) -> list[HaskellT # DONT CALL THIS FUNCTION, you want attr_deps_haskell_link_infos instead def attr_deps_haskell_link_infos_sans_template_deps(ctx: AnalysisContext) -> list[HaskellLinkInfo]: - return filter( + return dedupe(filter( None, [ d.get(HaskellLinkInfo) for d in attr_deps(ctx) ], - ) + )) def attr_deps_haskell_lib_infos( ctx: AnalysisContext, @@ -120,13 +120,13 @@ def attr_deps_haskell_lib_infos( ] def attr_deps_merged_link_infos(ctx: AnalysisContext) -> list[MergedLinkInfo]: - return filter( + return dedupe(filter( None, [ d.get(MergedLinkInfo) for d in attr_deps(ctx) ], - ) + )) def attr_deps_profiling_link_infos(ctx: AnalysisContext) -> list[MergedLinkInfo]: return filter( From 9c7e0a5cad513b9f5c6216ad172bf46080e38a96 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 16 Sep 2024 14:43:42 +0200 Subject: [PATCH 1094/1133] Use `at_argfile` helper --- haskell/compile.bzl | 13 +++++++------ haskell/haskell.bzl | 14 +++++++------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e63e8d83e..adfcdece8 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -50,6 +50,7 @@ load( "@prelude//linking:link_info.bzl", "LinkStyle", ) +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//:paths.bzl", "paths") load("@prelude//utils:graph_utils.bzl", "post_order_traversal") load("@prelude//utils:strings.bzl", "strip_prefix") @@ -654,12 +655,12 @@ def _compile_module( format="--extra-env-value={}", )) if haskell_toolchain.use_argsfile: - argsfile = actions.declare_output( - "haskell_compile_" + artifact_suffix + ".argsfile", - ) - actions.write(argsfile.as_output(), compile_args_for_file, allow_args = True) - compile_cmd_args.append(cmd_args(argsfile, format = "@{}")) - compile_cmd_hidden.append(compile_args_for_file) + compile_cmd_args.append(at_argfile( + actions = actions, + name = "haskell_compile_" + artifact_suffix + ".argsfile", + args = compile_args_for_file, + allow_args = True, + )) else: compile_cmd_args.append(compile_args_for_file) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 5294a3909..0c814217a 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -146,6 +146,7 @@ load( "@prelude//python:python.bzl", "PythonLibraryInfo", ) +load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:set.bzl", "set") load("@prelude//utils:utils.bzl", "filter_and_map_idx", "flatten") @@ -573,14 +574,13 @@ def _dynamic_link_shared_impl(actions, artifacts, dynamic_values, outputs, arg): link_args.add(cmd_args(unpack_link_args(arg.infos), prepend = "-optl")) - if arg.use_argsfile_at_link: - argsfile = actions.declare_output( - "haskell_link_" + arg.artifact_suffix.replace("-", "_") + ".argsfile", - ) - actions.write(argsfile.as_output(), link_args, allow_args = True) - link_cmd_args.append(cmd_args(argsfile, format = "@{}")) - link_cmd_hidden.append(link_args) + link_cmd_args.append(at_argfile( + actions = actions, + name = "haskell_link_" + arg.artifact_suffix.replace("-", "_") + ".argsfile", + args = link_args, + allow_args = True, + )) else: link_cmd_args.append(link_args) From f6af740a882d1f81150ec31cd9cd56d0f40cb145 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Mon, 16 Sep 2024 16:31:09 +0200 Subject: [PATCH 1095/1133] Remove left-over comment --- haskell/compile.bzl | 2 -- 1 file changed, 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index adfcdece8..d1e8cdcf3 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -422,8 +422,6 @@ def _common_compile_module_args( direct_deps_info: list[HaskellLibraryInfoTSet], pkgname: str | None = None, ) -> CommonCompileModuleArgs: - #haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - command = cmd_args(ghc_wrapper) command.add("--ghc", haskell_toolchain.compiler) From feb843faf5d74a6132cd685500dffa12b3d2f45d Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 17 Sep 2024 08:42:40 +0200 Subject: [PATCH 1096/1133] Add performance optimisations for handling package DBs See commit 997574cb277b58f32c7b8e91a1333d49ee017d73: multiple packages might exist within the same package_db. Before, since we deduplicated everything by value, you might have 100 packages and 20 package_dbs. If you give GHC 100 package dbs (even if there are only 20 unique ones) the performance crashes dramatically. That's especially true if you have a package_db representing all of stackage, which both occurs a lot and is slow to load. The two places are: * When we construct GHC_PACKAGE_PATH env var. These are fairly trivially deduped with a dict construction * When we do packagedb_args.add. We traverse the TSet in the loop above, so dedupe to a dict then too. --- haskell/compile.bzl | 26 ++++++++++++++++++++++---- haskell/haskell.bzl | 6 +++++- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index d1e8cdcf3..b4ddb4814 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -344,10 +344,8 @@ def get_packages_info2( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - hidden_args = [l for lib in libs.traverse() for l in lib.libs] - exposed_package_libs = cmd_args() - exposed_package_args = cmd_args([package_flag, "base"], hidden = hidden_args) + exposed_package_args = cmd_args([package_flag, "base"]) if for_deps: package_db_projection = "deps_package_db" @@ -356,7 +354,23 @@ def get_packages_info2( else: package_db_projection = "package_db" - packagedb_args = cmd_args(libs.project_as_args(package_db_projection)) + packagedb_args = cmd_args() + packagedb_set = {} + + for lib in libs.traverse(): + if for_deps: + db = lib.deps_db + elif use_empty_lib: + db = lib.empty_db + else: + db = lib.db + packagedb_set[db] = None + hidden_args = cmd_args(hidden = [ + lib.import_dirs.values(), + lib.stub_dirs, + lib.libs, + ]) + exposed_package_args.add(hidden_args) if resolved: pkg_deps = resolved[haskell_toolchain.packages.dynamic] @@ -377,6 +391,10 @@ def get_packages_info2( children = [package_db[name] for name in toolchain_libs if name in package_db] ) + # These we need to add for all the packages/dependencies, i.e. + # direct and transitive (e.g. `fbcode-common-hs-util-hs-array`) + packagedb_args.add(packagedb_set.keys()) + packagedb_args.add(package_db_tset.project_as_args("package_db")) direct_package_paths = [package_db[name].value.path for name in direct_toolchain_libs if name in package_db] diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 0c814217a..84b972b77 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -638,7 +638,9 @@ def _build_haskell_lib( linfos = [x.prof_info if enable_profiling else x.info for x in hlis] + # only gather direct dependencies uniq_infos = [x[link_style].value for x in linfos] + toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] if link_style == LinkStyle("shared"): @@ -1088,7 +1090,9 @@ def _make_link_package( pkg_conf = ctx.actions.write("pkg-" + artifact_suffix + "_link.conf", conf) db = ctx.actions.declare_output("db-" + artifact_suffix + "_link", dir = True) - db_deps = [x.db for x in hlis] + # While the list of hlis is unique, there may be multiple packages in the same db. + # Cutting down the GHC_PACKAGE_PATH significantly speeds up GHC. + db_deps = {x.db: None for x in hlis}.keys() # So that ghc-pkg can find the DBs for the dependencies. We might # be able to use flags for this instead, but this works. From 569d53c05c9e45f493ecb9475bd2b1a87f8a084f Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 17 Sep 2024 08:46:01 +0200 Subject: [PATCH 1097/1133] Move package db selection before the loop --- haskell/compile.bzl | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index b4ddb4814..3b7ceea77 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -348,23 +348,17 @@ def get_packages_info2( exposed_package_args = cmd_args([package_flag, "base"]) if for_deps: - package_db_projection = "deps_package_db" + get_db = lambda l: l.deps_db elif use_empty_lib: - package_db_projection = "empty_package_db" + get_db = lambda l: l.empty_db else: - package_db_projection = "package_db" + get_db = lambda l: l.db packagedb_args = cmd_args() packagedb_set = {} for lib in libs.traverse(): - if for_deps: - db = lib.deps_db - elif use_empty_lib: - db = lib.empty_db - else: - db = lib.db - packagedb_set[db] = None + packagedb_set[get_db(lib)] = None hidden_args = cmd_args(hidden = [ lib.import_dirs.values(), lib.stub_dirs, From edfa2d1f9ca733c1b34508650e3e38d275a58a12 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 17 Sep 2024 09:12:57 +0200 Subject: [PATCH 1098/1133] Use attr_deps_haskell_toolchain_libraries --- haskell/compile.bzl | 6 +----- haskell/haskell.bzl | 3 ++- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 3b7ceea77..fcf6a7404 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -229,11 +229,7 @@ def target_metadata( md_gen = ctx.attrs._generate_target_metadata[RunInfo] haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] - toolchain_libs = [ - dep[HaskellToolchainLibrary].name - for dep in ctx.attrs.deps - if HaskellToolchainLibrary in dep - ] + toolchain_libs = [dep.name for dep in attr_deps_haskell_toolchain_libraries(ctx)] haskell_direct_deps_lib_infos = attr_deps_haskell_lib_infos( ctx, diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 84b972b77..0f7184c0d 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -87,6 +87,7 @@ load( "attr_deps_haskell_link_infos_sans_template_deps", "attr_deps_haskell_lib_infos", "attr_deps_haskell_link_infos", + "attr_deps_haskell_toolchain_libraries", "attr_deps_merged_link_infos", "attr_deps_profiling_link_infos", "attr_deps_shared_library_infos", @@ -641,7 +642,7 @@ def _build_haskell_lib( # only gather direct dependencies uniq_infos = [x[link_style].value for x in linfos] - toolchain_libs = [dep[HaskellToolchainLibrary].name for dep in ctx.attrs.deps if HaskellToolchainLibrary in dep] + toolchain_libs = [dep.name for dep in attr_deps_haskell_toolchain_libraries(ctx)] if link_style == LinkStyle("shared"): lib = ctx.actions.declare_output(lib_short_path) From 5494f65cc661a876bdd5b5783278a6c453930418 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 17 Sep 2024 10:21:36 +0200 Subject: [PATCH 1099/1133] Remove `exposed_package_libs` They are always empty. --- haskell/compile.bzl | 3 --- haskell/haskell.bzl | 2 -- 2 files changed, 5 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index fcf6a7404..5f6b19253 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -101,7 +101,6 @@ CompileResultInfo = record( ) PackagesInfo = record( - exposed_package_libs = cmd_args, exposed_package_args = cmd_args, packagedb_args = cmd_args, transitive_deps = field(HaskellLibraryInfoTSet), @@ -340,7 +339,6 @@ def get_packages_info2( # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - exposed_package_libs = cmd_args() exposed_package_args = cmd_args([package_flag, "base"]) if for_deps: @@ -399,7 +397,6 @@ def get_packages_info2( exposed_package_args.add(package_flag, pkg_name) return PackagesInfo( - exposed_package_libs = exposed_package_libs, exposed_package_args = exposed_package_args, packagedb_args = packagedb_args, transitive_deps = libs, diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 0f7184c0d..08a5d1261 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -1144,8 +1144,6 @@ def _dynamic_link_binary_impl(actions, artifacts, dynamic_values, outputs, arg): link_cmd.add(arg.haskell_toolchain.linker_flags) link_cmd.add(arg.linker_flags) - link_cmd.add(cmd_args(hidden = packages_info.exposed_package_libs)) - link_cmd.add("-o", outputs[arg.output].as_output()) actions.run(link_cmd, category = "haskell_link") From dd7fce1fcd6fef589bbcd636db3fd0da75fb35b0 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Tue, 17 Sep 2024 10:22:42 +0200 Subject: [PATCH 1100/1133] Reformat --- haskell/compile.bzl | 11 +++++++---- haskell/haskell.bzl | 18 ++++++++++-------- 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 5f6b19253..da8cf1783 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -331,10 +331,13 @@ def get_packages_info2( # Collect library dependencies. Note that these don't need to be in a # particular order. - libs = actions.tset(HaskellLibraryInfoTSet, children = [ - lib.prof_info[link_style] if enable_profiling else lib.info[link_style] - for lib in direct_deps_link_info - ]) + libs = actions.tset( + HaskellLibraryInfoTSet, + children = [ + lib.prof_info[link_style] if enable_profiling else lib.info[link_style] + for lib in direct_deps_link_info + ], + ) # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 08a5d1261..1f0c1059b 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -285,15 +285,17 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: ] hlibinfos[link_style] = hlibinfo - hlinkinfos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = hlibinfo, children = [ - lib.info[link_style] - for lib in haskell_infos - ]) + hlinkinfos[link_style] = ctx.actions.tset( + HaskellLibraryInfoTSet, + value = hlibinfo, + children = [lib.info[link_style] for lib in haskell_infos], + ) prof_hlibinfos[link_style] = prof_hlibinfo - prof_hlinkinfos[link_style] = ctx.actions.tset(HaskellLibraryInfoTSet, value = prof_hlibinfo, children = [ - lib.prof_info[link_style] - for lib in haskell_infos - ]) + prof_hlinkinfos[link_style] = ctx.actions.tset( + HaskellLibraryInfoTSet, + value = prof_hlibinfo, + children = [lib.prof_info[link_style] for lib in haskell_infos], + ) link_infos[link_style] = LinkInfos( default = LinkInfo( pre_flags = ctx.attrs.exported_linker_flags, From 23539aa8cd73492a40d4008a5195a3fdb57d6965 Mon Sep 17 00:00:00 2001 From: David Tolnay Date: Mon, 9 Sep 2024 11:36:12 -0700 Subject: [PATCH 1101/1133] Convert cxx toolchain LinkerType to enum Summary: Because I am interested in using this as a dict key in the next diff. `dict[LinkerType, RunInfo]` Reviewed By: capickett Differential Revision: D62385685 fbshipit-source-id: 2e47ece5cf5ca8180effa12e83021cd0ba73012a (cherry picked from commit 446425115ec180ea6f7ee638e081dd968ac88090) --- cxx/anon_link.bzl | 18 +++++---- cxx/archive.bzl | 6 +-- cxx/cxx_library.bzl | 11 +++-- cxx/cxx_library_utility.bzl | 8 +++- cxx/cxx_link_utility.bzl | 18 +++++---- cxx/cxx_toolchain.bzl | 31 ++++++++++++--- cxx/cxx_toolchain_types.bzl | 4 +- cxx/headers.bzl | 3 +- cxx/link.bzl | 12 ++++-- cxx/linker.bzl | 62 ++++++++++++++--------------- cxx/omnibus.bzl | 8 +++- cxx/prebuilt_cxx_library_group.bzl | 8 +++- cxx/symbols.bzl | 10 +++-- cxx/user/cxx_toolchain_override.bzl | 24 +++++++++-- haskell/haskell.bzl | 11 +++-- linking/link_info.bzl | 16 +++++--- linking/lto.bzl | 10 +++-- linking/strip.bzl | 8 +++- rust/build_params.bzl | 23 ++++++----- toolchains/cxx.bzl | 9 +++-- toolchains/cxx/clang/tools.bzl | 3 +- toolchains/msvc/tools.bzl | 3 +- 22 files changed, 198 insertions(+), 108 deletions(-) diff --git a/cxx/anon_link.bzl b/cxx/anon_link.bzl index 456dc02c6..143f892ea 100644 --- a/cxx/anon_link.bzl +++ b/cxx/anon_link.bzl @@ -10,7 +10,11 @@ load( "ArtifactInfo", "make_artifact_tset", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//linking:execution_preference.bzl", "LinkExecutionPreference") load( @@ -34,7 +38,7 @@ def _serialize_linkable(linkable): return ("archive", ( (linkable.archive.artifact, linkable.archive.external_objects), linkable.link_whole, - linkable.linker_type, + linkable.linker_type.value, linkable.supports_lto, )) @@ -42,7 +46,7 @@ def _serialize_linkable(linkable): return ("objects", ( linkable.objects, linkable.link_whole, - linkable.linker_type, + linkable.linker_type.value, )) if isinstance(linkable, SharedLibLinkable): @@ -107,7 +111,7 @@ def _deserialize_linkable(linkable: (str, typing.Any)) -> typing.Any: external_objects = external_objects, ), link_whole = link_whole, - linker_type = linker_type, + linker_type = LinkerType(linker_type), supports_lto = supports_lto, ) @@ -116,7 +120,7 @@ def _deserialize_linkable(linkable: (str, typing.Any)) -> typing.Any: return ObjectsLinkable( objects = objects, link_whole = link_whole, - linker_type = linker_type, + linker_type = LinkerType(linker_type), ) if typ == "shared": @@ -207,7 +211,7 @@ ANON_ATTRS = { # ObjectsLinkable attrs.list(attrs.source()), # objects attrs.bool(), # link_whole - attrs.string(), # linker_type + attrs.enum(LinkerType.values()), # linker_type ), attrs.tuple( # ArchiveLinkable @@ -217,7 +221,7 @@ ANON_ATTRS = { attrs.list(attrs.source()), # external_objects ), attrs.bool(), # link_whole - attrs.string(), # linker_type + attrs.enum(LinkerType.values()), # linker_type attrs.bool(), # supports_lto ), attrs.tuple( diff --git a/cxx/archive.bzl b/cxx/archive.bzl index b54b99ea3..f3149df7c 100644 --- a/cxx/archive.bzl +++ b/cxx/archive.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo") +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo", "LinkerType") load("@prelude//linking:link_info.bzl", "Archive") load("@prelude//utils:argfile.bzl", "at_argfile") load("@prelude//utils:utils.bzl", "value_or") @@ -13,7 +13,7 @@ load(":cxx_context.bzl", "get_cxx_toolchain_info") def _archive_flags( archiver_type: str, - linker_type: str, + linker_type: LinkerType, use_archiver_flags: bool, symbol_table: bool, thin: bool) -> list[str]: @@ -43,7 +43,7 @@ def _archive_flags( flags += "T" # GNU archivers support generating deterministic archives. - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): flags += "D" return [flags] diff --git a/cxx/cxx_library.bzl b/cxx/cxx_library.bzl index 3f03fbb39..56796d9cf 100644 --- a/cxx/cxx_library.bzl +++ b/cxx/cxx_library.bzl @@ -150,7 +150,12 @@ load( "cxx_use_shlib_intfs", "cxx_use_shlib_intfs_mode", ) -load(":cxx_toolchain_types.bzl", "ShlibInterfacesMode", "is_bitcode_format") +load( + ":cxx_toolchain_types.bzl", + "LinkerType", + "ShlibInterfacesMode", + "is_bitcode_format", +) load( ":cxx_types.bzl", "CxxRuleConstructorParams", # @unused Used as a type @@ -1173,7 +1178,7 @@ def _strip_objects(ctx: AnalysisContext, objects: list[Artifact]) -> list[Artifa # Stripping is not supported on Windows linker_type = cxx_toolchain_info.linker_info.type - if linker_type == "windows": + if linker_type == LinkerType("windows"): return objects # Disable stripping if no `strip` binary was provided by the toolchain. @@ -1373,7 +1378,7 @@ def _static_library( # On darwin, the linked output references the archive that contains the # object files instead of the originating objects. object_external_debug_info = [] - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): object_external_debug_info.append(archive.artifact) object_external_debug_info.extend(archive.external_objects) elif objects_have_external_debug_info: diff --git a/cxx/cxx_library_utility.bzl b/cxx/cxx_library_utility.bzl index a84a5615a..5b1e255e0 100644 --- a/cxx/cxx_library_utility.bzl +++ b/cxx/cxx_library_utility.bzl @@ -25,7 +25,11 @@ load( "from_named_set", ) load(":cxx_context.bzl", "get_cxx_platform_info", "get_cxx_toolchain_info") -load(":cxx_toolchain_types.bzl", "ShlibInterfacesMode") +load( + ":cxx_toolchain_types.bzl", + "LinkerType", + "ShlibInterfacesMode", +) load( ":headers.bzl", "cxx_attr_header_namespace", @@ -143,7 +147,7 @@ def cxx_attr_resources(ctx: AnalysisContext) -> dict[str, ArtifactOutputs]: return resources def cxx_is_gnu(ctx: AnalysisContext) -> bool: - return get_cxx_toolchain_info(ctx).linker_info.type == "gnu" + return get_cxx_toolchain_info(ctx).linker_info.type == LinkerType("gnu") def cxx_use_shlib_intfs(ctx: AnalysisContext) -> bool: """ diff --git a/cxx/cxx_link_utility.bzl b/cxx/cxx_link_utility.bzl index 3d2f89a2a..bf5c8ae31 100644 --- a/cxx/cxx_link_utility.bzl +++ b/cxx/cxx_link_utility.bzl @@ -7,7 +7,11 @@ load("@prelude//:artifact_tset.bzl", "project_artifacts") load("@prelude//:paths.bzl", "paths") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:linker.bzl", "get_rpath_origin") load("@prelude//cxx:target_sdk_version.bzl", "get_target_sdk_version_linker_flags") @@ -42,14 +46,14 @@ def generates_split_debug(toolchain: CxxToolchainInfo): def linker_map_args(toolchain: CxxToolchainInfo, linker_map) -> LinkArgs: linker_type = toolchain.linker_info.type - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): flags = [ "-Xlinker", "-map", "-Xlinker", linker_map, ] - elif linker_type == "gnu": + elif linker_type == LinkerType("gnu"): flags = [ "-Xlinker", "-Map", @@ -98,7 +102,7 @@ def make_link_args( linker_info = cxx_toolchain_info.linker_info linker_type = linker_info.type - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): # Darwin requires a target triple specified to # control the deployment target being linked for. args.add(get_target_sdk_version_linker_flags(ctx)) @@ -132,7 +136,7 @@ def make_link_args( hidden.append(pdb_artifact.as_output()) filelists = None - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): filelists = filter(None, [unpack_link_args_filelist(link) for link in links]) hidden.extend(filelists) @@ -196,7 +200,7 @@ def cxx_sanitizer_runtime_arguments( if not linker_info.sanitizer_runtime_files: fail("C++ sanitizer runtime enabled but there are no runtime files") - if linker_info.type == "darwin": + if linker_info.type == LinkerType("darwin"): # ignore_artifacts as the runtime directory is not required at _link_ time runtime_rpath = cmd_args(ignore_artifacts = True) runtime_files = linker_info.sanitizer_runtime_files @@ -247,7 +251,7 @@ def executable_shared_lib_arguments( linker_type = cxx_toolchain.linker_info.type if len(shared_libs) > 0: - if linker_type == "windows": + if linker_type == LinkerType("windows"): shared_libs_symlink_tree = [ctx.actions.symlink_file( shlib.lib.output.basename, shlib.lib.output, diff --git a/cxx/cxx_toolchain.bzl b/cxx/cxx_toolchain.bzl index d69de97a9..002049dcf 100644 --- a/cxx/cxx_toolchain.bzl +++ b/cxx/cxx_toolchain.bzl @@ -6,7 +6,27 @@ # of this source tree. load("@prelude//:is_full_meta_repo.bzl", "is_full_meta_repo") -load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CudaCompilerInfo", "CvtresCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "DepTrackingMode", "DistLtoToolsInfo", "HipCompilerInfo", "LinkerInfo", "PicBehavior", "RcCompilerInfo", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "AsCompilerInfo", + "AsmCompilerInfo", + "BinaryUtilitiesInfo", + "CCompilerInfo", + "CudaCompilerInfo", + "CvtresCompilerInfo", + "CxxCompilerInfo", + "CxxObjectFormat", + "DepTrackingMode", + "DistLtoToolsInfo", + "HipCompilerInfo", + "LinkerInfo", + "LinkerType", + "PicBehavior", + "RcCompilerInfo", + "ShlibInterfacesMode", + "StripFlagsInfo", + "cxx_toolchain_infos", +) load("@prelude//cxx:cxx_utility.bzl", "cxx_toolchain_allow_cache_upload_args") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:headers.bzl", "HeaderMode", "HeadersAsRawHeadersMode") @@ -87,6 +107,7 @@ def cxx_toolchain_impl(ctx): preprocessor_flags = cmd_args(ctx.attrs.rc_preprocessor_flags), ) if ctx.attrs.rc_compiler else None + linker_type = LinkerType(ctx.attrs.linker_type) linker_info = LinkerInfo( archiver = ctx.attrs.archiver[RunInfo], archiver_flags = cmd_args(ctx.attrs.archiver_flags), @@ -98,7 +119,7 @@ def cxx_toolchain_impl(ctx): archive_symbol_table = ctx.attrs.archive_symbol_table, binary_extension = value_or(ctx.attrs.binary_extension, ""), generate_linker_maps = ctx.attrs.generate_linker_maps, - is_pdb_generated = is_pdb_generated(ctx.attrs.linker_type, ctx.attrs.linker_flags), + is_pdb_generated = is_pdb_generated(linker_type, ctx.attrs.linker_flags), link_binaries_locally = not value_or(ctx.attrs.cache_links, True), link_libraries_locally = False, link_style = LinkStyle(ctx.attrs.link_style), @@ -125,7 +146,7 @@ def cxx_toolchain_impl(ctx): static_dep_runtime_ld_flags = ctx.attrs.static_dep_runtime_ld_flags, static_library_extension = ctx.attrs.static_library_extension or "a", static_pic_dep_runtime_ld_flags = ctx.attrs.static_pic_dep_runtime_ld_flags, - type = ctx.attrs.linker_type, + type = linker_type, use_archiver_flags = ctx.attrs.use_archiver_flags, ) @@ -307,14 +328,14 @@ def _get_shared_library_name_default_prefix(ctx: AnalysisContext) -> str: return "" if extension == "dll" else "lib" def _get_shared_library_name_format(ctx: AnalysisContext) -> str: - linker_type = ctx.attrs.linker_type + linker_type = LinkerType(ctx.attrs.linker_type) extension = ctx.attrs.shared_library_extension if extension == "": extension = LINKERS[linker_type].default_shared_library_extension return "{}." + extension def _get_shared_library_versioned_name_format(ctx: AnalysisContext) -> str: - linker_type = ctx.attrs.linker_type + linker_type = LinkerType(ctx.attrs.linker_type) extension_format = ctx.attrs.shared_library_versioned_extension_format.replace("%s", "{}") if extension_format == "": extension_format = LINKERS[linker_type].default_shared_library_versioned_extension_format diff --git a/cxx/cxx_toolchain_types.bzl b/cxx/cxx_toolchain_types.bzl index 464c5743b..b73e6c723 100644 --- a/cxx/cxx_toolchain_types.bzl +++ b/cxx/cxx_toolchain_types.bzl @@ -7,7 +7,7 @@ load("@prelude//cxx:debug.bzl", "SplitDebugMode") -LinkerType = ["gnu", "darwin", "windows", "wasm"] +LinkerType = enum("gnu", "darwin", "windows", "wasm") ShlibInterfacesMode = enum("disabled", "enabled", "defined_only", "stub_from_library", "stub_from_headers") @@ -64,7 +64,7 @@ LinkerInfo = provider( "requires_objects": provider_field(typing.Any, default = None), "supports_distributed_thinlto": provider_field(typing.Any, default = None), "independent_shlib_interface_linker_flags": provider_field(typing.Any, default = None), - "type": provider_field(typing.Any, default = None), # of "LinkerType" type + "type": LinkerType, "use_archiver_flags": provider_field(typing.Any, default = None), "force_full_hybrid_if_capable": provider_field(typing.Any, default = None), "is_pdb_generated": provider_field(typing.Any, default = None), # bool diff --git a/cxx/headers.bzl b/cxx/headers.bzl index daf60e342..0e4f81917 100644 --- a/cxx/headers.bzl +++ b/cxx/headers.bzl @@ -6,6 +6,7 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerType") load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//utils:expect.bzl", "expect") load("@prelude//utils:lazy.bzl", "lazy") @@ -334,7 +335,7 @@ def _get_dict_header_namespace(namespace: str, naming: CxxHeadersNaming) -> str: def _get_debug_prefix_args(ctx: AnalysisContext, header_dir: Artifact) -> [cmd_args, None]: # NOTE(@christylee): Do we need to enable debug-prefix-map for darwin and windows? - if get_cxx_toolchain_info(ctx).linker_info.type != "gnu": + if get_cxx_toolchain_info(ctx).linker_info.type != LinkerType("gnu"): return None fmt = "-fdebug-prefix-map={}=" + value_or(header_dir.owner.cell, ".") diff --git a/cxx/link.bzl b/cxx/link.bzl index d08d697a7..8657aec74 100644 --- a/cxx/link.bzl +++ b/cxx/link.bzl @@ -16,7 +16,11 @@ load( "bolt", "cxx_use_bolt", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) load( "@prelude//cxx/dist_lto:darwin_dist_lto.bzl", "cxx_darwin_dist_link", @@ -146,7 +150,7 @@ def cxx_link_into( fail("Cannot use distributed thinlto with sanitizer runtime") linker_type = linker_info.type - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): exe = cxx_darwin_dist_link( ctx, output, @@ -155,7 +159,7 @@ def cxx_link_into( should_generate_dwp, is_result_executable, ) - elif linker_type == "gnu": + elif linker_type == LinkerType("gnu"): exe = cxx_gnu_dist_link( ctx, output, @@ -246,7 +250,7 @@ def cxx_link_into( all_link_args.add(link_cmd_parts.post_linker_flags) - if linker_info.type == "windows": + if linker_info.type == LinkerType("windows"): shell_quoted_args = cmd_args(all_link_args) else: shell_quoted_args = cmd_args(all_link_args, quote = "shell") diff --git a/cxx/linker.bzl b/cxx/linker.bzl index cee29d070..09ec0eea0 100644 --- a/cxx/linker.bzl +++ b/cxx/linker.bzl @@ -5,7 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo") +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerInfo", "LinkerType") load("@prelude//utils:arglike.bzl", "ArgLike") load("@prelude//utils:expect.bzl", "expect") @@ -34,19 +34,19 @@ SharedLibraryFlagOverrides = record( ) LINKERS = { - "darwin": Linker( + LinkerType("darwin"): Linker( default_shared_library_extension = "dylib", default_shared_library_versioned_extension_format = "{}.dylib", shared_library_name_linker_flags_format = ["-install_name", "@rpath/{}"], shared_library_flags = ["-shared"], ), - "gnu": Linker( + LinkerType("gnu"): Linker( default_shared_library_extension = "so", default_shared_library_versioned_extension_format = "so.{}", shared_library_name_linker_flags_format = ["-Wl,-soname,{}"], shared_library_flags = ["-shared"], ), - "wasm": Linker( + LinkerType("wasm"): Linker( default_shared_library_extension = "wasm", default_shared_library_versioned_extension_format = "{}.wasm", shared_library_name_linker_flags_format = [], @@ -54,7 +54,7 @@ LINKERS = { # See https://github.com/WebAssembly/tool-conventions/blob/main/DynamicLinking.md#llvm-implementation shared_library_flags = ["-shared"], ), - "windows": Linker( + LinkerType("windows"): Linker( default_shared_library_extension = "dll", default_shared_library_versioned_extension_format = "dll", # NOTE(agallagher): I *think* windows doesn't support a flag to set the @@ -138,7 +138,7 @@ def get_default_shared_library_name(linker_info: LinkerInfo, label: Label): short_name = "{}_{}".format(_sanitize(label.package), _sanitize(label.name)) return get_shared_library_name(linker_info, short_name, apply_default_prefix = True) -def get_shared_library_name_linker_flags(linker_type: str, soname: str, flag_overrides: [SharedLibraryFlagOverrides, None] = None) -> list[str]: +def get_shared_library_name_linker_flags(linker_type: LinkerType, soname: str, flag_overrides: [SharedLibraryFlagOverrides, None] = None) -> list[str]: """ Arguments to pass to the linker to set the given soname. """ @@ -152,7 +152,7 @@ def get_shared_library_name_linker_flags(linker_type: str, soname: str, flag_ove for f in shared_library_name_linker_flags_format ] -def get_shared_library_flags(linker_type: str, flag_overrides: [SharedLibraryFlagOverrides, None] = None) -> list[ArgLike]: +def get_shared_library_flags(linker_type: LinkerType, flag_overrides: [SharedLibraryFlagOverrides, None] = None) -> list[ArgLike]: """ Arguments to pass to the linker to link a shared library. """ @@ -161,24 +161,24 @@ def get_shared_library_flags(linker_type: str, flag_overrides: [SharedLibraryFla return LINKERS[linker_type].shared_library_flags -def get_link_whole_args(linker_type: str, inputs: list[Artifact]) -> list[typing.Any]: +def get_link_whole_args(linker_type: LinkerType, inputs: list[Artifact]) -> list[typing.Any]: """ Return linker args used to always link all the given inputs. """ args = [] - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): args.append("-Wl,--whole-archive") args.extend(inputs) args.append("-Wl,--no-whole-archive") - elif linker_type == "darwin": + elif linker_type == LinkerType("darwin"): for inp in inputs: args.append("-Xlinker") args.append("-force_load") args.append("-Xlinker") args.append(inp) - elif linker_type == "windows": + elif linker_type == LinkerType("windows"): for inp in inputs: args.append(inp) args.append("/WHOLEARCHIVE:" + inp.basename) @@ -187,42 +187,42 @@ def get_link_whole_args(linker_type: str, inputs: list[Artifact]) -> list[typing return args -def get_objects_as_library_args(linker_type: str, objects: list[Artifact]) -> list[typing.Any]: +def get_objects_as_library_args(linker_type: LinkerType, objects: list[Artifact]) -> list[typing.Any]: """ Return linker args used to link the given objects as a library. """ args = [] - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): args.append("-Wl,--start-lib") args.extend(objects) args.append("-Wl,--end-lib") - elif linker_type == "darwin" or linker_type == "windows": + elif linker_type == LinkerType("darwin") or linker_type == LinkerType("windows"): args.extend(objects) else: fail("Linker type {} not supported".format(linker_type)) return args -def get_ignore_undefined_symbols_flags(linker_type: str) -> list[str]: +def get_ignore_undefined_symbols_flags(linker_type: LinkerType) -> list[str]: """ Return linker args used to suppress undefined symbol errors. """ args = [] - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): args.append("-Wl,--allow-shlib-undefined") args.append("-Wl,--unresolved-symbols=ignore-all") - elif linker_type == "darwin": + elif linker_type == LinkerType("darwin"): args.append("-Wl,-undefined,dynamic_lookup") else: fail("Linker type {} not supported".format(linker_type)) return args -def get_no_as_needed_shared_libs_flags(linker_type: str) -> list[str]: +def get_no_as_needed_shared_libs_flags(linker_type: LinkerType) -> list[str]: """ Return linker args used to prevent linkers from dropping unused shared library dependencies from the e.g. DT_NEEDED tags of the link. @@ -230,26 +230,26 @@ def get_no_as_needed_shared_libs_flags(linker_type: str) -> list[str]: args = [] - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): args.append("-Wl,--no-as-needed") - elif linker_type == "darwin": + elif linker_type == LinkerType("darwin"): pass else: fail("Linker type {} not supported".format(linker_type)) return args -def get_output_flags(linker_type: str, output: Artifact) -> list[ArgLike]: - if linker_type == "windows": +def get_output_flags(linker_type: LinkerType, output: Artifact) -> list[ArgLike]: + if linker_type == LinkerType("windows"): return ["/Brepro", cmd_args(output.as_output(), format = "/OUT:{}")] else: return ["-o", output.as_output()] def get_import_library( ctx: AnalysisContext, - linker_type: str, + linker_type: LinkerType, output_short_path: str) -> (Artifact | None, list[ArgLike]): - if linker_type == "windows": + if linker_type == LinkerType("windows"): import_library = ctx.actions.declare_output(output_short_path + ".imp.lib") return import_library, [cmd_args(import_library.as_output(), format = "/IMPLIB:{}")] else: @@ -257,8 +257,8 @@ def get_import_library( def get_deffile_flags( ctx: AnalysisContext, - linker_type: str) -> list[ArgLike]: - if linker_type == "windows" and ctx.attrs.deffile != None: + linker_type: LinkerType) -> list[ArgLike]: + if linker_type == LinkerType("windows") and ctx.attrs.deffile != None: return [ cmd_args(ctx.attrs.deffile, format = "/DEF:{}"), ] @@ -266,23 +266,23 @@ def get_deffile_flags( return [] def get_rpath_origin( - linker_type: str) -> str: + linker_type: LinkerType) -> str: """ Return the macro that runtime loaders resolve to the main executable at runtime. """ - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): return "$ORIGIN" - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): return "@loader_path" fail("Linker type {} not supported".format(linker_type)) def is_pdb_generated( - linker_type: str, + linker_type: LinkerType, linker_flags: list[[str, ResolvedStringWithMacros]]) -> bool: - if linker_type != "windows": + if linker_type != LinkerType("windows"): return False for flag in reversed(linker_flags): flag = str(flag).upper() diff --git a/cxx/omnibus.bzl b/cxx/omnibus.bzl index 12f4f696a..ad5a0f089 100644 --- a/cxx/omnibus.bzl +++ b/cxx/omnibus.bzl @@ -6,7 +6,11 @@ # of this source tree. load("@prelude//:local_only.bzl", "get_resolved_cxx_binary_link_execution_preference") -load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "LinkerType", + "PicBehavior", +) load( "@prelude//cxx:link.bzl", "CxxLinkResult", # @unused Used as a type @@ -513,7 +517,7 @@ def _create_omnibus( # Add global symbols version script. # FIXME(agallagher): Support global symbols for darwin. - if linker_info.type != "darwin": + if linker_info.type != LinkerType("darwin"): global_sym_vers = _create_global_symbols_version_script( ctx, # Extract symbols from roots... diff --git a/cxx/prebuilt_cxx_library_group.bzl b/cxx/prebuilt_cxx_library_group.bzl index eb3ad453f..6370818ec 100644 --- a/cxx/prebuilt_cxx_library_group.bzl +++ b/cxx/prebuilt_cxx_library_group.bzl @@ -5,7 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "LinkerType", + "PicBehavior", +) load( "@prelude//cxx:preprocessor.bzl", "CPreprocessor", @@ -113,7 +117,7 @@ def _parse_macro(arg: str) -> [(str, str), None]: def _get_static_link_infos( ctx: AnalysisContext, - linker_type: str, + linker_type: LinkerType, libs: list[Artifact], args: list[str]) -> LinkInfos: """ diff --git a/cxx/symbols.bzl b/cxx/symbols.bzl index 4bebd9ff9..dbd8ca84c 100644 --- a/cxx/symbols.bzl +++ b/cxx/symbols.bzl @@ -6,7 +6,11 @@ # of this source tree. load("@prelude//:paths.bzl", "paths") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) load("@prelude//cxx:cxx_utility.bzl", "cxx_attrs_get_allow_cache_upload") load("@prelude//os_lookup:defs.bzl", "OsLookup") @@ -48,7 +52,7 @@ def _extract_symbol_names( nm_flags += "u" # darwin objects don't have dynamic symbol tables. - if dynamic and cxx_toolchain.linker_info.type != "darwin": + if dynamic and cxx_toolchain.linker_info.type != LinkerType("darwin"): nm_flags += "D" # llvm-nm supports -U for this but gnu nm doesn't. @@ -314,7 +318,7 @@ def get_undefined_symbols_args( category: [str, None] = None, identifier: [str, None] = None, prefer_local: bool = False) -> cmd_args: - if cxx_toolchain.linker_info.type == "gnu": + if cxx_toolchain.linker_info.type == LinkerType("gnu"): # linker script is only supported in gnu linkers linker_script = create_undefined_symbols_linker_script( ctx.actions, diff --git a/cxx/user/cxx_toolchain_override.bzl b/cxx/user/cxx_toolchain_override.bzl index a9041253f..320264dbe 100644 --- a/cxx/user/cxx_toolchain_override.bzl +++ b/cxx/user/cxx_toolchain_override.bzl @@ -5,7 +5,23 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "AsCompilerInfo", "AsmCompilerInfo", "BinaryUtilitiesInfo", "CCompilerInfo", "CxxCompilerInfo", "CxxObjectFormat", "CxxPlatformInfo", "CxxToolchainInfo", "LinkerInfo", "LinkerType", "PicBehavior", "ShlibInterfacesMode", "StripFlagsInfo", "cxx_toolchain_infos") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "AsCompilerInfo", + "AsmCompilerInfo", + "BinaryUtilitiesInfo", + "CCompilerInfo", + "CxxCompilerInfo", + "CxxObjectFormat", + "CxxPlatformInfo", + "CxxToolchainInfo", + "LinkerInfo", + "LinkerType", + "PicBehavior", + "ShlibInterfacesMode", + "StripFlagsInfo", + "cxx_toolchain_infos", +) load("@prelude//cxx:cxx_utility.bzl", "cxx_toolchain_allow_cache_upload_args") load("@prelude//cxx:debug.bzl", "SplitDebugMode") load("@prelude//cxx:headers.bzl", "HeaderMode") @@ -68,7 +84,7 @@ def _cxx_toolchain_override(ctx): allow_cache_upload = _pick_raw(ctx.attrs.cxx_compiler_allow_cache_upload, base_cxx_info.allow_cache_upload), ) base_linker_info = base_toolchain.linker_info - linker_type = ctx.attrs.linker_type if ctx.attrs.linker_type != None else base_linker_info.type + linker_type = LinkerType(ctx.attrs.linker_type) if ctx.attrs.linker_type != None else base_linker_info.type pdb_expected = is_pdb_generated(linker_type, ctx.attrs.linker_flags) if ctx.attrs.linker_flags != None else base_linker_info.is_pdb_generated # This handles case when linker type is overridden to non-windows from @@ -77,7 +93,7 @@ def _cxx_toolchain_override(ctx): # we can't inspect base linker flags and disable PDB subtargets. # This shouldn't be a problem because to use windows linker after non-windows # linker flags should be changed as well. - pdb_expected = linker_type == "windows" and pdb_expected + pdb_expected = linker_type == LinkerType("windows") and pdb_expected shlib_interfaces = ShlibInterfacesMode(ctx.attrs.shared_library_interface_mode) if ctx.attrs.shared_library_interface_mode else None sanitizer_runtime_files = flatten([runtime_file[DefaultInfo].default_outputs for runtime_file in ctx.attrs.sanitizer_runtime_files]) if ctx.attrs.sanitizer_runtime_files != None else None linker_info = LinkerInfo( @@ -206,7 +222,7 @@ cxx_toolchain_override_registration_spec = RuleRegistrationSpec( "link_weight": attrs.option(attrs.int(), default = None), "linker": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "linker_flags": attrs.option(attrs.list(attrs.arg()), default = None), - "linker_type": attrs.option(attrs.enum(LinkerType), default = None), + "linker_type": attrs.option(attrs.enum(LinkerType.values()), default = None), "llvm_link": attrs.option(attrs.exec_dep(providers = [RunInfo]), default = None), "lto_mode": attrs.option(attrs.enum(LtoMode.values()), default = None), "min_sdk_version": attrs.option(attrs.string(), default = None), diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 1f0c1059b..1825df3ac 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -21,6 +21,7 @@ load( load( "@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo", + "LinkerType", "PicBehavior", ) load("@prelude//cxx:groups.bzl", "get_dedupped_roots_from_groups") @@ -267,7 +268,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: def archive_linkable(lib): return ArchiveLinkable( archive = Archive(artifact = lib), - linker_type = "gnu", + linker_type = LinkerType("gnu"), ) def shared_linkable(lib): @@ -535,10 +536,12 @@ HaskellLibBuildOutput = record( libs = list[Artifact], ) -def _get_haskell_shared_library_name_linker_flags(linker_type: str, soname: str) -> list[str]: - if linker_type == "gnu": +def _get_haskell_shared_library_name_linker_flags( + linker_type: LinkerType, + soname: str) -> list[str]: + if linker_type == LinkerType("gnu"): return ["-Wl,-soname,{}".format(soname)] - elif linker_type == "darwin": + elif linker_type == LinkerType("darwin"): # Passing `-install_name @rpath/...` or # `-Xlinker -install_name -Xlinker @rpath/...` instead causes # ghc-9.6.3: panic! (the 'impossible' happened) diff --git a/linking/link_info.bzl b/linking/link_info.bzl index e1e09ae9a..3b4725f53 100644 --- a/linking/link_info.bzl +++ b/linking/link_info.bzl @@ -10,7 +10,11 @@ load( "ArtifactTSet", "make_artifact_tset", ) -load("@prelude//cxx:cxx_toolchain_types.bzl", "PicBehavior") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "LinkerType", + "PicBehavior", +) load( "@prelude//cxx:linker.bzl", "get_link_whole_args", @@ -77,7 +81,7 @@ ArchiveLinkable = record( archive = field(Archive), # If a bitcode bundle was created for this artifact it will be present here bitcode_bundle = field(Artifact | None, None), - linker_type = field(str), + linker_type = field(LinkerType), link_whole = field(bool, False), # Indicates if this archive may contain LTO bit code. Can be set to `False` # to e.g. tell dist LTO handling that a potentially expensive archive doesn't @@ -96,7 +100,7 @@ ObjectsLinkable = record( objects = field([list[Artifact], None], None), # Any of the objects that are in bitcode format bitcode_bundle = field(Artifact | None, None), - linker_type = field(str), + linker_type = field(LinkerType), link_whole = field(bool, False), ) @@ -900,13 +904,13 @@ def merge_swiftmodule_linkables(ctx: AnalysisContext, linkables: list[[Swiftmodu ], )) -def wrap_with_no_as_needed_shared_libs_flags(linker_type: str, link_info: LinkInfo) -> LinkInfo: +def wrap_with_no_as_needed_shared_libs_flags(linker_type: LinkerType, link_info: LinkInfo) -> LinkInfo: """ Wrap link info in args used to prevent linkers from dropping unused shared library dependencies from the e.g. DT_NEEDED tags of the link. """ - if linker_type == "gnu": + if linker_type == LinkerType("gnu"): return wrap_link_info( inner = link_info, pre_flags = ( @@ -916,7 +920,7 @@ def wrap_with_no_as_needed_shared_libs_flags(linker_type: str, link_info: LinkIn post_flags = ["-Wl,--pop-state"], ) - if linker_type == "darwin": + if linker_type == LinkerType("darwin"): return link_info fail("Linker type {} not supported".format(linker_type)) diff --git a/linking/lto.bzl b/linking/lto.bzl index f275d0059..fab91ec6d 100644 --- a/linking/lto.bzl +++ b/linking/lto.bzl @@ -5,7 +5,11 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) load("@prelude//cxx:debug.bzl", "SplitDebugMode") # Styles of LTO. @@ -50,7 +54,7 @@ def get_split_debug_lto_info(actions: AnalysisActions, cxx_toolchain: CxxToolcha # TODO: It might be nice to generalize a but more and move the darwin v. gnu # differences into toolchain settings (e.g. `split_debug_lto_flags_fmt`). - if linker_info.type == "darwin": + if linker_info.type == LinkerType("darwin"): # https://releases.llvm.org/14.0.0/tools/clang/docs/CommandGuide/clang.html#cmdoption-flto # We need to pass -object_path_lto to keep the temporary LTO object files around to use # for dSYM generation. @@ -74,7 +78,7 @@ def get_split_debug_lto_info(actions: AnalysisActions, cxx_toolchain: CxxToolcha linker_flags = linker_args, ) - if linker_info.type == "gnu": + if linker_info.type == LinkerType("gnu"): dwo_dir = actions.declare_output(name + ".dwo.d", dir = True) linker_flags = cmd_args([ diff --git a/linking/strip.bzl b/linking/strip.bzl index baf413680..9bcd22207 100644 --- a/linking/strip.bzl +++ b/linking/strip.bzl @@ -6,7 +6,11 @@ # of this source tree. load("@prelude//cxx:cxx_context.bzl", "get_cxx_toolchain_info") -load("@prelude//cxx:cxx_toolchain_types.bzl", "CxxToolchainInfo") +load( + "@prelude//cxx:cxx_toolchain_types.bzl", + "CxxToolchainInfo", + "LinkerType", +) def _strip_debug_info(ctx: AnalysisContext, out: str, obj: Artifact) -> Artifact: """ @@ -15,7 +19,7 @@ def _strip_debug_info(ctx: AnalysisContext, out: str, obj: Artifact) -> Artifact cxx_toolchain = get_cxx_toolchain_info(ctx) strip = cxx_toolchain.binary_utilities_info.strip output = ctx.actions.declare_output("__stripped__", out) - if cxx_toolchain.linker_info.type == "gnu": + if cxx_toolchain.linker_info.type == LinkerType("gnu"): cmd = cmd_args([strip, "--strip-debug", "--strip-unneeded", "-o", output.as_output(), obj]) else: cmd = cmd_args([strip, "-S", "-o", output.as_output(), obj]) diff --git a/rust/build_params.bzl b/rust/build_params.bzl index ec0b1df3d..f21c280c6 100644 --- a/rust/build_params.bzl +++ b/rust/build_params.bzl @@ -7,6 +7,7 @@ # Rules for mapping requirements to options +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerType") load( "@prelude//linking:link_info.bzl", "LibOutputStyle", @@ -186,20 +187,20 @@ _RUST_STATIC_NON_PIC_LIBRARY = 7 _NATIVE_LINKABLE_STATIC_PIC = 8 _NATIVE_LINKABLE_STATIC_NON_PIC = 9 -def _executable_prefix_suffix(linker_type: str, target_os_type: OsLookup) -> (str, str): +def _executable_prefix_suffix(linker_type: LinkerType, target_os_type: OsLookup) -> (str, str): return { - "darwin": ("", ""), - "gnu": ("", ".exe") if target_os_type.platform == "windows" else ("", ""), - "wasm": ("", ".wasm"), - "windows": ("", ".exe"), + LinkerType("darwin"): ("", ""), + LinkerType("gnu"): ("", ".exe") if target_os_type.platform == "windows" else ("", ""), + LinkerType("wasm"): ("", ".wasm"), + LinkerType("windows"): ("", ".exe"), }[linker_type] -def _library_prefix_suffix(linker_type: str, target_os_type: OsLookup) -> (str, str): +def _library_prefix_suffix(linker_type: LinkerType, target_os_type: OsLookup) -> (str, str): return { - "darwin": ("lib", ".dylib"), - "gnu": ("", ".dll") if target_os_type.platform == "windows" else ("lib", ".so"), - "wasm": ("", ".wasm"), - "windows": ("", ".dll"), + LinkerType("darwin"): ("lib", ".dylib"), + LinkerType("gnu"): ("", ".dll") if target_os_type.platform == "windows" else ("lib", ".so"), + LinkerType("wasm"): ("", ".wasm"), + LinkerType("windows"): ("", ".dll"), }[linker_type] _BUILD_PARAMS = { @@ -338,7 +339,7 @@ def build_params( link_strategy: LinkStrategy | None, lib_output_style: LibOutputStyle | None, lang: LinkageLang, - linker_type: str, + linker_type: LinkerType, target_os_type: OsLookup) -> BuildParams: if rule == RuleType("binary"): expect(link_strategy != None) diff --git a/toolchains/cxx.bzl b/toolchains/cxx.bzl index 57da1f97d..3212e4696 100644 --- a/toolchains/cxx.bzl +++ b/toolchains/cxx.bzl @@ -14,6 +14,7 @@ load( "CxxPlatformInfo", "CxxToolchainInfo", "LinkerInfo", + "LinkerType", "PicBehavior", "RcCompilerInfo", "ShlibInterfacesMode", @@ -36,7 +37,7 @@ CxxToolsInfo = provider( "cvtres_compiler": provider_field(typing.Any, default = None), "cxx_compiler": provider_field(typing.Any, default = None), "linker": provider_field(typing.Any, default = None), - "linker_type": provider_field(typing.Any, default = None), + "linker_type": LinkerType, "rc_compiler": provider_field(typing.Any, default = None), }, ) @@ -90,7 +91,7 @@ def _cxx_toolchain_from_cxx_tools_info(ctx: AnalysisContext, cxx_tools_info: Cxx additional_linker_flags = ["-fuse-ld=lld"] if os == "linux" and cxx_tools_info.linker != "g++" and cxx_tools_info.cxx_compiler != "g++" else [] if os == "windows": - linker_type = "windows" + linker_type = LinkerType("windows") binary_extension = "exe" object_file_extension = "obj" static_library_extension = "lib" @@ -107,10 +108,10 @@ def _cxx_toolchain_from_cxx_tools_info(ctx: AnalysisContext, cxx_tools_info: Cxx shared_library_versioned_name_format = "{}.so.{}" if os == "macos": - linker_type = "darwin" + linker_type = LinkerType("darwin") pic_behavior = PicBehavior("always_enabled") else: - linker_type = "gnu" + linker_type = LinkerType("gnu") pic_behavior = PicBehavior("supported") if cxx_tools_info.compiler_type == "clang": diff --git a/toolchains/cxx/clang/tools.bzl b/toolchains/cxx/clang/tools.bzl index ac85a344b..05f4a6a1b 100644 --- a/toolchains/cxx/clang/tools.bzl +++ b/toolchains/cxx/clang/tools.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerType") load("@prelude//toolchains:cxx.bzl", "CxxToolsInfo") def _path_clang_tools_impl(_ctx) -> list[Provider]: @@ -21,7 +22,7 @@ def _path_clang_tools_impl(_ctx) -> list[Provider]: archiver = "ar", archiver_type = "gnu", linker = "clang++", - linker_type = "gnu", + linker_type = LinkerType("gnu"), ), ] diff --git a/toolchains/msvc/tools.bzl b/toolchains/msvc/tools.bzl index e5b058194..93ffa771d 100644 --- a/toolchains/msvc/tools.bzl +++ b/toolchains/msvc/tools.bzl @@ -5,6 +5,7 @@ # License, Version 2.0 found in the LICENSE-APACHE file in the root directory # of this source tree. +load("@prelude//cxx:cxx_toolchain_types.bzl", "LinkerType") load("@prelude//toolchains:cxx.bzl", "CxxToolsInfo") load("@prelude//utils:cmd_script.bzl", "ScriptOs", "cmd_script") @@ -133,7 +134,7 @@ def _find_msvc_tools_impl(ctx: AnalysisContext) -> list[Provider]: archiver = lib_exe_script, archiver_type = "windows", linker = _windows_linker_wrapper(ctx, link_exe_script), - linker_type = "windows", + linker_type = LinkerType("windows"), ), ] From faa08fd66e2feb17a034064ccbb957398f0e3aa2 Mon Sep 17 00:00:00 2001 From: Simon Marlow Date: Thu, 22 Aug 2024 00:21:09 -0700 Subject: [PATCH 1102/1133] Fix haskell_ghci() for multi-version GHC Summary: Including tp2_build of ghc/8.8.3/platform010 and ghc/9.2.4/platform010 allow-large-files Reviewed By: pepeiborra Differential Revision: D61471626 fbshipit-source-id: fb211301cb0d29787e27bf4348e520916ddc9f1e (cherry picked from commit 1848949d99189e417fbb64caa3009c6f7651797e) --- haskell/haskell_ghci.bzl | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/haskell/haskell_ghci.bzl b/haskell/haskell_ghci.bzl index d9005c3f0..b7867f967 100644 --- a/haskell/haskell_ghci.bzl +++ b/haskell/haskell_ghci.bzl @@ -43,6 +43,7 @@ load( ) load( "@prelude//linking:linkable_graph.bzl", + "LinkableGraph", "LinkableRootInfo", "create_linkable_graph", "get_deps_for_link", @@ -183,7 +184,11 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: for nlabel, n in graph_nodes.items() } - all_direct_deps = [dep.label for dep in all_deps] + all_direct_deps = [] + for dep in all_deps: + graph = dep.get(LinkableGraph) + if graph: + all_direct_deps.append(graph.label) dep_graph[ctx.label] = all_direct_deps # Need to exclude all transitive deps of excluded deps @@ -346,16 +351,16 @@ def _replace_macros_in_script_template( # Optional string args srcs: [str, None] = None, output_name: [str, None] = None, - ghci_iserv_path: [str, None] = None, + ghci_iserv_path: [Artifact, None] = None, preload_libs: [str, None] = None) -> Artifact: toolchain_paths = { BINUTILS_PATH: haskell_toolchain.ghci_binutils_path, - GHCI_LIB_PATH: haskell_toolchain.ghci_lib_path, + GHCI_LIB_PATH: haskell_toolchain.ghci_lib_path.get(DefaultInfo).default_outputs[0], CC_PATH: haskell_toolchain.ghci_cc_path, CPP_PATH: haskell_toolchain.ghci_cpp_path, CXX_PATH: haskell_toolchain.ghci_cxx_path, - GHCI_PACKAGER: haskell_toolchain.ghci_packager, - GHCI_GHC_PATH: haskell_toolchain.ghci_ghc_path, + GHCI_PACKAGER: haskell_toolchain.ghci_packager.get(DefaultInfo).default_outputs[0], + GHCI_GHC_PATH: haskell_toolchain.ghci_ghc_path.get(DefaultInfo).default_outputs[0], } if ghci_bin != None: @@ -370,7 +375,7 @@ def _replace_macros_in_script_template( replace_cmd.add(cmd_args(script_template, format = "--script_template={}")) for name, path in toolchain_paths.items(): if path: - replace_cmd.add(cmd_args("--{}={}".format(name, path))) + replace_cmd.add(cmd_args(path, format = "--{}={{}}".format(name))) replace_cmd.add(cmd_args( final_script.as_output(), @@ -467,7 +472,7 @@ def _write_iserv_script( script_template = ghci_iserv_template, output_name = iserv_script_name, haskell_toolchain = haskell_toolchain, - ghci_iserv_path = ghci_iserv_path, + ghci_iserv_path = ghci_iserv_path.get(DefaultInfo).default_outputs[0], preload_libs = preload_libs, ) return iserv_script From c02667a3c1e76eb8b84fcd108f2802703b83b78c Mon Sep 17 00:00:00 2001 From: Chatura Atapattu Date: Thu, 2 May 2024 07:26:06 -0700 Subject: [PATCH 1103/1133] Rename relative args -> args Summary: Because we had absolute args before, we needed to differentiate. We no longer need to do this. By default we assume relative args. Reviewed By: lmvasquezg Differential Revision: D56731576 fbshipit-source-id: 90bbc0021171d8640a5da900b78315a6f8271542 (cherry picked from commit f955fc2cf8e72b148d771f73e54a64cf6969e926) --- haskell/haskell.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 1825df3ac..6e7a2f5ce 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -366,7 +366,7 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: inherited_pp_info = cxx_inherited_preprocessor_infos(ctx.attrs.deps) own_pp_info = CPreprocessor( - relative_args = CPreprocessorArgs(args = flatten([["-isystem", d] for d in ctx.attrs.cxx_header_dirs])), + args = CPreprocessorArgs(args = flatten([["-isystem", d] for d in ctx.attrs.cxx_header_dirs])), ) return [ From b65c1ac128d91413e55d7dff757d08ae5f02f673 Mon Sep 17 00:00:00 2001 From: Neil Mitchell Date: Fri, 26 Apr 2024 13:22:38 -0700 Subject: [PATCH 1104/1133] Optimise generating ghci targets Summary: D56378115 optimised 2 places we generate .db files. There was one remaining one which I missed - now caught. Reviewed By: helfper Differential Revision: D56642470 fbshipit-source-id: 83979f9ebdce0877929cb15b42bf39b0acd2de49 --- haskell/haskell_ghci.bzl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/haskell/haskell_ghci.bzl b/haskell/haskell_ghci.bzl index b7867f967..cf86a0f18 100644 --- a/haskell/haskell_ghci.bzl +++ b/haskell/haskell_ghci.bzl @@ -647,11 +647,11 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: package_symlinks_root = ctx.label.name + ".packages" packagedb_args = cmd_args(delimiter = " ") - prebuilt_packagedb_args = cmd_args(delimiter = " ") + prebuilt_packagedb_args_set = {} for lib in packages_info.transitive_deps.traverse(): if lib.is_prebuilt: - prebuilt_packagedb_args.add(lib.db) + prebuilt_packagedb_args_set[lib.db] = None else: lib_symlinks_root = paths.join( package_symlinks_root, @@ -682,6 +682,7 @@ def haskell_ghci_impl(ctx: AnalysisContext) -> list[Provider]: "packagedb", ), ) + prebuilt_packagedb_args = cmd_args(prebuilt_packagedb_args_set.keys(), delimiter = " ") script_templates = [] for script_template in ctx.attrs.extra_script_templates: From 62a4e334931acf283403c598d2b47239b438e6af Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 18 Sep 2024 09:15:33 +0200 Subject: [PATCH 1105/1133] Gracefully handle `None` values for ghci --- haskell/haskell_ghci.bzl | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/haskell/haskell_ghci.bzl b/haskell/haskell_ghci.bzl index cf86a0f18..817dc8163 100644 --- a/haskell/haskell_ghci.bzl +++ b/haskell/haskell_ghci.bzl @@ -332,6 +332,11 @@ def _build_haskell_omnibus_so(ctx: AnalysisContext) -> HaskellOmnibusData: so_symlinks_root = so_symlinks_root, ) +def _get_default_output(dependency: Dependency | None) -> Artifact | None: + if dependency == None: + return None + return dependency.get(DefaultInfo).default_outputs[0] + # Use the script_template_processor.py script to generate a script from a # script template. def _replace_macros_in_script_template( @@ -355,12 +360,12 @@ def _replace_macros_in_script_template( preload_libs: [str, None] = None) -> Artifact: toolchain_paths = { BINUTILS_PATH: haskell_toolchain.ghci_binutils_path, - GHCI_LIB_PATH: haskell_toolchain.ghci_lib_path.get(DefaultInfo).default_outputs[0], + GHCI_LIB_PATH: _get_default_output(haskell_toolchain.ghci_lib_path), CC_PATH: haskell_toolchain.ghci_cc_path, CPP_PATH: haskell_toolchain.ghci_cpp_path, CXX_PATH: haskell_toolchain.ghci_cxx_path, - GHCI_PACKAGER: haskell_toolchain.ghci_packager.get(DefaultInfo).default_outputs[0], - GHCI_GHC_PATH: haskell_toolchain.ghci_ghc_path.get(DefaultInfo).default_outputs[0], + GHCI_PACKAGER: _get_default_output(haskell_toolchain.ghci_packager), + GHCI_GHC_PATH: _get_default_output(haskell_toolchain.ghci_ghc_path), } if ghci_bin != None: @@ -472,7 +477,7 @@ def _write_iserv_script( script_template = ghci_iserv_template, output_name = iserv_script_name, haskell_toolchain = haskell_toolchain, - ghci_iserv_path = ghci_iserv_path.get(DefaultInfo).default_outputs[0], + ghci_iserv_path = _get_default_output(ghci_iserv_path), preload_libs = preload_libs, ) return iserv_script From 801a782396cb2dbb799096ea86c21e998dc89eeb Mon Sep 17 00:00:00 2001 From: Stiopa Koltsov Date: Wed, 25 Sep 2024 17:49:03 -0700 Subject: [PATCH 1106/1133] Avoid list["xxx"] in prelude Summary: Because string literals in types are deprecated and need to be removed. Reviewed By: JakobDegen Differential Revision: D63408317 fbshipit-source-id: 11552d44893aaef23e097dd9fda84fdaf35eb0cc (cherry picked from commit 9297f2c298c1f0cd8fbbbb9c9fff31930881db90) --- decls/common.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/decls/common.bzl b/decls/common.bzl index 2f153480d..f662459f9 100644 --- a/decls/common.bzl +++ b/decls/common.bzl @@ -20,7 +20,7 @@ prelude_rule = record( further = field([str, None], None), attrs = field(dict[str, Attr]), impl = field([typing.Callable, None], None), - uses_plugins = field([list["PluginKind"], None], None), + uses_plugins = field([list[plugins.PluginKind], None], None), ) AbiGenerationMode = ["unknown", "class", "source", "migrating_to_source_only", "source_only", "unrecognized"] From f66d6e90bd90ff33b14dd7d3dddb87154185e310 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Thu, 24 Oct 2024 09:21:21 +0200 Subject: [PATCH 1107/1133] Remove boot files from ghc command line This fixes a regression introduced in commit de7f2c44275d88e1de528f0923e6140c0bfceaee which broke compilation of sources with cyclic module dependencies. --- haskell/tools/generate_target_metadata.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 9e67a8899..9a60b913e 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -190,7 +190,7 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): json_fname = os.path.join(dname, "depends.json") make_fname = os.path.join(dname, "depends.make") haskell_sources = list(filter(is_haskell_src, sources)) - haskell_boot_sources = list(filter (is_haskell_boot, sources)) + args = [ ghc, "-M", "-include-pkg-deps", # Note: `-outputdir '.'` removes the prefix of all targets: @@ -198,7 +198,7 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): "-outputdir", ".", "-dep-json", json_fname, "-dep-makefile", make_fname, - ] + ghc_args + haskell_sources + haskell_boot_sources + ] + ghc_args + haskell_sources env = os.environ.copy() path = env.get("PATH", "") From e588c995d02d98654248ba273cdbc0025caa1024 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 30 Oct 2024 07:06:47 -0700 Subject: [PATCH 1108/1133] pkgname adjustment avoiding --. (#51) with --, ghc-pkg will complain. That happens if the package is located at the top-level directory in the previous name generation, so it is fixed. --- haskell/haskell.bzl | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 6e7a2f5ce..f574b0a73 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -818,7 +818,13 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: indexing_tsets = {} sub_targets = {} - libname = repr(ctx.label.path).replace("//", "_").replace("/", "_") + "_" + ctx.label.name + libprefix = repr(ctx.label.path).replace("//", "_").replace("/", "_") + + # avoid consecutive "--" in package name, which is not allowed by ghc-pkg. + if libprefix[-1] == '_': + libname = libprefix + ctx.label.name + else: + libname = libprefix + "_" + ctx.label.name pkgname = libname.replace("_", "-") md_file = target_metadata( From 1bc53ba4ac0ad902dd6b47223c2aee15b071a9f9 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 30 Oct 2024 07:12:24 -0700 Subject: [PATCH 1109/1133] Base doesn't have to be special (#40) --- haskell/compile.bzl | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index da8cf1783..3600e79a0 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -190,7 +190,6 @@ def _dynamic_target_metadata_impl(actions, artifacts, dynamic_values, outputs, a package_flag = _package_flag(arg.haskell_toolchain) ghc_args = cmd_args() ghc_args.add("-hide-all-packages") - ghc_args.add(package_flag, "base") ghc_args.add(cmd_args(arg.toolchain_libs, prepend=package_flag)) ghc_args.add(cmd_args(packages_info.exposed_package_args)) @@ -339,10 +338,10 @@ def get_packages_info2( ], ) - # base is special and gets exposed by default package_flag = _package_flag(haskell_toolchain) - - exposed_package_args = cmd_args([package_flag, "base"]) + hidden_args = [l for lib in libs.traverse() for l in lib.libs] + exposed_package_libs = cmd_args() + exposed_package_args = cmd_args() if for_deps: get_db = lambda l: l.deps_db From 4541453acbb82de4b5ac9b800108e8ba6a5b2763 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 30 Oct 2024 16:32:54 +0100 Subject: [PATCH 1110/1133] Adapt to dynamic output API changes --- haskell/compile.bzl | 91 +++++++++++++++++++------------------ haskell/haskell.bzl | 48 +++++++++++++------ haskell/haskell_haddock.bzl | 47 +++++++++---------- haskell/library_info.bzl | 6 +-- 4 files changed, 104 insertions(+), 88 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index da8cf1783..c9cd9da99 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -170,7 +170,7 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl return modules -def _dynamic_target_metadata_impl(actions, artifacts, dynamic_values, outputs, arg): +def _dynamic_target_metadata_impl(actions, output, arg, pkg_deps) -> list[Provider]: # Add -package-db and -package/-expose-package flags for each Haskell # library dependency. @@ -185,7 +185,7 @@ def _dynamic_target_metadata_impl(actions, artifacts, dynamic_values, outputs, a enable_profiling = False, use_empty_lib = True, for_deps = True, - resolved = dynamic_values, + pkg_deps = pkg_deps, ) package_flag = _package_flag(arg.haskell_toolchain) ghc_args = cmd_args() @@ -210,13 +210,20 @@ def _dynamic_target_metadata_impl(actions, artifacts, dynamic_values, outputs, a md_args.add( arg.lib_package_name_and_prefix, ) - md_args.add("--output", outputs[arg.md_file].as_output()) + md_args.add("--output", output) actions.run(md_args, category = "haskell_metadata", identifier = arg.suffix if arg.suffix else None) return [] -_dynamic_target_metadata = dynamic_actions(impl = _dynamic_target_metadata_impl) +_dynamic_target_metadata = dynamic_actions( + impl = _dynamic_target_metadata_impl, + attrs = { + "output": dynattrs.output(), + "arg": dynattrs.value(typing.Any), + "pkg_deps": dynattrs.option(dynattrs.dynamic_value()), + }, +) def target_metadata( ctx: AnalysisContext, @@ -245,9 +252,8 @@ def target_metadata( # (module X.Y.Z must be defined in a file at X/Y/Z.hs) ctx.actions.dynamic_output_new(_dynamic_target_metadata( - dynamic = [], - dynamic_values = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [], - outputs = [md_file.as_output()], + pkg_deps = haskell_toolchain.packages.dynamic if haskell_toolchain.packages else None, + output = md_file.as_output(), arg = struct( compiler_flags = ctx.attrs.compiler_flags, deps = ctx.attrs.deps, @@ -255,7 +261,6 @@ def target_metadata( haskell_direct_deps_lib_infos = haskell_direct_deps_lib_infos, haskell_toolchain = haskell_toolchain, lib_package_name_and_prefix =_attr_deps_haskell_lib_package_name_and_prefix(ctx), - md_file = md_file, md_gen = md_gen, sources = sources, strip_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), @@ -313,7 +318,7 @@ def get_packages_info( specify_pkg_version = specify_pkg_version, enable_profiling = enable_profiling, use_empty_lib = use_empty_lib, - resolved = {}, + pkg_deps = None, ) def get_packages_info2( @@ -326,7 +331,7 @@ def get_packages_info2( specify_pkg_version: bool, enable_profiling: bool, use_empty_lib: bool, - resolved: dict[DynamicValue, ResolvedDynamicValue], + pkg_deps: ResolvedDynamicValue | None, for_deps: bool = False) -> PackagesInfo: # Collect library dependencies. Note that these don't need to be in a @@ -363,8 +368,7 @@ def get_packages_info2( ]) exposed_package_args.add(hidden_args) - if resolved: - pkg_deps = resolved[haskell_toolchain.packages.dynamic] + if pkg_deps: package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages else: package_db = {} @@ -418,7 +422,7 @@ def _common_compile_module_args( compiler_flags: list[ArgLike], ghc_wrapper: RunInfo, haskell_toolchain: HaskellToolchainInfo, - resolved: dict[DynamicValue, ResolvedDynamicValue], + pkg_deps: ResolvedDynamicValue | None, enable_haddock: bool, enable_profiling: bool, link_style: LinkStyle, @@ -493,7 +497,6 @@ def _common_compile_module_args( toolchain_libs = direct_toolchain_libs + libs.reduce("packages") if haskell_toolchain.packages: - pkg_deps = resolved[haskell_toolchain.packages.dynamic] package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages else: package_db = [] @@ -563,7 +566,7 @@ def _compile_module( md_file: Artifact, graph: dict[str, list[str]], package_deps: dict[str, list[str]], - outputs: dict[Artifact, Artifact], + outputs: dict[Artifact, OutputArtifact], artifact_suffix: str, direct_deps_by_name: dict[str, typing.Any], toolchain_deps_by_name: dict[str, None], @@ -590,8 +593,8 @@ def _compile_module( objects = [outputs[obj] for obj in module.objects] his = [outputs[hi] for hi in module.interfaces] - compile_args_for_file.add("-o", objects[0].as_output()) - compile_args_for_file.add("-ohi", his[0].as_output()) + compile_args_for_file.add("-o", objects[0]) + compile_args_for_file.add("-ohi", his[0]) # Set the output directories. We do not use the -outputdir flag, but set the directories individually. # Note, the -outputdir option is shorthand for the combination of -odir, -hidir, -hiedir, -stubdir and -dumpdir. @@ -603,12 +606,12 @@ def _compile_module( ) if module.stub_dir != None: stubs = outputs[module.stub_dir] - compile_args_for_file.add("-stubdir", stubs.as_output()) + compile_args_for_file.add("-stubdir", stubs) if link_style in [LinkStyle("static_pic"), LinkStyle("static")]: compile_args_for_file.add("-dynamic-too") - compile_args_for_file.add("-dyno", objects[1].as_output()) - compile_args_for_file.add("-dynohi", his[1].as_output()) + compile_args_for_file.add("-dyno", objects[1]) + compile_args_for_file.add("-dynohi", his[1]) compile_args_for_file.add(module.source) @@ -623,9 +626,9 @@ def _compile_module( toolchain_deps.append(dep_pkgname) elif dep_pkgname in direct_deps_by_name: library_deps.append(dep_pkgname) - exposed_package_dbs.append(direct_deps_by_name[dep_pkgname].package_db) + exposed_package_dbs.append(direct_deps_by_name[dep_pkgname][0]) for dep_modname in dep_modules: - exposed_package_modules.append(direct_deps_by_name[dep_pkgname].modules[dep_modname]) + exposed_package_modules.append(direct_deps_by_name[dep_pkgname][1].providers[DynamicCompileResultInfo].modules[dep_modname]) else: fail("Unknown library dependency '{}'. Add the library to the `deps` attribute".format(dep_pkgname)) @@ -698,7 +701,7 @@ def _compile_module( tagged_dep_file = abi_tag.tag_artifacts(dep_file) compile_cmd.add("--buck2-dep", tagged_dep_file) - compile_cmd.add("--abi-out", outputs[module.hash].as_output()) + compile_cmd.add("--abi-out", outputs[module.hash]) actions.run( compile_cmd, category = "haskell_compile_" + artifact_suffix.replace("-", "_"), identifier = module_name, @@ -720,14 +723,7 @@ def _compile_module( return module_tset -def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): - direct_deps_by_name = { - info.value.name: struct( - package_db = info.value.empty_db, - modules = dynamic_values[info.value.dynamic[arg.enable_profiling]].providers[DynamicCompileResultInfo].modules, - ) - for info in arg.direct_deps_info - } +def _dynamic_do_compile_impl(actions, md_file, pkg_deps, arg, direct_deps_by_name, outputs): common_args = _common_compile_module_args( actions, compiler_flags = arg.compiler_flags, @@ -737,7 +733,7 @@ def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): haskell_toolchain = arg.haskell_toolchain, label = arg.label, main = arg.main, - resolved = dynamic_values, + pkg_deps = pkg_deps, sources = arg.sources, enable_haddock = arg.enable_haddock, enable_profiling = arg.enable_profiling, @@ -746,7 +742,7 @@ def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): pkgname = arg.pkgname, ) - md = artifacts[arg.md_file].read_json() + md = md_file.read_json() th_modules = md["th_modules"] module_map = md["module_mapping"] graph = md["module_graph"] @@ -785,7 +781,16 @@ def _dynamic_do_compile_impl(actions, artifacts, dynamic_values, outputs, arg): -_dynamic_do_compile = dynamic_actions(impl = _dynamic_do_compile_impl) +_dynamic_do_compile = dynamic_actions( + impl = _dynamic_do_compile_impl, + attrs = { + "md_file" : dynattrs.artifact_value(), + "arg" : dynattrs.value(typing.Any), + "pkg_deps": dynattrs.option(dynattrs.dynamic_value()), + "outputs": dynattrs.dict(Artifact, dynattrs.output()), + "direct_deps_by_name": dynattrs.dict(str, dynattrs.tuple(dynattrs.value(Artifact), dynattrs.dynamic_value())), + }, +) # Compile all the context's sources. def compile( @@ -822,17 +827,13 @@ def compile( ] dyn_module_tsets = ctx.actions.dynamic_output_new(_dynamic_do_compile( - dynamic = [md_file], - dynamic_values = [ - info.value.dynamic[enable_profiling] - for lib in attr_deps_haskell_link_infos(ctx) - for info in [ - lib.prof_info[link_style] - if enable_profiling else - lib.info[link_style] - ] - ] + ([ haskell_toolchain.packages.dynamic ] if haskell_toolchain.packages else [ ]), - outputs = [o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes], + md_file = md_file, + pkg_deps = haskell_toolchain.packages.dynamic if haskell_toolchain.packages else None, + outputs = {o: o.as_output() for o in interfaces + objects + stub_dirs + abi_hashes}, + direct_deps_by_name = { + info.value.name: (info.value.empty_db, info.value.dynamic[enable_profiling]) + for info in direct_deps_info + }, arg = struct( artifact_suffix = artifact_suffix, compiler_flags = ctx.attrs.compiler_flags, diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 6e7a2f5ce..6b8cc47c8 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -243,6 +243,10 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: hlibinfo = HaskellLibraryInfo( name = ctx.attrs.name, db = ctx.attrs.db, + empty_db = None, + deps_db = None, + objects = {}, + dependencies = [], import_dirs = {}, stub_dirs = [], id = ctx.attrs.id, @@ -255,6 +259,10 @@ def haskell_prebuilt_library_impl(ctx: AnalysisContext) -> list[Provider]: prof_hlibinfo = HaskellLibraryInfo( name = ctx.attrs.name, db = ctx.attrs.db, + empty_db = None, + deps_db = None, + objects = {}, + dependencies = [], import_dirs = {}, stub_dirs = [], id = ctx.attrs.id, @@ -549,8 +557,7 @@ def _get_haskell_shared_library_name_linker_flags( else: fail("Unknown linker type '{}'.".format(linker_type)) -def _dynamic_link_shared_impl(actions, artifacts, dynamic_values, outputs, arg): - pkg_deps = dynamic_values[arg.haskell_toolchain.packages.dynamic] +def _dynamic_link_shared_impl(actions, pkg_deps, lib, arg): package_db = pkg_deps.providers[DynamicHaskellPackageDbInfo].packages package_db_tset = actions.tset( @@ -591,7 +598,7 @@ def _dynamic_link_shared_impl(actions, artifacts, dynamic_values, outputs, arg): link_cmd_args.append(link_args) link_cmd = cmd_args(link_cmd_args, hidden = link_cmd_hidden) - link_cmd.add("-o", outputs[arg.lib].as_output()) + link_cmd.add("-o", lib) actions.run( link_cmd, @@ -600,7 +607,14 @@ def _dynamic_link_shared_impl(actions, artifacts, dynamic_values, outputs, arg): return [] -_dynamic_link_shared = dynamic_actions(impl = _dynamic_link_shared_impl) +_dynamic_link_shared = dynamic_actions( + impl = _dynamic_link_shared_impl, + attrs = { + "arg": dynattrs.value(typing.Any), + "lib": dynattrs.output(), + "pkg_deps": dynattrs.dynamic_value(), + }, +) def _build_haskell_lib( ctx, @@ -664,9 +678,8 @@ def _build_haskell_lib( ) ctx.actions.dynamic_output_new(_dynamic_link_shared( - dynamic = [], - dynamic_values = [haskell_toolchain.packages.dynamic], - outputs = [lib.as_output()], + pkg_deps = haskell_toolchain.packages.dynamic, + lib = lib.as_output(), arg = struct( artifact_suffix = artifact_suffix, haskell_toolchain = haskell_toolchain, @@ -1124,7 +1137,7 @@ def _make_link_package( return db -def _dynamic_link_binary_impl(actions, artifacts, dynamic_values, outputs, arg): +def _dynamic_link_binary_impl(actions, pkg_deps, output, arg): link_cmd = arg.link.copy() # link is already frozen, make a copy # Add -package-db and -package/-expose-package flags for each Haskell @@ -1136,7 +1149,7 @@ def _dynamic_link_binary_impl(actions, artifacts, dynamic_values, outputs, arg): haskell_toolchain = arg.haskell_toolchain, haskell_direct_deps_lib_infos = arg.haskell_direct_deps_lib_infos, link_style = arg.link_style, - resolved = dynamic_values, + pkg_deps = pkg_deps, specify_pkg_version = False, enable_profiling = arg.enable_profiling, use_empty_lib = False, @@ -1149,13 +1162,20 @@ def _dynamic_link_binary_impl(actions, artifacts, dynamic_values, outputs, arg): link_cmd.add(arg.haskell_toolchain.linker_flags) link_cmd.add(arg.linker_flags) - link_cmd.add("-o", outputs[arg.output].as_output()) + link_cmd.add("-o", output) actions.run(link_cmd, category = "haskell_link") return [] -_dynamic_link_binary = dynamic_actions(impl = _dynamic_link_binary_impl) +_dynamic_link_binary = dynamic_actions( + impl = _dynamic_link_binary_impl, + attrs = { + "arg": dynattrs.value(typing.Any), + "pkg_deps": dynattrs.option(dynattrs.dynamic_value()), + "output": dynattrs.output(), + }, +) def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: enable_profiling = ctx.attrs.enable_profiling @@ -1377,9 +1397,8 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: ) ctx.actions.dynamic_output_new(_dynamic_link_binary( - dynamic = [], - dynamic_values = [haskell_toolchain.packages.dynamic] if haskell_toolchain.packages else [ ], - outputs = [output.as_output()], + pkg_deps = haskell_toolchain.packages.dynamic if haskell_toolchain.packages else None, + output = output.as_output(), arg = struct( deps = ctx.attrs.deps, direct_deps_link_info = attr_deps_haskell_link_infos(ctx), @@ -1389,7 +1408,6 @@ def haskell_binary_impl(ctx: AnalysisContext) -> list[Provider]: link = link, link_style = link_style, linker_flags = ctx.attrs.linker_flags, - output = output, toolchain_libs = toolchain_libs, ), )) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index ec31f17f5..178ae2a1e 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -54,7 +54,7 @@ def _haddock_dump_interface( haddock_info: _HaddockInfo, module_deps: list[CompiledModuleTSet], graph: dict[str, list[str]], - outputs: dict[Artifact, Artifact]) -> _HaddockInfoTSet: + outputs: dict[Artifact, OutputArtifact]) -> _HaddockInfoTSet: # Transitive module dependencies from other packages. cross_package_modules = actions.tset( @@ -69,7 +69,7 @@ def _haddock_dump_interface( for dep_name in graph[module_name] ] - expected_html = outputs[haddock_info.html] + expected_html = haddock_info.html module_html = _haddock_module_to_html(module_name) if paths.basename(expected_html.short_path) != module_html: @@ -82,7 +82,7 @@ def _haddock_dump_interface( actions.run( cmd.copy().add( "--odir", cmd_args(html_output.as_output(), parent = 1), - "--dump-interface", outputs[haddock_info.haddock].as_output(), + "--dump-interface", outputs[haddock_info.haddock], "--html", "--hoogle", cmd_args( @@ -102,36 +102,26 @@ def _haddock_dump_interface( if make_copy: # XXX might as well use `symlink_file`` but that does not work with buck2 RE # (see https://github.com/facebook/buck2/issues/222) - actions.copy_file(expected_html.as_output(), html_output) + actions.copy_file(outputs[expected_html], html_output) return actions.tset( _HaddockInfoTSet, - value = _HaddockInfo(interface = haddock_info.interface, haddock = outputs[haddock_info.haddock], html = outputs[haddock_info.html]), + value = _HaddockInfo(interface = haddock_info.interface, haddock = haddock_info.haddock, html = haddock_info.html), children = this_package_modules, ) -def _dynamic_haddock_dump_interfaces_impl(actions, artifacts, dynamic_values, outputs, arg): - md = artifacts[arg.md_file].read_json() +def _dynamic_haddock_dump_interfaces_impl(actions, md_file, dynamic_info_lib, outputs, arg): + md = md_file.read_json() module_map = md["module_mapping"] graph = md["module_graph"] package_deps = md["package_deps"] - dynamic_info_lib = {} - - for lib in arg.direct_deps_link_info: - info = lib.info[arg.link_style] - direct = info.value - dynamic = direct.dynamic[False] - dynamic_info = dynamic_values[dynamic].providers[DynamicCompileResultInfo] - - dynamic_info_lib[direct.name] = dynamic_info - haddock_infos = { module_map.get(k, k): v for k, v in arg.haddock_infos.items() } module_tsets = {} for module_name in post_order_traversal(graph): module_deps = [ - info.modules[mod] + info.providers[DynamicCompileResultInfo].modules[mod] for lib, info in dynamic_info_lib.items() for mod in package_deps.get(module_name, {}).get(lib, []) ] @@ -149,7 +139,15 @@ def _dynamic_haddock_dump_interfaces_impl(actions, artifacts, dynamic_values, ou return [] -_dynamic_haddock_dump_interfaces = dynamic_actions(impl = _dynamic_haddock_dump_interfaces_impl) +_dynamic_haddock_dump_interfaces = dynamic_actions( + impl = _dynamic_haddock_dump_interfaces_impl, + attrs = { + "md_file": dynattrs.artifact_value(), + "arg": dynattrs.value(typing.Any), + "dynamic_info_lib": dynattrs.dict(str, dynattrs.dynamic_value()), + "outputs": dynattrs.dict(Artifact, dynattrs.output()), + }, +) def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileResultInfo, md_file: Artifact) -> HaskellHaddockInfo: haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] @@ -188,19 +186,18 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes direct_deps_link_info = attr_deps_haskell_link_infos(ctx) ctx.actions.dynamic_output_new(_dynamic_haddock_dump_interfaces( - dynamic = [md_file], - dynamic_values = [ - info.value.dynamic[False] + md_file = md_file, + dynamic_info_lib = { + info.value.name: info.value.dynamic[False] for lib in direct_deps_link_info for info in [ #lib.prof_info[link_style] #if enable_profiling else lib.info[link_style], ] - ], - outputs = [output.as_output() for info in haddock_infos.values() for output in [info.haddock, info.html]], + }, + outputs = {output: output.as_output() for info in haddock_infos.values() for output in [info.haddock, info.html]}, arg = struct( - direct_deps_link_info = direct_deps_link_info, dyn_cmd = cmd.copy(), haddock_infos = haddock_infos, link_style = link_style, diff --git a/haskell/library_info.bzl b/haskell/library_info.bzl index 5eb033a84..dbd80cbea 100644 --- a/haskell/library_info.bzl +++ b/haskell/library_info.bzl @@ -26,9 +26,9 @@ HaskellLibraryInfo = record( # package config database: e.g. platform009/build/ghc/lib/package.conf.d db = Artifact, # package config database, referring to the empty lib which is only used for compilation - empty_db = Artifact, - # pacakge config database, used for ghc -M - deps_db = Artifact, + empty_db = Artifact | None, + # package config database, used for ghc -M + deps_db = Artifact | None, # e.g. "base-4.13.0.0" id = str, # dynamic dependency information From bc753a1c67447a441eb03ee15694137efc370965 Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Fri, 1 Nov 2024 09:27:40 +0100 Subject: [PATCH 1111/1133] Prevent loading of default package environment for haddock actions --- haskell/haskell.bzl | 1 + haskell/haskell_haddock.bzl | 2 ++ 2 files changed, 3 insertions(+) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 6b8cc47c8..a209e5ad0 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -992,6 +992,7 @@ def haskell_library_impl(ctx: AnalysisContext) -> list[Provider]: cmd_args( haskell_toolchain.haddock, "--gen-index", + "--optghc=-package-env=-", "-o", cmd_args(styles[0].as_output(), parent=1), hidden=[file.as_output() for file in styles] ), diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 178ae2a1e..66687e6ec 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -163,6 +163,7 @@ def haskell_haddock_lib(ctx: AnalysisContext, pkgname: str, compiled: CompileRes "index.html", "--no-tmp-comp-dir", "--no-warnings", + "--optghc=-package-env=-", "--package-name", pkgname, ) @@ -218,6 +219,7 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: cmd = cmd_args(haskell_toolchain.haddock) cmd.add( + "--optghc=-package-env=-", "--gen-index", "--gen-contents", "-o", From 8ab5fd2aa84aafcb4ef6e2fb2d838cbce3ea4f36 Mon Sep 17 00:00:00 2001 From: Torsten Schmits Date: Mon, 4 Nov 2024 15:12:08 +0100 Subject: [PATCH 1112/1133] use -fwrite-if-simplified-core to avoid unused bytecode generation --- haskell/compile.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 3600e79a0..344578a8e 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -686,7 +686,7 @@ def _compile_module( compile_cmd.add(cmd_args(library_deps, prepend = "-package")) compile_cmd.add(cmd_args(toolchain_deps, prepend = "-package")) - compile_cmd.add("-fbyte-code-and-object-code") + compile_cmd.add("-fwrite-if-simplified-core") if enable_th: compile_cmd.add("-fprefer-byte-code") From 05ee50bb3aab47505a9433d140d9a97ec293733e Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 6 Nov 2024 07:51:50 -0800 Subject: [PATCH 1113/1133] Parallel downsweep (#46) use -j in haskell_metadata action * parallel downsweep * weight = 8 for haskell_metadata run --- haskell/compile.bzl | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 1b6ee3335..e1c0d9a27 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -189,6 +189,7 @@ def _dynamic_target_metadata_impl(actions, output, arg, pkg_deps) -> list[Provid ) package_flag = _package_flag(arg.haskell_toolchain) ghc_args = cmd_args() + ghc_args.add("-j") ghc_args.add("-hide-all-packages") ghc_args.add(cmd_args(arg.toolchain_libs, prepend=package_flag)) @@ -211,7 +212,12 @@ def _dynamic_target_metadata_impl(actions, output, arg, pkg_deps) -> list[Provid ) md_args.add("--output", output) - actions.run(md_args, category = "haskell_metadata", identifier = arg.suffix if arg.suffix else None) + actions.run( + md_args, + category = "haskell_metadata", + identifier = arg.suffix if arg.suffix else None, + weight = 8, + ) return [] From 913d64f2e774d59fc66ba727c0f2a404f8b4cc99 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 6 Nov 2024 07:57:39 -0800 Subject: [PATCH 1114/1133] Use argfile for stub dir copy if use_argfile_at_link is on (#52) The arg list is too long, so it needs to be wrapped in argfile. * Use argfile for stub dir copy if use_argfile_at_link is on. * at argfile doesn't work on linux shell. so in more portable way. --- haskell/compile.bzl | 32 ++++++++++++++++++++++++++------ 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index e1c0d9a27..148d60aef 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -50,7 +50,7 @@ load( "@prelude//linking:link_info.bzl", "LinkStyle", ) -load("@prelude//utils:argfile.bzl", "at_argfile") +load("@prelude//utils:argfile.bzl", "argfile", "at_argfile") load("@prelude//:paths.bzl", "paths") load("@prelude//utils:graph_utils.bzl", "post_order_traversal") load("@prelude//utils:strings.bzl", "strip_prefix") @@ -865,8 +865,25 @@ def compile( stubs_dir = ctx.actions.declare_output("stubs-" + artifact_suffix, dir=True) # collect the stubs from all modules into the stubs_dir - ctx.actions.run( - cmd_args([ + if ctx.attrs.use_argsfile_at_link: + stub_copy_cmd = cmd_args([ + "bash", "-exuc", + """\ + mkdir -p \"$0\" + cat $1 | while read stub; do + find \"$stub\" -mindepth 1 -maxdepth 1 -exec cp -r -t \"$0\" '{}' ';' + done + """, + ]) + stub_copy_cmd.add(stubs_dir.as_output()) + stub_copy_cmd.add(argfile( + actions = ctx.actions, + name = "haskell_stubs_" + artifact_suffix + ".argsfile", + args = stub_dirs, + allow_args = True, + )) + else: + stub_copy_cmd = cmd_args([ "bash", "-exuc", """\ mkdir -p \"$0\" @@ -874,9 +891,12 @@ def compile( find \"$stub\" -mindepth 1 -maxdepth 1 -exec cp -r -t \"$0\" '{}' ';' done """, - stubs_dir.as_output(), - stub_dirs - ]), + ]) + stub_copy_cmd.add(stubs_dir.as_output()) + stub_copy_cmd.add(stub_dirs) + + ctx.actions.run( + stub_copy_cmd, category = "haskell_stubs", identifier = artifact_suffix, local_only = True, From 18b32bcec1d8e2cff01b4139f807fbf942aa4d2b Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 6 Nov 2024 08:00:58 -0800 Subject: [PATCH 1115/1133] Allow cache upload for local action (#55) Local actions can upload to cache with allow_cache_upload on. * allow_cache_upload for compile action * link also allows cache upload. --- haskell/compile.bzl | 4 +++- haskell/haskell.bzl | 11 +++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 148d60aef..509696993 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -713,7 +713,9 @@ def _compile_module( dep_files = { "abi": abi_tag, "packagedb": packagedb_tag, - } + }, + # explicit turn this on for local_only actions to upload their results. + allow_cache_upload = True, ) module_tset = actions.tset( diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index dbff3674a..a13a66dcc 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -603,6 +603,8 @@ def _dynamic_link_shared_impl(actions, pkg_deps, lib, arg): actions.run( link_cmd, category = "haskell_link" + arg.artifact_suffix.replace("-", "_"), + # explicit turn this on for local_only actions to upload their results. + allow_cache_upload = True, ) return [] @@ -661,7 +663,7 @@ def _build_haskell_lib( # only gather direct dependencies uniq_infos = [x[link_style].value for x in linfos] - toolchain_libs = [dep.name for dep in attr_deps_haskell_toolchain_libraries(ctx)] + toolchain_libs = [dep.name for dep in attr_deps_haskell_toolchain_libraries(ctx)] if link_style == LinkStyle("shared"): lib = ctx.actions.declare_output(lib_short_path) @@ -1171,7 +1173,12 @@ def _dynamic_link_binary_impl(actions, pkg_deps, output, arg): link_cmd.add("-o", output) - actions.run(link_cmd, category = "haskell_link") + actions.run( + link_cmd, + category = "haskell_link", + # explicit turn this on for local_only actions to upload their results. + allow_cache_upload = True, + ) return [] From 1f0991a869e7f109fab70c16fa360471e124c071 Mon Sep 17 00:00:00 2001 From: Torsten Schmits Date: Thu, 7 Nov 2024 16:14:43 +0100 Subject: [PATCH 1116/1133] Remove -x from stub collection action script --- haskell/compile.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 509696993..8a2109dca 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -869,7 +869,7 @@ def compile( # collect the stubs from all modules into the stubs_dir if ctx.attrs.use_argsfile_at_link: stub_copy_cmd = cmd_args([ - "bash", "-exuc", + "bash", "-euc", """\ mkdir -p \"$0\" cat $1 | while read stub; do @@ -886,7 +886,7 @@ def compile( )) else: stub_copy_cmd = cmd_args([ - "bash", "-exuc", + "bash", "-euc", """\ mkdir -p \"$0\" for stub; do From 0fec69d7a00943ca7deb8f8a097c98dd526749ce Mon Sep 17 00:00:00 2001 From: Torsten Schmits Date: Fri, 8 Nov 2024 16:09:20 +0100 Subject: [PATCH 1117/1133] Pass -v0 to ghc-pkg register to reduce warning spam --- haskell/haskell.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index a13a66dcc..0d7b0d5c3 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -406,7 +406,7 @@ GHC_PKG=$1 DB=$2 PKGCONF=$3 "$GHC_PKG" init "$DB" -"$GHC_PKG" register --package-conf "$DB" --no-expand-pkgroot "$PKGCONF" --force +"$GHC_PKG" register --package-conf "$DB" --no-expand-pkgroot "$PKGCONF" --force -v0 """ # Create a package From ece0c27ebd3ca1559c9c978ec3b9203c843955ac Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 13 Nov 2024 18:08:02 +0100 Subject: [PATCH 1118/1133] Do not use `--reflink=auto` for `cp` command This is not working for remote actions on RE. --- haskell/haskell_haddock.bzl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 66687e6ec..1746d117e 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -243,7 +243,7 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: cmd_args(cmd, delimiter = " ", quote = "shell"), [ cmd_args( - ["cp", "-f", "--reflink=auto", html, out.as_output()], + ["cp", "-f", html, out.as_output()], delimiter = " ", ) for html in dep_htmls ], From 9997b1ac4fb452e2ffdd8829ad55d0f1ba8a804b Mon Sep 17 00:00:00 2001 From: Claudio Bley Date: Wed, 13 Nov 2024 18:10:45 +0100 Subject: [PATCH 1119/1133] Refactor: add output artifact to haskell_haddock action --- haskell/haskell_haddock.bzl | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/haskell/haskell_haddock.bzl b/haskell/haskell_haddock.bzl index 1746d117e..78190ab7c 100644 --- a/haskell/haskell_haddock.bzl +++ b/haskell/haskell_haddock.bzl @@ -236,14 +236,13 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: cmd.add(ctx.attrs.haddock_flags) - script = ctx.actions.declare_output("haddock-script") script_args = cmd_args([ "#!/bin/sh", cmd_args( cmd_args(cmd, delimiter = " ", quote = "shell"), [ cmd_args( - ["cp", "-f", html, out.as_output()], + ["cp", "-f", html, cmd_args(out, ignore_artifacts = True)], delimiter = " ", ) for html in dep_htmls ], @@ -251,15 +250,15 @@ def haskell_haddock_impl(ctx: AnalysisContext) -> list[Provider]: ) ]) - ctx.actions.write( - script, + script = ctx.actions.write( + "haddock-script", script_args, is_executable = True, - allow_args = True, + with_inputs = True, ) ctx.actions.run( - cmd_args(script, hidden = script_args), + cmd_args(script, hidden=out.as_output()), category = "haskell_haddock", no_outputs_cleanup = True, ) From 12393f270a857169e6a98880670d9558427cf56f Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 6 Nov 2024 08:03:50 -0800 Subject: [PATCH 1120/1133] extra_libraries. native C/C++ libraries can be provided. --- decls/haskell_common.bzl | 8 ++++++++ decls/haskell_rules.bzl | 2 ++ haskell/compile.bzl | 16 ++++++++++++++++ haskell/toolchain.bzl | 7 +++++++ 4 files changed, 33 insertions(+) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index 0f704f71d..711a3c2ad 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -76,6 +76,13 @@ def _use_argsfile_at_link_arg(): """), } +def _extra_libraries_arg(): + return { + "extra_libraries": attrs.list(attrs.dep(), default = [], doc = """ + Non-Haskell deps (C/C++ libraries) +"""), + } + haskell_common = struct( srcs_arg = _srcs_arg, deps_arg = _deps_arg, @@ -85,4 +92,5 @@ haskell_common = struct( external_tools_arg = _external_tools_arg, srcs_envs_arg = _srcs_envs_arg, use_argsfile_at_link_arg = _use_argsfile_at_link_arg, + extra_libraries_arg = _extra_libraries_arg, ) diff --git a/decls/haskell_rules.bzl b/decls/haskell_rules.bzl index 61d2107ca..17923f930 100644 --- a/decls/haskell_rules.bzl +++ b/decls/haskell_rules.bzl @@ -49,6 +49,7 @@ haskell_binary = prelude_rule( haskell_common.external_tools_arg() | haskell_common.srcs_envs_arg () | haskell_common.use_argsfile_at_link_arg () | + haskell_common.extra_libraries_arg () | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | @@ -170,6 +171,7 @@ haskell_library = prelude_rule( haskell_common.external_tools_arg() | haskell_common.srcs_envs_arg() | haskell_common.use_argsfile_at_link_arg() | + haskell_common.extra_libraries_arg() | haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 8a2109dca..f31ae1ab3 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -31,6 +31,7 @@ load( "HaskellToolchainLibrary", "DynamicHaskellPackageDbInfo", "HaskellPackageDbTSet", + "NativeToolchainLibrary", ) load( "@prelude//haskell:util.bzl", @@ -435,6 +436,7 @@ def _common_compile_module_args( label: Label, deps: list[Dependency], external_tool_paths: list[RunInfo], + extra_libraries: list[Dependency], sources: list[Artifact], direct_deps_info: list[HaskellLibraryInfoTSet], pkgname: str | None = None, @@ -578,6 +580,7 @@ def _compile_module( aux_deps: None | list[Artifact], src_envs: None | dict[str, ArgLike], source_prefixes: list[str], + extra_libraries: list[Dependency], ) -> CompiledModuleTSet: # These compiler arguments can be passed in a response file. compile_args_for_file = cmd_args(common_args.args_for_file, hidden = aux_deps or []) @@ -695,6 +698,16 @@ def _compile_module( compile_cmd.add(cmd_args(library_deps, prepend = "-package")) compile_cmd.add(cmd_args(toolchain_deps, prepend = "-package")) + # extra-libraries + extra_libs = [ + lib[NativeToolchainLibrary] + for lib in extra_libraries + if NativeToolchainLibrary in lib + ] + for l in extra_libs: + compile_cmd.add(l.lib_path) + compile_cmd.add("-l{}".format(l.name)) + compile_cmd.add("-fwrite-if-simplified-core") if enable_th: compile_cmd.add("-fprefer-byte-code") @@ -736,6 +749,7 @@ def _dynamic_do_compile_impl(actions, md_file, pkg_deps, arg, direct_deps_by_nam compiler_flags = arg.compiler_flags, deps = arg.deps, external_tool_paths = arg.external_tool_paths, + extra_libraries = arg.extra_libraries, ghc_wrapper = arg.ghc_wrapper, haskell_toolchain = arg.haskell_toolchain, label = arg.label, @@ -782,6 +796,7 @@ def _dynamic_do_compile_impl(actions, md_file, pkg_deps, arg, direct_deps_by_nam direct_deps_by_name = direct_deps_by_name, toolchain_deps_by_name = arg.toolchain_deps_by_name, source_prefixes = source_prefixes, + extra_libraries = arg.extra_libraries, ) return [DynamicCompileResultInfo(modules = module_tsets)] @@ -861,6 +876,7 @@ def compile( sources_deps = ctx.attrs.srcs_deps, srcs_envs = ctx.attrs.srcs_envs, toolchain_deps_by_name = toolchain_deps_by_name, + extra_libraries = ctx.attrs.extra_libraries, ), )) diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index f57d6d161..380e724e6 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -70,3 +70,10 @@ HaskellPackageDbTSet = transitive_set( DynamicHaskellPackageDbInfo = provider(fields = { "packages": dict[str, HaskellPackageDbTSet], }) + +NativeToolchainLibrary = provider( + fields = { + "name": provider_field(str), + "lib_path": provider_field(typing.Any, default = None), + }, +) From 80f08b54e184b1be7f4eceb5b4d3fea23498e635 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 9 Oct 2024 15:12:53 -0700 Subject: [PATCH 1121/1133] pass worker-id --- haskell/haskell.bzl | 1 + haskell/tools/generate_target_metadata.py | 5 +++-- haskell/tools/ghc_wrapper.py | 7 +++++-- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 0d7b0d5c3..ca2052862 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -569,6 +569,7 @@ def _dynamic_link_shared_impl(actions, pkg_deps, lib, arg): link_cmd_args = [cmd_args(arg.haskell_toolchain.linker)] link_cmd_hidden = [] + link_args.add("--worker-id=ABCDE") link_args.add(arg.haskell_toolchain.linker_flags) link_args.add(arg.linker_flags) link_args.add("-hide-all-packages") diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 9a60b913e..06f2d673f 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -190,7 +190,8 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): json_fname = os.path.join(dname, "depends.json") make_fname = os.path.join(dname, "depends.make") haskell_sources = list(filter(is_haskell_src, sources)) - + haskell_boot_sources = list(filter (is_haskell_boot, sources)) + worker_args = ["--worker-id=ABCDE"] args = [ ghc, "-M", "-include-pkg-deps", # Note: `-outputdir '.'` removes the prefix of all targets: @@ -198,7 +199,7 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths): "-outputdir", ".", "-dep-json", json_fname, "-dep-makefile", make_fname, - ] + ghc_args + haskell_sources + ] + worker_args + ghc_args + haskell_sources + haskell_boot_sources env = os.environ.copy() path = env.get("PATH", "") diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 0a1c20f2d..b0ca652b0 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -75,8 +75,10 @@ def main(): ) args, ghc_args = parser.parse_known_args() + worker_args = ["--worker-id=ABCDE"] - cmd = [args.ghc] + ghc_args + print("WOOROROR", file=sys.stderr) + cmd = [args.ghc] + worker_args + ghc_args aux_paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] + [str(os.path.dirname(binexepath)) for binexepath in args.bin_exe] env = os.environ.copy() @@ -130,8 +132,9 @@ def main(): def recompute_abi_hash(ghc, abi_out): """Call ghc on the hi file and write the ABI hash to abi_out.""" hi_file = abi_out.with_suffix("") + worker_args = ["--worker-id=ABCDE"] - cmd = [ghc, "-v0", "-package-env=-", "--show-iface-abi-hash", hi_file] + cmd = [ghc, "-v0", "-package-env=-", "--show-iface-abi-hash", hi_file] + worker_args hash = subprocess.check_output(cmd, text=True).split(maxsplit=1)[0] From ea497ae420d6f3c145b6bd7395d92be4a0a6cd4f Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 9 Oct 2024 16:01:26 -0700 Subject: [PATCH 1122/1133] hand pkgname as --worker-id --- haskell/compile.bzl | 8 +++++++- haskell/haskell.bzl | 3 ++- haskell/tools/generate_target_metadata.py | 11 ++++++++--- haskell/tools/ghc_wrapper.py | 11 +++++++---- 4 files changed, 24 insertions(+), 9 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index f31ae1ab3..27acc6c51 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -201,6 +201,7 @@ def _dynamic_target_metadata_impl(actions, output, arg, pkg_deps) -> list[Provid md_args = cmd_args(arg.md_gen) md_args.add(packages_info.bin_paths) md_args.add("--ghc", arg.haskell_toolchain.compiler) + md_args.add("--worker-id", arg.worker_id) md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) md_args.add( "--source-prefix", @@ -240,6 +241,9 @@ def target_metadata( md_file = ctx.actions.declare_output(ctx.attrs.name + suffix + ".md.json") md_gen = ctx.attrs._generate_target_metadata[RunInfo] + libname = repr(ctx.label.path).replace("//", "_").replace("/", "_") + "_" + ctx.label.name + pkgname = libname.replace("_", "-") + haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] toolchain_libs = [dep.name for dep in attr_deps_haskell_toolchain_libraries(ctx)] @@ -272,6 +276,7 @@ def target_metadata( strip_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), suffix = suffix, toolchain_libs = toolchain_libs, + worker_id = pkgname, ), )) @@ -443,7 +448,8 @@ def _common_compile_module_args( ) -> CommonCompileModuleArgs: command = cmd_args(ghc_wrapper) command.add("--ghc", haskell_toolchain.compiler) - + worker_id = pkgname + command.add("--worker-id", worker_id) # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't # be parsed when inside an argsfile. command.add(haskell_toolchain.compiler_flags) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index ca2052862..d739e6657 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -569,7 +569,7 @@ def _dynamic_link_shared_impl(actions, pkg_deps, lib, arg): link_cmd_args = [cmd_args(arg.haskell_toolchain.linker)] link_cmd_hidden = [] - link_args.add("--worker-id=ABCDE") + link_args.add("--worker-id={}".format(arg.worker_id)) link_args.add(arg.haskell_toolchain.linker_flags) link_args.add(arg.linker_flags) link_args.add("-hide-all-packages") @@ -694,6 +694,7 @@ def _build_haskell_lib( objects = objects, toolchain_libs = toolchain_libs, use_argsfile_at_link = ctx.attrs.use_argsfile_at_link, + worker_id = pkgname, ), )) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 06f2d673f..902dd31f0 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -35,6 +35,11 @@ def main(): required=True, type=argparse.FileType("w"), help="Write package metadata to this file in JSON format.") + parser.add_argument( + "--worker-id", + required=True, + type=str, + help="Worker id") parser.add_argument( "--ghc", required=True, @@ -86,7 +91,7 @@ def json_default_handler(o): def obtain_target_metadata(args): paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] - ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source, paths) + ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source, paths, args.worker_id) th_modules = determine_th_modules(ghc_depends) module_mapping = determine_module_mapping(ghc_depends, args.source_prefix) module_graph = determine_module_graph(ghc_depends) @@ -185,13 +190,13 @@ def determine_package_deps(ghc_depends): return package_deps -def run_ghc_depends(ghc, ghc_args, sources, aux_paths): +def run_ghc_depends(ghc, ghc_args, sources, aux_paths, worker_id): with tempfile.TemporaryDirectory() as dname: json_fname = os.path.join(dname, "depends.json") make_fname = os.path.join(dname, "depends.make") haskell_sources = list(filter(is_haskell_src, sources)) haskell_boot_sources = list(filter (is_haskell_boot, sources)) - worker_args = ["--worker-id=ABCDE"] + worker_args = ["--worker-id={}".format(worker_id)] args = [ ghc, "-M", "-include-pkg-deps", # Note: `-outputdir '.'` removes the prefix of all targets: diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index b0ca652b0..96e2ce397 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -36,6 +36,9 @@ def main(): default=[], help="Path to a package db that is used during the module compilation", ) + parser.add_argument( + "--worker-id", required=True, type=str, help="worker id", + ) parser.add_argument( "--ghc", required=True, type=str, help="Path to the Haskell compiler GHC." ) @@ -75,7 +78,7 @@ def main(): ) args, ghc_args = parser.parse_known_args() - worker_args = ["--worker-id=ABCDE"] + worker_args = ["--worker-id={}".format(args.worker_id)] print("WOOROROR", file=sys.stderr) cmd = [args.ghc] + worker_args + ghc_args @@ -101,7 +104,7 @@ def main(): if returncode != 0: return returncode - recompute_abi_hash(args.ghc, args.abi_out) + recompute_abi_hash(args.ghc, args.abi_out, args.worker_id) # write an empty dep file, to signal that all tagged files are unused try: @@ -129,10 +132,10 @@ def main(): return 0 -def recompute_abi_hash(ghc, abi_out): +def recompute_abi_hash(ghc, abi_out, worker_id): """Call ghc on the hi file and write the ABI hash to abi_out.""" hi_file = abi_out.with_suffix("") - worker_args = ["--worker-id=ABCDE"] + worker_args = ["--worker-id={}".format(worker_id)] cmd = [ghc, "-v0", "-package-env=-", "--show-iface-abi-hash", hi_file] + worker_args From 640e7bdf46e621d63b4041bdc7f44158cea4f6c1 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Thu, 10 Oct 2024 14:34:40 -0700 Subject: [PATCH 1123/1133] --worker-close --- haskell/haskell.bzl | 1 + haskell/tools/ghc_wrapper.py | 6 ++++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index d739e6657..f34844218 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -570,6 +570,7 @@ def _dynamic_link_shared_impl(actions, pkg_deps, lib, arg): link_cmd_hidden = [] link_args.add("--worker-id={}".format(arg.worker_id)) + link_args.add("--worker-close") link_args.add(arg.haskell_toolchain.linker_flags) link_args.add(arg.linker_flags) link_args.add("-hide-all-packages") diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 96e2ce397..6f2d2ea31 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -39,6 +39,9 @@ def main(): parser.add_argument( "--worker-id", required=True, type=str, help="worker id", ) + parser.add_argument( + "--worker-close", required=False, type=bool, default=False, help="worker close", + ) parser.add_argument( "--ghc", required=True, type=str, help="Path to the Haskell compiler GHC." ) @@ -78,9 +81,8 @@ def main(): ) args, ghc_args = parser.parse_known_args() - worker_args = ["--worker-id={}".format(args.worker_id)] + worker_args = ["--worker-id={}".format(args.worker_id)] + ["--worker-close"] if args.worker_close else [] - print("WOOROROR", file=sys.stderr) cmd = [args.ghc] + worker_args + ghc_args aux_paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] + [str(os.path.dirname(binexepath)) for binexepath in args.bin_exe] From 0625492ae2fe7a820e7ab0f2a2634ca38e29da74 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Thu, 10 Oct 2024 16:34:19 -0700 Subject: [PATCH 1124/1133] bug fix --- haskell/tools/ghc_wrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 6f2d2ea31..8f2247afa 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -81,7 +81,7 @@ def main(): ) args, ghc_args = parser.parse_known_args() - worker_args = ["--worker-id={}".format(args.worker_id)] + ["--worker-close"] if args.worker_close else [] + worker_args = ["--worker-id={}".format(args.worker_id)] + (["--worker-close"] if args.worker_close else []) cmd = [args.ghc] + worker_args + ghc_args From 7f25e72ccaad71aecf393cc95688626d4a6df9d2 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Thu, 10 Oct 2024 23:40:52 -0700 Subject: [PATCH 1125/1133] fix incompatibility with argsfile --- haskell/haskell.bzl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index f34844218..7e723bbc8 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -569,8 +569,6 @@ def _dynamic_link_shared_impl(actions, pkg_deps, lib, arg): link_cmd_args = [cmd_args(arg.haskell_toolchain.linker)] link_cmd_hidden = [] - link_args.add("--worker-id={}".format(arg.worker_id)) - link_args.add("--worker-close") link_args.add(arg.haskell_toolchain.linker_flags) link_args.add(arg.linker_flags) link_args.add("-hide-all-packages") @@ -601,6 +599,8 @@ def _dynamic_link_shared_impl(actions, pkg_deps, lib, arg): link_cmd = cmd_args(link_cmd_args, hidden = link_cmd_hidden) link_cmd.add("-o", lib) + link_cmd.add("--worker-id={}".format(arg.worker_id)) + link_cmd.add("--worker-close") actions.run( link_cmd, From e958a143cec9359e17cf4e6978b79fe4c90e36c4 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Fri, 11 Oct 2024 06:42:25 -0700 Subject: [PATCH 1126/1133] not pass worker-* if use_persistent_workers=False --- haskell/compile.bzl | 8 +++++--- haskell/haskell.bzl | 6 ++++-- haskell/toolchain.bzl | 1 + haskell/tools/generate_target_metadata.py | 7 +++++-- haskell/tools/ghc_wrapper.py | 13 +++++++++---- 5 files changed, 24 insertions(+), 11 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 27acc6c51..2422efdd7 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -201,7 +201,8 @@ def _dynamic_target_metadata_impl(actions, output, arg, pkg_deps) -> list[Provid md_args = cmd_args(arg.md_gen) md_args.add(packages_info.bin_paths) md_args.add("--ghc", arg.haskell_toolchain.compiler) - md_args.add("--worker-id", arg.worker_id) + if arg.haskell_toolchain.use_persistent_workers: + md_args.add("--worker-id", arg.worker_id) md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) md_args.add( "--source-prefix", @@ -448,8 +449,9 @@ def _common_compile_module_args( ) -> CommonCompileModuleArgs: command = cmd_args(ghc_wrapper) command.add("--ghc", haskell_toolchain.compiler) - worker_id = pkgname - command.add("--worker-id", worker_id) + if haskell_toolchain.use_persistent_workers: + worker_id = pkgname + command.add("--worker-id", worker_id) # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't # be parsed when inside an argsfile. command.add(haskell_toolchain.compiler_flags) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 7e723bbc8..065cd3363 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -599,8 +599,10 @@ def _dynamic_link_shared_impl(actions, pkg_deps, lib, arg): link_cmd = cmd_args(link_cmd_args, hidden = link_cmd_hidden) link_cmd.add("-o", lib) - link_cmd.add("--worker-id={}".format(arg.worker_id)) - link_cmd.add("--worker-close") + + if arg.haskell_toolchain.use_persistent_workers: + link_cmd.add("--worker-id={}".format(arg.worker_id)) + link_cmd.add("--worker-close") actions.run( link_cmd, diff --git a/haskell/toolchain.bzl b/haskell/toolchain.bzl index 380e724e6..b685df9ea 100644 --- a/haskell/toolchain.bzl +++ b/haskell/toolchain.bzl @@ -40,6 +40,7 @@ HaskellToolchainInfo = provider( "cache_links": provider_field(typing.Any, default = None), "script_template_processor": provider_field(typing.Any, default = None), "packages": provider_field(typing.Any, default = None), + "use_persistent_workers": provider_field(typing.Any, default = None), }, ) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 902dd31f0..2e3962e77 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -37,7 +37,7 @@ def main(): help="Write package metadata to this file in JSON format.") parser.add_argument( "--worker-id", - required=True, + required=False, type=str, help="Worker id") parser.add_argument( @@ -196,7 +196,10 @@ def run_ghc_depends(ghc, ghc_args, sources, aux_paths, worker_id): make_fname = os.path.join(dname, "depends.make") haskell_sources = list(filter(is_haskell_src, sources)) haskell_boot_sources = list(filter (is_haskell_boot, sources)) - worker_args = ["--worker-id={}".format(worker_id)] + if worker_id: + worker_args = ["--worker-id={}".format(worker_id)] + else: + worker_args = [] args = [ ghc, "-M", "-include-pkg-deps", # Note: `-outputdir '.'` removes the prefix of all targets: diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 8f2247afa..9a5862041 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -37,7 +37,7 @@ def main(): help="Path to a package db that is used during the module compilation", ) parser.add_argument( - "--worker-id", required=True, type=str, help="worker id", + "--worker-id", required=False, type=str, help="worker id", ) parser.add_argument( "--worker-close", required=False, type=bool, default=False, help="worker close", @@ -81,8 +81,10 @@ def main(): ) args, ghc_args = parser.parse_known_args() - worker_args = ["--worker-id={}".format(args.worker_id)] + (["--worker-close"] if args.worker_close else []) - + if args.worker_id: + worker_args = ["--worker-id={}".format(args.worker_id)] + (["--worker-close"] if args.worker_close else []) + else: + worker_args = [] cmd = [args.ghc] + worker_args + ghc_args aux_paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] + [str(os.path.dirname(binexepath)) for binexepath in args.bin_exe] @@ -137,7 +139,10 @@ def main(): def recompute_abi_hash(ghc, abi_out, worker_id): """Call ghc on the hi file and write the ABI hash to abi_out.""" hi_file = abi_out.with_suffix("") - worker_args = ["--worker-id={}".format(worker_id)] + if worker_id: + worker_args = ["--worker-id={}".format(worker_id)] + else: + worker_args = [] cmd = [ghc, "-v0", "-package-env=-", "--show-iface-abi-hash", hi_file] + worker_args From a786686f28c1e3453e6403645710471055dd22e1 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Tue, 15 Oct 2024 05:54:45 -0700 Subject: [PATCH 1127/1133] worker-id -> worker-target-id --- haskell/compile.bzl | 8 ++++---- haskell/haskell.bzl | 4 ++-- haskell/tools/generate_target_metadata.py | 10 +++++----- haskell/tools/ghc_wrapper.py | 14 +++++++------- 4 files changed, 18 insertions(+), 18 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 2422efdd7..c4ce2f8cd 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -202,7 +202,7 @@ def _dynamic_target_metadata_impl(actions, output, arg, pkg_deps) -> list[Provid md_args.add(packages_info.bin_paths) md_args.add("--ghc", arg.haskell_toolchain.compiler) if arg.haskell_toolchain.use_persistent_workers: - md_args.add("--worker-id", arg.worker_id) + md_args.add("--worker-target-id", arg.worker_target_id) md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) md_args.add( "--source-prefix", @@ -277,7 +277,7 @@ def target_metadata( strip_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), suffix = suffix, toolchain_libs = toolchain_libs, - worker_id = pkgname, + worker_target_id = pkgname, ), )) @@ -450,8 +450,8 @@ def _common_compile_module_args( command = cmd_args(ghc_wrapper) command.add("--ghc", haskell_toolchain.compiler) if haskell_toolchain.use_persistent_workers: - worker_id = pkgname - command.add("--worker-id", worker_id) + worker_target_id = pkgname + command.add("--worker-target-id", worker_target_id) # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't # be parsed when inside an argsfile. command.add(haskell_toolchain.compiler_flags) diff --git a/haskell/haskell.bzl b/haskell/haskell.bzl index 065cd3363..5e3de9344 100644 --- a/haskell/haskell.bzl +++ b/haskell/haskell.bzl @@ -601,7 +601,7 @@ def _dynamic_link_shared_impl(actions, pkg_deps, lib, arg): link_cmd.add("-o", lib) if arg.haskell_toolchain.use_persistent_workers: - link_cmd.add("--worker-id={}".format(arg.worker_id)) + link_cmd.add("--worker-target-id={}".format(arg.worker_target_id)) link_cmd.add("--worker-close") actions.run( @@ -697,7 +697,7 @@ def _build_haskell_lib( objects = objects, toolchain_libs = toolchain_libs, use_argsfile_at_link = ctx.attrs.use_argsfile_at_link, - worker_id = pkgname, + worker_target_id = pkgname, ), )) diff --git a/haskell/tools/generate_target_metadata.py b/haskell/tools/generate_target_metadata.py index 2e3962e77..749e6d351 100755 --- a/haskell/tools/generate_target_metadata.py +++ b/haskell/tools/generate_target_metadata.py @@ -36,7 +36,7 @@ def main(): type=argparse.FileType("w"), help="Write package metadata to this file in JSON format.") parser.add_argument( - "--worker-id", + "--worker-target-id", required=False, type=str, help="Worker id") @@ -91,7 +91,7 @@ def json_default_handler(o): def obtain_target_metadata(args): paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] - ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source, paths, args.worker_id) + ghc_depends = run_ghc_depends(args.ghc, args.ghc_arg, args.source, paths, args.worker_target_id) th_modules = determine_th_modules(ghc_depends) module_mapping = determine_module_mapping(ghc_depends, args.source_prefix) module_graph = determine_module_graph(ghc_depends) @@ -190,14 +190,14 @@ def determine_package_deps(ghc_depends): return package_deps -def run_ghc_depends(ghc, ghc_args, sources, aux_paths, worker_id): +def run_ghc_depends(ghc, ghc_args, sources, aux_paths, worker_target_id): with tempfile.TemporaryDirectory() as dname: json_fname = os.path.join(dname, "depends.json") make_fname = os.path.join(dname, "depends.make") haskell_sources = list(filter(is_haskell_src, sources)) haskell_boot_sources = list(filter (is_haskell_boot, sources)) - if worker_id: - worker_args = ["--worker-id={}".format(worker_id)] + if worker_target_id: + worker_args = ["--worker-target-id={}".format(worker_target_id)] else: worker_args = [] args = [ diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 9a5862041..8af6fbe5a 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -37,7 +37,7 @@ def main(): help="Path to a package db that is used during the module compilation", ) parser.add_argument( - "--worker-id", required=False, type=str, help="worker id", + "--worker-target-id", required=False, type=str, help="worker target id", ) parser.add_argument( "--worker-close", required=False, type=bool, default=False, help="worker close", @@ -81,8 +81,8 @@ def main(): ) args, ghc_args = parser.parse_known_args() - if args.worker_id: - worker_args = ["--worker-id={}".format(args.worker_id)] + (["--worker-close"] if args.worker_close else []) + if args.worker_target_id: + worker_args = ["--worker-target-id={}".format(args.worker_target_id)] + (["--worker-close"] if args.worker_close else []) else: worker_args = [] cmd = [args.ghc] + worker_args + ghc_args @@ -108,7 +108,7 @@ def main(): if returncode != 0: return returncode - recompute_abi_hash(args.ghc, args.abi_out, args.worker_id) + recompute_abi_hash(args.ghc, args.abi_out, args.worker_target_id) # write an empty dep file, to signal that all tagged files are unused try: @@ -136,11 +136,11 @@ def main(): return 0 -def recompute_abi_hash(ghc, abi_out, worker_id): +def recompute_abi_hash(ghc, abi_out, worker_target_id): """Call ghc on the hi file and write the ABI hash to abi_out.""" hi_file = abi_out.with_suffix("") - if worker_id: - worker_args = ["--worker-id={}".format(worker_id)] + if worker_target_id: + worker_args = ["--worker-target-id={}".format(worker_target_id)] else: worker_args = [] From 77611a557c98530c00a3ad5dc5d0650452c4ca87 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Thu, 17 Oct 2024 06:24:31 -0700 Subject: [PATCH 1128/1133] for now, show-iface-abi-hash has its own target-id. --- haskell/tools/ghc_wrapper.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 8af6fbe5a..5ddaeb8e2 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -108,7 +108,7 @@ def main(): if returncode != 0: return returncode - recompute_abi_hash(args.ghc, args.abi_out, args.worker_target_id) + recompute_abi_hash(args.ghc, args.abi_out) # write an empty dep file, to signal that all tagged files are unused try: @@ -136,13 +136,13 @@ def main(): return 0 -def recompute_abi_hash(ghc, abi_out, worker_target_id): +def recompute_abi_hash(ghc, abi_out): # worker_target_id """Call ghc on the hi file and write the ABI hash to abi_out.""" hi_file = abi_out.with_suffix("") - if worker_target_id: - worker_args = ["--worker-target-id={}".format(worker_target_id)] - else: - worker_args = [] + #if worker_target_id: + worker_args = ["--worker-target-id=show-iface-abi-hash"] # format(worker_target_id) + #else: + # worker_args = [] cmd = [ghc, "-v0", "-package-env=-", "--show-iface-abi-hash", hi_file] + worker_args From e4aa376cc82a5f9b4a84b4bbbf82ad1c0622ef71 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Mon, 28 Oct 2024 07:04:54 -0700 Subject: [PATCH 1129/1133] haskell_metadata as a worker-target-id --- haskell/compile.bzl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index c4ce2f8cd..a83164df2 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -202,7 +202,7 @@ def _dynamic_target_metadata_impl(actions, output, arg, pkg_deps) -> list[Provid md_args.add(packages_info.bin_paths) md_args.add("--ghc", arg.haskell_toolchain.compiler) if arg.haskell_toolchain.use_persistent_workers: - md_args.add("--worker-target-id", arg.worker_target_id) + md_args.add("--worker-target-id", "haskell_metadata") md_args.add(cmd_args(ghc_args, format="--ghc-arg={}")) md_args.add( "--source-prefix", @@ -277,7 +277,6 @@ def target_metadata( strip_prefix = _strip_prefix(str(ctx.label.cell_root), str(ctx.label.path)), suffix = suffix, toolchain_libs = toolchain_libs, - worker_target_id = pkgname, ), )) From ee578bc1139378259b43a83ba29acf143915ea05 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Mon, 28 Oct 2024 14:42:51 -0700 Subject: [PATCH 1130/1133] show-iface-abi-hash worker only when persistent workers are used. --- haskell/tools/ghc_wrapper.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/haskell/tools/ghc_wrapper.py b/haskell/tools/ghc_wrapper.py index 5ddaeb8e2..ccd4bee13 100755 --- a/haskell/tools/ghc_wrapper.py +++ b/haskell/tools/ghc_wrapper.py @@ -83,8 +83,10 @@ def main(): args, ghc_args = parser.parse_known_args() if args.worker_target_id: worker_args = ["--worker-target-id={}".format(args.worker_target_id)] + (["--worker-close"] if args.worker_close else []) + use_persistent_workers = True else: worker_args = [] + use_persistent_workers = False cmd = [args.ghc] + worker_args + ghc_args aux_paths = [str(binpath) for binpath in args.bin_path if binpath.is_dir()] + [str(os.path.dirname(binexepath)) for binexepath in args.bin_exe] @@ -108,7 +110,7 @@ def main(): if returncode != 0: return returncode - recompute_abi_hash(args.ghc, args.abi_out) + recompute_abi_hash(args.ghc, args.abi_out, use_persistent_workers) # write an empty dep file, to signal that all tagged files are unused try: @@ -136,13 +138,13 @@ def main(): return 0 -def recompute_abi_hash(ghc, abi_out): # worker_target_id +def recompute_abi_hash(ghc, abi_out, use_persistent_workers): """Call ghc on the hi file and write the ABI hash to abi_out.""" hi_file = abi_out.with_suffix("") - #if worker_target_id: - worker_args = ["--worker-target-id=show-iface-abi-hash"] # format(worker_target_id) - #else: - # worker_args = [] + if use_persistent_workers: + worker_args = ["--worker-target-id=show-iface-abi-hash"] + else: + worker_args = [] cmd = [ghc, "-v0", "-package-env=-", "--show-iface-abi-hash", hi_file] + worker_args From 068a6b20ed0a2335b9994e3289de2fdb3f6a9fd4 Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Wed, 30 Oct 2024 05:46:42 -0700 Subject: [PATCH 1131/1133] handle pkgname = None case --- haskell/compile.bzl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index a83164df2..7c85c0d50 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -446,9 +446,10 @@ def _common_compile_module_args( direct_deps_info: list[HaskellLibraryInfoTSet], pkgname: str | None = None, ) -> CommonCompileModuleArgs: + command = cmd_args(ghc_wrapper) command.add("--ghc", haskell_toolchain.compiler) - if haskell_toolchain.use_persistent_workers: + if haskell_toolchain.use_persistent_workers and pkgname: worker_target_id = pkgname command.add("--worker-target-id", worker_target_id) # Some rules pass in RTS (e.g. `+RTS ... -RTS`) options for GHC, which can't From 5adae0478f1e022d1f4fd67e9490e599d870955d Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Thu, 19 Dec 2024 19:34:36 -0800 Subject: [PATCH 1132/1133] introduce module_prefix. --- decls/haskell_common.bzl | 8 ++++++++ decls/haskell_rules.bzl | 2 ++ haskell/compile.bzl | 9 ++++++--- 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/decls/haskell_common.bzl b/decls/haskell_common.bzl index 711a3c2ad..0a9eaa729 100644 --- a/decls/haskell_common.bzl +++ b/decls/haskell_common.bzl @@ -83,6 +83,13 @@ def _extra_libraries_arg(): """), } +def _module_prefix_arg(): + return { + "module_prefix": attrs.option(attrs.string(), default = None, doc = """ + Module prefix if needed +"""), + } + haskell_common = struct( srcs_arg = _srcs_arg, deps_arg = _deps_arg, @@ -93,4 +100,5 @@ haskell_common = struct( srcs_envs_arg = _srcs_envs_arg, use_argsfile_at_link_arg = _use_argsfile_at_link_arg, extra_libraries_arg = _extra_libraries_arg, + module_prefix_arg = _module_prefix_arg, ) diff --git a/decls/haskell_rules.bzl b/decls/haskell_rules.bzl index 17923f930..f20cc98c1 100644 --- a/decls/haskell_rules.bzl +++ b/decls/haskell_rules.bzl @@ -53,6 +53,7 @@ haskell_binary = prelude_rule( haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | + haskell_common.module_prefix_arg() | buck.platform_deps_arg() | { "contacts": attrs.list(attrs.string(), default = []), @@ -175,6 +176,7 @@ haskell_library = prelude_rule( haskell_common.compiler_flags_arg() | haskell_common.deps_arg() | haskell_common.scripts_arg() | + haskell_common.module_prefix_arg() | buck.platform_deps_arg() | native_common.link_whole(link_whole_type = attrs.bool(default = False)) | native_common.preferred_linkage(preferred_linkage_type = attrs.enum(Linkage.values())) | diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 7c85c0d50..3efa4d480 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -124,7 +124,7 @@ def _strip_prefix(prefix, s): return stripped if stripped != None else s -def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_style: LinkStyle, enable_profiling: bool, suffix: str) -> dict[str, _Module]: +def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_style: LinkStyle, enable_profiling: bool, suffix: str, module_prefix: str | None) -> dict[str, _Module]: modules = {} osuf, hisuf = output_extensions(link_style, enable_profiling) @@ -137,7 +137,10 @@ def _modules_by_name(ctx: AnalysisContext, *, sources: list[Artifact], link_styl continue module_name = src_to_module_name(src.short_path) + bootsuf - interface_path = paths.replace_extension(src.short_path, "." + hisuf + bootsuf) + if module_prefix: + interface_path = paths.replace_extension(module_prefix.replace(".", "/") + "/" + src.short_path, "." + hisuf + bootsuf) + else: + interface_path = paths.replace_extension(src.short_path, "." + hisuf + bootsuf) interface = ctx.actions.declare_output("mod-" + suffix, interface_path) interfaces = [interface] object_path = paths.replace_extension(src.short_path, "." + osuf + bootsuf) @@ -832,7 +835,7 @@ def compile( pkgname: str | None = None) -> CompileResultInfo: artifact_suffix = get_artifact_suffix(link_style, enable_profiling) - modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix) + modules = _modules_by_name(ctx, sources = ctx.attrs.srcs, link_style = link_style, enable_profiling = enable_profiling, suffix = artifact_suffix, module_prefix = ctx.attrs.module_prefix) haskell_toolchain = ctx.attrs._haskell_toolchain[HaskellToolchainInfo] From e14e57bb0e8a2bd8897ebc25df960235e2e7209d Mon Sep 17 00:00:00 2001 From: Ian-Woo Kim Date: Tue, 24 Dec 2024 09:14:23 -0800 Subject: [PATCH 1133/1133] -fpackage-db-byte-code --- haskell/compile.bzl | 1 + 1 file changed, 1 insertion(+) diff --git a/haskell/compile.bzl b/haskell/compile.bzl index 3efa4d480..87a33b5ba 100644 --- a/haskell/compile.bzl +++ b/haskell/compile.bzl @@ -720,6 +720,7 @@ def _compile_module( compile_cmd.add("-l{}".format(l.name)) compile_cmd.add("-fwrite-if-simplified-core") + compile_cmd.add("-fpackage-db-byte-code") if enable_th: compile_cmd.add("-fprefer-byte-code")