From 54583b075f32eefb3e5cbad73509026062b9613b Mon Sep 17 00:00:00 2001 From: Tom Switzer Date: Wed, 14 May 2025 17:02:14 -0400 Subject: [PATCH 1/2] Get bazel tests working on Spark 3.2. --- .bazelversion | 1 + .ijwb/.bazelproject | 17 +- .ijwb/aspects/BUILD.bazel | 16 + .ijwb/aspects/artifacts.bzl | 70 + .ijwb/aspects/code_generator_info.bzl | 19 + .ijwb/aspects/fast_build_info_bundled.bzl | 149 ++ .ijwb/aspects/flag_hack.bzl | 32 + .ijwb/aspects/intellij_info_bundled.bzl | 100 ++ .ijwb/aspects/intellij_info_impl_bundled.bzl | 1391 +++++++++++++++++ .ijwb/aspects/java_classpath.bzl | 37 + .ijwb/aspects/java_info.bzl | 23 + .ijwb/aspects/make_variables.bzl | 216 +++ .ijwb/aspects/python_info.bzl | 24 + .ijwb/aspects/xcode_query.bzl | 22 + WORKSPACE | 6 + aggregator/BUILD.bazel | 4 +- .../aggregator/test/ApproxDistinctTest.scala | 6 + .../aggregator/test/ApproxHistogramTest.scala | 10 + .../test/ApproxPercentilesTest.scala | 5 + .../aggregator/test/FrequentItemsTest.scala | 8 + .../chronon/aggregator/test/MinHeapTest.scala | 3 + .../chronon/aggregator/test/MomentTest.scala | 6 + .../aggregator/test/RowAggregatorTest.scala | 2 + .../test/SawtoothAggregatorTest.scala | 3 + .../test/SawtoothOnlineAggregatorTest.scala | 2 + .../test/TwoStackLiteAggregatorTest.scala | 3 + .../aggregator/test/VarianceTest.scala | 2 + api/BUILD.bazel | 7 +- flink/BUILD.bazel | 5 +- jvm/spark_repos.bzl | 16 +- online/BUILD.bazel | 24 +- .../online/test/DataStreamBuilderTest.scala | 7 +- service/BUILD.bazel | 8 +- spark/BUILD.bazel | 23 +- .../chronon/spark/SparkSessionBuilder.scala | 9 +- spark/src/test/resources/BUILD.bazel | 5 + .../spark/test/ChainingFetcherTest.scala | 5 +- .../chronon/spark/test/ExampleDataUtils.scala | 17 + .../chronon/spark/test/FetchStatsTest.scala | 2 + .../ai/chronon/spark/test/FetcherTest.scala | 113 +- .../chronon/spark/test/FetcherTestUtil.scala | 112 ++ .../spark/test/MetadataExporterTest.scala | 8 +- .../spark/test/MetadataStoreTest.scala | 11 +- .../spark/test/SchemaEvolutionTest.scala | 29 +- .../ai/chronon/spark/test/StreamingTest.scala | 7 +- .../ai/chronon/spark/test/TestUtils.scala | 27 +- third_party/java/spark/BUILD.bazel | 18 + tools/build_rules/prelude_bazel | 3 +- tools/build_rules/testing.bzl | 49 + .../intellij/default_view.bazelproject | 16 + tools/policies/BUILD.bazel | 1 + tools/policies/derby.policy | 9 + 52 files changed, 2528 insertions(+), 180 deletions(-) create mode 100644 .bazelversion create mode 100644 .ijwb/aspects/BUILD.bazel create mode 100644 .ijwb/aspects/artifacts.bzl create mode 100644 .ijwb/aspects/code_generator_info.bzl create mode 100644 .ijwb/aspects/fast_build_info_bundled.bzl create mode 100644 .ijwb/aspects/flag_hack.bzl create mode 100644 .ijwb/aspects/intellij_info_bundled.bzl create mode 100644 .ijwb/aspects/intellij_info_impl_bundled.bzl create mode 100644 .ijwb/aspects/java_classpath.bzl create mode 100644 .ijwb/aspects/java_info.bzl create mode 100644 .ijwb/aspects/make_variables.bzl create mode 100644 .ijwb/aspects/python_info.bzl create mode 100644 .ijwb/aspects/xcode_query.bzl create mode 100644 spark/src/test/resources/BUILD.bazel create mode 100644 spark/src/test/scala/ai/chronon/spark/test/ExampleDataUtils.scala create mode 100644 spark/src/test/scala/ai/chronon/spark/test/FetcherTestUtil.scala create mode 100644 tools/build_rules/testing.bzl create mode 100644 tools/ide_support/intellij/default_view.bazelproject create mode 100644 tools/policies/BUILD.bazel create mode 100644 tools/policies/derby.policy diff --git a/.bazelversion b/.bazelversion new file mode 100644 index 0000000000..19b860c187 --- /dev/null +++ b/.bazelversion @@ -0,0 +1 @@ +6.4.0 diff --git a/.ijwb/.bazelproject b/.ijwb/.bazelproject index d87cdc2369..0145258a47 100644 --- a/.ijwb/.bazelproject +++ b/.ijwb/.bazelproject @@ -1,16 +1 @@ -directories: - # Add the directories you want added as source here - # By default, we've added your entire workspace ('.') - . - -# Automatically includes all relevant targets under the 'directories' above -derive_targets_from_directories: true - -targets: - # If source code isn't resolving, add additional targets that compile it here - -additional_languages: - # Uncomment any additional languages you want supported - python - scala - java +import tools/ide_support/intellij/default_view.bazelproject diff --git a/.ijwb/aspects/BUILD.bazel b/.ijwb/aspects/BUILD.bazel new file mode 100644 index 0000000000..1797ffe5e7 --- /dev/null +++ b/.ijwb/aspects/BUILD.bazel @@ -0,0 +1,16 @@ +# +# Description: +# The final form of the BUILD file accessed at runtime as an external WORKSPACE. +# + +licenses(["notice"]) # Apache 2.0 + +load(":flag_hack.bzl", "define_flag_hack") + +exports_files([ + "tools/PackageParser_deploy.jar", + "tools/CreateAar_deploy.jar", + "tools/JarFilter_deploy.jar", +]) + +define_flag_hack() diff --git a/.ijwb/aspects/artifacts.bzl b/.ijwb/aspects/artifacts.bzl new file mode 100644 index 0000000000..bc7b403656 --- /dev/null +++ b/.ijwb/aspects/artifacts.bzl @@ -0,0 +1,70 @@ +"""Utility methods for working with ArtifactLocation types.""" + +def struct_omit_none(**kwargs): + """A replacement for standard `struct` function that omits the fields with None value.""" + d = {name: kwargs[name] for name in kwargs if kwargs[name] != None} + return struct(**d) + +def sources_from_target(ctx): + """Get the list of sources from a target as artifact locations.""" + return artifacts_from_target_list_attr(ctx, "srcs") + +def artifacts_from_target_list_attr(ctx, attr_name): + """Converts a list of targets to a list of artifact locations.""" + return [ + artifact_location(f) + for target in getattr(ctx.rule.attr, attr_name, []) + for f in target.files.to_list() + ] + +def artifact_location(f): + """Creates an ArtifactLocation proto from a File.""" + if f == None: + return None + + return to_artifact_location( + f.path, + f.root.path if not f.is_source else "", + f.is_source, + is_external_artifact(f.owner), + ) + +def to_artifact_location(exec_path, root_exec_path_fragment, is_source, is_external): + """Derives workspace path from other path fragments, and creates an ArtifactLocation proto.""" + + # directory structure: + # exec_path = (../repo_name)? + (root_fragment)? + relative_path + relative_path = _strip_external_workspace_prefix(exec_path) + relative_path = _strip_root_exec_path_fragment(relative_path, root_exec_path_fragment) + + root_exec_path_fragment = exec_path[:-(len("/" + relative_path))] + + return struct_omit_none( + relative_path = relative_path, + is_source = is_source, + is_external = is_external, + root_execution_path_fragment = root_exec_path_fragment + ) + +def is_external_artifact(label): + """Determines whether a label corresponds to an external artifact.""" + + # Label.EXTERNAL_PATH_PREFIX is due to change from 'external' to '..' in Bazel 0.4.5. + # This code is for forwards and backwards compatibility. + # Remove the 'external' check when Bazel 0.4.4 and earlier no longer need to be supported. + return label.workspace_root.startswith("external") or label.workspace_root.startswith("..") + +def _strip_root_exec_path_fragment(path, root_fragment): + if root_fragment and path.startswith(root_fragment + "/"): + return path[len(root_fragment + "/"):] + return path + +def _strip_external_workspace_prefix(path): + """Either 'external/workspace_name/' or '../workspace_name/'.""" + + # Label.EXTERNAL_PATH_PREFIX is due to change from 'external' to '..' in Bazel 0.4.5. + # This code is for forwards and backwards compatibility. + # Remove the 'external/' check when Bazel 0.4.4 and earlier no longer need to be supported. + if path.startswith("../") or path.startswith("external/"): + return "/".join(path.split("/")[2:]) + return path diff --git a/.ijwb/aspects/code_generator_info.bzl b/.ijwb/aspects/code_generator_info.bzl new file mode 100644 index 0000000000..7b5e598e13 --- /dev/null +++ b/.ijwb/aspects/code_generator_info.bzl @@ -0,0 +1,19 @@ +"""Data required for the code-generator system""" + +# The following is a list of the languages to the set of Rule names +# which can be considered code-generators for that language. Look +# for the `get_code_generator_rule_names` function in the aspect +# logic that integrates with this constant. + +CODE_GENERATOR_RULE_NAMES = struct( +# TEMPLATE-INCLUDE-BEGIN + generic = [ + ], + java = [ + ], + python = [ + ], + scala = [ + ], +# TEMPLATE-INCLUDE-END +) diff --git a/.ijwb/aspects/fast_build_info_bundled.bzl b/.ijwb/aspects/fast_build_info_bundled.bzl new file mode 100644 index 0000000000..bb9326c24c --- /dev/null +++ b/.ijwb/aspects/fast_build_info_bundled.bzl @@ -0,0 +1,149 @@ +"""An aspect to gather info needed by the FastBuildService.""" + +load( + ":artifacts.bzl", + "artifact_location", + "sources_from_target", + "struct_omit_none", +) +load( + ":intellij_info_impl_bundled.bzl", + "stringify_label", +) +load(":java_info.bzl", "get_java_info") + +_DEP_ATTRS = ["deps", "exports", "runtime_deps", "_java_toolchain"] + +def _get_android_ide_info(target): + if hasattr(android_common, "AndroidIdeInfo") and android_common.AndroidIdeInfo in target: + return target[android_common.AndroidIdeInfo] + if hasattr(target, "android"): + return target.android + return None + +def _fast_build_info_impl(target, ctx): + dep_targets = _get_all_dep_targets(target, ctx) + dep_outputs = _get_all_dep_outputs(dep_targets) + + output_files = [] + + info = { + "workspace_name": ctx.workspace_name, + "label": stringify_label(target.label), + "dependencies": [stringify_label(t.label) for t in dep_targets], + } + + write_output = False + if hasattr(ctx.rule.attr, "data") and ctx.rule.attr.data: + # The data attribute can reference artifacts directly (like deploy jars) that the aspect + # will skip. So we need to gather them up here, in the referencing target. + write_output = True + info["data"] = [ + struct( + label = stringify_label(datadep.label), + artifacts = [artifact_location(file) for file in datadep.files.to_list()], + ) + for datadep in ctx.rule.attr.data + ] + + if hasattr(target, "java_toolchain"): + toolchain = target.java_toolchain + elif java_common.JavaToolchainInfo != platform_common.ToolchainInfo and \ + java_common.JavaToolchainInfo in target: + toolchain = target[java_common.JavaToolchainInfo] + else: + toolchain = None + if toolchain: + write_output = True + javac_jars = [] + if hasattr(toolchain, "tools"): + javac_jars = [artifact_location(f) for f in toolchain.tools.to_list()] + bootclasspath_jars = [] + if hasattr(toolchain, "bootclasspath"): + bootclasspath_jars = [artifact_location(f) for f in toolchain.bootclasspath.to_list()] + info["java_toolchain_info"] = struct_omit_none( + javac_jars = javac_jars, + bootclasspath_jars = bootclasspath_jars, + source_version = toolchain.source_version, + target_version = toolchain.target_version, + ) + java_info = get_java_info(target) + if java_info: + write_output = True + launcher = None + if hasattr(ctx.rule.attr, "use_launcher") and not ctx.rule.attr.use_launcher: + launcher = None + elif hasattr(ctx.rule.attr, "launcher") and ctx.rule.attr.launcher: + launcher = stringify_label(ctx.rule.attr.launcher.label) + elif hasattr(ctx.rule.attr, "_java_launcher") and ctx.rule.attr._java_launcher: + # TODO: b/295221112 - remove _java_launcher when it's removed from Java rules + launcher = stringify_label(ctx.rule.attr._java_launcher.label) + elif hasattr(ctx.rule.attr, "_javabase") and ctx.rule.attr._javabase: + launcher = stringify_label(ctx.rule.attr._javabase.label) + java_info = { + "sources": sources_from_target(ctx), + "test_class": getattr(ctx.rule.attr, "test_class", None), + "test_size": getattr(ctx.rule.attr, "size", None), + "launcher": launcher, + "swigdeps": getattr(ctx.rule.attr, "swigdeps", True), + "jvm_flags": getattr(ctx.rule.attr, "jvm_flags", []), + "main_class": getattr(ctx.rule.attr, "main_class", None), + } + annotation_processing = target[JavaInfo].annotation_processing + if annotation_processing: + java_info["annotation_processor_class_names"] = annotation_processing.processor_classnames + java_info["annotation_processor_classpath"] = [ + artifact_location(t) + for t in annotation_processing.processor_classpath.to_list() + ] + info["java_info"] = struct_omit_none(**java_info) + + android_ide_info = _get_android_ide_info(target) + if android_ide_info: + write_output = True + android_info = struct_omit_none( + aar = artifact_location(android_ide_info.aar), + merged_manifest = artifact_location( + getattr(android_ide_info, "generated_manifest", None) or + getattr(android_ide_info, "merged_manifest", None), + ), + ) + info["android_info"] = android_info + + if write_output: + output_file = ctx.actions.declare_file(target.label.name + ".ide-fast-build-info.txt") + ctx.actions.write(output_file, proto.encode_text(struct_omit_none(**info))) + output_files.append(output_file) + + output_groups = depset(output_files, transitive = dep_outputs) + return [OutputGroupInfo(**{"ide-fast-build": output_groups})] + +def _get_all_dep_outputs(dep_targets): + """Get the ide-fast-build output files for all dependencies""" + return [ + dep_target[OutputGroupInfo]["ide-fast-build"] + for dep_target in dep_targets + if _has_ide_fast_build(dep_target) + ] + +def _get_all_dep_targets(target, ctx): + """Get all the targets mentioned in one of the _DEP_ATTRS attributes of the target""" + targets = [] + for attr_name in _DEP_ATTRS: + attr_val = getattr(ctx.rule.attr, attr_name, None) + if not attr_val: + continue + attr_type = type(attr_val) + if attr_type == type(target): + targets.append(attr_val) + elif attr_type == type([]): + targets += [list_val for list_val in attr_val if type(list_val) == type(target)] + return targets + +def _has_ide_fast_build(target): + return OutputGroupInfo in target and "ide-fast-build" in target[OutputGroupInfo] + +fast_build_info_aspect = aspect( + attr_aspects = _DEP_ATTRS, + implementation = _fast_build_info_impl, +) diff --git a/.ijwb/aspects/flag_hack.bzl b/.ijwb/aspects/flag_hack.bzl new file mode 100644 index 0000000000..8d4494f63e --- /dev/null +++ b/.ijwb/aspects/flag_hack.bzl @@ -0,0 +1,32 @@ +##### Begin bazel-flag-hack +# The flag hack stuff below is a way to detect flags that bazel has been invoked with from the +# aspect. Once PY3-as-default is stable, it can be removed. When removing, also remove the +# define_flag_hack() call in BUILD and the "_flag_hack" attr on the aspect below. See +# "PY3-as-default" in: +# https://github.com/bazelbuild/bazel/blob/master/src/main/java/com/google/devtools/build/lib/rules/python/PythonConfiguration.java + +FlagHackInfo = provider(fields = ["incompatible_py2_outputs_are_suffixed"]) + +def _flag_hack_impl(ctx): + return [FlagHackInfo(incompatible_py2_outputs_are_suffixed = ctx.attr.incompatible_py2_outputs_are_suffixed)] + +_flag_hack_rule = rule( + attrs = {"incompatible_py2_outputs_are_suffixed": attr.bool()}, + implementation = _flag_hack_impl, +) + +def define_flag_hack(): + native.config_setting( + name = "incompatible_py2_outputs_are_suffixed_setting", + values = {"incompatible_py2_outputs_are_suffixed": "true"}, + ) + _flag_hack_rule( + name = "flag_hack", + incompatible_py2_outputs_are_suffixed = select({ + ":incompatible_py2_outputs_are_suffixed_setting": True, + "//conditions:default": False, + }), + visibility = ["//visibility:public"], + ) + +##### End bazel-flag-hack diff --git a/.ijwb/aspects/intellij_info_bundled.bzl b/.ijwb/aspects/intellij_info_bundled.bzl new file mode 100644 index 0000000000..70ae96e453 --- /dev/null +++ b/.ijwb/aspects/intellij_info_bundled.bzl @@ -0,0 +1,100 @@ +"""Bazel-specific intellij aspect.""" + +load( + ":intellij_info_impl_bundled.bzl", + "intellij_info_aspect_impl", + "make_intellij_info_aspect", + "is_valid_aspect_target", +) + +EXTRA_DEPS = [ + "embed", # From go rules (bazel only) + "_cc_toolchain", # From rules_cc (bazel only) + "_kt_toolchain", # From rules_kotlin (bazel only) +] + +TOOLCHAIN_TYPE_DEPS = [ + "@@bazel_tools//tools/cpp:toolchain_type", # For rules_cc +] + +def tool_label(tool_name): + """Returns a label that points to a tool target in the bundled aspect workspace.""" + return Label("tools/" + tool_name) + +def get_go_import_path(ctx): + """Returns the import path for a go target.""" + import_path = getattr(ctx.rule.attr, "importpath", None) + if import_path: + return import_path + prefix = None + if hasattr(ctx.rule.attr, "_go_prefix"): + prefix = ctx.rule.attr._go_prefix.go_prefix + if not prefix: + return None + import_path = prefix + if ctx.label.package: + import_path += "/" + ctx.label.package + if ctx.label.name != "go_default_library": + import_path += "/" + ctx.label.name + return import_path + +def is_go_proto_library(target, _ctx): + return hasattr(target[OutputGroupInfo], "go_generated_srcs") + +def get_go_proto_library_generated_srcs(target): + files = target[OutputGroupInfo].go_generated_srcs.to_list() + return [f for f in files if f.basename.endswith(".go")] + +def get_py_launcher(target, ctx): + """Returns the python launcher for a given rule.""" + + # Used by other implementations of get_launcher + _ = target # @unused + attr = ctx.rule.attr + if hasattr(attr, "_launcher") and attr._launcher != None: + return str(attr._launcher.label) + return None + +def _collect_targets_from_toolchains(ctx, toolchain_types): + """Returns a list of targets for the given toolchain types.""" + result = [] + + for toolchain_type in toolchain_types: + # toolchains attribute only available in Bazel 8+ + toolchains = getattr(ctx.rule, "toolchains", []) + + if toolchain_type in toolchains: + if is_valid_aspect_target(toolchains[toolchain_type]): + result.append(toolchains[toolchain_type]) + + return result + +semantics = struct( + tool_label = tool_label, + toolchains_propagation = struct( + toolchain_types = TOOLCHAIN_TYPE_DEPS, + collect_toolchain_deps = _collect_targets_from_toolchains, + ), + extra_deps = EXTRA_DEPS, + extra_required_aspect_providers = [], + go = struct( + get_import_path = get_go_import_path, + is_proto_library = is_go_proto_library, + get_proto_library_generated_srcs = get_go_proto_library_generated_srcs, + ), + py = struct( + get_launcher = get_py_launcher, + ), + flag_hack_label = ":flag_hack", +) + +def _aspect_impl(target, ctx): + return intellij_info_aspect_impl(target, ctx, semantics) + +# TEMPLATE-INCLUDE-BEGIN +intellij_info_aspect = make_intellij_info_aspect( + _aspect_impl, + semantics, +) +# TEMPLATE-INCLUDE-END + diff --git a/.ijwb/aspects/intellij_info_impl_bundled.bzl b/.ijwb/aspects/intellij_info_impl_bundled.bzl new file mode 100644 index 0000000000..ac09bb48ca --- /dev/null +++ b/.ijwb/aspects/intellij_info_impl_bundled.bzl @@ -0,0 +1,1391 @@ +"""Implementation of IntelliJ-specific information collecting aspect.""" + +load( + "@bazel_tools//tools/build_defs/cc:action_names.bzl", + "ACTION_NAMES", +) +load( + ":artifacts.bzl", + "artifact_location", + "artifacts_from_target_list_attr", + "is_external_artifact", + "sources_from_target", + "struct_omit_none", + "to_artifact_location", +) +load(":flag_hack.bzl", "FlagHackInfo") + +load(":java_info.bzl", "get_java_info", "java_info_in_target", "java_info_reference") + +load(":python_info.bzl", "get_py_info", "py_info_in_target") + +load(":code_generator_info.bzl", "CODE_GENERATOR_RULE_NAMES") + +load( + ":make_variables.bzl", + "expand_make_variables", +) + +IntelliJInfo = provider( + doc = "Collected information about the targets visited by the aspect.", + fields = [ + "export_deps", + "kind", + "output_groups", + "target_key", + ], +) + +# Defensive list of features that can appear in the C++ toolchain, but which we +# definitely don't want to enable (when enabled, they'd contribute command line +# flags that don't make sense in the context of intellij info). +UNSUPPORTED_FEATURES = [ + "thin_lto", + "module_maps", + "use_header_modules", + "fdo_instrument", + "fdo_optimize", +] + +# Compile-time dependency attributes, grouped by type. +DEPS = [ + "_stl", # From cc rules + "malloc", # From cc_binary rules + "implementation_deps", # From cc_library rules + "_java_toolchain", # From java rules + "deps", + "jars", # from java_import rules + "exports", + "java_lib", # From old proto_library rules + "_android_sdk", # from android rules + "aidl_lib", # from android_sdk + "_scala_toolchain", # From scala rules + "test_app", # android_instrumentation_test + "instruments", # android_instrumentation_test + "tests", # From test_suite + "compilers", # From go_proto_library + "associates", # From kotlin rules +] + +# Run-time dependency attributes, grouped by type. +RUNTIME_DEPS = [ + "runtime_deps", +] + +PREREQUISITE_DEPS = [] + +# Dependency type enum +COMPILE_TIME = 0 + +RUNTIME = 1 + +# PythonVersion enum; must match PyIdeInfo.PythonVersion +PY2 = 1 + +PY3 = 2 + +# PythonCompatVersion enum; must match PyIdeInfo.PythonSrcsVersion +SRC_PY2 = 1 + +SRC_PY3 = 2 + +SRC_PY2AND3 = 3 + +SRC_PY2ONLY = 4 + +SRC_PY3ONLY = 5 + +##### Helpers + +def run_jar(ctx, jar, **kwargs): + """Runs a jar using the current java runtime used to run this bazel instance. + + Finds the current java runtime and uses the java executable to run the provided jar. The jar + file should be a self contained _deploy jar. + """ + + host_java = ctx.attr._java_runtime[java_common.JavaRuntimeInfo] + + return ctx.actions.run_shell( + tools = depset([jar], transitive = [host_java.files]), + command = "%s -jar %s $@" % (host_java.java_executable_exec_path, jar.path), + **kwargs, + ) + +def get_code_generator_rule_names(ctx, language_name): + """Supplies a list of Rule names for code generation for the language specified + + For some languages, it is possible to specify Rules' names that are interpreted as + code-generators for the language. These Rules' names are specified as attrs and are provided to + the aspect using the `AspectStrategy#AspectParameter` in the plugin logic. + """ + + if not language_name: + fail("the `language_name` must be provided") + + if hasattr(CODE_GENERATOR_RULE_NAMES, language_name): + return getattr(CODE_GENERATOR_RULE_NAMES, language_name) + + return [] + +def get_registry_flag(ctx, name): + """Registry flags are passed to aspects using defines. See CppAspectArgsProvider.""" + + return ctx.var.get(name) == "true" + +def source_directory_tuple(resource_file): + """Creates a tuple of (exec_path, root_exec_path_fragment, is_source, is_external).""" + relative_path = str(android_common.resource_source_directory(resource_file)) + root_exec_path_fragment = resource_file.root.path if not resource_file.is_source else None + return ( + relative_path if resource_file.is_source else root_exec_path_fragment + "/" + relative_path, + root_exec_path_fragment, + resource_file.is_source, + is_external_artifact(resource_file.owner), + ) + +def get_res_artifacts(resources): + """Get a map from the res folder to the set of resource files within that folder. + + Args: + resources: all resources of a target + + Returns: + a map from the res folder to the set of resource files within that folder (as a tuple of path segments) + """ + res_artifacts = dict() + for resource in resources: + for file in resource.files.to_list(): + res_folder = source_directory_tuple(file) + res_artifacts.setdefault(res_folder, []).append(file) + return res_artifacts + +def build_file_artifact_location(ctx): + """Creates an ArtifactLocation proto representing a location of a given BUILD file.""" + return to_artifact_location( + ctx.label.package + "/BUILD", + ctx.label.package + "/BUILD", + True, + is_external_artifact(ctx.label), + ) + +# https://github.com/bazelbuild/bazel/issues/18966 +def _list_or_depset_to_list(list_or_depset): + if hasattr(list_or_depset, "to_list"): + return list_or_depset.to_list() + return list_or_depset + +def get_source_jars(output): + if hasattr(output, "source_jars"): + return _list_or_depset_to_list(output.source_jars) + if hasattr(output, "source_jar"): + return [output.source_jar] + return [] + +def library_artifact(java_output, rule_kind = None): + """Creates a LibraryArtifact representing a given java_output.""" + if java_output == None or java_output.class_jar == None: + return None + src_jars = get_source_jars(java_output) + + if rule_kind != None and rule_kind.startswith("scala"): + interface_jar = None + else: + interface_jar = artifact_location(java_output.ijar) + + return struct_omit_none( + interface_jar = interface_jar, + jar = artifact_location(java_output.class_jar), + source_jar = artifact_location(src_jars[0]) if src_jars else None, + source_jars = [artifact_location(f) for f in src_jars], + ) + +def annotation_processing_jars(generated_class_jar, generated_source_jar): + """Creates a LibraryArtifact representing Java annotation processing jars.""" + src_jar = generated_source_jar + return struct_omit_none( + jar = artifact_location(generated_class_jar), + source_jar = artifact_location(src_jar), + source_jars = [artifact_location(src_jar)] if src_jar else None, + ) + +def jars_from_output(output): + """Collect jars for intellij-resolve-files from Java output.""" + if output == None: + return [] + source_jars = get_source_jars(output) + return [ + jar + for jar in ([output.ijar if len(source_jars) > 0 and output.ijar else output.class_jar] + source_jars) + if jar != None and not jar.is_source + ] + +def _collect_target_from_attr(rule_attrs, attr_name, result): + """Collects the targets from the given attr into the result.""" + if not hasattr(rule_attrs, attr_name): + return + attr_value = getattr(rule_attrs, attr_name) + type_name = type(attr_value) + if type_name == "Target": + result.append(attr_value) + elif type_name == "list": + result.extend(attr_value) + +def collect_targets_from_attrs(rule_attrs, attrs): + """Returns a list of targets from the given attributes.""" + result = [] + for attr_name in attrs: + _collect_target_from_attr(rule_attrs, attr_name, result) + return [target for target in result if is_valid_aspect_target(target)] + +def targets_to_labels(targets): + """Returns a set of label strings for the given targets.""" + return depset([str(target.label) for target in targets]) + +def list_omit_none(value): + """Returns a list of the value, or the empty list if None.""" + return [value] if value else [] + +def is_valid_aspect_target(target): + """Returns whether the target has had the aspect run on it.""" + return IntelliJInfo in target + +def get_aspect_ids(ctx): + """Returns the all aspect ids, filtering out self.""" + aspect_ids = None + if hasattr(ctx, "aspect_ids"): + aspect_ids = ctx.aspect_ids + else: + return None + return [aspect_id for aspect_id in aspect_ids if "intellij_info_aspect" not in aspect_id] + +def _is_language_specific_proto_library(ctx, target, semantics): + """Returns True if the target is a proto library with attached language-specific aspect.""" + if ctx.rule.kind != "proto_library": + return False + if java_info_in_target(target): + return True + if CcInfo in target: + return True + if semantics.go.is_proto_library(target, ctx): + return True + return False + +def stringify_label(label): + """Stringifies a label, making sure any leading '@'s are stripped from main repo labels.""" + s = str(label) + + # If the label is in the main repo, make sure any leading '@'s are stripped so that tests are + # okay with the fixture setups. + return s.lstrip("@") if s.startswith("@@//") or s.startswith("@//") else s + +def make_target_key(label, aspect_ids): + """Returns a TargetKey proto struct from a target.""" + return struct_omit_none( + aspect_ids = tuple(aspect_ids) if aspect_ids else None, + label = stringify_label(label), + ) + +def make_dep(dep, dependency_type): + """Returns a Dependency proto struct.""" + return struct( + dependency_type = dependency_type, + target = dep[IntelliJInfo].target_key, + ) + +def make_deps(deps, dependency_type): + """Returns a list of Dependency proto structs.""" + return [make_dep(dep, dependency_type) for dep in deps] + +def make_dep_from_label(label, dependency_type): + """Returns a Dependency proto struct from a label.""" + return struct( + dependency_type = dependency_type, + target = struct(label = stringify_label(label)), + ) + +def update_sync_output_groups(groups_dict, key, new_set): + """Updates all sync-relevant output groups associated with 'key'. + + This is currently the [key] output group itself, together with [key]-outputs + and [key]-direct-deps. + + Args: + groups_dict: the output groups dict, from group name to artifact depset. + key: the base output group name. + new_set: a depset of artifacts to add to the output groups. + """ + update_set_in_dict(groups_dict, key, new_set) + update_set_in_dict(groups_dict, key + "-outputs", new_set) + update_set_in_dict(groups_dict, key + "-direct-deps", new_set) + +def update_set_in_dict(input_dict, key, other_set): + """Updates depset in dict, merging it with another depset.""" + input_dict[key] = depset(transitive = [input_dict.get(key, depset()), other_set]) + +def _get_output_mnemonic(ctx): + """Gives the output directory mnemonic for some target context.""" + return ctx.bin_dir.path.split("/")[1] + +def _get_python_version(ctx): + if ctx.attr._flag_hack[FlagHackInfo].incompatible_py2_outputs_are_suffixed: + if _get_output_mnemonic(ctx).find("-py2-") != -1: + return PY2 + return PY3 + else: + if _get_output_mnemonic(ctx).find("-py3-") != -1: + return PY3 + return PY2 + +_SRCS_VERSION_MAPPING = { + "PY2": SRC_PY2, + "PY3": SRC_PY3, + "PY2AND3": SRC_PY2AND3, + "PY2ONLY": SRC_PY2ONLY, + "PY3ONLY": SRC_PY3ONLY, +} + +def _get_python_srcs_version(ctx): + srcs_version = getattr(ctx.rule.attr, "srcs_version", "PY2AND3") + return _SRCS_VERSION_MAPPING.get(srcs_version, default = SRC_PY2AND3) + +def _do_starlark_string_expansion(ctx, name, strings, extra_targets = [], tokenization = True): + # first, expand all starlark predefined paths: + # location, locations, rootpath, rootpaths, execpath, execpaths + strings = [ctx.expand_location(value, targets = extra_targets) for value in strings] + + # then expand any regular GNU make style variables + return expand_make_variables(ctx, tokenization, strings) + +##### Builders for individual parts of the aspect output + +def collect_py_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): + """Updates Python-specific output groups, returns false if not a Python target.""" + if not py_info_in_target(target) or _is_language_specific_proto_library(ctx, target, semantics): + return False + + py_semantics = getattr(semantics, "py", None) + if py_semantics: + py_launcher = py_semantics.get_launcher(target, ctx) + else: + py_launcher = None + + sources = sources_from_target(ctx) + to_build = get_py_info(target).transitive_sources + args = getattr(ctx.rule.attr, "args", []) + data_deps = getattr(ctx.rule.attr, "data", []) + args = _do_starlark_string_expansion(ctx, "args", args, data_deps, tokenization = False) + imports = getattr(ctx.rule.attr, "imports", []) + is_code_generator = False + + # If there are apparently no sources found from `srcs` and the target has a rule name which is + # one of the ones pre-specified to the aspect as being a code-generator for Python then + # interpret the outputs of the target specified in the PyInfo as being sources. + + if 0 == len(sources) and ctx.rule.kind in get_code_generator_rule_names(ctx, "python"): + def provider_import_to_attr_import(provider_import): + """\ + Remaps the imports from PyInfo + + The imports that are supplied on the `PyInfo` are relative to the runfiles and so are + not the same as those which might be supplied on an attribute of `py_library`. This + function will remap those back so they look as if they were `imports` attributes on + the rule. The form of the runfiles import is `//`. + The actual `workspace_name` is not interesting such that the first part can be simply + stripped. Next the package to the Label is stripped leaving a path that would have been + supplied on an `imports` attribute to a Rule. + """ + + # Other code in this file appears to assume *NIX path component separators? + + provider_import_parts = [p for p in provider_import.split("/")] + package_parts = [p for p in ctx.label.package.split("/")] + + if 0 == len(provider_import_parts): + return None + + scratch_parts = provider_import_parts[1:] # remove the workspace name or _main + + for p in package_parts: + if 0 != len(provider_import_parts) and scratch_parts[0] == p: + scratch_parts = scratch_parts[1:] + else: + return None + + return "/".join(scratch_parts) + + def provider_imports_to_attr_imports(): + result = [] + + for provider_import in get_py_info(target).imports.to_list(): + attr_import = provider_import_to_attr_import(provider_import) + if attr_import: + result.append(attr_import) + + return result + + if get_py_info(target).imports: + imports.extend(provider_imports_to_attr_imports()) + + runfiles = target[DefaultInfo].default_runfiles + + if runfiles and runfiles.files: + sources.extend([artifact_location(f) for f in runfiles.files.to_list()]) + + is_code_generator = True + + ide_info["py_ide_info"] = struct_omit_none( + launcher = py_launcher, + python_version = _get_python_version(ctx), + sources = sources, + srcs_version = _get_python_srcs_version(ctx), + args = args, + imports = imports, + is_code_generator = is_code_generator, + ) + + update_sync_output_groups(output_groups, "intellij-info-py", depset([ide_info_file])) + update_sync_output_groups(output_groups, "intellij-compile-py", to_build) + update_sync_output_groups(output_groups, "intellij-resolve-py", to_build) + return True + +def _collect_generated_go_sources(target, ctx, semantics): + """Returns a depset of go source files generated by this target.""" + if semantics.go.is_proto_library(target, ctx): + return semantics.go.get_proto_library_generated_srcs(target) + else: + return None + +def collect_go_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): + """Updates Go-specific output groups, returns false if not a recognized Go target.""" + sources = [] + generated = [] + cgo = False + + # currently there's no Go Skylark API, with the only exception being proto_library targets + if ctx.rule.kind in [ + "go_binary", + "go_library", + "go_test", + "go_source", + "go_appengine_binary", + "go_appengine_library", + "go_appengine_test", + ]: + sources = [f for src in getattr(ctx.rule.attr, "srcs", []) for f in src.files.to_list()] + generated = [f for f in sources if not f.is_source] + cgo = getattr(ctx.rule.attr, "cgo", False) + elif ctx.rule.kind == "go_wrap_cc": + genfiles = target.files.to_list() + go_genfiles = [f for f in genfiles if f.basename.endswith(".go")] + if go_genfiles: + sources = go_genfiles + generated = go_genfiles + else: + # if the .go file isn't in 'files', build the .a and .x files instead + generated = genfiles + else: + generated_sources = _collect_generated_go_sources(target, ctx, semantics) + if not generated_sources: + return False + sources = generated_sources + generated = generated_sources + + import_path = None + go_semantics = getattr(semantics, "go", None) + if go_semantics: + import_path = go_semantics.get_import_path(ctx) + + library_labels = [] + if ctx.rule.kind in ("go_test", "go_library", "go_appengine_test"): + if getattr(ctx.rule.attr, "library", None) != None: + library_labels = [stringify_label(ctx.rule.attr.library.label)] + elif getattr(ctx.rule.attr, "embed", None) != None: + for library in ctx.rule.attr.embed: + if library[IntelliJInfo].kind == "go_source" or library[IntelliJInfo].kind == "go_proto_library": + l = library[IntelliJInfo].output_groups["intellij-sources-go-outputs"].to_list() + sources += l + generated += [f for f in l if not f.is_source] + else: + library_labels.append(stringify_label(library.label)) + + ide_info["go_ide_info"] = struct_omit_none( + import_path = import_path, + library_labels = library_labels, + sources = [artifact_location(f) for f in sources], + cgo = cgo, + ) + + compile_files = target[OutputGroupInfo].compilation_outputs if hasattr(target[OutputGroupInfo], "compilation_outputs") else depset([]) + compile_files = depset(generated, transitive = [compile_files]) + + update_sync_output_groups(output_groups, "intellij-info-go", depset([ide_info_file])) + update_sync_output_groups(output_groups, "intellij-compile-go", compile_files) + update_sync_output_groups(output_groups, "intellij-resolve-go", depset(generated)) + update_sync_output_groups(output_groups, "intellij-sources-go", depset(sources)) + return True + +def collect_cpp_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): + """Updates C++-specific output groups, returns false if not a C++ target.""" + + if CcInfo not in target: + return False + + # ignore cc_proto_library, attach to proto_library with aspect attached instead + if ctx.rule.kind == "cc_proto_library": + return False + + # Go targets always provide CcInfo. Usually it's empty, but even if it isn't we don't handle it + if ctx.rule.kind.startswith("go_"): + return False + + sources = artifacts_from_target_list_attr(ctx, "srcs") + headers = artifacts_from_target_list_attr(ctx, "hdrs") + textual_headers = artifacts_from_target_list_attr(ctx, "textual_hdrs") + + target_copts = [] + if hasattr(ctx.rule.attr, "copts"): + target_copts += ctx.rule.attr.copts + extra_targets = [] + if hasattr(ctx.rule.attr, "additional_compiler_inputs"): + extra_targets += ctx.rule.attr.additional_compiler_inputs + if hasattr(semantics, "cc") and hasattr(semantics.cc, "get_default_copts"): + target_copts += semantics.cc.get_default_copts(ctx) + + target_copts = _do_starlark_string_expansion(ctx, "copt", target_copts, extra_targets) + + compilation_context = target[CcInfo].compilation_context + + # Merge current compilation context with context of implementation dependencies. + if hasattr(ctx.rule.attr, "implementation_deps"): + implementation_deps = ctx.rule.attr.implementation_deps + compilation_context = cc_common.merge_compilation_contexts( + compilation_contexts = + [compilation_context] + [impl[CcInfo].compilation_context for impl in implementation_deps], + ) + + # external_includes available since bazel 7 + external_includes = getattr(compilation_context, "external_includes", depset()).to_list() + + c_info = struct_omit_none( + header = headers, + source = sources, + target_copt = target_copts, + textual_header = textual_headers, + transitive_define = compilation_context.defines.to_list(), + transitive_include_directory = compilation_context.includes.to_list(), + transitive_quote_include_directory = compilation_context.quote_includes.to_list(), + # both system and external includes are add using `-isystem` + transitive_system_include_directory = compilation_context.system_includes.to_list() + external_includes, + include_prefix = getattr(ctx.rule.attr, "include_prefix", None), + strip_include_prefix = getattr(ctx.rule.attr, "strip_include_prefix", None), + ) + ide_info["c_ide_info"] = c_info + resolve_files = compilation_context.headers + + # TODO(brendandouglas): target to cpp files only + compile_files = target[OutputGroupInfo].compilation_outputs if hasattr(target[OutputGroupInfo], "compilation_outputs") else depset([]) + + update_sync_output_groups(output_groups, "intellij-info-cpp", depset([ide_info_file])) + update_sync_output_groups(output_groups, "intellij-compile-cpp", compile_files) + update_sync_output_groups(output_groups, "intellij-resolve-cpp", resolve_files) + return True + +def collect_c_toolchain_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): + """Updates cc_toolchain-relevant output groups, returns false if not a cc_toolchain target.""" + + # The other toolchains like the JDK might also have ToolchainInfo but it's not a C++ toolchain, + # so check kind as well. + # TODO(jvoung): We are temporarily getting info from cc_toolchain_suite + # https://github.com/bazelbuild/bazel/commit/3aedb2f6de80630f88ffb6b60795c44e351a5810 + # but will switch back to cc_toolchain providing CcToolchainProvider once we migrate C++ rules + # to generic platforms and toolchains. + if ctx.rule.kind != "cc_toolchain" and ctx.rule.kind != "cc_toolchain_suite" and ctx.rule.kind != "cc_toolchain_alias": + return False + if cc_common.CcToolchainInfo not in target: + return False + + # cc toolchain to access compiler flags + cpp_toolchain = target[cc_common.CcToolchainInfo] + + # cpp fragment to access bazel options + cpp_fragment = ctx.fragments.cpp + + copts = cpp_fragment.copts + cxxopts = cpp_fragment.cxxopts + conlyopts = cpp_fragment.conlyopts + + feature_configuration = cc_common.configure_features( + ctx = ctx, + cc_toolchain = cpp_toolchain, + requested_features = ctx.features, + unsupported_features = ctx.disabled_features + UNSUPPORTED_FEATURES, + ) + c_variables = cc_common.create_compile_variables( + feature_configuration = feature_configuration, + cc_toolchain = cpp_toolchain, + user_compile_flags = copts + conlyopts, + ) + cpp_variables = cc_common.create_compile_variables( + feature_configuration = feature_configuration, + cc_toolchain = cpp_toolchain, + user_compile_flags = copts + cxxopts, + ) + c_options = cc_common.get_memory_inefficient_command_line( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.c_compile, + variables = c_variables, + ) + cpp_options = cc_common.get_memory_inefficient_command_line( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.cpp_compile, + variables = cpp_variables, + ) + + if (get_registry_flag(ctx, "_cpp_use_get_tool_for_action")): + c_compiler = cc_common.get_tool_for_action( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.c_compile, + ) + cpp_compiler = cc_common.get_tool_for_action( + feature_configuration = feature_configuration, + action_name = ACTION_NAMES.cpp_compile, + ) + else: + c_compiler = str(cpp_toolchain.compiler_executable) + cpp_compiler = str(cpp_toolchain.compiler_executable) + + c_toolchain_info = struct_omit_none( + built_in_include_directory = [str(d) for d in cpp_toolchain.built_in_include_directories], + c_option = c_options, + cpp_option = cpp_options, + c_compiler = c_compiler, + cpp_compiler = cpp_compiler, + target_name = cpp_toolchain.target_gnu_system_name, + ) + ide_info["c_toolchain_ide_info"] = c_toolchain_info + update_sync_output_groups(output_groups, "intellij-info-cpp", depset([ide_info_file])) + return True + +def get_java_provider(target): + """Find a provider exposing java compilation/outputs data.""" + + # Check for kt providers before JavaInfo. e.g. kt targets have + # JavaInfo, but their data lives in the "kt" provider and not JavaInfo. + # See https://github.com/bazelbuild/intellij/pull/1202 + if hasattr(target, "kt") and hasattr(target.kt, "outputs"): + return target.kt + java_info = get_java_info(target) + if java_info: + return java_info + if hasattr(java_common, "JavaPluginInfo") and java_common.JavaPluginInfo in target: + return target[java_common.JavaPluginInfo] + return None + +def _collect_generated_files(java): + """Collects generated files from a Java target""" + if hasattr(java, "java_outputs"): + return [ + (outputs.generated_class_jar, outputs.generated_source_jar) + for outputs in java.java_outputs + if outputs.generated_class_jar != None + ] + + # Handles Bazel versions before 5.0.0. + if (hasattr(java, "annotation_processing") and java.annotation_processing and java.annotation_processing.enabled): + return [(java.annotation_processing.class_jar, java.annotation_processing.source_jar)] + return [] + +def collect_java_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): + """Updates Java-specific output groups, returns false if not a Java target.""" + java = get_java_provider(target) + if not java: + return False + if hasattr(java, "java_outputs") and java.java_outputs: + java_outputs = java.java_outputs + elif hasattr(java, "outputs") and java.outputs: + java_outputs = java.outputs.jars + else: + return False + + java_semantics = semantics.java if hasattr(semantics, "java") else None + if java_semantics and java_semantics.skip_target(target, ctx): + return False + + ide_info_files = [] + sources = sources_from_target(ctx) + jars = [library_artifact(output, ctx.rule.kind) for output in java_outputs] + class_jars = [output.class_jar for output in java_outputs if output and output.class_jar] + output_jars = [jar for output in java_outputs for jar in jars_from_output(output)] + resolve_files = output_jars + compile_files = class_jars + + gen_jars = [] + for generated_class_jar, generated_source_jar in _collect_generated_files(java): + gen_jars.append(annotation_processing_jars(generated_class_jar, generated_source_jar)) + resolve_files += [ + jar + for jar in [ + generated_class_jar, + generated_source_jar, + ] + if jar != None and not jar.is_source + ] + compile_files += [ + jar + for jar in [generated_class_jar] + if jar != None and not jar.is_source + ] + + jdeps = None + jdeps_file = None + if java_semantics and hasattr(java_semantics, "get_filtered_jdeps"): + jdeps_file = java_semantics.get_filtered_jdeps(target) + if jdeps_file == None and hasattr(java, "outputs") and hasattr(java.outputs, "jdeps") and java.outputs.jdeps: + jdeps_file = java.outputs.jdeps + if jdeps_file: + jdeps = artifact_location(jdeps_file) + resolve_files.append(jdeps_file) + + java_sources, gen_java_sources, srcjars = divide_java_sources(ctx) + + if java_semantics: + srcjars = java_semantics.filter_source_jars(target, ctx, srcjars) + + package_manifest = None + if java_sources: + package_manifest = build_java_package_manifest(ctx, target, java_sources, ".java-manifest") + ide_info_files.append(package_manifest) + + filtered_gen_jar = None + if java_sources and (gen_java_sources or srcjars): + filtered_gen_jar, filtered_gen_resolve_files = _build_filtered_gen_jar( + ctx, + target, + java_outputs, + gen_java_sources, + srcjars, + ) + resolve_files += filtered_gen_resolve_files + + # Custom lint checks are incorporated as java plugins. We collect them here and register them with the IDE so that the IDE can also run the same checks. + plugin_processor_jar_files = [] + if hasattr(ctx.rule.attr, "_android_lint_plugins"): + plugin_processor_jar_files += [ + jar + for p in getattr(ctx.rule.attr, "_android_lint_plugins", []) + for jar in _android_lint_plugin_jars(p) + ] + + if hasattr(java, "annotation_processing") and java.annotation_processing and hasattr(java.annotation_processing, "processor_classpath"): + plugin_processor_jar_files += java.annotation_processing.processor_classpath.to_list() + resolve_files += plugin_processor_jar_files + plugin_processor_jars = [annotation_processing_jars(jar, None) for jar in depset(plugin_processor_jar_files).to_list()] + + java_info = struct_omit_none( + filtered_gen_jar = filtered_gen_jar, + generated_jars = gen_jars, + jars = jars, + jdeps = jdeps, + main_class = getattr(ctx.rule.attr, "main_class", None), + package_manifest = artifact_location(package_manifest), + sources = sources, + test_class = getattr(ctx.rule.attr, "test_class", None), + plugin_processor_jars = plugin_processor_jars, + ) + + ide_info["java_ide_info"] = java_info + ide_info_files.append(ide_info_file) + update_sync_output_groups(output_groups, "intellij-info-java", depset(ide_info_files)) + update_sync_output_groups(output_groups, "intellij-compile-java", depset(compile_files)) + update_sync_output_groups(output_groups, "intellij-resolve-java", depset(resolve_files)) + + # also add transitive hjars + src jars, to catch implicit deps + if hasattr(java, "transitive_compile_time_jars"): + update_set_in_dict(output_groups, "intellij-resolve-java-direct-deps", java.transitive_compile_time_jars) + update_set_in_dict(output_groups, "intellij-resolve-java-direct-deps", java.transitive_source_jars) + return True + +def _android_lint_plugin_jars(target): + java_info = get_java_info(target) + if java_info: + return java_info.transitive_runtime_jars.to_list() + else: + return [] + +def _package_manifest_file_argument(f): + artifact = artifact_location(f) + is_external = "1" if is_external_artifact(f.owner) else "0" + return artifact.root_execution_path_fragment + "," + artifact.relative_path + "," + is_external + +def build_java_package_manifest(ctx, target, source_files, suffix): + """Builds the java package manifest for the given source files.""" + output = ctx.actions.declare_file(target.label.name + suffix) + + args = ctx.actions.args() + args.add("--output_manifest") + args.add(output.path) + args.add_joined( + "--sources", + source_files, + join_with = ":", + map_each = _package_manifest_file_argument, + ) + + # Bazel has an option to put your command line args in a file, and then pass the name of that file as the only + # argument to your executable. The PackageParser supports taking args in this way, we can pass in an args file + # as "@filename". + # Bazel Persistent Workers take their input as a file that contains the argument that will be parsed and turned + # into a WorkRequest proto and read on stdin. It also wants an argument of the form "@filename". We can use the + # params file as an arg file. + # Thus if we always use a params file, we can support both persistent worker mode and local mode (regular) mode. + args.use_param_file("@%s", use_always = True) + args.set_param_file_format("multiline") + + run_jar( + ctx = ctx, + jar = ctx.file._package_parser, + inputs = source_files, + outputs = [output], + arguments = [args], + mnemonic = "JavaPackageManifest", + progress_message = "Parsing java package strings for " + str(target.label), + execution_requirements = { + "supports-workers": "1", + "requires-worker-protocol": "proto", + }, + ) + return output + +def _build_filtered_gen_jar(ctx, target, java_outputs, gen_java_sources, srcjars): + """Filters the passed jar to contain only classes from the given manifest.""" + jar_artifacts = [] + source_jar_artifacts = [] + for jar in java_outputs: + if jar.ijar: + jar_artifacts.append(jar.ijar) + elif jar.class_jar: + jar_artifacts.append(jar.class_jar) + if hasattr(jar, "source_jars") and jar.source_jars: + source_jar_artifacts.extend(_list_or_depset_to_list(jar.source_jars)) + elif hasattr(jar, "source_jar") and jar.source_jar: + source_jar_artifacts.append(jar.source_jar) + + if len(source_jar_artifacts) == 0 or len(jar_artifacts) == 0: + jar_artifacts.extend([jar.class_jar for jar in java_outputs if jar.class_jar]) + + filtered_jar = ctx.actions.declare_file(target.label.name + "-filtered-gen.jar") + filtered_source_jar = ctx.actions.declare_file(target.label.name + "-filtered-gen-src.jar") + args = [] + for jar in jar_artifacts: + args += ["--filter_jar", jar.path] + for jar in source_jar_artifacts: + args += ["--filter_source_jar", jar.path] + args += ["--filtered_jar", filtered_jar.path] + args += ["--filtered_source_jar", filtered_source_jar.path] + if gen_java_sources: + for java_file in gen_java_sources: + args += ["--keep_java_file", java_file.path] + if srcjars: + for source_jar in srcjars: + args += ["--keep_source_jar", source_jar.path] + run_jar( + ctx = ctx, + jar = ctx.file._jar_filter, + inputs = jar_artifacts + source_jar_artifacts + gen_java_sources + srcjars, + outputs = [filtered_jar, filtered_source_jar], + arguments = args, + mnemonic = "JarFilter", + progress_message = "Filtering generated code for " + str(target.label), + ) + output_jar = struct( + jar = artifact_location(filtered_jar), + source_jar = artifact_location(filtered_source_jar), + ) + intellij_resolve_files = [filtered_jar, filtered_source_jar] + return output_jar, intellij_resolve_files + +def divide_java_sources(ctx): + """Divide sources into plain java, generated java, and srcjars.""" + + java_sources = [] + gen_java_sources = [] + srcjars = [] + if hasattr(ctx.rule.attr, "srcs"): + srcs = ctx.rule.attr.srcs + for src in srcs: + for f in src.files.to_list(): + if f.basename.endswith(".java"): + if f.is_source: + java_sources.append(f) + else: + gen_java_sources.append(f) + elif f.basename.endswith(".srcjar"): + srcjars.append(f) + + return java_sources, gen_java_sources, srcjars + +def collect_android_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): + """Updates Android-specific output groups, returns true if any android specific info was collected.""" + handled = False + handled = _collect_android_ide_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled + handled = _collect_android_instrumentation_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled + handled = _collect_aar_import_info(ctx, ide_info, ide_info_file, output_groups) or handled + handled = _collect_android_sdk_info(ctx, ide_info, ide_info_file, output_groups) or handled + + if handled: + # do this once do avoid adding unnecessary nesting to the depset + # (https://docs.bazel.build/versions/master/skylark/performance.html#reduce-the-number-of-calls-to-depset) + update_sync_output_groups(output_groups, "intellij-info-android", depset([ide_info_file])) + return handled + +def _collect_android_ide_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): + """Populates ide_info proto and intellij_resolve_android output group + + Updates ide_info proto with android_ide_info, and intellij_resolve_android with android + resolve files. It returns false on android_library and android_binary targets, as this preserves + consistent functionality with the previous condition of the presence of the .android legacy + provider. + """ + if ctx.rule.kind not in ["android_library", "android_binary", "kt_android_library"]: + return False + + android_semantics = semantics.android if hasattr(semantics, "android") else None + extra_ide_info = android_semantics.extra_ide_info(target, ctx) if android_semantics else {} + + if hasattr(android_common, "AndroidIdeInfo"): + android = target[android_common.AndroidIdeInfo] + else: + # Backwards compatibility: supports android struct provider + legacy_android = getattr(target, "android") + + # Transform into AndroidIdeInfo form + android = struct( + java_package = legacy_android.java_package, + manifest = legacy_android.manifest, + idl_source_jar = getattr(legacy_android.idl.output, "source_jar", None), + idl_class_jar = getattr(legacy_android.idl.output, "class_jar", None), + defines_android_resources = legacy_android.defines_resources, + idl_import_root = getattr(legacy_android.idl, "import_root", None), + resource_jar = legacy_android.resource_jar, + signed_apk = legacy_android.apk, + apks_under_test = legacy_android.apks_under_test, + ) + + output_jar = struct( + class_jar = android.idl_class_jar, + ijar = None, + source_jar = android.idl_source_jar, + ) if android.idl_class_jar else None + + resources = [] + res_folders = [] + resolve_files = jars_from_output(output_jar) + if hasattr(ctx.rule.attr, "resource_files"): + for artifact_path_fragments, res_files in get_res_artifacts(ctx.rule.attr.resource_files).items(): + # Generate unique ArtifactLocation for resource directories. + root = to_artifact_location(*artifact_path_fragments) + resources.append(root) + + # Generate aar + aar_file_name = target.label.name.replace("/", "-") + aar_file_name += "-" + str(hash(root.root_execution_path_fragment + root.relative_path + aar_file_name)) + + aar = ctx.actions.declare_file(aar_file_name + ".aar") + args = ctx.actions.args() + + # using param file to get around argument length limitation + # the name of param file (%s) is automatically filled in by blaze + args.use_param_file("@%s") + args.set_param_file_format("multiline") + + args.add("--aar", aar) + args.add("--manifest_file", android.manifest) + args.add_joined("--resources", res_files, join_with = ",") + args.add("--resource_root", root.relative_path if root.is_source else root.root_execution_path_fragment + "/" + root.relative_path) + + run_jar( + ctx = ctx, + jar = ctx.file._create_aar, + outputs = [aar], + inputs = [android.manifest] + res_files, + arguments = [args], + mnemonic = "CreateAar", + progress_message = "Generating " + aar_file_name + ".aar for target " + str(target.label), + ) + resolve_files.append(aar) + + # Generate unique ResFolderLocation for resource files. + res_folders.append(struct_omit_none(aar = artifact_location(aar), root = root)) + + instruments = None + if hasattr(ctx.rule.attr, "instruments") and ctx.rule.attr.instruments: + instruments = stringify_label(ctx.rule.attr.instruments.label) + + render_resolve_jar = None + if android_semantics and hasattr(android_semantics, "build_render_resolve_jar"): + render_resolve_jar = android_semantics.build_render_resolve_jar(target, ctx) + + if render_resolve_jar: + update_sync_output_groups(output_groups, "intellij-render-resolve-android", depset([render_resolve_jar])) + + android_info = struct_omit_none( + java_package = android.java_package, + idl_import_root = getattr(android, "idl_import_root", None), + manifest = artifact_location(android.manifest), + manifest_values = [struct_omit_none(key = key, value = value) for key, value in ctx.rule.attr.manifest_values.items()] if hasattr(ctx.rule.attr, "manifest_values") else None, + apk = artifact_location(android.signed_apk), + dependency_apk = [artifact_location(apk) for apk in android.apks_under_test], + has_idl_sources = android.idl_class_jar != None, + idl_jar = library_artifact(output_jar), + generate_resource_class = android.defines_android_resources, + resources = resources, + res_folders = res_folders, + resource_jar = library_artifact(android.resource_jar), + instruments = instruments, + render_resolve_jar = artifact_location(render_resolve_jar) if render_resolve_jar else None, + **extra_ide_info + ) + + if android.manifest and not android.manifest.is_source: + resolve_files.append(android.manifest) + + # b/176209293: expose resource jar to make sure empty library + # knows they are remote output artifact + if android.resource_jar: + resolve_files += [jar for jar in jars_from_output(android.resource_jar)] + + ide_info["android_ide_info"] = android_info + update_sync_output_groups(output_groups, "intellij-resolve-android", depset(resolve_files)) + return True + +def _collect_android_instrumentation_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): + """Updates ide_info proto with android_instrumentation_info, returns false if not an android_instrumentation_test target.""" + if not ctx.rule.kind == "android_instrumentation_test": + return False + + android_instrumentation_info = struct_omit_none( + test_app = stringify_label(ctx.rule.attr.test_app.label), + target_device = str(ctx.rule.attr.target_device.label), + ) + ide_info["android_instrumentation_info"] = android_instrumentation_info + return True + +def _collect_android_sdk_info(ctx, ide_info, ide_info_file, output_groups): + """Updates android_sdk-relevant groups, returns false if not an android_sdk target.""" + if ctx.rule.kind != "android_sdk": + return False + android_jar_file = ctx.rule.attr.android_jar.files.to_list()[0] + ide_info["android_sdk_ide_info"] = struct( + android_jar = artifact_location(android_jar_file), + ) + return True + +def _collect_aar_import_info(ctx, ide_info, ide_info_file, output_groups): + """Updates ide_info proto with aar_import-relevant groups, returns false if not an aar_import target.""" + if ctx.rule.kind != "aar_import": + return False + if not hasattr(ctx.rule.attr, "aar"): + return False + aar_file = ctx.rule.attr.aar.files.to_list()[0] + ide_info["android_aar_ide_info"] = struct_omit_none( + aar = artifact_location(aar_file), + java_package = getattr(ctx.rule.attr, "package", None), + ) + update_sync_output_groups(output_groups, "intellij-resolve-android", depset([aar_file])) + return True + +def build_test_info(ctx): + """Build TestInfo.""" + if not is_test_rule(ctx): + return None + return struct_omit_none( + size = ctx.rule.attr.size, + ) + +def is_test_rule(ctx): + kind_string = ctx.rule.kind + return kind_string.endswith("_test") + +def collect_java_toolchain_info(target, ide_info, ide_info_file, output_groups): + """Updates java_toolchain-relevant output groups, returns false if not a java_toolchain target.""" + if hasattr(target, "java_toolchain"): + toolchain = target.java_toolchain + elif java_common.JavaToolchainInfo != platform_common.ToolchainInfo and \ + java_common.JavaToolchainInfo in target: + toolchain = target[java_common.JavaToolchainInfo] + else: + return False + javac_jars = [] + if hasattr(toolchain, "tools"): + javac_jars = [ + artifact_location(f) + for f in toolchain.tools.to_list() + if f.basename.endswith(".jar") + ] + ide_info["java_toolchain_ide_info"] = struct_omit_none( + javac_jars = javac_jars, + source_version = toolchain.source_version, + target_version = toolchain.target_version, + ) + update_sync_output_groups(output_groups, "intellij-info-java", depset([ide_info_file])) + return True + +def artifact_to_path(artifact): + return artifact.root_execution_path_fragment + "/" + artifact.relative_path + +def collect_kotlin_toolchain_info(target, ctx, ide_info, ide_info_file, output_groups): + """Updates kotlin_toolchain-relevant output groups, returns false if not a kotlin_toolchain target.""" + if ctx.rule.kind == "_kt_toolchain" and platform_common.ToolchainInfo in target: + kt = target[platform_common.ToolchainInfo] + elif hasattr(target, "kt") and hasattr(target.kt, "language_version"): + kt = target.kt # Legacy struct provider mechanism + else: + return False + + if not hasattr(kt, "language_version"): + return False + ide_info["kt_toolchain_ide_info"] = struct( + language_version = kt.language_version, + ) + update_sync_output_groups(output_groups, "intellij-info-kt", depset([ide_info_file])) + return True + +def _is_proto_library_wrapper(target, ctx): + """Returns True if the target is an empty shim around a proto library.""" + if not ctx.rule.kind.endswith("proto_library") or ctx.rule.kind == "proto_library": + return False + + # treat any *proto_library rule with a single proto_library dep as a shim + deps = collect_targets_from_attrs(ctx.rule.attr, ["deps"]) + return len(deps) == 1 and IntelliJInfo in deps[0] and deps[0][IntelliJInfo].kind == "proto_library" + +def _get_forwarded_deps(target, ctx): + """Returns the list of deps of this target to forward. + + Used to handle wrapper/shim targets which are really just pointers to a + different target (for example, java_proto_library) + """ + if _is_proto_library_wrapper(target, ctx): + return collect_targets_from_attrs(ctx.rule.attr, ["deps"]) + return [] + +def _is_analysis_test(target): + """Returns if the target is an analysis test. + + Rules created with analysis_test=True cannot create write actions, so the + aspect should skip them. + """ + return AnalysisTestResultInfo in target + +##### Main aspect function + +def intellij_info_aspect_impl(target, ctx, semantics): + """Aspect implementation function.""" + + tags = ctx.rule.attr.tags + if "no-ide" in tags: + return [] + + if _is_analysis_test(target): + return [] + + rule_attrs = ctx.rule.attr + + # Collect direct dependencies + direct_dep_targets = collect_targets_from_attrs( + rule_attrs, + semantics_extra_deps(DEPS, semantics, "extra_deps"), + ) + + # Collect direct toolchain type-based dependencies + if hasattr(semantics, "toolchains_propagation"): + direct_dep_targets.extend( + semantics.toolchains_propagation.collect_toolchain_deps( + ctx, + semantics.toolchains_propagation.toolchain_types, + ), + ) + + direct_deps = make_deps(direct_dep_targets, COMPILE_TIME) + + # Add exports from direct dependencies + exported_deps_from_deps = [] + for dep in direct_dep_targets: + exported_deps_from_deps = exported_deps_from_deps + dep[IntelliJInfo].export_deps + + # Combine into all compile time deps + compiletime_deps = direct_deps + exported_deps_from_deps + + # Propagate my own exports + export_deps = [] + direct_exports = [] + if java_info_in_target(target): + direct_exports = collect_targets_from_attrs(rule_attrs, ["exports"]) + export_deps.extend(make_deps(direct_exports, COMPILE_TIME)) + + # Collect transitive exports + for export in direct_exports: + export_deps.extend(export[IntelliJInfo].export_deps) + + if ctx.rule.kind == "android_library" or ctx.rule.kind == "kt_android_library": + # Empty android libraries export all their dependencies. + if not hasattr(rule_attrs, "srcs") or not ctx.rule.attr.srcs: + export_deps.extend(compiletime_deps) + + # Deduplicate the entries + export_deps = depset(export_deps).to_list() + + # runtime_deps + runtime_dep_targets = collect_targets_from_attrs( + rule_attrs, + RUNTIME_DEPS, + ) + runtime_deps = make_deps(runtime_dep_targets, RUNTIME) + all_deps = depset(compiletime_deps + runtime_deps).to_list() + + # extra prerequisites + extra_prerequisite_targets = collect_targets_from_attrs( + rule_attrs, + semantics_extra_deps(PREREQUISITE_DEPS, semantics, "extra_prerequisites"), + ) + + forwarded_deps = _get_forwarded_deps(target, ctx) + direct_exports + + # Roll up output files from my prerequisites + prerequisites = direct_dep_targets + runtime_dep_targets + extra_prerequisite_targets + direct_exports + output_groups = dict() + for dep in prerequisites: + for k, v in dep[IntelliJInfo].output_groups.items(): + if dep in forwarded_deps: + # unconditionally roll up deps for these targets + output_groups[k] = output_groups[k] + [v] if k in output_groups else [v] + continue + + # roll up outputs of direct deps into '-direct-deps' output group + if k.endswith("-direct-deps"): + continue + if k.endswith("-outputs"): + directs = k[:-len("outputs")] + "direct-deps" + output_groups[directs] = output_groups[directs] + [v] if directs in output_groups else [v] + continue + + # everything else gets rolled up transitively + output_groups[k] = output_groups[k] + [v] if k in output_groups else [v] + + # Convert output_groups from lists to depsets after the lists are finalized. This avoids + # creating and growing depsets gradually, as that results in depsets many levels deep: + # a construct which would give the build system some trouble. + for k, v in output_groups.items(): + output_groups[k] = depset(transitive = output_groups[k]) + + # Initialize the ide info dict, and corresponding output file + # This will be passed to each language-specific handler to fill in as required + file_name = target.label.name + + # bazel allows target names differing only by case, so append a hash to support + # case-insensitive file systems + file_name = file_name + "-" + str(hash(file_name)) + aspect_ids = get_aspect_ids(ctx) + if aspect_ids: + aspect_hash = hash(".".join(aspect_ids)) + file_name = file_name + "-" + str(aspect_hash) + file_name = file_name + ".intellij-info.txt" + ide_info_file = ctx.actions.declare_file(file_name) + + target_key = make_target_key(target.label, aspect_ids) + ide_info = dict( + build_file_artifact_location = build_file_artifact_location(ctx), + features = ctx.features, + key = target_key, + kind_string = ctx.rule.kind, + tags = tags, + deps = list(all_deps), + ) + + # Collect test info + ide_info["test_info"] = build_test_info(ctx) + + handled = False + handled = collect_py_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled + handled = collect_cpp_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled + handled = collect_c_toolchain_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled + handled = collect_go_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled + handled = collect_java_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled + handled = collect_java_toolchain_info(target, ide_info, ide_info_file, output_groups) or handled + handled = collect_android_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled + handled = collect_kotlin_toolchain_info(target, ctx, ide_info, ide_info_file, output_groups) or handled + + # Any extra ide info + if hasattr(semantics, "extra_ide_info"): + handled = semantics.extra_ide_info(target, ctx, ide_info, ide_info_file, output_groups) or handled + + # Add to generic output group if it's not handled by a language-specific handler + if not handled: + update_sync_output_groups(output_groups, "intellij-info-generic", depset([ide_info_file])) + + # Output the ide information file. + info = struct_omit_none(**ide_info) + ctx.actions.write(ide_info_file, proto.encode_text(info)) + + # Return providers. + return [ + IntelliJInfo( + export_deps = export_deps, + kind = ctx.rule.kind, + output_groups = output_groups, + target_key = target_key, + ), + OutputGroupInfo(**output_groups), + ] + +def semantics_extra_deps(base, semantics, name): + if not hasattr(semantics, name): + return base + extra_deps = getattr(semantics, name) + return base + extra_deps + +def make_intellij_info_aspect(aspect_impl, semantics, **kwargs): + """Creates the aspect given the semantics.""" + tool_label = semantics.tool_label + flag_hack_label = semantics.flag_hack_label + deps = semantics_extra_deps(DEPS, semantics, "extra_deps") + runtime_deps = RUNTIME_DEPS + prerequisite_deps = semantics_extra_deps(PREREQUISITE_DEPS, semantics, "extra_prerequisites") + + attr_aspects = deps + runtime_deps + prerequisite_deps + + attrs = { + "_package_parser": attr.label( + default = tool_label("PackageParser_deploy.jar"), + allow_single_file = True, + ), + "_jar_filter": attr.label( + default = tool_label("JarFilter_deploy.jar"), + allow_single_file = True, + ), + "_flag_hack": attr.label( + default = flag_hack_label, + ), + "_create_aar": attr.label( + default = tool_label("CreateAar_deploy.jar"), + allow_single_file = True, + ), + "_java_runtime": attr.label( + default = "@bazel_tools//tools/jdk:current_java_runtime", + cfg = "exec", + ), + } + + # add attrs required by semantics + if hasattr(semantics, "attrs"): + attrs.update(semantics.attrs) + + return aspect( + attr_aspects = attr_aspects, + attrs = attrs, + fragments = ["cpp"], + required_aspect_providers = [java_info_reference(), [CcInfo]] + semantics.extra_required_aspect_providers, + implementation = aspect_impl, + **kwargs + ) diff --git a/.ijwb/aspects/java_classpath.bzl b/.ijwb/aspects/java_classpath.bzl new file mode 100644 index 0000000000..7ad254f751 --- /dev/null +++ b/.ijwb/aspects/java_classpath.bzl @@ -0,0 +1,37 @@ +"""An aspect which extracts the runtime classpath from a java target.""" + +load(":java_info.bzl", "get_java_info", "java_info_in_target") + +def _runtime_classpath_impl(target, ctx): + """The top level aspect implementation function. + + Args: + target: Essentially a struct representing a BUILD target. + + ctx: The context object that can be used to access attributes and generate + outputs and actions. + + Returns: + A struct with only the output_groups provider. + """ + ctx = ctx # unused argument + return struct(output_groups = { + "runtime_classpath": _get_runtime_jars(target), + }) + +def _get_runtime_jars(target): + java_info = get_java_info(target) + if not java_info: + return depset() + if java_info.compilation_info: + return java_info.compilation_info.runtime_classpath + + # JavaInfo constructor doesn't fill in compilation info, so just return the + # full transitive set of runtime jars + # https://github.com/bazelbuild/bazel/issues/10170 + return java_info.transitive_runtime_jars + +def _aspect_def(impl): + return aspect(implementation = impl) + +java_classpath_aspect = _aspect_def(_runtime_classpath_impl) diff --git a/.ijwb/aspects/java_info.bzl b/.ijwb/aspects/java_info.bzl new file mode 100644 index 0000000000..9ee0d32ea5 --- /dev/null +++ b/.ijwb/aspects/java_info.bzl @@ -0,0 +1,23 @@ +# TEMPLATE-INCLUDE-BEGIN +# TEMPLATE-INCLUDE-END + +def java_info_in_target(target): + +# TEMPLATE-INCLUDE-BEGIN + return JavaInfo in target +# TEMPLATE-INCLUDE-END + +def get_java_info(target): + +# TEMPLATE-INCLUDE-BEGIN + if JavaInfo in target: + return target[JavaInfo] + else: + return None +# TEMPLATE-INCLUDE-END + +def java_info_reference(): + +# TEMPLATE-INCLUDE-BEGIN + return [JavaInfo] +# TEMPLATE-INCLUDE-END diff --git a/.ijwb/aspects/make_variables.bzl b/.ijwb/aspects/make_variables.bzl new file mode 100644 index 0000000000..0ffd697921 --- /dev/null +++ b/.ijwb/aspects/make_variables.bzl @@ -0,0 +1,216 @@ +# Copyright 2020 The Bazel Authors. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Copied from: https://github.com/bazelbuild/bazel/blob/6f7faa659e5eb3e56c8a6274ebcb86884703d603/src/main/starlark/builtins_bzl/common/cc/cc_helper.bzl + +"""Utility functions to expand make variables. Implementation taken from cc_helper. """ + +def expand_make_variables(ctx, tokenization, unexpanded_tokens, additional_make_variable_substitutions = {}): + tokens = [] + targets = [] + for additional_compiler_input in getattr(ctx.attr, "additional_compiler_inputs", []): + targets.append(additional_compiler_input) + for token in unexpanded_tokens: + if tokenization: + expanded_token = _expand(ctx, token, additional_make_variable_substitutions, targets = targets) + _tokenize(tokens, expanded_token) + else: + exp = _expand_single_make_variable(ctx, token, additional_make_variable_substitutions) + if exp != None: + _tokenize(tokens, exp) + else: + tokens.append(_expand(ctx, token, additional_make_variable_substitutions, targets = targets)) + return tokens + +# Tries to expand a single make variable from token. +# If token has additional characters other than ones +# corresponding to make variable returns None. +def _expand_single_make_variable(ctx, token, additional_make_variable_substitutions = {}): + if len(token) < 3: + return None + if token[0] != "$" or token[1] != "(" or token[len(token) - 1] != ")": + return None + unexpanded_var = token[2:len(token) - 1] + expanded_var = _expand_nested_variable(ctx, additional_make_variable_substitutions, unexpanded_var) + return expanded_var + + +def _expand_nested_variable(ctx, additional_vars, exp, execpath = True, targets = []): + # If make variable is predefined path variable(like $(location ...)) + # we will expand it first. + if exp.find(" ") != -1: + if not execpath: + if exp.startswith("location"): + exp = exp.replace("location", "rootpath", 1) + data_targets = [] + if ctx.attr.data != None: + data_targets = ctx.attr.data + + # Make sure we do not duplicate targets. + unified_targets_set = {} + for data_target in data_targets: + unified_targets_set[data_target] = True + for target in targets: + unified_targets_set[target] = True + return ctx.expand_location("$({})".format(exp), targets = unified_targets_set.keys()) + + # Recursively expand nested make variables, but since there is no recursion + # in Starlark we will do it via for loop. + unbounded_recursion = True + + # The only way to check if the unbounded recursion is happening or not + # is to have a look at the depth of the recursion. + # 10 seems to be a reasonable number, since it is highly unexpected + # to have nested make variables which are expanding more than 10 times. + for _ in range(10): + exp = _lookup_var(ctx, additional_vars, exp) + if len(exp) >= 3 and exp[0] == "$" and exp[1] == "(" and exp[len(exp) - 1] == ")": + # Try to expand once more. + exp = exp[2:len(exp) - 1] + continue + unbounded_recursion = False + break + + if unbounded_recursion: + fail("potentially unbounded recursion during expansion of {}".format(exp)) + return exp + +def _lookup_var(ctx, additional_vars, var): + expanded_make_var = additional_vars.get(var) + if expanded_make_var != None: + return expanded_make_var + + expanded_make_var = ctx.var.get(var) + if expanded_make_var != None: + return expanded_make_var + + # ctx.rule.var is only available in Bazel 9+ + expanded_make_var = getattr(ctx.rule, "var", {}).get(var) + if expanded_make_var != None: + return expanded_make_var + + fail("{}: {} not defined".format(ctx.label, "$(" + var + ")")) + +def _expand(ctx, expression, additional_make_variable_substitutions, execpath = True, targets = []): + idx = 0 + last_make_var_end = 0 + result = [] + n = len(expression) + for _ in range(n): + if idx >= n: + break + if expression[idx] != "$": + idx += 1 + continue + + idx += 1 + + # We've met $$ pattern, so $ is escaped. + if idx < n and expression[idx] == "$": + idx += 1 + result.append(expression[last_make_var_end:idx - 1]) + last_make_var_end = idx + # We might have found a potential start for Make Variable. + + elif idx < n and expression[idx] == "(": + # Try to find the closing parentheses. + make_var_start = idx + make_var_end = make_var_start + for j in range(idx + 1, n): + if expression[j] == ")": + make_var_end = j + break + + # Note we cannot go out of string's bounds here, + # because of this check. + # If start of the variable is different from the end, + # we found a make variable. + if make_var_start != make_var_end: + # Some clarifications: + # *****$(MAKE_VAR_1)*******$(MAKE_VAR_2)***** + # ^ ^ ^ + # | | | + # last_make_var_end make_var_start make_var_end + result.append(expression[last_make_var_end:make_var_start - 1]) + make_var = expression[make_var_start + 1:make_var_end] + exp = _expand_nested_variable(ctx, additional_make_variable_substitutions, make_var, execpath, targets) + result.append(exp) + + # Update indexes. + idx = make_var_end + 1 + last_make_var_end = idx + + # Add the last substring which would be skipped by for loop. + if last_make_var_end < n: + result.append(expression[last_make_var_end:n]) + + return "".join(result) + +def _tokenize(options, options_string): + token = [] + force_token = False + quotation = "\0" + length = len(options_string) + + # Since it is impossible to modify loop variable inside loop + # in Starlark, and also there is no while loop, I have to + # use this ugly hack. + i = -1 + for _ in range(length): + i += 1 + if i >= length: + break + c = options_string[i] + if quotation != "\0": + # In quotation. + if c == quotation: + # End quotation. + quotation = "\0" + elif c == "\\" and quotation == "\"": + i += 1 + if i == length: + fail("backslash at the end of the string: {}".format(options_string)) + c = options_string[i] + if c != "\\" and c != "\"": + token.append("\\") + token.append(c) + else: + # Regular char, in quotation. + token.append(c) + else: + # Not in quotation. + if c == "'" or c == "\"": + # Begin single double quotation. + quotation = c + force_token = True + elif c == " " or c == "\t": + # Space not quoted. + if force_token or len(token) > 0: + options.append("".join(token)) + token = [] + force_token = False + elif c == "\\": + # Backslash not quoted. + i += 1 + if i == length: + fail("backslash at the end of the string: {}".format(options_string)) + token.append(options_string[i]) + else: + # Regular char, not quoted. + token.append(c) + if quotation != "\0": + fail("unterminated quotation at the end of the string: {}".format(options_string)) + + if force_token or len(token) > 0: + options.append("".join(token)) diff --git a/.ijwb/aspects/python_info.bzl b/.ijwb/aspects/python_info.bzl new file mode 100644 index 0000000000..bcf6c76d63 --- /dev/null +++ b/.ijwb/aspects/python_info.bzl @@ -0,0 +1,24 @@ +# TEMPLATE-INCLUDE-BEGIN +load("@rules_python//python:defs.bzl", RulesPyInfo = "PyInfo") +# TEMPLATE-INCLUDE-END + +def py_info_in_target(target): + +# TEMPLATE-INCLUDE-BEGIN + if RulesPyInfo in target: + return True + if PyInfo in target: + return True + return False +# TEMPLATE-INCLUDE-END + +def get_py_info(target): + +# TEMPLATE-INCLUDE-BEGIN + if RulesPyInfo in target: + return target[RulesPyInfo] + if PyInfo in target: + return target[PyInfo] + return None +# TEMPLATE-INCLUDE-END + diff --git a/.ijwb/aspects/xcode_query.bzl b/.ijwb/aspects/xcode_query.bzl new file mode 100644 index 0000000000..60193b82d2 --- /dev/null +++ b/.ijwb/aspects/xcode_query.bzl @@ -0,0 +1,22 @@ +provider_attrs = ["xcode_version", "default_macos_sdk_version"] + +def all_items_are_true(items): + for item in items: + if item == False: + return False + + return True + +def hasattrs(obj, attrs): + return all_items_are_true([hasattr(obj, attr) for attr in attrs]) + +def format(target): + all_providers = providers(target) + for key in all_providers: + provider = all_providers[key] + + if hasattrs(provider, provider_attrs): + attrs = [getattr(provider, attr) for attr in provider_attrs] + return "{} {}".format(attrs[0], attrs[1]) + + return "" diff --git a/WORKSPACE b/WORKSPACE index b0d62a697a..be4e7b70b4 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -123,6 +123,12 @@ load("@io_bazel_rules_scala//scala:toolchains.bzl", "scala_register_toolchains") scala_register_toolchains() +load("@io_bazel_rules_scala//testing:junit.bzl", "junit_repositories", "junit_toolchain") + +junit_repositories() + +junit_toolchain() + load("@io_bazel_rules_scala//testing:scalatest.bzl", "scalatest_repositories", "scalatest_toolchain") scalatest_repositories() diff --git a/aggregator/BUILD.bazel b/aggregator/BUILD.bazel index b7458a1f53..87d12b232e 100644 --- a/aggregator/BUILD.bazel +++ b/aggregator/BUILD.bazel @@ -57,13 +57,13 @@ scala_library( ]), ) -scala_test_suite( +scala_junit_test_suite( name = "test", srcs = glob(["src/test/scala/ai/chronon/aggregator/test/*.scala"]), + suffixes = ["Test"], visibility = ["//visibility:public"], deps = [ ":aggregator", - ":test-lib", "//api:api-lib", "//api:api-models", maven_artifact("junit:junit"), diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxDistinctTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxDistinctTest.scala index 2416a894f5..fb127c6c2c 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxDistinctTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxDistinctTest.scala @@ -19,8 +19,11 @@ package ai.chronon.aggregator.test import ai.chronon.aggregator.base.ApproxDistinctCount import junit.framework.TestCase import org.junit.Assert._ +import org.junit.Test class ApproxDistinctTest extends TestCase { + + @Test def testErrorBound(uniques: Int, errorBound: Int, lgK: Int): Unit = { val uniqueElems = 1 to uniques val duplicates = uniqueElems ++ uniqueElems ++ uniqueElems @@ -32,6 +35,7 @@ class ApproxDistinctTest extends TestCase { assertTrue(Math.abs(estimated - uniques) < errorBound) } + @Test def testMergingErrorBound(uniques: Int, errorBound: Int, lgK: Int, merges: Int): Unit = { val chunkSize = uniques / merges assert(chunkSize > 0) @@ -50,12 +54,14 @@ class ApproxDistinctTest extends TestCase { assertTrue(Math.abs(estimated - uniques) < errorBound) } + @Test def testErrorBounds(): Unit = { testErrorBound(uniques = 100, errorBound = 1, lgK = 10) testErrorBound(uniques = 1000, errorBound = 20, lgK = 10) testErrorBound(uniques = 10000, errorBound = 300, lgK = 10) } + @Test def testMergingErrorBounds(): Unit = { testMergingErrorBound(uniques = 100, errorBound = 1, lgK = 10, merges = 10) testMergingErrorBound(uniques = 1000, errorBound = 20, lgK = 10, merges = 4) diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxHistogramTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxHistogramTest.scala index eb1512cb5f..177c04d373 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxHistogramTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxHistogramTest.scala @@ -3,11 +3,14 @@ package ai.chronon.aggregator.test import ai.chronon.aggregator.base.{ApproxHistogram, ApproxHistogramIr} import junit.framework.TestCase import org.junit.Assert._ +import org.junit.Test import java.util import scala.jdk.CollectionConverters._ class ApproxHistogramTest extends TestCase { + + @Test def testHistogram(): Unit = { val approxHistogram = new ApproxHistogram[String](3) val counts = (1L to 3).map(i => i.toString -> i).toMap @@ -18,6 +21,7 @@ class ApproxHistogramTest extends TestCase { assertEquals(toHashMap(counts), approxHistogram.finalize(ir)) } + @Test def testSketch(): Unit = { val approxHistogram = new ApproxHistogram[String](3) val counts = (1L to 4).map(i => i.toString -> i).toMap @@ -29,6 +33,7 @@ class ApproxHistogramTest extends TestCase { assertEquals(toHashMap(expected), approxHistogram.finalize(ir)) } + @Test def testMergeSketches(): Unit = { val approxHistogram = new ApproxHistogram[String](3) val counts1: Map[String, Long] = Map("5" -> 5L, "4" -> 4, "2" -> 2, "1" -> 1) @@ -51,6 +56,7 @@ class ApproxHistogramTest extends TestCase { assertTrue(ir.histogram.isEmpty) } + @Test def testMergeHistograms(): Unit = { val approxHistogram = new ApproxHistogram[String](3) val counts1: Map[String, Long] = Map("4" -> 4L, "2" -> 2) @@ -73,6 +79,7 @@ class ApproxHistogramTest extends TestCase { assertTrue(ir.sketch.isEmpty) } + @Test def testMergeHistogramsToSketch(): Unit = { val approxHistogram = new ApproxHistogram[String](3) val counts1: Map[String, Long] = Map("4" -> 4L, "3" -> 3) @@ -96,6 +103,7 @@ class ApproxHistogramTest extends TestCase { assertTrue(ir.histogram.isEmpty) } + @Test def testMergeSketchAndHistogram(): Unit = { val approxHistogram = new ApproxHistogram[String](3) val counts1: Map[String, Long] = Map("5" -> 5L, "3" -> 3, "2" -> 2, "1" -> 1) @@ -118,6 +126,7 @@ class ApproxHistogramTest extends TestCase { assert(ir.histogram.isEmpty) } + @Test def testNormalizeHistogram(): Unit = { val approxHistogram = new ApproxHistogram[String](3) val counts = (1L to 3).map(i => i.toString -> i).toMap @@ -128,6 +137,7 @@ class ApproxHistogramTest extends TestCase { assertEquals(ir, normalized) } + @Test def testNormalizeSketch(): Unit = { val approxHistogram = new ApproxHistogram[String](3) val counts = (1L to 4).map(i => i.toString -> i).toMap diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxPercentilesTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxPercentilesTest.scala index 3eb8ff5647..2e4b5c4fa4 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxPercentilesTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/ApproxPercentilesTest.scala @@ -22,11 +22,14 @@ import ai.chronon.aggregator.row.StatsGenerator import com.yahoo.sketches.kll.KllFloatsSketch import junit.framework.TestCase import org.junit.Assert._ +import org.junit.Test import scala.util.Random class ApproxPercentilesTest extends TestCase { @transient lazy val logger = LoggerFactory.getLogger(getClass) + + @Test def testBasicImpl(nums: Int, slide: Int, k: Int, percentiles: Array[Double], errorPercent: Float): Unit = { val sorted = (0 to nums).map(_.toFloat) val elems = Random.shuffle(sorted.toList).toArray @@ -54,6 +57,7 @@ class ApproxPercentilesTest extends TestCase { diffs.foreach(diff => assertTrue(diff < errorMargin)) } + @Test def testBasicPercentiles: Unit = { val percentiles_tested: Int = 31 val percentiles: Array[Double] = (0 to percentiles_tested).toArray.map(i => i * 1.0 / percentiles_tested) @@ -72,6 +76,7 @@ class ApproxPercentilesTest extends TestCase { drift } + @Test def testPSIDrifts(): Unit = { assertTrue( getPSIDrift( diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/FrequentItemsTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/FrequentItemsTest.scala index fac5349ae5..6274c3175d 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/FrequentItemsTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/FrequentItemsTest.scala @@ -3,11 +3,14 @@ package ai.chronon.aggregator.test import ai.chronon.aggregator.base.{FrequentItemType, FrequentItems, FrequentItemsFriendly, ItemsSketchIR} import junit.framework.TestCase import org.junit.Assert._ +import org.junit.Test import java.util import scala.jdk.CollectionConverters._ class FrequentItemsTest extends TestCase { + + @Test def testNonPowerOfTwoAndTruncate(): Unit = { val size = 3 val items = new FrequentItems[String](size) @@ -31,6 +34,7 @@ class FrequentItemsTest extends TestCase { result) } + @Test def testLessItemsThanSize(): Unit = { val size = 10 val items = new FrequentItems[java.lang.Long](size) @@ -53,6 +57,7 @@ class FrequentItemsTest extends TestCase { result) } + @Test def testZeroSize(): Unit = { val size = 0 val items = new FrequentItems[java.lang.Double](size) @@ -69,6 +74,7 @@ class FrequentItemsTest extends TestCase { assertEquals(new util.HashMap[String, Double](), result) } + @Test def testSketchSizes(): Unit = { val expectedSketchSizes = Map( @@ -88,6 +94,7 @@ class FrequentItemsTest extends TestCase { assertEquals(expectedSketchSizes, actualSketchSizes) } + @Test def testNormalization(): Unit = { val testValues = (1 to 4) .map(i => i -> i) @@ -119,6 +126,7 @@ class FrequentItemsTest extends TestCase { assertEquals(expectedStringValues, actualStringValues) } + @Test def testBulkMerge(): Unit = { val sketch = new FrequentItems[String](3) diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/MinHeapTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/MinHeapTest.scala index 5cf5dda1a5..14e7a21c07 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/MinHeapTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/MinHeapTest.scala @@ -19,11 +19,14 @@ package ai.chronon.aggregator.test import ai.chronon.aggregator.base.MinHeap import junit.framework.TestCase import org.junit.Assert._ +import org.junit.Test import java.util import scala.collection.JavaConverters._ class MinHeapTest extends TestCase { + + @Test def testInserts(): Unit = { val mh = new MinHeap[Int](maxSize = 4, Ordering.Int) diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/MomentTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/MomentTest.scala index 4c45eafa35..3997e39284 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/MomentTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/MomentTest.scala @@ -4,6 +4,7 @@ import ai.chronon.aggregator.base._ import junit.framework.TestCase import org.apache.commons.math3.stat.descriptive.moment.{Kurtosis => ApacheKurtosis, Skewness => ApacheSkew} import org.junit.Assert._ +import org.junit.Test class MomentTest extends TestCase { def makeAgg(aggregator: MomentAggregator, values: Seq[Double]): (MomentAggregator, MomentsIR) = { @@ -35,24 +36,28 @@ class MomentTest extends TestCase { assertEquals(expected(v1 ++ v2), agg.finalize(ir), 0.1) } + @Test def testUpdate(): Unit = { val values = Seq(1.1, 2.2, 3.3, 4.4, 5.5) assertUpdate(new Skew(), values, expectedSkew) assertUpdate(new Kurtosis(), values, expectedKurtosis) } + @Test def testInsufficientSizes(): Unit = { val values = Seq(1.1, 2.2, 3.3, 4.4) assertUpdate(new Skew(), values.take(2), _ => Double.NaN) assertUpdate(new Kurtosis(), values.take(3), _ => Double.NaN) } + @Test def testNoVariance(): Unit = { val values = Seq(1.0, 1.0, 1.0, 1.0) assertUpdate(new Skew(), values, _ => Double.NaN) assertUpdate(new Kurtosis(), values, _ => Double.NaN) } + @Test def testMerge(): Unit = { val values1 = Seq(1.1, 2.2, 3.3) val values2 = Seq(4.4, 5.5) @@ -60,6 +65,7 @@ class MomentTest extends TestCase { assertMerge(new Skew(), values1, values2, expectedSkew) } + @Test def testNormalize(): Unit = { val values = Seq(1.0, 2.0, 3.0, 4.0, 5.0) val (agg, ir) = makeAgg(new Kurtosis, values) diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/RowAggregatorTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/RowAggregatorTest.scala index 58c25ce6a9..4e2b2a3751 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/RowAggregatorTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/RowAggregatorTest.scala @@ -20,6 +20,7 @@ import ai.chronon.aggregator.row.RowAggregator import ai.chronon.api._ import junit.framework.TestCase import org.junit.Assert._ +import org.junit.Test import java.util import scala.collection.JavaConverters._ @@ -49,6 +50,7 @@ object TestRow { } class RowAggregatorTest extends TestCase { + @Test def testUpdate(): Unit = { val rows = List( TestRow(1L, 4, 5.0f, "A", Seq(5, 3, 4), Seq("D", "A", "B", "A"), Map("A" -> 1, "B" -> 2)), diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/SawtoothAggregatorTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/SawtoothAggregatorTest.scala index 60bb5fc2c3..cef363a711 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/SawtoothAggregatorTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/SawtoothAggregatorTest.scala @@ -25,6 +25,7 @@ import ai.chronon.api._ import com.google.gson.Gson import junit.framework.TestCase import org.junit.Assert._ +import org.junit.Test import java.util import scala.collection.mutable @@ -47,6 +48,7 @@ class Timer { class SawtoothAggregatorTest extends TestCase { + @Test def testTailAccuracy(): Unit = { val timer = new Timer val queries = CStream.genTimestamps(new Window(30, TimeUnit.DAYS), 10000, 5 * 60 * 1000) @@ -118,6 +120,7 @@ class SawtoothAggregatorTest extends TestCase { } } + @Test def testRealTimeAccuracy(): Unit = { val timer = new Timer val queries = CStream.genTimestamps(new Window(1, TimeUnit.DAYS), 1000) diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/SawtoothOnlineAggregatorTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/SawtoothOnlineAggregatorTest.scala index 95ac9d37c6..30d2f0aa2d 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/SawtoothOnlineAggregatorTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/SawtoothOnlineAggregatorTest.scala @@ -23,6 +23,7 @@ import ai.chronon.api._ import com.google.gson.Gson import junit.framework.TestCase import org.junit.Assert.assertEquals +import org.junit.Test import java.time.{Instant, ZoneOffset} import java.time.format.DateTimeFormatter @@ -30,6 +31,7 @@ import java.util.Locale class SawtoothOnlineAggregatorTest extends TestCase { + @Test def testConsistency(): Unit = { val queryEndTs = TsUtils.round(System.currentTimeMillis(), WindowUtils.Day.millis) val batchEndTs = queryEndTs - WindowUtils.Day.millis diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/TwoStackLiteAggregatorTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/TwoStackLiteAggregatorTest.scala index 4225842e1b..45895b7c88 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/TwoStackLiteAggregatorTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/TwoStackLiteAggregatorTest.scala @@ -22,12 +22,14 @@ import ai.chronon.aggregator.windowing.{TwoStackLiteAggregator, TwoStackLiteAggr import ai.chronon.api.{Aggregation, Builders, IntType, LongType, Operation, StructField, StructType, TimeUnit, Window} import junit.framework.TestCase import org.junit.Assert._ +import org.junit.Test import ai.chronon.api.Extensions.AggregationOps import com.google.gson.Gson import scala.collection.Seq class TwoStackLiteAggregatorTest extends TestCase{ + @Test def testBufferWithTopK(): Unit = { val topK = new TopK[Integer](IntType, 2) val bankersBuffer = new TwoStackLiteAggregationBuffer(topK, 5) @@ -53,6 +55,7 @@ class TwoStackLiteAggregatorTest extends TestCase{ assertBufferEquals(Seq(10), bankersBuffer.query) } + @Test def testAgainstSawtooth(): Unit = { val timer = new Timer val queries = CStream.genTimestamps(new Window(30, TimeUnit.DAYS), 100000, 5 * 60 * 1000) diff --git a/aggregator/src/test/scala/ai/chronon/aggregator/test/VarianceTest.scala b/aggregator/src/test/scala/ai/chronon/aggregator/test/VarianceTest.scala index 21f7b8a553..9c207b8a4a 100644 --- a/aggregator/src/test/scala/ai/chronon/aggregator/test/VarianceTest.scala +++ b/aggregator/src/test/scala/ai/chronon/aggregator/test/VarianceTest.scala @@ -20,6 +20,7 @@ import org.slf4j.LoggerFactory import ai.chronon.aggregator.base.Variance import junit.framework.TestCase import org.junit.Assert._ +import org.junit.Test class VarianceTest extends TestCase { @transient lazy val logger = LoggerFactory.getLogger(getClass) @@ -59,6 +60,7 @@ class VarianceTest extends TestCase { assertTrue((naiveResult - welfordResult) / naiveResult < 0.0000001) } + @Test def testVariance: Unit = { compare(1000000) compare(1000000, min = 100000, max = 100001) diff --git a/api/BUILD.bazel b/api/BUILD.bazel index 3a5baf4f03..ef55c23483 100644 --- a/api/BUILD.bazel +++ b/api/BUILD.bazel @@ -34,13 +34,14 @@ scala_library( ), ) -scala_test_suite( +scala_junit_test_suite( name = "test", srcs = glob(["src/test/scala/ai/chronon/api/test/*.scala"]), - visibility = ["//visibility:public"], + strip_prefix = "src/test/scala/ai/chronon/api/test", + visibility = ["//visibility:private"], deps = [ - ":api-models", ":api-lib", + ":api-models", "//third_party/java/spark:spark-libs", maven_artifact("com.fasterxml.jackson.core:jackson-core"), maven_artifact("com.fasterxml.jackson.core:jackson-databind"), diff --git a/flink/BUILD.bazel b/flink/BUILD.bazel index 333fbbc70a..bca5b96131 100644 --- a/flink/BUILD.bazel +++ b/flink/BUILD.bazel @@ -41,10 +41,11 @@ scala_library( ], ) -scala_test_suite( +scala_junit_test_suite( name = "test", srcs = glob(["src/test/scala/ai/chronon/flink/**/*.scala"]), - visibility = ["//visibility:public"], + strip_prefix = "src/test/scala/ai/chronon/flink", + visibility = ["//visibility:private"], deps = [ ":flink", ":test-lib", diff --git a/jvm/spark_repos.bzl b/jvm/spark_repos.bzl index 53da4ae798..d0484cf35a 100644 --- a/jvm/spark_repos.bzl +++ b/jvm/spark_repos.bzl @@ -6,6 +6,7 @@ spark_2_4_repo = repo(name = "spark_2_4", artifacts = [ "org.apache.curator:apache-curator:2.11.0", "org.apache.datasketches:datasketches-java:2.0.0", "org.apache.datasketches:datasketches-memory:1.3.0", + "org.apache.derby:derby:10.12.1.1", "org.apache.hive:hive-exec:1.2.1", versioned_artifacts("2.4.0", [ "org.apache.spark:spark-streaming_2.11", @@ -28,7 +29,6 @@ spark_3_1_repo = repo(name = "spark_3_1", artifacts = [ "org.apache.curator:apache-curator:2.12.0", "org.apache.datasketches:datasketches-java:2.0.0", "org.apache.datasketches:datasketches-memory:1.3.0", - "org.apache.hive:hive-exec:3.1.2", "org.apache.kafka:kafka_2.12:2.6.3", versioned_artifacts("3.1.1", [ "org.apache.spark:spark-streaming_2.12", @@ -43,9 +43,11 @@ spark_3_1_repo = repo(name = "spark_3_1", artifacts = [ "org.json4s:json4s-core_2.12", "org.json4s:json4s-jackson_2.12", ]), + "org.apache.derby:derby:10.12.1.1", "org.apache.hive:hive-metastore:2.3.9", + "org.apache.hive:hive-exec:2.3.9", "io.delta:delta-core_2.12:2.0.2", -], excluded_artifacts = ["org.slf4j:slf4j-log4j12"]) +], excluded_artifacts = ["org.slf4j:slf4j-log4j12", "org.pentaho:pentaho-aggdesigner-algorithm"]) spark_3_2_repo = repo( name = "spark_3_2", @@ -80,11 +82,14 @@ spark_3_2_repo = repo( "org.apache.avro:avro:1.8.2", "org.apache.avro:avro-mapred:1.8.2", "org.apache.hive:hive-metastore:2.3.9", - "org.apache.hive:hive-exec:3.1.2", + "org.apache.hive:hive-exec:2.3.9", # Monitoring "io.prometheus.jmx:jmx_prometheus_javaagent:0.20.0", "io.delta:delta-core_2.12:2.0.2", + + # Test + "org.apache.derby:derby:10.14.2.0", ], excluded_artifacts = [ "org.pentaho:pentaho-aggdesigner-algorithm", @@ -124,12 +129,15 @@ spark_3_5_repo = repo( "org.apache.avro:avro:1.8.2", "org.apache.avro:avro-mapred:1.8.2", "org.apache.hive:hive-metastore:2.3.9", - "org.apache.hive:hive-exec:3.1.2", + "org.apache.hive:hive-exec:2.3.9", # Monitoring "io.prometheus.jmx:jmx_prometheus_javaagent:0.20.0", "io.delta:delta-core_2.12:2.0.2", "io.delta:delta-core_2.13:2.0.2", + + # Test + "org.apache.derby:derby:10.14.2.0", ], excluded_artifacts = [ "org.pentaho:pentaho-aggdesigner-algorithm", diff --git a/online/BUILD.bazel b/online/BUILD.bazel index 19c606e1e7..9fe8addfd4 100644 --- a/online/BUILD.bazel +++ b/online/BUILD.bazel @@ -24,10 +24,10 @@ scala_library( ]), visibility = ["//visibility:public"], deps = [ - "//api:api-models", + "//aggregator", "//api:api-lib", + "//api:api-models", "//third_party/java/spark:spark-libs", - "//aggregator", maven_artifact("com.esotericsoftware:kryo"), scala_artifact("org.json4s:json4s-core"), scala_artifact("org.json4s:json4s-jackson"), @@ -61,10 +61,10 @@ scala_library( srcs = glob(["src/test/scala/ai/chronon/online/**/*.scala"]), visibility = ["//visibility:public"], deps = [ - "//api:api-models", - "//api:api-lib", ":online", "//aggregator", + "//api:api-lib", + "//api:api-models", "//third_party/java/spark:spark-libs", maven_artifact("com.esotericsoftware:kryo"), scala_artifact("org.json4s:json4s-core"), @@ -102,10 +102,11 @@ scala_library( ), ) -scala_test_suite( +scala_junit_test_suite( name = "test", srcs = glob(["src/test/scala/ai/chronon/online/**/*.scala"]), - visibility = ["//visibility:public"], + strip_prefix = "src/test/scala/ai/chronon/online", + visibility = ["//visibility:private"], deps = [ ":online", ":test-lib", @@ -113,6 +114,13 @@ scala_test_suite( "//api:api-lib", "//api:api-models", "//third_party/java/spark:spark-libs", + maven_artifact("com.fasterxml.jackson.core:jackson-core"), + maven_artifact("com.fasterxml.jackson.core:jackson-databind"), + scala_artifact("org.scalactic:scalactic"), + scala_artifact("org.scalatest:scalatest-matchers-core"), + scala_artifact("org.scalatest:scalatest-core"), + maven_artifact("org.scalatest:scalatest-compatible"), + scala_artifact("org.scalatest:scalatest-shouldmatchers"), scala_artifact("org.scalatestplus:mockito-3-4"), maven_artifact("org.mockito:mockito-core"), maven_artifact("org.apache.thrift:libthrift"), @@ -123,7 +131,9 @@ scala_test_suite( maven_artifact("com.github.ben-manes.caffeine:caffeine"), maven_artifact("junit:junit"), maven_artifact("com.novocode:junit-interface"), - ], + ] + select_for_scala_version(before_2_13 = [ + maven_artifact("com.fasterxml.jackson.module:jackson-module-scala_2.12"), + ]), ) genrule( diff --git a/online/src/test/scala/ai/chronon/online/test/DataStreamBuilderTest.scala b/online/src/test/scala/ai/chronon/online/test/DataStreamBuilderTest.scala index 74ec3a2113..4fd9638be0 100644 --- a/online/src/test/scala/ai/chronon/online/test/DataStreamBuilderTest.scala +++ b/online/src/test/scala/ai/chronon/online/test/DataStreamBuilderTest.scala @@ -29,7 +29,12 @@ import scala.util.ScalaJavaConversions.JListOps class DataStreamBuilderTest { @transient lazy val logger = LoggerFactory.getLogger(getClass) lazy val spark: SparkSession = { - System.setSecurityManager(null) + try { + System.setSecurityManager(null) + } catch { + case (t: java.lang.SecurityException) if t.getMessage.contains("GoogleTestSecurityManager") => + // Running on Bazel, allow it. + } val spark = SparkSession .builder() .appName("DataStreamBuilderTest") diff --git a/service/BUILD.bazel b/service/BUILD.bazel index a41b4ba3b1..af9fcffca5 100644 --- a/service/BUILD.bazel +++ b/service/BUILD.bazel @@ -24,10 +24,13 @@ java_library( ], ) -scala_test_suite( +scala_junit_test_suite( name = "test", srcs = glob(["src/test/java/ai/chronon/service/handlers/*.java"]), - visibility = ["//visibility:public"], + strip_prefix = "src/test/java/ai/chronon/service/handlers", + # prefixes = ["ai.chronon.service.handlers"], + # suffixes = ["Test"], + visibility = ["//visibility:private"], deps = [ ":service", "//online", @@ -38,5 +41,6 @@ scala_test_suite( maven_artifact("io.vertx:vertx-unit"), maven_artifact("io.vertx:vertx-core"), maven_artifact("io.vertx:vertx-web"), + maven_artifact("io.vertx:vertx-codegen"), ], ) diff --git a/spark/BUILD.bazel b/spark/BUILD.bazel index b5edfb36b6..a81a050910 100644 --- a/spark/BUILD.bazel +++ b/spark/BUILD.bazel @@ -57,6 +57,7 @@ scala_library( "//api:api-models", "//online", "//third_party/java/spark:spark-libs", + "@bazel_tools//tools/java/runfiles", maven_artifact("com.google.code.gson:gson"), maven_artifact("org.apache.thrift:libthrift"), maven_artifact("com.google.guava:guava"), @@ -79,19 +80,30 @@ scala_library( ], ) -scala_test_suite( +scala_junit_test_suite( name = "test", + timeout = "eternal", srcs = glob(["src/test/scala/ai/chronon/spark/test/**/*.scala"]), - visibility = ["//visibility:public"], + data = [ + "//spark/src/test/resources", + "//tools/policies:derby.policy", + ], + jvm_flags = [ + "-Djava.security.policy=$(location //tools/policies:derby.policy)", + ], + resources = ["//spark/src/test/resources"], + strip_prefix = "src/test/scala/ai/chronon/spark/test", + visibility = ["//visibility:private"], deps = [ ":spark", - ":test-lib", "//aggregator", "//aggregator:test-lib", "//api:api-lib", "//api:api-models", "//online", "//third_party/java/spark:spark-libs", + "//third_party/java/spark:spark-test-libs", + "@bazel_tools//tools/java/runfiles", scala_artifact("org.scala-lang.modules:scala-java8-compat"), maven_artifact("junit:junit"), maven_artifact("com.novocode:junit-interface"), @@ -100,6 +112,11 @@ scala_test_suite( scala_artifact("com.fasterxml.jackson.module:jackson-module-scala"), maven_artifact("com.google.code.gson:gson"), scala_artifact("org.rogach:scallop"), + scala_artifact("org.scalactic:scalactic"), + scala_artifact("org.scalatest:scalatest-matchers-core"), + scala_artifact("org.scalatest:scalatest-core"), + maven_artifact("org.scalatest:scalatest-compatible"), + scala_artifact("org.scalatest:scalatest-shouldmatchers"), scala_artifact("org.scalatestplus:mockito-3-4"), maven_artifact("org.mockito:mockito-core"), maven_artifact("org.slf4j:slf4j-api"), diff --git a/spark/src/main/scala/ai/chronon/spark/SparkSessionBuilder.scala b/spark/src/main/scala/ai/chronon/spark/SparkSessionBuilder.scala index ebe6f6b1ad..285ee43b38 100644 --- a/spark/src/main/scala/ai/chronon/spark/SparkSessionBuilder.scala +++ b/spark/src/main/scala/ai/chronon/spark/SparkSessionBuilder.scala @@ -58,7 +58,12 @@ object SparkSessionBuilder { if (local) { //required to run spark locally with hive support enabled - for sbt test - System.setSecurityManager(null) + try { + System.setSecurityManager(null) + } catch { + case (t: java.lang.SecurityException) if t.getMessage.contains("GoogleTestSecurityManager") => + // Running on Bazel, allow it. + } } val userName = Properties.userName val warehouseDir = localWarehouseLocation.map(expandUser).getOrElse(DefaultWarehouseDir.getAbsolutePath) @@ -102,6 +107,7 @@ object SparkSessionBuilder { .config("spark.sql.warehouse.dir", s"$warehouseDir/data") .config("spark.hadoop.javax.jdo.option.ConnectionURL", metastoreDb) .config("spark.driver.bindAddress", "127.0.0.1") + .config("spark.ui.enabled", "false") } else { // hive jars need to be available on classpath - no needed for local testing baseBuilder @@ -131,6 +137,7 @@ object SparkSessionBuilder { .master("local[*]") .config("spark.local.dir", s"/tmp/$userName/chronon-spark-streaming") .config("spark.kryo.registrationRequired", "true") + .config("spark.ui.enabled", "false") } else { baseBuilder } diff --git a/spark/src/test/resources/BUILD.bazel b/spark/src/test/resources/BUILD.bazel new file mode 100644 index 0000000000..1f185ecf19 --- /dev/null +++ b/spark/src/test/resources/BUILD.bazel @@ -0,0 +1,5 @@ +filegroup( + name = "resources", + srcs = glob(["**"]), + visibility = ["//visibility:public"], +) diff --git a/spark/src/test/scala/ai/chronon/spark/test/ChainingFetcherTest.scala b/spark/src/test/scala/ai/chronon/spark/test/ChainingFetcherTest.scala index 61575dfe7f..f30db29a10 100644 --- a/spark/src/test/scala/ai/chronon/spark/test/ChainingFetcherTest.scala +++ b/spark/src/test/scala/ai/chronon/spark/test/ChainingFetcherTest.scala @@ -22,7 +22,7 @@ import ai.chronon.api import ai.chronon.api.Constants.ChrononMetadataKey import ai.chronon.api.Extensions.{JoinOps, MetadataOps} import ai.chronon.api._ -import ai.chronon.online.Fetcher.{Request} +import ai.chronon.online.Fetcher.Request import ai.chronon.online.{MetadataStore, SparkConversions} import ai.chronon.spark.Extensions._ import ai.chronon.spark.{Join => _, _} @@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.expressions.GenericRow import org.apache.spark.sql.functions.lit import org.apache.spark.sql.{DataFrame, Row, SparkSession} import org.junit.Assert.{assertEquals, assertTrue} +import org.junit.Test import java.lang import java.util.TimeZone @@ -311,6 +312,7 @@ class ChainingFetcherTest extends TestCase { assertEquals(0, diff.count()) } + @Test def testFetchParentJoin(): Unit = { val namespace = "parent_join_fetch" val joinConf = generateMutationData(namespace, Accuracy.TEMPORAL) @@ -318,6 +320,7 @@ class ChainingFetcherTest extends TestCase { compareTemporalFetch(joinConf, "2021-04-15", expected, fetcherResponse, "user") } + @Test def testFetchChainingDeterministic(): Unit = { val namespace = "chaining_fetch" val chainingJoinConf = generateChainingJoinData(namespace, Accuracy.TEMPORAL) diff --git a/spark/src/test/scala/ai/chronon/spark/test/ExampleDataUtils.scala b/spark/src/test/scala/ai/chronon/spark/test/ExampleDataUtils.scala new file mode 100644 index 0000000000..237da41895 --- /dev/null +++ b/spark/src/test/scala/ai/chronon/spark/test/ExampleDataUtils.scala @@ -0,0 +1,17 @@ +package ai.chronon.spark.test + +import com.google.devtools.build.runfiles.Runfiles +import java.io.File + +object ExampleDataUtils { + lazy val runfiles = Runfiles.create() + + def getExampleDataDirectory(): String = { + val confResource = getClass.getResource("/") + if (confResource != null) confResource.getPath + else runfiles.rlocation("chronon/spark/src/test/resources") + } + + def getExampleData(path: String): String = + new File(getExampleDataDirectory(), path).getPath +} diff --git a/spark/src/test/scala/ai/chronon/spark/test/FetchStatsTest.scala b/spark/src/test/scala/ai/chronon/spark/test/FetchStatsTest.scala index c6b3c1b258..f10fd848aa 100644 --- a/spark/src/test/scala/ai/chronon/spark/test/FetchStatsTest.scala +++ b/spark/src/test/scala/ai/chronon/spark/test/FetchStatsTest.scala @@ -32,6 +32,7 @@ import ai.chronon.spark.{Analyzer, Join, SparkSessionBuilder, TableUtils} import com.google.gson.GsonBuilder import junit.framework.TestCase import org.apache.spark.sql.SparkSession +import org.junit.Test import java.util.TimeZone import java.util.concurrent.Executors @@ -56,6 +57,7 @@ class FetchStatsTest extends TestCase { private val today = tableUtils.partitionSpec.at(System.currentTimeMillis()) private val yesterday = tableUtils.partitionSpec.before(today) + @Test def testFetchStats(): Unit = { // Part 1: Build the assets. Join definition, compute and serve stats. tableUtils.createDatabase(namespace) diff --git a/spark/src/test/scala/ai/chronon/spark/test/FetcherTest.scala b/spark/src/test/scala/ai/chronon/spark/test/FetcherTest.scala index 051ec1be73..25214a8fa6 100644 --- a/spark/src/test/scala/ai/chronon/spark/test/FetcherTest.scala +++ b/spark/src/test/scala/ai/chronon/spark/test/FetcherTest.scala @@ -33,6 +33,7 @@ import org.apache.spark.sql.catalyst.expressions.GenericRow import org.apache.spark.sql.functions.{avg, col, lit} import org.apache.spark.sql.{DataFrame, Row, SparkSession} import org.junit.Assert.{assertEquals, assertTrue} +import org.junit.Test import org.mockito.ArgumentMatchers.{any, anyString} import org.mockito.Mockito.{reset, spy, when} import org.slf4j.LoggerFactory @@ -655,12 +656,14 @@ class FetcherTest extends TestCase { assertEquals(0, diff.count()) } + @Test def testTemporalFetchJoinDeterministic(): Unit = { val namespace = "deterministic_fetch" val joinConf = generateMutationData(namespace) compareTemporalFetch(joinConf, "2021-04-10", namespace, consistencyCheck = false, dropDsOnWrite = true) } + @Test def testTemporalFetchJoinDerivation(): Unit = { val namespace = "derivation_fetch" val joinConf = generateMutationData(namespace) @@ -675,6 +678,7 @@ class FetcherTest extends TestCase { compareTemporalFetch(joinConf, "2021-04-10", namespace, consistencyCheck = false, dropDsOnWrite = true) } + @Test def testTemporalFetchJoinDerivationRenameOnly(): Unit = { val namespace = "derivation_fetch_rename_only" val joinConf = generateMutationData(namespace) @@ -685,6 +689,7 @@ class FetcherTest extends TestCase { compareTemporalFetch(joinConf, "2021-04-10", namespace, consistencyCheck = false, dropDsOnWrite = true) } + @Test def testTemporalFetchJoinGenerated(): Unit = { val namespace = "generated_fetch" val joinConf = generateRandomData(namespace) @@ -695,6 +700,7 @@ class FetcherTest extends TestCase { dropDsOnWrite = false) } + @Test def testTemporalTiledFetchJoinDeterministic(): Unit = { val namespace = "deterministic_tiled_fetch" val joinConf = generateEventOnlyData(namespace, groupByCustomJson = Some("{\"enable_tiling\": true}")) @@ -702,6 +708,7 @@ class FetcherTest extends TestCase { } // test soft-fail on missing keys + @Test def testEmptyRequest(): Unit = { val spark: SparkSession = createSparkSession() val namespace = "empty_request" @@ -727,6 +734,7 @@ class FetcherTest extends TestCase { assertTrue(responseMap.keys.forall(_.endsWith("_exception"))) } + @Test def testTemporalFetchGroupByNonExistKey(): Unit = { val namespace = "non_exist_key_group_by_fetch" val spark: SparkSession = createSparkSession() @@ -752,6 +760,7 @@ class FetcherTest extends TestCase { assertEquals(expected, result.head.values.get) } + @Test def testKVStorePartialFailure(): Unit = { val spark: SparkSession = createSparkSession() @@ -781,6 +790,7 @@ class FetcherTest extends TestCase { exceptionKeys.foreach(k => assertTrue(responseMap.contains(k))) } + @Test def testGroupByServingInfoTtlCacheRefresh(): Unit = { val namespace = "test_group_by_serving_info_ttl_cache_refresh" val spark: SparkSession = createSparkSession() @@ -817,6 +827,7 @@ class FetcherTest extends TestCase { assertTrue(response3.values.isSuccess) } + @Test def testJoinConfTtlCacheRefresh(): Unit = { val namespace = "test_join_conf_ttl_cache_refresh" val spark: SparkSession = createSparkSession() @@ -850,104 +861,4 @@ class FetcherTest extends TestCase { val response2 = fetch() assertTrue(response2.isSuccess) } -} - -object FetcherTestUtil { - @transient lazy val logger = LoggerFactory.getLogger(getClass) - def joinResponses(spark: SparkSession, - requests: Array[Request], - mockApi: MockApi, - useJavaFetcher: Boolean = false, - runCount: Int = 1, - samplePercent: Double = -1, - logToHive: Boolean = false, - debug: Boolean = false)(implicit ec: ExecutionContext): (List[Response], DataFrame) = { - val chunkSize = 100 - @transient lazy val fetcher = mockApi.buildFetcher(debug) - @transient lazy val javaFetcher = mockApi.buildJavaFetcher() - - def fetchOnce = { - var latencySum: Long = 0 - var latencyCount = 0 - val blockStart = System.currentTimeMillis() - val result = requests.iterator - .grouped(chunkSize) - .map { oldReqs => - // deliberately mis-type a few keys - val r = oldReqs - .map(r => - r.copy(keys = r.keys.mapValues { v => - if (v.isInstanceOf[java.lang.Long]) v.toString else v - }.toMap)) - val responses = if (useJavaFetcher) { - // Converting to java request and using the toScalaRequest functionality to test conversion - val convertedJavaRequests = r.map(new JavaRequest(_)).toJava - val javaResponse = javaFetcher.fetchJoin(convertedJavaRequests) - FutureConverters - .toScala(javaResponse) - .map( - _.toScala.map(jres => - Response( - Request(jres.request.name, jres.request.keys.toScala.toMap, Option(jres.request.atMillis)), - jres.values.toScala.map(_.toScala) - ))) - } else { - fetcher.fetchJoin(r) - } - - // fix mis-typed keys in the request - val fixedResponses = - responses.map(resps => resps.zip(oldReqs).map { case (resp, req) => resp.copy(request = req) }) - System.currentTimeMillis() -> fixedResponses - } - .flatMap { - case (start, future) => - val result = Await.result(future, Duration(10000, SECONDS)) // todo: change back to millis - val latency = System.currentTimeMillis() - start - latencySum += latency - latencyCount += 1 - result - } - .toList - val latencyMillis = latencySum.toFloat / latencyCount.toFloat - val qps = (requests.length * 1000.0) / (System.currentTimeMillis() - blockStart).toFloat - (latencyMillis, qps, result) - } - - // to overwhelm the profiler with fetching code path - // so as to make it prominent in the flamegraph & collect enough stats - - var latencySum = 0.0 - var qpsSum = 0.0 - var loggedValues: Seq[LoggableResponseBase64] = null - var result: List[Response] = null - (0 until runCount).foreach { _ => - val (latency, qps, resultVal) = fetchOnce - result = resultVal - loggedValues = mockApi.flushLoggedValues - latencySum += latency - qpsSum += qps - } - val fetcherNameString = if (useJavaFetcher) "Java" else "Scala" - - logger.info(s""" - |Averaging fetching stats for $fetcherNameString Fetcher over ${requests.length} requests $runCount times - |with batch size: $chunkSize - |average qps: ${qpsSum / runCount} - |average latency: ${latencySum / runCount} - |""".stripMargin) - val loggedDf = mockApi.loggedValuesToDf(loggedValues, spark) - if (logToHive) { - TableUtils(spark).insertPartitions( - loggedDf, - mockApi.logTable, - partitionColumns = Seq("ds", "name") - ) - } - if (samplePercent > 0) { - logger.info(s"logged count: ${loggedDf.count()}") - loggedDf.show() - } - result -> loggedDf - } -} +} \ No newline at end of file diff --git a/spark/src/test/scala/ai/chronon/spark/test/FetcherTestUtil.scala b/spark/src/test/scala/ai/chronon/spark/test/FetcherTestUtil.scala new file mode 100644 index 0000000000..92ca07b9e3 --- /dev/null +++ b/spark/src/test/scala/ai/chronon/spark/test/FetcherTestUtil.scala @@ -0,0 +1,112 @@ +package ai.chronon.spark.test + +import ai.chronon.online.Fetcher.{Request, Response} +import ai.chronon.online.{JavaRequest, LoggableResponseBase64} +import ai.chronon.spark.TableUtils +import org.apache.spark.sql.{DataFrame, SparkSession} +import org.slf4j.LoggerFactory + +import scala.compat.java8.FutureConverters +import scala.concurrent.{Await, ExecutionContext} +import scala.concurrent.duration.{Duration, SECONDS} +import scala.util.ScalaJavaConversions._ + +object FetcherTestUtil { + @transient lazy val logger = LoggerFactory.getLogger(getClass) + def joinResponses(spark: SparkSession, + requests: Array[Request], + mockApi: MockApi, + useJavaFetcher: Boolean = false, + runCount: Int = 1, + samplePercent: Double = -1, + logToHive: Boolean = false, + debug: Boolean = false)(implicit ec: ExecutionContext): (List[Response], DataFrame) = { + val chunkSize = 100 + @transient lazy val fetcher = mockApi.buildFetcher(debug) + @transient lazy val javaFetcher = mockApi.buildJavaFetcher() + + def fetchOnce = { + var latencySum: Long = 0 + var latencyCount = 0 + val blockStart = System.currentTimeMillis() + val result = requests.iterator + .grouped(chunkSize) + .map { oldReqs => + // deliberately mis-type a few keys + val r = oldReqs + .map(r => + r.copy(keys = r.keys.mapValues { v => + if (v.isInstanceOf[java.lang.Long]) v.toString else v + }.toMap)) + val responses = if (useJavaFetcher) { + // Converting to java request and using the toScalaRequest functionality to test conversion + val convertedJavaRequests = r.map(new JavaRequest(_)).toJava + val javaResponse = javaFetcher.fetchJoin(convertedJavaRequests) + FutureConverters + .toScala(javaResponse) + .map( + _.toScala.map(jres => + Response( + Request(jres.request.name, jres.request.keys.toScala.toMap, Option(jres.request.atMillis)), + jres.values.toScala.map(_.toScala) + ))) + } else { + fetcher.fetchJoin(r) + } + + // fix mis-typed keys in the request + val fixedResponses = + responses.map(resps => resps.zip(oldReqs).map { case (resp, req) => resp.copy(request = req) }) + System.currentTimeMillis() -> fixedResponses + } + .flatMap { + case (start, future) => + val result = Await.result(future, Duration(10000, SECONDS)) // todo: change back to millis + val latency = System.currentTimeMillis() - start + latencySum += latency + latencyCount += 1 + result + } + .toList + val latencyMillis = latencySum.toFloat / latencyCount.toFloat + val qps = (requests.length * 1000.0) / (System.currentTimeMillis() - blockStart).toFloat + (latencyMillis, qps, result) + } + + // to overwhelm the profiler with fetching code path + // so as to make it prominent in the flamegraph & collect enough stats + + var latencySum = 0.0 + var qpsSum = 0.0 + var loggedValues: Seq[LoggableResponseBase64] = null + var result: List[Response] = null + (0 until runCount).foreach { _ => + val (latency, qps, resultVal) = fetchOnce + result = resultVal + loggedValues = mockApi.flushLoggedValues + latencySum += latency + qpsSum += qps + } + val fetcherNameString = if (useJavaFetcher) "Java" else "Scala" + + logger.info(s""" + |Averaging fetching stats for $fetcherNameString Fetcher over ${requests.length} requests $runCount times + |with batch size: $chunkSize + |average qps: ${qpsSum / runCount} + |average latency: ${latencySum / runCount} + |""".stripMargin) + val loggedDf = mockApi.loggedValuesToDf(loggedValues, spark) + if (logToHive) { + TableUtils(spark).insertPartitions( + loggedDf, + mockApi.logTable, + partitionColumns = Seq("ds", "name") + ) + } + if (samplePercent > 0) { + logger.info(s"logged count: ${loggedDf.count()}") + loggedDf.show() + } + result -> loggedDf + } +} diff --git a/spark/src/test/scala/ai/chronon/spark/test/MetadataExporterTest.scala b/spark/src/test/scala/ai/chronon/spark/test/MetadataExporterTest.scala index 9fbfde24c3..5fe00489e3 100644 --- a/spark/src/test/scala/ai/chronon/spark/test/MetadataExporterTest.scala +++ b/spark/src/test/scala/ai/chronon/spark/test/MetadataExporterTest.scala @@ -27,6 +27,7 @@ import org.apache.spark.sql.SparkSession import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.module.scala.DefaultScalaModule import org.junit.Assert.assertEquals +import org.junit.Test import scala.io.Source import java.io.File @@ -62,6 +63,7 @@ class MetadataExporterTest extends TestCase { } } + @Test def testMetadataExport(): Unit = { // Create the tables. val namespace = "example_namespace" @@ -78,10 +80,10 @@ class MetadataExporterTest extends TestCase { val sampleDf = DataFrameGen .events(spark, sampleData, 10000, partitions = 30) sampleDf.save(sampleTable) - val confResource = getClass.getResource("/") + val confResourcePath = ExampleDataUtils.getExampleDataDirectory() val tmpDir: File = Files.createTempDir() - MetadataExporter.run(confResource.getPath, tmpDir.getAbsolutePath) - printFilesInDirectory(s"${confResource.getPath}/joins/team") + MetadataExporter.run(confResourcePath, tmpDir.getAbsolutePath) + printFilesInDirectory(s"${confResourcePath}/joins/team") printFilesInDirectory(s"${tmpDir.getAbsolutePath}/joins") // Read the files. val file = Source.fromFile(s"${tmpDir.getAbsolutePath}/joins/example_join.v1") diff --git a/spark/src/test/scala/ai/chronon/spark/test/MetadataStoreTest.scala b/spark/src/test/scala/ai/chronon/spark/test/MetadataStoreTest.scala index 8cfd0b5526..5d5378be5a 100644 --- a/spark/src/test/scala/ai/chronon/spark/test/MetadataStoreTest.scala +++ b/spark/src/test/scala/ai/chronon/spark/test/MetadataStoreTest.scala @@ -6,6 +6,7 @@ import ai.chronon.online.MetadataEndPoint.NameByTeamEndPointName import ai.chronon.online.{MetadataDirWalker, MetadataEndPoint, MetadataStore} import junit.framework.TestCase import org.junit.Assert.{assertEquals, assertFalse, assertTrue} +import org.junit.Test import scala.concurrent.duration.Duration import scala.concurrent.{Await, Future} @@ -13,8 +14,8 @@ import scala.io.Source class MetadataStoreTest extends TestCase { val joinPath = "joins/team/example_join.v1" - val confResource = getClass.getResource(s"/$joinPath") - val src = Source.fromFile(confResource.getPath) + val confResourcePath = ExampleDataUtils.getExampleData(joinPath) + val src = Source.fromFile(confResourcePath) val expected = { try src.mkString @@ -23,6 +24,7 @@ class MetadataStoreTest extends TestCase { val acceptedEndPoints = List(MetadataEndPoint.ConfByKeyEndPointName, MetadataEndPoint.NameByTeamEndPointName) + @Test def testMetadataStoreSingleFile(): Unit = { val inMemoryKvStore = OnlineUtils.buildInMemoryKVStore("FetcherTest") val singleFileDataSet = ChrononMetadataKey @@ -31,7 +33,7 @@ class MetadataStoreTest extends TestCase { inMemoryKvStore.create(singleFileDataSet) inMemoryKvStore.create(NameByTeamEndPointName) // set the working directory to /chronon instead of $MODULE_DIR in configuration if Intellij fails testing - val singleFileDirWalker = new MetadataDirWalker(confResource.getPath, acceptedEndPoints) + val singleFileDirWalker = new MetadataDirWalker(confResourcePath, acceptedEndPoints) val singleFileKvMap = singleFileDirWalker.run val singleFilePut: Seq[Future[scala.collection.Seq[Boolean]]] = singleFileKvMap.toSeq.map { case (endPoint, kvMap) => singleFileMetadataStore.put(kvMap, endPoint) @@ -56,6 +58,7 @@ class MetadataStoreTest extends TestCase { assertFalse(emptyRes.latest.isSuccess) } + @Test def testMetadataStoreDirectory(): Unit = { val inMemoryKvStore = OnlineUtils.buildInMemoryKVStore("FetcherTest") val directoryDataSetDataSet = ChrononMetadataKey @@ -63,7 +66,7 @@ class MetadataStoreTest extends TestCase { val directoryMetadataStore = new MetadataStore(inMemoryKvStore, directoryDataSetDataSet, timeoutMillis = 10000) inMemoryKvStore.create(directoryDataSetDataSet) inMemoryKvStore.create(directoryMetadataDataSet) - val directoryDataDirWalker = new MetadataDirWalker(confResource.getPath.replace(s"/$joinPath", ""), acceptedEndPoints) + val directoryDataDirWalker = new MetadataDirWalker(ExampleDataUtils.getExampleDataDirectory(), acceptedEndPoints) val directoryDataKvMap = directoryDataDirWalker.run val directoryPut = directoryDataKvMap.toSeq.map { case (endPoint, kvMap) => directoryMetadataStore.put(kvMap, endPoint) diff --git a/spark/src/test/scala/ai/chronon/spark/test/SchemaEvolutionTest.scala b/spark/src/test/scala/ai/chronon/spark/test/SchemaEvolutionTest.scala index 8bb7a132dc..1b8ab86120 100644 --- a/spark/src/test/scala/ai/chronon/spark/test/SchemaEvolutionTest.scala +++ b/spark/src/test/scala/ai/chronon/spark/test/SchemaEvolutionTest.scala @@ -27,6 +27,7 @@ import junit.framework.TestCase import org.apache.spark.sql.functions.{col, lit} import org.apache.spark.sql.{DataFrame, Row, SparkSession} import org.junit.Assert.{assertEquals, assertFalse, assertNotEquals, assertTrue} +import org.junit.Test import java.nio.charset.StandardCharsets import java.util.{Base64, TimeZone} @@ -35,31 +36,6 @@ import scala.concurrent.Await import scala.concurrent.duration.{Duration, SECONDS} import scala.util.ScalaJavaConversions.{JListOps, ListOps} -case class GroupByTestSuite( - name: String, - groupByConf: GroupBy, - groupByData: DataFrame -) - -case class JoinTestSuite( - joinConf: Join, - groupBys: Seq[GroupByTestSuite], - fetchExpectations: (Map[String, AnyRef], Map[String, AnyRef]) -) - -object JoinTestSuite { - - def apply(joinConf: Join, groupBys: Seq[GroupByTestSuite]): JoinTestSuite = { - val suite = JoinTestSuite(joinConf, groupBys) - assert( - groupBys.map(_.groupByConf.metaData.name) == - joinConf.joinParts.toScala - .map(_.groupBy.metaData.name) - ) - suite - } -} - class SchemaEvolutionTest extends TestCase { val spark: SparkSession = SparkSessionBuilder.build("SchemaEvolutionTest", local = true) @@ -301,6 +277,7 @@ class SchemaEvolutionTest extends TestCase { flattenedDf } + @Test private def testSchemaEvolution(namespace: String, joinSuiteV1: JoinTestSuite, joinSuiteV2: JoinTestSuite): Unit = { assert(joinSuiteV1.joinConf.metaData.name == joinSuiteV2.joinConf.metaData.name, message = "Schema evolution can only be tested on changes of the SAME join") @@ -428,11 +405,13 @@ class SchemaEvolutionTest extends TestCase { assertTrue(removedFeatures.forall(flattenedDf34.schema.fieldNames.contains(_))) } + @Test def testAddFeatures(): Unit = { val namespace = "add_features" testSchemaEvolution(namespace, createV1Join(namespace), createV2Join(namespace)) } + @Test def testRemoveFeatures(): Unit = { val namespace = "remove_features" testSchemaEvolution(namespace, createV2Join(namespace), createV1Join(namespace)) diff --git a/spark/src/test/scala/ai/chronon/spark/test/StreamingTest.scala b/spark/src/test/scala/ai/chronon/spark/test/StreamingTest.scala index a8a48c1b33..56cd4d0774 100644 --- a/spark/src/test/scala/ai/chronon/spark/test/StreamingTest.scala +++ b/spark/src/test/scala/ai/chronon/spark/test/StreamingTest.scala @@ -22,14 +22,14 @@ import ai.chronon.api.{Accuracy, Builders, Constants, Operation, TimeUnit, Windo import ai.chronon.api.Constants.ChrononMetadataKey import ai.chronon.api.Extensions._ import ai.chronon.spark.test.StreamingTest.buildInMemoryKvStore -import ai.chronon.online.{MetadataStore} +import ai.chronon.online.MetadataStore import ai.chronon.spark.Extensions._ import ai.chronon.spark.{Join => _, _} import junit.framework.TestCase -import org.apache.spark.sql.{SparkSession} +import org.apache.spark.sql.SparkSession +import org.junit.Test import java.util.TimeZone - import scala.collection.JavaConverters.{asScalaBufferConverter, _} object StreamingTest { @@ -49,6 +49,7 @@ class StreamingTest extends TestCase { private val yesterday = tableUtils.partitionSpec.before(today) private val yearAgo = tableUtils.partitionSpec.minus(today, new Window(365, TimeUnit.DAYS)) + @Test def testStructInStreaming(): Unit = { tableUtils.createDatabase(namespace) val topicName = "fake_topic" diff --git a/spark/src/test/scala/ai/chronon/spark/test/TestUtils.scala b/spark/src/test/scala/ai/chronon/spark/test/TestUtils.scala index 8fe03d6563..a2fe851d72 100644 --- a/spark/src/test/scala/ai/chronon/spark/test/TestUtils.scala +++ b/spark/src/test/scala/ai/chronon/spark/test/TestUtils.scala @@ -25,7 +25,32 @@ import ai.chronon.spark.TableUtils import org.apache.spark.sql.functions.col import org.apache.spark.sql.{DataFrame, Row, SparkSession} -import scala.util.ScalaJavaConversions.JListOps +import scala.util.ScalaJavaConversions.{JListOps, ListOps} + +case class GroupByTestSuite( + name: String, + groupByConf: GroupBy, + groupByData: DataFrame + ) + +case class JoinTestSuite( + joinConf: Join, + groupBys: Seq[GroupByTestSuite], + fetchExpectations: (Map[String, AnyRef], Map[String, AnyRef]) + ) + +object JoinTestSuite { + + def apply(joinConf: Join, groupBys: Seq[GroupByTestSuite]): JoinTestSuite = { + val suite = JoinTestSuite(joinConf, groupBys) + assert( + groupBys.map(_.groupByConf.metaData.name) == + joinConf.joinParts.toScala + .map(_.groupBy.metaData.name) + ) + suite + } +} object TestUtils { def createViewsGroupBy(namespace: String, diff --git a/third_party/java/spark/BUILD.bazel b/third_party/java/spark/BUILD.bazel index 692b462f59..c2f653cb02 100644 --- a/third_party/java/spark/BUILD.bazel +++ b/third_party/java/spark/BUILD.bazel @@ -96,6 +96,13 @@ SPARK_3_5_LIBS_PROVIDED = SPARK_LIBS_PROVIDED + [ ), ] +SPARK_LIBS_TEST = [ + jar( + name = "derby", + org = "org.apache.derby", + ), +] + java_library( name = "spark-libs", visibility = ["//visibility:public"], @@ -107,6 +114,17 @@ java_library( }), ) +java_library( + name = "spark-test-libs", + visibility = ["//visibility:public"], + exports = select({ + "//conditions:default": get_jars_for_repo("spark_3_2", SPARK_LIBS_TEST), + "//tools/flags/spark:spark_3_1": get_jars_for_repo("spark_3_1", SPARK_LIBS_TEST), + "//tools/flags/spark:spark_3_2": get_jars_for_repo("spark_3_2", SPARK_LIBS_TEST), + "//tools/flags/spark:spark_3_5": get_jars_for_repo("spark_3_5", SPARK_LIBS_TEST), + }), +) + # Usually spark provided jars are subset of all jars java_library( name = "spark-provided-libs", diff --git a/tools/build_rules/prelude_bazel b/tools/build_rules/prelude_bazel index eb693d0d15..10481f56f0 100644 --- a/tools/build_rules/prelude_bazel +++ b/tools/build_rules/prelude_bazel @@ -8,6 +8,7 @@ load( "scala_jar") load("@rules_java//java:defs.bzl", "java_library","java_binary") -load("@io_bazel_rules_scala//scala:scala.bzl", "scala_library", "scala_binary","scala_test_suite") +load("@io_bazel_rules_scala//scala:scala.bzl", "scala_library", "scala_binary", "scala_junit_test", "scala_test_suite") load("//tools/build_rules:maven_artifact.bzl", "maven_artifact", "scala_artifact") load("//tools/build_rules:jvm_binary.bzl", "jvm_binary") +load("//tools/build_rules:testing.bzl", "scala_junit_test_suite") diff --git a/tools/build_rules/testing.bzl b/tools/build_rules/testing.bzl new file mode 100644 index 0000000000..61007332f8 --- /dev/null +++ b/tools/build_rules/testing.bzl @@ -0,0 +1,49 @@ +load("@io_bazel_rules_scala//scala:scala.bzl", "scala_junit_test", "scala_library") + +def is_junit_test(path): + return path.endswith("Test.scala") or path.endswith("Test.java") + +def make_short_name(path, strip_prefix): + if path.startswith(strip_prefix): + short_name = path[len(strip_prefix):] + else: + short_name = path + if short_name.startswith("/"): + short_name = short_name[1:] + return short_name.replace("/", "_").replace(".scala", "").replace(".java", "") + +def scala_junit_test_suite(name, srcs, strip_prefix, **kwargs): + test_deps = kwargs.pop("deps", []) + jvm_flags = kwargs.pop("jvm_flags", []) + timeout = kwargs.pop("timeout", "moderate") + + util_srcs = [src for src in srcs if not is_junit_test(src)] + if len(util_srcs) > 0: + test_utils = "{}_utils".format(name) + scala_library( + name = test_utils, + srcs = util_srcs, + deps = test_deps, + **kwargs + ) + test_deps.append(":{}".format(test_utils)) + + tests = [] + for src in srcs: + if is_junit_test(src): + test_name = "{}_{}".format(name, make_short_name(src, strip_prefix)) + tests.append(test_name) + scala_junit_test( + name = test_name, + srcs = [src], + suffixes = ["Test"], + timeout = timeout, + deps = test_deps, + jvm_flags = jvm_flags, + **kwargs + ) + + native.test_suite( + name = name, + tests = tests, + ) diff --git a/tools/ide_support/intellij/default_view.bazelproject b/tools/ide_support/intellij/default_view.bazelproject new file mode 100644 index 0000000000..d87cdc2369 --- /dev/null +++ b/tools/ide_support/intellij/default_view.bazelproject @@ -0,0 +1,16 @@ +directories: + # Add the directories you want added as source here + # By default, we've added your entire workspace ('.') + . + +# Automatically includes all relevant targets under the 'directories' above +derive_targets_from_directories: true + +targets: + # If source code isn't resolving, add additional targets that compile it here + +additional_languages: + # Uncomment any additional languages you want supported + python + scala + java diff --git a/tools/policies/BUILD.bazel b/tools/policies/BUILD.bazel new file mode 100644 index 0000000000..b8ede830b0 --- /dev/null +++ b/tools/policies/BUILD.bazel @@ -0,0 +1 @@ +exports_files(["derby.policy"]) diff --git a/tools/policies/derby.policy b/tools/policies/derby.policy new file mode 100644 index 0000000000..7d7fddbc7f --- /dev/null +++ b/tools/policies/derby.policy @@ -0,0 +1,9 @@ +// This file ensures that tests running under Bazel will have the correct permissions +// to use Derby as a backend for the Hive metastore. +// +// See: https://db.apache.org/derby/docs/10.13/security/csecembeddedperms.html +grant { + permission java.lang.RuntimePermission "createClassLoader"; + permission org.apache.derby.security.SystemPermission "engine", "usederbyinternals"; + permission java.util.PropertyPermission "derby.*", "read"; +}; From 5cc1c5ef52dc4ddc33675de78f0e3780dbe341f1 Mon Sep 17 00:00:00 2001 From: Tom Switzer Date: Wed, 14 May 2025 17:05:16 -0400 Subject: [PATCH 2/2] Whoops. --- .ijwb/aspects/BUILD.bazel | 16 - .ijwb/aspects/artifacts.bzl | 70 - .ijwb/aspects/code_generator_info.bzl | 19 - .ijwb/aspects/fast_build_info_bundled.bzl | 149 -- .ijwb/aspects/flag_hack.bzl | 32 - .ijwb/aspects/intellij_info_bundled.bzl | 100 -- .ijwb/aspects/intellij_info_impl_bundled.bzl | 1391 ------------------ .ijwb/aspects/java_classpath.bzl | 37 - .ijwb/aspects/java_info.bzl | 23 - .ijwb/aspects/make_variables.bzl | 216 --- .ijwb/aspects/python_info.bzl | 24 - .ijwb/aspects/xcode_query.bzl | 22 - 12 files changed, 2099 deletions(-) delete mode 100644 .ijwb/aspects/BUILD.bazel delete mode 100644 .ijwb/aspects/artifacts.bzl delete mode 100644 .ijwb/aspects/code_generator_info.bzl delete mode 100644 .ijwb/aspects/fast_build_info_bundled.bzl delete mode 100644 .ijwb/aspects/flag_hack.bzl delete mode 100644 .ijwb/aspects/intellij_info_bundled.bzl delete mode 100644 .ijwb/aspects/intellij_info_impl_bundled.bzl delete mode 100644 .ijwb/aspects/java_classpath.bzl delete mode 100644 .ijwb/aspects/java_info.bzl delete mode 100644 .ijwb/aspects/make_variables.bzl delete mode 100644 .ijwb/aspects/python_info.bzl delete mode 100644 .ijwb/aspects/xcode_query.bzl diff --git a/.ijwb/aspects/BUILD.bazel b/.ijwb/aspects/BUILD.bazel deleted file mode 100644 index 1797ffe5e7..0000000000 --- a/.ijwb/aspects/BUILD.bazel +++ /dev/null @@ -1,16 +0,0 @@ -# -# Description: -# The final form of the BUILD file accessed at runtime as an external WORKSPACE. -# - -licenses(["notice"]) # Apache 2.0 - -load(":flag_hack.bzl", "define_flag_hack") - -exports_files([ - "tools/PackageParser_deploy.jar", - "tools/CreateAar_deploy.jar", - "tools/JarFilter_deploy.jar", -]) - -define_flag_hack() diff --git a/.ijwb/aspects/artifacts.bzl b/.ijwb/aspects/artifacts.bzl deleted file mode 100644 index bc7b403656..0000000000 --- a/.ijwb/aspects/artifacts.bzl +++ /dev/null @@ -1,70 +0,0 @@ -"""Utility methods for working with ArtifactLocation types.""" - -def struct_omit_none(**kwargs): - """A replacement for standard `struct` function that omits the fields with None value.""" - d = {name: kwargs[name] for name in kwargs if kwargs[name] != None} - return struct(**d) - -def sources_from_target(ctx): - """Get the list of sources from a target as artifact locations.""" - return artifacts_from_target_list_attr(ctx, "srcs") - -def artifacts_from_target_list_attr(ctx, attr_name): - """Converts a list of targets to a list of artifact locations.""" - return [ - artifact_location(f) - for target in getattr(ctx.rule.attr, attr_name, []) - for f in target.files.to_list() - ] - -def artifact_location(f): - """Creates an ArtifactLocation proto from a File.""" - if f == None: - return None - - return to_artifact_location( - f.path, - f.root.path if not f.is_source else "", - f.is_source, - is_external_artifact(f.owner), - ) - -def to_artifact_location(exec_path, root_exec_path_fragment, is_source, is_external): - """Derives workspace path from other path fragments, and creates an ArtifactLocation proto.""" - - # directory structure: - # exec_path = (../repo_name)? + (root_fragment)? + relative_path - relative_path = _strip_external_workspace_prefix(exec_path) - relative_path = _strip_root_exec_path_fragment(relative_path, root_exec_path_fragment) - - root_exec_path_fragment = exec_path[:-(len("/" + relative_path))] - - return struct_omit_none( - relative_path = relative_path, - is_source = is_source, - is_external = is_external, - root_execution_path_fragment = root_exec_path_fragment - ) - -def is_external_artifact(label): - """Determines whether a label corresponds to an external artifact.""" - - # Label.EXTERNAL_PATH_PREFIX is due to change from 'external' to '..' in Bazel 0.4.5. - # This code is for forwards and backwards compatibility. - # Remove the 'external' check when Bazel 0.4.4 and earlier no longer need to be supported. - return label.workspace_root.startswith("external") or label.workspace_root.startswith("..") - -def _strip_root_exec_path_fragment(path, root_fragment): - if root_fragment and path.startswith(root_fragment + "/"): - return path[len(root_fragment + "/"):] - return path - -def _strip_external_workspace_prefix(path): - """Either 'external/workspace_name/' or '../workspace_name/'.""" - - # Label.EXTERNAL_PATH_PREFIX is due to change from 'external' to '..' in Bazel 0.4.5. - # This code is for forwards and backwards compatibility. - # Remove the 'external/' check when Bazel 0.4.4 and earlier no longer need to be supported. - if path.startswith("../") or path.startswith("external/"): - return "/".join(path.split("/")[2:]) - return path diff --git a/.ijwb/aspects/code_generator_info.bzl b/.ijwb/aspects/code_generator_info.bzl deleted file mode 100644 index 7b5e598e13..0000000000 --- a/.ijwb/aspects/code_generator_info.bzl +++ /dev/null @@ -1,19 +0,0 @@ -"""Data required for the code-generator system""" - -# The following is a list of the languages to the set of Rule names -# which can be considered code-generators for that language. Look -# for the `get_code_generator_rule_names` function in the aspect -# logic that integrates with this constant. - -CODE_GENERATOR_RULE_NAMES = struct( -# TEMPLATE-INCLUDE-BEGIN - generic = [ - ], - java = [ - ], - python = [ - ], - scala = [ - ], -# TEMPLATE-INCLUDE-END -) diff --git a/.ijwb/aspects/fast_build_info_bundled.bzl b/.ijwb/aspects/fast_build_info_bundled.bzl deleted file mode 100644 index bb9326c24c..0000000000 --- a/.ijwb/aspects/fast_build_info_bundled.bzl +++ /dev/null @@ -1,149 +0,0 @@ -"""An aspect to gather info needed by the FastBuildService.""" - -load( - ":artifacts.bzl", - "artifact_location", - "sources_from_target", - "struct_omit_none", -) -load( - ":intellij_info_impl_bundled.bzl", - "stringify_label", -) -load(":java_info.bzl", "get_java_info") - -_DEP_ATTRS = ["deps", "exports", "runtime_deps", "_java_toolchain"] - -def _get_android_ide_info(target): - if hasattr(android_common, "AndroidIdeInfo") and android_common.AndroidIdeInfo in target: - return target[android_common.AndroidIdeInfo] - if hasattr(target, "android"): - return target.android - return None - -def _fast_build_info_impl(target, ctx): - dep_targets = _get_all_dep_targets(target, ctx) - dep_outputs = _get_all_dep_outputs(dep_targets) - - output_files = [] - - info = { - "workspace_name": ctx.workspace_name, - "label": stringify_label(target.label), - "dependencies": [stringify_label(t.label) for t in dep_targets], - } - - write_output = False - if hasattr(ctx.rule.attr, "data") and ctx.rule.attr.data: - # The data attribute can reference artifacts directly (like deploy jars) that the aspect - # will skip. So we need to gather them up here, in the referencing target. - write_output = True - info["data"] = [ - struct( - label = stringify_label(datadep.label), - artifacts = [artifact_location(file) for file in datadep.files.to_list()], - ) - for datadep in ctx.rule.attr.data - ] - - if hasattr(target, "java_toolchain"): - toolchain = target.java_toolchain - elif java_common.JavaToolchainInfo != platform_common.ToolchainInfo and \ - java_common.JavaToolchainInfo in target: - toolchain = target[java_common.JavaToolchainInfo] - else: - toolchain = None - if toolchain: - write_output = True - javac_jars = [] - if hasattr(toolchain, "tools"): - javac_jars = [artifact_location(f) for f in toolchain.tools.to_list()] - bootclasspath_jars = [] - if hasattr(toolchain, "bootclasspath"): - bootclasspath_jars = [artifact_location(f) for f in toolchain.bootclasspath.to_list()] - info["java_toolchain_info"] = struct_omit_none( - javac_jars = javac_jars, - bootclasspath_jars = bootclasspath_jars, - source_version = toolchain.source_version, - target_version = toolchain.target_version, - ) - java_info = get_java_info(target) - if java_info: - write_output = True - launcher = None - if hasattr(ctx.rule.attr, "use_launcher") and not ctx.rule.attr.use_launcher: - launcher = None - elif hasattr(ctx.rule.attr, "launcher") and ctx.rule.attr.launcher: - launcher = stringify_label(ctx.rule.attr.launcher.label) - elif hasattr(ctx.rule.attr, "_java_launcher") and ctx.rule.attr._java_launcher: - # TODO: b/295221112 - remove _java_launcher when it's removed from Java rules - launcher = stringify_label(ctx.rule.attr._java_launcher.label) - elif hasattr(ctx.rule.attr, "_javabase") and ctx.rule.attr._javabase: - launcher = stringify_label(ctx.rule.attr._javabase.label) - java_info = { - "sources": sources_from_target(ctx), - "test_class": getattr(ctx.rule.attr, "test_class", None), - "test_size": getattr(ctx.rule.attr, "size", None), - "launcher": launcher, - "swigdeps": getattr(ctx.rule.attr, "swigdeps", True), - "jvm_flags": getattr(ctx.rule.attr, "jvm_flags", []), - "main_class": getattr(ctx.rule.attr, "main_class", None), - } - annotation_processing = target[JavaInfo].annotation_processing - if annotation_processing: - java_info["annotation_processor_class_names"] = annotation_processing.processor_classnames - java_info["annotation_processor_classpath"] = [ - artifact_location(t) - for t in annotation_processing.processor_classpath.to_list() - ] - info["java_info"] = struct_omit_none(**java_info) - - android_ide_info = _get_android_ide_info(target) - if android_ide_info: - write_output = True - android_info = struct_omit_none( - aar = artifact_location(android_ide_info.aar), - merged_manifest = artifact_location( - getattr(android_ide_info, "generated_manifest", None) or - getattr(android_ide_info, "merged_manifest", None), - ), - ) - info["android_info"] = android_info - - if write_output: - output_file = ctx.actions.declare_file(target.label.name + ".ide-fast-build-info.txt") - ctx.actions.write(output_file, proto.encode_text(struct_omit_none(**info))) - output_files.append(output_file) - - output_groups = depset(output_files, transitive = dep_outputs) - return [OutputGroupInfo(**{"ide-fast-build": output_groups})] - -def _get_all_dep_outputs(dep_targets): - """Get the ide-fast-build output files for all dependencies""" - return [ - dep_target[OutputGroupInfo]["ide-fast-build"] - for dep_target in dep_targets - if _has_ide_fast_build(dep_target) - ] - -def _get_all_dep_targets(target, ctx): - """Get all the targets mentioned in one of the _DEP_ATTRS attributes of the target""" - targets = [] - for attr_name in _DEP_ATTRS: - attr_val = getattr(ctx.rule.attr, attr_name, None) - if not attr_val: - continue - attr_type = type(attr_val) - if attr_type == type(target): - targets.append(attr_val) - elif attr_type == type([]): - targets += [list_val for list_val in attr_val if type(list_val) == type(target)] - return targets - -def _has_ide_fast_build(target): - return OutputGroupInfo in target and "ide-fast-build" in target[OutputGroupInfo] - -fast_build_info_aspect = aspect( - attr_aspects = _DEP_ATTRS, - implementation = _fast_build_info_impl, -) diff --git a/.ijwb/aspects/flag_hack.bzl b/.ijwb/aspects/flag_hack.bzl deleted file mode 100644 index 8d4494f63e..0000000000 --- a/.ijwb/aspects/flag_hack.bzl +++ /dev/null @@ -1,32 +0,0 @@ -##### Begin bazel-flag-hack -# The flag hack stuff below is a way to detect flags that bazel has been invoked with from the -# aspect. Once PY3-as-default is stable, it can be removed. When removing, also remove the -# define_flag_hack() call in BUILD and the "_flag_hack" attr on the aspect below. See -# "PY3-as-default" in: -# https://github.com/bazelbuild/bazel/blob/master/src/main/java/com/google/devtools/build/lib/rules/python/PythonConfiguration.java - -FlagHackInfo = provider(fields = ["incompatible_py2_outputs_are_suffixed"]) - -def _flag_hack_impl(ctx): - return [FlagHackInfo(incompatible_py2_outputs_are_suffixed = ctx.attr.incompatible_py2_outputs_are_suffixed)] - -_flag_hack_rule = rule( - attrs = {"incompatible_py2_outputs_are_suffixed": attr.bool()}, - implementation = _flag_hack_impl, -) - -def define_flag_hack(): - native.config_setting( - name = "incompatible_py2_outputs_are_suffixed_setting", - values = {"incompatible_py2_outputs_are_suffixed": "true"}, - ) - _flag_hack_rule( - name = "flag_hack", - incompatible_py2_outputs_are_suffixed = select({ - ":incompatible_py2_outputs_are_suffixed_setting": True, - "//conditions:default": False, - }), - visibility = ["//visibility:public"], - ) - -##### End bazel-flag-hack diff --git a/.ijwb/aspects/intellij_info_bundled.bzl b/.ijwb/aspects/intellij_info_bundled.bzl deleted file mode 100644 index 70ae96e453..0000000000 --- a/.ijwb/aspects/intellij_info_bundled.bzl +++ /dev/null @@ -1,100 +0,0 @@ -"""Bazel-specific intellij aspect.""" - -load( - ":intellij_info_impl_bundled.bzl", - "intellij_info_aspect_impl", - "make_intellij_info_aspect", - "is_valid_aspect_target", -) - -EXTRA_DEPS = [ - "embed", # From go rules (bazel only) - "_cc_toolchain", # From rules_cc (bazel only) - "_kt_toolchain", # From rules_kotlin (bazel only) -] - -TOOLCHAIN_TYPE_DEPS = [ - "@@bazel_tools//tools/cpp:toolchain_type", # For rules_cc -] - -def tool_label(tool_name): - """Returns a label that points to a tool target in the bundled aspect workspace.""" - return Label("tools/" + tool_name) - -def get_go_import_path(ctx): - """Returns the import path for a go target.""" - import_path = getattr(ctx.rule.attr, "importpath", None) - if import_path: - return import_path - prefix = None - if hasattr(ctx.rule.attr, "_go_prefix"): - prefix = ctx.rule.attr._go_prefix.go_prefix - if not prefix: - return None - import_path = prefix - if ctx.label.package: - import_path += "/" + ctx.label.package - if ctx.label.name != "go_default_library": - import_path += "/" + ctx.label.name - return import_path - -def is_go_proto_library(target, _ctx): - return hasattr(target[OutputGroupInfo], "go_generated_srcs") - -def get_go_proto_library_generated_srcs(target): - files = target[OutputGroupInfo].go_generated_srcs.to_list() - return [f for f in files if f.basename.endswith(".go")] - -def get_py_launcher(target, ctx): - """Returns the python launcher for a given rule.""" - - # Used by other implementations of get_launcher - _ = target # @unused - attr = ctx.rule.attr - if hasattr(attr, "_launcher") and attr._launcher != None: - return str(attr._launcher.label) - return None - -def _collect_targets_from_toolchains(ctx, toolchain_types): - """Returns a list of targets for the given toolchain types.""" - result = [] - - for toolchain_type in toolchain_types: - # toolchains attribute only available in Bazel 8+ - toolchains = getattr(ctx.rule, "toolchains", []) - - if toolchain_type in toolchains: - if is_valid_aspect_target(toolchains[toolchain_type]): - result.append(toolchains[toolchain_type]) - - return result - -semantics = struct( - tool_label = tool_label, - toolchains_propagation = struct( - toolchain_types = TOOLCHAIN_TYPE_DEPS, - collect_toolchain_deps = _collect_targets_from_toolchains, - ), - extra_deps = EXTRA_DEPS, - extra_required_aspect_providers = [], - go = struct( - get_import_path = get_go_import_path, - is_proto_library = is_go_proto_library, - get_proto_library_generated_srcs = get_go_proto_library_generated_srcs, - ), - py = struct( - get_launcher = get_py_launcher, - ), - flag_hack_label = ":flag_hack", -) - -def _aspect_impl(target, ctx): - return intellij_info_aspect_impl(target, ctx, semantics) - -# TEMPLATE-INCLUDE-BEGIN -intellij_info_aspect = make_intellij_info_aspect( - _aspect_impl, - semantics, -) -# TEMPLATE-INCLUDE-END - diff --git a/.ijwb/aspects/intellij_info_impl_bundled.bzl b/.ijwb/aspects/intellij_info_impl_bundled.bzl deleted file mode 100644 index ac09bb48ca..0000000000 --- a/.ijwb/aspects/intellij_info_impl_bundled.bzl +++ /dev/null @@ -1,1391 +0,0 @@ -"""Implementation of IntelliJ-specific information collecting aspect.""" - -load( - "@bazel_tools//tools/build_defs/cc:action_names.bzl", - "ACTION_NAMES", -) -load( - ":artifacts.bzl", - "artifact_location", - "artifacts_from_target_list_attr", - "is_external_artifact", - "sources_from_target", - "struct_omit_none", - "to_artifact_location", -) -load(":flag_hack.bzl", "FlagHackInfo") - -load(":java_info.bzl", "get_java_info", "java_info_in_target", "java_info_reference") - -load(":python_info.bzl", "get_py_info", "py_info_in_target") - -load(":code_generator_info.bzl", "CODE_GENERATOR_RULE_NAMES") - -load( - ":make_variables.bzl", - "expand_make_variables", -) - -IntelliJInfo = provider( - doc = "Collected information about the targets visited by the aspect.", - fields = [ - "export_deps", - "kind", - "output_groups", - "target_key", - ], -) - -# Defensive list of features that can appear in the C++ toolchain, but which we -# definitely don't want to enable (when enabled, they'd contribute command line -# flags that don't make sense in the context of intellij info). -UNSUPPORTED_FEATURES = [ - "thin_lto", - "module_maps", - "use_header_modules", - "fdo_instrument", - "fdo_optimize", -] - -# Compile-time dependency attributes, grouped by type. -DEPS = [ - "_stl", # From cc rules - "malloc", # From cc_binary rules - "implementation_deps", # From cc_library rules - "_java_toolchain", # From java rules - "deps", - "jars", # from java_import rules - "exports", - "java_lib", # From old proto_library rules - "_android_sdk", # from android rules - "aidl_lib", # from android_sdk - "_scala_toolchain", # From scala rules - "test_app", # android_instrumentation_test - "instruments", # android_instrumentation_test - "tests", # From test_suite - "compilers", # From go_proto_library - "associates", # From kotlin rules -] - -# Run-time dependency attributes, grouped by type. -RUNTIME_DEPS = [ - "runtime_deps", -] - -PREREQUISITE_DEPS = [] - -# Dependency type enum -COMPILE_TIME = 0 - -RUNTIME = 1 - -# PythonVersion enum; must match PyIdeInfo.PythonVersion -PY2 = 1 - -PY3 = 2 - -# PythonCompatVersion enum; must match PyIdeInfo.PythonSrcsVersion -SRC_PY2 = 1 - -SRC_PY3 = 2 - -SRC_PY2AND3 = 3 - -SRC_PY2ONLY = 4 - -SRC_PY3ONLY = 5 - -##### Helpers - -def run_jar(ctx, jar, **kwargs): - """Runs a jar using the current java runtime used to run this bazel instance. - - Finds the current java runtime and uses the java executable to run the provided jar. The jar - file should be a self contained _deploy jar. - """ - - host_java = ctx.attr._java_runtime[java_common.JavaRuntimeInfo] - - return ctx.actions.run_shell( - tools = depset([jar], transitive = [host_java.files]), - command = "%s -jar %s $@" % (host_java.java_executable_exec_path, jar.path), - **kwargs, - ) - -def get_code_generator_rule_names(ctx, language_name): - """Supplies a list of Rule names for code generation for the language specified - - For some languages, it is possible to specify Rules' names that are interpreted as - code-generators for the language. These Rules' names are specified as attrs and are provided to - the aspect using the `AspectStrategy#AspectParameter` in the plugin logic. - """ - - if not language_name: - fail("the `language_name` must be provided") - - if hasattr(CODE_GENERATOR_RULE_NAMES, language_name): - return getattr(CODE_GENERATOR_RULE_NAMES, language_name) - - return [] - -def get_registry_flag(ctx, name): - """Registry flags are passed to aspects using defines. See CppAspectArgsProvider.""" - - return ctx.var.get(name) == "true" - -def source_directory_tuple(resource_file): - """Creates a tuple of (exec_path, root_exec_path_fragment, is_source, is_external).""" - relative_path = str(android_common.resource_source_directory(resource_file)) - root_exec_path_fragment = resource_file.root.path if not resource_file.is_source else None - return ( - relative_path if resource_file.is_source else root_exec_path_fragment + "/" + relative_path, - root_exec_path_fragment, - resource_file.is_source, - is_external_artifact(resource_file.owner), - ) - -def get_res_artifacts(resources): - """Get a map from the res folder to the set of resource files within that folder. - - Args: - resources: all resources of a target - - Returns: - a map from the res folder to the set of resource files within that folder (as a tuple of path segments) - """ - res_artifacts = dict() - for resource in resources: - for file in resource.files.to_list(): - res_folder = source_directory_tuple(file) - res_artifacts.setdefault(res_folder, []).append(file) - return res_artifacts - -def build_file_artifact_location(ctx): - """Creates an ArtifactLocation proto representing a location of a given BUILD file.""" - return to_artifact_location( - ctx.label.package + "/BUILD", - ctx.label.package + "/BUILD", - True, - is_external_artifact(ctx.label), - ) - -# https://github.com/bazelbuild/bazel/issues/18966 -def _list_or_depset_to_list(list_or_depset): - if hasattr(list_or_depset, "to_list"): - return list_or_depset.to_list() - return list_or_depset - -def get_source_jars(output): - if hasattr(output, "source_jars"): - return _list_or_depset_to_list(output.source_jars) - if hasattr(output, "source_jar"): - return [output.source_jar] - return [] - -def library_artifact(java_output, rule_kind = None): - """Creates a LibraryArtifact representing a given java_output.""" - if java_output == None or java_output.class_jar == None: - return None - src_jars = get_source_jars(java_output) - - if rule_kind != None and rule_kind.startswith("scala"): - interface_jar = None - else: - interface_jar = artifact_location(java_output.ijar) - - return struct_omit_none( - interface_jar = interface_jar, - jar = artifact_location(java_output.class_jar), - source_jar = artifact_location(src_jars[0]) if src_jars else None, - source_jars = [artifact_location(f) for f in src_jars], - ) - -def annotation_processing_jars(generated_class_jar, generated_source_jar): - """Creates a LibraryArtifact representing Java annotation processing jars.""" - src_jar = generated_source_jar - return struct_omit_none( - jar = artifact_location(generated_class_jar), - source_jar = artifact_location(src_jar), - source_jars = [artifact_location(src_jar)] if src_jar else None, - ) - -def jars_from_output(output): - """Collect jars for intellij-resolve-files from Java output.""" - if output == None: - return [] - source_jars = get_source_jars(output) - return [ - jar - for jar in ([output.ijar if len(source_jars) > 0 and output.ijar else output.class_jar] + source_jars) - if jar != None and not jar.is_source - ] - -def _collect_target_from_attr(rule_attrs, attr_name, result): - """Collects the targets from the given attr into the result.""" - if not hasattr(rule_attrs, attr_name): - return - attr_value = getattr(rule_attrs, attr_name) - type_name = type(attr_value) - if type_name == "Target": - result.append(attr_value) - elif type_name == "list": - result.extend(attr_value) - -def collect_targets_from_attrs(rule_attrs, attrs): - """Returns a list of targets from the given attributes.""" - result = [] - for attr_name in attrs: - _collect_target_from_attr(rule_attrs, attr_name, result) - return [target for target in result if is_valid_aspect_target(target)] - -def targets_to_labels(targets): - """Returns a set of label strings for the given targets.""" - return depset([str(target.label) for target in targets]) - -def list_omit_none(value): - """Returns a list of the value, or the empty list if None.""" - return [value] if value else [] - -def is_valid_aspect_target(target): - """Returns whether the target has had the aspect run on it.""" - return IntelliJInfo in target - -def get_aspect_ids(ctx): - """Returns the all aspect ids, filtering out self.""" - aspect_ids = None - if hasattr(ctx, "aspect_ids"): - aspect_ids = ctx.aspect_ids - else: - return None - return [aspect_id for aspect_id in aspect_ids if "intellij_info_aspect" not in aspect_id] - -def _is_language_specific_proto_library(ctx, target, semantics): - """Returns True if the target is a proto library with attached language-specific aspect.""" - if ctx.rule.kind != "proto_library": - return False - if java_info_in_target(target): - return True - if CcInfo in target: - return True - if semantics.go.is_proto_library(target, ctx): - return True - return False - -def stringify_label(label): - """Stringifies a label, making sure any leading '@'s are stripped from main repo labels.""" - s = str(label) - - # If the label is in the main repo, make sure any leading '@'s are stripped so that tests are - # okay with the fixture setups. - return s.lstrip("@") if s.startswith("@@//") or s.startswith("@//") else s - -def make_target_key(label, aspect_ids): - """Returns a TargetKey proto struct from a target.""" - return struct_omit_none( - aspect_ids = tuple(aspect_ids) if aspect_ids else None, - label = stringify_label(label), - ) - -def make_dep(dep, dependency_type): - """Returns a Dependency proto struct.""" - return struct( - dependency_type = dependency_type, - target = dep[IntelliJInfo].target_key, - ) - -def make_deps(deps, dependency_type): - """Returns a list of Dependency proto structs.""" - return [make_dep(dep, dependency_type) for dep in deps] - -def make_dep_from_label(label, dependency_type): - """Returns a Dependency proto struct from a label.""" - return struct( - dependency_type = dependency_type, - target = struct(label = stringify_label(label)), - ) - -def update_sync_output_groups(groups_dict, key, new_set): - """Updates all sync-relevant output groups associated with 'key'. - - This is currently the [key] output group itself, together with [key]-outputs - and [key]-direct-deps. - - Args: - groups_dict: the output groups dict, from group name to artifact depset. - key: the base output group name. - new_set: a depset of artifacts to add to the output groups. - """ - update_set_in_dict(groups_dict, key, new_set) - update_set_in_dict(groups_dict, key + "-outputs", new_set) - update_set_in_dict(groups_dict, key + "-direct-deps", new_set) - -def update_set_in_dict(input_dict, key, other_set): - """Updates depset in dict, merging it with another depset.""" - input_dict[key] = depset(transitive = [input_dict.get(key, depset()), other_set]) - -def _get_output_mnemonic(ctx): - """Gives the output directory mnemonic for some target context.""" - return ctx.bin_dir.path.split("/")[1] - -def _get_python_version(ctx): - if ctx.attr._flag_hack[FlagHackInfo].incompatible_py2_outputs_are_suffixed: - if _get_output_mnemonic(ctx).find("-py2-") != -1: - return PY2 - return PY3 - else: - if _get_output_mnemonic(ctx).find("-py3-") != -1: - return PY3 - return PY2 - -_SRCS_VERSION_MAPPING = { - "PY2": SRC_PY2, - "PY3": SRC_PY3, - "PY2AND3": SRC_PY2AND3, - "PY2ONLY": SRC_PY2ONLY, - "PY3ONLY": SRC_PY3ONLY, -} - -def _get_python_srcs_version(ctx): - srcs_version = getattr(ctx.rule.attr, "srcs_version", "PY2AND3") - return _SRCS_VERSION_MAPPING.get(srcs_version, default = SRC_PY2AND3) - -def _do_starlark_string_expansion(ctx, name, strings, extra_targets = [], tokenization = True): - # first, expand all starlark predefined paths: - # location, locations, rootpath, rootpaths, execpath, execpaths - strings = [ctx.expand_location(value, targets = extra_targets) for value in strings] - - # then expand any regular GNU make style variables - return expand_make_variables(ctx, tokenization, strings) - -##### Builders for individual parts of the aspect output - -def collect_py_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): - """Updates Python-specific output groups, returns false if not a Python target.""" - if not py_info_in_target(target) or _is_language_specific_proto_library(ctx, target, semantics): - return False - - py_semantics = getattr(semantics, "py", None) - if py_semantics: - py_launcher = py_semantics.get_launcher(target, ctx) - else: - py_launcher = None - - sources = sources_from_target(ctx) - to_build = get_py_info(target).transitive_sources - args = getattr(ctx.rule.attr, "args", []) - data_deps = getattr(ctx.rule.attr, "data", []) - args = _do_starlark_string_expansion(ctx, "args", args, data_deps, tokenization = False) - imports = getattr(ctx.rule.attr, "imports", []) - is_code_generator = False - - # If there are apparently no sources found from `srcs` and the target has a rule name which is - # one of the ones pre-specified to the aspect as being a code-generator for Python then - # interpret the outputs of the target specified in the PyInfo as being sources. - - if 0 == len(sources) and ctx.rule.kind in get_code_generator_rule_names(ctx, "python"): - def provider_import_to_attr_import(provider_import): - """\ - Remaps the imports from PyInfo - - The imports that are supplied on the `PyInfo` are relative to the runfiles and so are - not the same as those which might be supplied on an attribute of `py_library`. This - function will remap those back so they look as if they were `imports` attributes on - the rule. The form of the runfiles import is `//`. - The actual `workspace_name` is not interesting such that the first part can be simply - stripped. Next the package to the Label is stripped leaving a path that would have been - supplied on an `imports` attribute to a Rule. - """ - - # Other code in this file appears to assume *NIX path component separators? - - provider_import_parts = [p for p in provider_import.split("/")] - package_parts = [p for p in ctx.label.package.split("/")] - - if 0 == len(provider_import_parts): - return None - - scratch_parts = provider_import_parts[1:] # remove the workspace name or _main - - for p in package_parts: - if 0 != len(provider_import_parts) and scratch_parts[0] == p: - scratch_parts = scratch_parts[1:] - else: - return None - - return "/".join(scratch_parts) - - def provider_imports_to_attr_imports(): - result = [] - - for provider_import in get_py_info(target).imports.to_list(): - attr_import = provider_import_to_attr_import(provider_import) - if attr_import: - result.append(attr_import) - - return result - - if get_py_info(target).imports: - imports.extend(provider_imports_to_attr_imports()) - - runfiles = target[DefaultInfo].default_runfiles - - if runfiles and runfiles.files: - sources.extend([artifact_location(f) for f in runfiles.files.to_list()]) - - is_code_generator = True - - ide_info["py_ide_info"] = struct_omit_none( - launcher = py_launcher, - python_version = _get_python_version(ctx), - sources = sources, - srcs_version = _get_python_srcs_version(ctx), - args = args, - imports = imports, - is_code_generator = is_code_generator, - ) - - update_sync_output_groups(output_groups, "intellij-info-py", depset([ide_info_file])) - update_sync_output_groups(output_groups, "intellij-compile-py", to_build) - update_sync_output_groups(output_groups, "intellij-resolve-py", to_build) - return True - -def _collect_generated_go_sources(target, ctx, semantics): - """Returns a depset of go source files generated by this target.""" - if semantics.go.is_proto_library(target, ctx): - return semantics.go.get_proto_library_generated_srcs(target) - else: - return None - -def collect_go_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): - """Updates Go-specific output groups, returns false if not a recognized Go target.""" - sources = [] - generated = [] - cgo = False - - # currently there's no Go Skylark API, with the only exception being proto_library targets - if ctx.rule.kind in [ - "go_binary", - "go_library", - "go_test", - "go_source", - "go_appengine_binary", - "go_appengine_library", - "go_appengine_test", - ]: - sources = [f for src in getattr(ctx.rule.attr, "srcs", []) for f in src.files.to_list()] - generated = [f for f in sources if not f.is_source] - cgo = getattr(ctx.rule.attr, "cgo", False) - elif ctx.rule.kind == "go_wrap_cc": - genfiles = target.files.to_list() - go_genfiles = [f for f in genfiles if f.basename.endswith(".go")] - if go_genfiles: - sources = go_genfiles - generated = go_genfiles - else: - # if the .go file isn't in 'files', build the .a and .x files instead - generated = genfiles - else: - generated_sources = _collect_generated_go_sources(target, ctx, semantics) - if not generated_sources: - return False - sources = generated_sources - generated = generated_sources - - import_path = None - go_semantics = getattr(semantics, "go", None) - if go_semantics: - import_path = go_semantics.get_import_path(ctx) - - library_labels = [] - if ctx.rule.kind in ("go_test", "go_library", "go_appengine_test"): - if getattr(ctx.rule.attr, "library", None) != None: - library_labels = [stringify_label(ctx.rule.attr.library.label)] - elif getattr(ctx.rule.attr, "embed", None) != None: - for library in ctx.rule.attr.embed: - if library[IntelliJInfo].kind == "go_source" or library[IntelliJInfo].kind == "go_proto_library": - l = library[IntelliJInfo].output_groups["intellij-sources-go-outputs"].to_list() - sources += l - generated += [f for f in l if not f.is_source] - else: - library_labels.append(stringify_label(library.label)) - - ide_info["go_ide_info"] = struct_omit_none( - import_path = import_path, - library_labels = library_labels, - sources = [artifact_location(f) for f in sources], - cgo = cgo, - ) - - compile_files = target[OutputGroupInfo].compilation_outputs if hasattr(target[OutputGroupInfo], "compilation_outputs") else depset([]) - compile_files = depset(generated, transitive = [compile_files]) - - update_sync_output_groups(output_groups, "intellij-info-go", depset([ide_info_file])) - update_sync_output_groups(output_groups, "intellij-compile-go", compile_files) - update_sync_output_groups(output_groups, "intellij-resolve-go", depset(generated)) - update_sync_output_groups(output_groups, "intellij-sources-go", depset(sources)) - return True - -def collect_cpp_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): - """Updates C++-specific output groups, returns false if not a C++ target.""" - - if CcInfo not in target: - return False - - # ignore cc_proto_library, attach to proto_library with aspect attached instead - if ctx.rule.kind == "cc_proto_library": - return False - - # Go targets always provide CcInfo. Usually it's empty, but even if it isn't we don't handle it - if ctx.rule.kind.startswith("go_"): - return False - - sources = artifacts_from_target_list_attr(ctx, "srcs") - headers = artifacts_from_target_list_attr(ctx, "hdrs") - textual_headers = artifacts_from_target_list_attr(ctx, "textual_hdrs") - - target_copts = [] - if hasattr(ctx.rule.attr, "copts"): - target_copts += ctx.rule.attr.copts - extra_targets = [] - if hasattr(ctx.rule.attr, "additional_compiler_inputs"): - extra_targets += ctx.rule.attr.additional_compiler_inputs - if hasattr(semantics, "cc") and hasattr(semantics.cc, "get_default_copts"): - target_copts += semantics.cc.get_default_copts(ctx) - - target_copts = _do_starlark_string_expansion(ctx, "copt", target_copts, extra_targets) - - compilation_context = target[CcInfo].compilation_context - - # Merge current compilation context with context of implementation dependencies. - if hasattr(ctx.rule.attr, "implementation_deps"): - implementation_deps = ctx.rule.attr.implementation_deps - compilation_context = cc_common.merge_compilation_contexts( - compilation_contexts = - [compilation_context] + [impl[CcInfo].compilation_context for impl in implementation_deps], - ) - - # external_includes available since bazel 7 - external_includes = getattr(compilation_context, "external_includes", depset()).to_list() - - c_info = struct_omit_none( - header = headers, - source = sources, - target_copt = target_copts, - textual_header = textual_headers, - transitive_define = compilation_context.defines.to_list(), - transitive_include_directory = compilation_context.includes.to_list(), - transitive_quote_include_directory = compilation_context.quote_includes.to_list(), - # both system and external includes are add using `-isystem` - transitive_system_include_directory = compilation_context.system_includes.to_list() + external_includes, - include_prefix = getattr(ctx.rule.attr, "include_prefix", None), - strip_include_prefix = getattr(ctx.rule.attr, "strip_include_prefix", None), - ) - ide_info["c_ide_info"] = c_info - resolve_files = compilation_context.headers - - # TODO(brendandouglas): target to cpp files only - compile_files = target[OutputGroupInfo].compilation_outputs if hasattr(target[OutputGroupInfo], "compilation_outputs") else depset([]) - - update_sync_output_groups(output_groups, "intellij-info-cpp", depset([ide_info_file])) - update_sync_output_groups(output_groups, "intellij-compile-cpp", compile_files) - update_sync_output_groups(output_groups, "intellij-resolve-cpp", resolve_files) - return True - -def collect_c_toolchain_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): - """Updates cc_toolchain-relevant output groups, returns false if not a cc_toolchain target.""" - - # The other toolchains like the JDK might also have ToolchainInfo but it's not a C++ toolchain, - # so check kind as well. - # TODO(jvoung): We are temporarily getting info from cc_toolchain_suite - # https://github.com/bazelbuild/bazel/commit/3aedb2f6de80630f88ffb6b60795c44e351a5810 - # but will switch back to cc_toolchain providing CcToolchainProvider once we migrate C++ rules - # to generic platforms and toolchains. - if ctx.rule.kind != "cc_toolchain" and ctx.rule.kind != "cc_toolchain_suite" and ctx.rule.kind != "cc_toolchain_alias": - return False - if cc_common.CcToolchainInfo not in target: - return False - - # cc toolchain to access compiler flags - cpp_toolchain = target[cc_common.CcToolchainInfo] - - # cpp fragment to access bazel options - cpp_fragment = ctx.fragments.cpp - - copts = cpp_fragment.copts - cxxopts = cpp_fragment.cxxopts - conlyopts = cpp_fragment.conlyopts - - feature_configuration = cc_common.configure_features( - ctx = ctx, - cc_toolchain = cpp_toolchain, - requested_features = ctx.features, - unsupported_features = ctx.disabled_features + UNSUPPORTED_FEATURES, - ) - c_variables = cc_common.create_compile_variables( - feature_configuration = feature_configuration, - cc_toolchain = cpp_toolchain, - user_compile_flags = copts + conlyopts, - ) - cpp_variables = cc_common.create_compile_variables( - feature_configuration = feature_configuration, - cc_toolchain = cpp_toolchain, - user_compile_flags = copts + cxxopts, - ) - c_options = cc_common.get_memory_inefficient_command_line( - feature_configuration = feature_configuration, - action_name = ACTION_NAMES.c_compile, - variables = c_variables, - ) - cpp_options = cc_common.get_memory_inefficient_command_line( - feature_configuration = feature_configuration, - action_name = ACTION_NAMES.cpp_compile, - variables = cpp_variables, - ) - - if (get_registry_flag(ctx, "_cpp_use_get_tool_for_action")): - c_compiler = cc_common.get_tool_for_action( - feature_configuration = feature_configuration, - action_name = ACTION_NAMES.c_compile, - ) - cpp_compiler = cc_common.get_tool_for_action( - feature_configuration = feature_configuration, - action_name = ACTION_NAMES.cpp_compile, - ) - else: - c_compiler = str(cpp_toolchain.compiler_executable) - cpp_compiler = str(cpp_toolchain.compiler_executable) - - c_toolchain_info = struct_omit_none( - built_in_include_directory = [str(d) for d in cpp_toolchain.built_in_include_directories], - c_option = c_options, - cpp_option = cpp_options, - c_compiler = c_compiler, - cpp_compiler = cpp_compiler, - target_name = cpp_toolchain.target_gnu_system_name, - ) - ide_info["c_toolchain_ide_info"] = c_toolchain_info - update_sync_output_groups(output_groups, "intellij-info-cpp", depset([ide_info_file])) - return True - -def get_java_provider(target): - """Find a provider exposing java compilation/outputs data.""" - - # Check for kt providers before JavaInfo. e.g. kt targets have - # JavaInfo, but their data lives in the "kt" provider and not JavaInfo. - # See https://github.com/bazelbuild/intellij/pull/1202 - if hasattr(target, "kt") and hasattr(target.kt, "outputs"): - return target.kt - java_info = get_java_info(target) - if java_info: - return java_info - if hasattr(java_common, "JavaPluginInfo") and java_common.JavaPluginInfo in target: - return target[java_common.JavaPluginInfo] - return None - -def _collect_generated_files(java): - """Collects generated files from a Java target""" - if hasattr(java, "java_outputs"): - return [ - (outputs.generated_class_jar, outputs.generated_source_jar) - for outputs in java.java_outputs - if outputs.generated_class_jar != None - ] - - # Handles Bazel versions before 5.0.0. - if (hasattr(java, "annotation_processing") and java.annotation_processing and java.annotation_processing.enabled): - return [(java.annotation_processing.class_jar, java.annotation_processing.source_jar)] - return [] - -def collect_java_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): - """Updates Java-specific output groups, returns false if not a Java target.""" - java = get_java_provider(target) - if not java: - return False - if hasattr(java, "java_outputs") and java.java_outputs: - java_outputs = java.java_outputs - elif hasattr(java, "outputs") and java.outputs: - java_outputs = java.outputs.jars - else: - return False - - java_semantics = semantics.java if hasattr(semantics, "java") else None - if java_semantics and java_semantics.skip_target(target, ctx): - return False - - ide_info_files = [] - sources = sources_from_target(ctx) - jars = [library_artifact(output, ctx.rule.kind) for output in java_outputs] - class_jars = [output.class_jar for output in java_outputs if output and output.class_jar] - output_jars = [jar for output in java_outputs for jar in jars_from_output(output)] - resolve_files = output_jars - compile_files = class_jars - - gen_jars = [] - for generated_class_jar, generated_source_jar in _collect_generated_files(java): - gen_jars.append(annotation_processing_jars(generated_class_jar, generated_source_jar)) - resolve_files += [ - jar - for jar in [ - generated_class_jar, - generated_source_jar, - ] - if jar != None and not jar.is_source - ] - compile_files += [ - jar - for jar in [generated_class_jar] - if jar != None and not jar.is_source - ] - - jdeps = None - jdeps_file = None - if java_semantics and hasattr(java_semantics, "get_filtered_jdeps"): - jdeps_file = java_semantics.get_filtered_jdeps(target) - if jdeps_file == None and hasattr(java, "outputs") and hasattr(java.outputs, "jdeps") and java.outputs.jdeps: - jdeps_file = java.outputs.jdeps - if jdeps_file: - jdeps = artifact_location(jdeps_file) - resolve_files.append(jdeps_file) - - java_sources, gen_java_sources, srcjars = divide_java_sources(ctx) - - if java_semantics: - srcjars = java_semantics.filter_source_jars(target, ctx, srcjars) - - package_manifest = None - if java_sources: - package_manifest = build_java_package_manifest(ctx, target, java_sources, ".java-manifest") - ide_info_files.append(package_manifest) - - filtered_gen_jar = None - if java_sources and (gen_java_sources or srcjars): - filtered_gen_jar, filtered_gen_resolve_files = _build_filtered_gen_jar( - ctx, - target, - java_outputs, - gen_java_sources, - srcjars, - ) - resolve_files += filtered_gen_resolve_files - - # Custom lint checks are incorporated as java plugins. We collect them here and register them with the IDE so that the IDE can also run the same checks. - plugin_processor_jar_files = [] - if hasattr(ctx.rule.attr, "_android_lint_plugins"): - plugin_processor_jar_files += [ - jar - for p in getattr(ctx.rule.attr, "_android_lint_plugins", []) - for jar in _android_lint_plugin_jars(p) - ] - - if hasattr(java, "annotation_processing") and java.annotation_processing and hasattr(java.annotation_processing, "processor_classpath"): - plugin_processor_jar_files += java.annotation_processing.processor_classpath.to_list() - resolve_files += plugin_processor_jar_files - plugin_processor_jars = [annotation_processing_jars(jar, None) for jar in depset(plugin_processor_jar_files).to_list()] - - java_info = struct_omit_none( - filtered_gen_jar = filtered_gen_jar, - generated_jars = gen_jars, - jars = jars, - jdeps = jdeps, - main_class = getattr(ctx.rule.attr, "main_class", None), - package_manifest = artifact_location(package_manifest), - sources = sources, - test_class = getattr(ctx.rule.attr, "test_class", None), - plugin_processor_jars = plugin_processor_jars, - ) - - ide_info["java_ide_info"] = java_info - ide_info_files.append(ide_info_file) - update_sync_output_groups(output_groups, "intellij-info-java", depset(ide_info_files)) - update_sync_output_groups(output_groups, "intellij-compile-java", depset(compile_files)) - update_sync_output_groups(output_groups, "intellij-resolve-java", depset(resolve_files)) - - # also add transitive hjars + src jars, to catch implicit deps - if hasattr(java, "transitive_compile_time_jars"): - update_set_in_dict(output_groups, "intellij-resolve-java-direct-deps", java.transitive_compile_time_jars) - update_set_in_dict(output_groups, "intellij-resolve-java-direct-deps", java.transitive_source_jars) - return True - -def _android_lint_plugin_jars(target): - java_info = get_java_info(target) - if java_info: - return java_info.transitive_runtime_jars.to_list() - else: - return [] - -def _package_manifest_file_argument(f): - artifact = artifact_location(f) - is_external = "1" if is_external_artifact(f.owner) else "0" - return artifact.root_execution_path_fragment + "," + artifact.relative_path + "," + is_external - -def build_java_package_manifest(ctx, target, source_files, suffix): - """Builds the java package manifest for the given source files.""" - output = ctx.actions.declare_file(target.label.name + suffix) - - args = ctx.actions.args() - args.add("--output_manifest") - args.add(output.path) - args.add_joined( - "--sources", - source_files, - join_with = ":", - map_each = _package_manifest_file_argument, - ) - - # Bazel has an option to put your command line args in a file, and then pass the name of that file as the only - # argument to your executable. The PackageParser supports taking args in this way, we can pass in an args file - # as "@filename". - # Bazel Persistent Workers take their input as a file that contains the argument that will be parsed and turned - # into a WorkRequest proto and read on stdin. It also wants an argument of the form "@filename". We can use the - # params file as an arg file. - # Thus if we always use a params file, we can support both persistent worker mode and local mode (regular) mode. - args.use_param_file("@%s", use_always = True) - args.set_param_file_format("multiline") - - run_jar( - ctx = ctx, - jar = ctx.file._package_parser, - inputs = source_files, - outputs = [output], - arguments = [args], - mnemonic = "JavaPackageManifest", - progress_message = "Parsing java package strings for " + str(target.label), - execution_requirements = { - "supports-workers": "1", - "requires-worker-protocol": "proto", - }, - ) - return output - -def _build_filtered_gen_jar(ctx, target, java_outputs, gen_java_sources, srcjars): - """Filters the passed jar to contain only classes from the given manifest.""" - jar_artifacts = [] - source_jar_artifacts = [] - for jar in java_outputs: - if jar.ijar: - jar_artifacts.append(jar.ijar) - elif jar.class_jar: - jar_artifacts.append(jar.class_jar) - if hasattr(jar, "source_jars") and jar.source_jars: - source_jar_artifacts.extend(_list_or_depset_to_list(jar.source_jars)) - elif hasattr(jar, "source_jar") and jar.source_jar: - source_jar_artifacts.append(jar.source_jar) - - if len(source_jar_artifacts) == 0 or len(jar_artifacts) == 0: - jar_artifacts.extend([jar.class_jar for jar in java_outputs if jar.class_jar]) - - filtered_jar = ctx.actions.declare_file(target.label.name + "-filtered-gen.jar") - filtered_source_jar = ctx.actions.declare_file(target.label.name + "-filtered-gen-src.jar") - args = [] - for jar in jar_artifacts: - args += ["--filter_jar", jar.path] - for jar in source_jar_artifacts: - args += ["--filter_source_jar", jar.path] - args += ["--filtered_jar", filtered_jar.path] - args += ["--filtered_source_jar", filtered_source_jar.path] - if gen_java_sources: - for java_file in gen_java_sources: - args += ["--keep_java_file", java_file.path] - if srcjars: - for source_jar in srcjars: - args += ["--keep_source_jar", source_jar.path] - run_jar( - ctx = ctx, - jar = ctx.file._jar_filter, - inputs = jar_artifacts + source_jar_artifacts + gen_java_sources + srcjars, - outputs = [filtered_jar, filtered_source_jar], - arguments = args, - mnemonic = "JarFilter", - progress_message = "Filtering generated code for " + str(target.label), - ) - output_jar = struct( - jar = artifact_location(filtered_jar), - source_jar = artifact_location(filtered_source_jar), - ) - intellij_resolve_files = [filtered_jar, filtered_source_jar] - return output_jar, intellij_resolve_files - -def divide_java_sources(ctx): - """Divide sources into plain java, generated java, and srcjars.""" - - java_sources = [] - gen_java_sources = [] - srcjars = [] - if hasattr(ctx.rule.attr, "srcs"): - srcs = ctx.rule.attr.srcs - for src in srcs: - for f in src.files.to_list(): - if f.basename.endswith(".java"): - if f.is_source: - java_sources.append(f) - else: - gen_java_sources.append(f) - elif f.basename.endswith(".srcjar"): - srcjars.append(f) - - return java_sources, gen_java_sources, srcjars - -def collect_android_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): - """Updates Android-specific output groups, returns true if any android specific info was collected.""" - handled = False - handled = _collect_android_ide_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled - handled = _collect_android_instrumentation_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled - handled = _collect_aar_import_info(ctx, ide_info, ide_info_file, output_groups) or handled - handled = _collect_android_sdk_info(ctx, ide_info, ide_info_file, output_groups) or handled - - if handled: - # do this once do avoid adding unnecessary nesting to the depset - # (https://docs.bazel.build/versions/master/skylark/performance.html#reduce-the-number-of-calls-to-depset) - update_sync_output_groups(output_groups, "intellij-info-android", depset([ide_info_file])) - return handled - -def _collect_android_ide_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): - """Populates ide_info proto and intellij_resolve_android output group - - Updates ide_info proto with android_ide_info, and intellij_resolve_android with android - resolve files. It returns false on android_library and android_binary targets, as this preserves - consistent functionality with the previous condition of the presence of the .android legacy - provider. - """ - if ctx.rule.kind not in ["android_library", "android_binary", "kt_android_library"]: - return False - - android_semantics = semantics.android if hasattr(semantics, "android") else None - extra_ide_info = android_semantics.extra_ide_info(target, ctx) if android_semantics else {} - - if hasattr(android_common, "AndroidIdeInfo"): - android = target[android_common.AndroidIdeInfo] - else: - # Backwards compatibility: supports android struct provider - legacy_android = getattr(target, "android") - - # Transform into AndroidIdeInfo form - android = struct( - java_package = legacy_android.java_package, - manifest = legacy_android.manifest, - idl_source_jar = getattr(legacy_android.idl.output, "source_jar", None), - idl_class_jar = getattr(legacy_android.idl.output, "class_jar", None), - defines_android_resources = legacy_android.defines_resources, - idl_import_root = getattr(legacy_android.idl, "import_root", None), - resource_jar = legacy_android.resource_jar, - signed_apk = legacy_android.apk, - apks_under_test = legacy_android.apks_under_test, - ) - - output_jar = struct( - class_jar = android.idl_class_jar, - ijar = None, - source_jar = android.idl_source_jar, - ) if android.idl_class_jar else None - - resources = [] - res_folders = [] - resolve_files = jars_from_output(output_jar) - if hasattr(ctx.rule.attr, "resource_files"): - for artifact_path_fragments, res_files in get_res_artifacts(ctx.rule.attr.resource_files).items(): - # Generate unique ArtifactLocation for resource directories. - root = to_artifact_location(*artifact_path_fragments) - resources.append(root) - - # Generate aar - aar_file_name = target.label.name.replace("/", "-") - aar_file_name += "-" + str(hash(root.root_execution_path_fragment + root.relative_path + aar_file_name)) - - aar = ctx.actions.declare_file(aar_file_name + ".aar") - args = ctx.actions.args() - - # using param file to get around argument length limitation - # the name of param file (%s) is automatically filled in by blaze - args.use_param_file("@%s") - args.set_param_file_format("multiline") - - args.add("--aar", aar) - args.add("--manifest_file", android.manifest) - args.add_joined("--resources", res_files, join_with = ",") - args.add("--resource_root", root.relative_path if root.is_source else root.root_execution_path_fragment + "/" + root.relative_path) - - run_jar( - ctx = ctx, - jar = ctx.file._create_aar, - outputs = [aar], - inputs = [android.manifest] + res_files, - arguments = [args], - mnemonic = "CreateAar", - progress_message = "Generating " + aar_file_name + ".aar for target " + str(target.label), - ) - resolve_files.append(aar) - - # Generate unique ResFolderLocation for resource files. - res_folders.append(struct_omit_none(aar = artifact_location(aar), root = root)) - - instruments = None - if hasattr(ctx.rule.attr, "instruments") and ctx.rule.attr.instruments: - instruments = stringify_label(ctx.rule.attr.instruments.label) - - render_resolve_jar = None - if android_semantics and hasattr(android_semantics, "build_render_resolve_jar"): - render_resolve_jar = android_semantics.build_render_resolve_jar(target, ctx) - - if render_resolve_jar: - update_sync_output_groups(output_groups, "intellij-render-resolve-android", depset([render_resolve_jar])) - - android_info = struct_omit_none( - java_package = android.java_package, - idl_import_root = getattr(android, "idl_import_root", None), - manifest = artifact_location(android.manifest), - manifest_values = [struct_omit_none(key = key, value = value) for key, value in ctx.rule.attr.manifest_values.items()] if hasattr(ctx.rule.attr, "manifest_values") else None, - apk = artifact_location(android.signed_apk), - dependency_apk = [artifact_location(apk) for apk in android.apks_under_test], - has_idl_sources = android.idl_class_jar != None, - idl_jar = library_artifact(output_jar), - generate_resource_class = android.defines_android_resources, - resources = resources, - res_folders = res_folders, - resource_jar = library_artifact(android.resource_jar), - instruments = instruments, - render_resolve_jar = artifact_location(render_resolve_jar) if render_resolve_jar else None, - **extra_ide_info - ) - - if android.manifest and not android.manifest.is_source: - resolve_files.append(android.manifest) - - # b/176209293: expose resource jar to make sure empty library - # knows they are remote output artifact - if android.resource_jar: - resolve_files += [jar for jar in jars_from_output(android.resource_jar)] - - ide_info["android_ide_info"] = android_info - update_sync_output_groups(output_groups, "intellij-resolve-android", depset(resolve_files)) - return True - -def _collect_android_instrumentation_info(target, ctx, semantics, ide_info, ide_info_file, output_groups): - """Updates ide_info proto with android_instrumentation_info, returns false if not an android_instrumentation_test target.""" - if not ctx.rule.kind == "android_instrumentation_test": - return False - - android_instrumentation_info = struct_omit_none( - test_app = stringify_label(ctx.rule.attr.test_app.label), - target_device = str(ctx.rule.attr.target_device.label), - ) - ide_info["android_instrumentation_info"] = android_instrumentation_info - return True - -def _collect_android_sdk_info(ctx, ide_info, ide_info_file, output_groups): - """Updates android_sdk-relevant groups, returns false if not an android_sdk target.""" - if ctx.rule.kind != "android_sdk": - return False - android_jar_file = ctx.rule.attr.android_jar.files.to_list()[0] - ide_info["android_sdk_ide_info"] = struct( - android_jar = artifact_location(android_jar_file), - ) - return True - -def _collect_aar_import_info(ctx, ide_info, ide_info_file, output_groups): - """Updates ide_info proto with aar_import-relevant groups, returns false if not an aar_import target.""" - if ctx.rule.kind != "aar_import": - return False - if not hasattr(ctx.rule.attr, "aar"): - return False - aar_file = ctx.rule.attr.aar.files.to_list()[0] - ide_info["android_aar_ide_info"] = struct_omit_none( - aar = artifact_location(aar_file), - java_package = getattr(ctx.rule.attr, "package", None), - ) - update_sync_output_groups(output_groups, "intellij-resolve-android", depset([aar_file])) - return True - -def build_test_info(ctx): - """Build TestInfo.""" - if not is_test_rule(ctx): - return None - return struct_omit_none( - size = ctx.rule.attr.size, - ) - -def is_test_rule(ctx): - kind_string = ctx.rule.kind - return kind_string.endswith("_test") - -def collect_java_toolchain_info(target, ide_info, ide_info_file, output_groups): - """Updates java_toolchain-relevant output groups, returns false if not a java_toolchain target.""" - if hasattr(target, "java_toolchain"): - toolchain = target.java_toolchain - elif java_common.JavaToolchainInfo != platform_common.ToolchainInfo and \ - java_common.JavaToolchainInfo in target: - toolchain = target[java_common.JavaToolchainInfo] - else: - return False - javac_jars = [] - if hasattr(toolchain, "tools"): - javac_jars = [ - artifact_location(f) - for f in toolchain.tools.to_list() - if f.basename.endswith(".jar") - ] - ide_info["java_toolchain_ide_info"] = struct_omit_none( - javac_jars = javac_jars, - source_version = toolchain.source_version, - target_version = toolchain.target_version, - ) - update_sync_output_groups(output_groups, "intellij-info-java", depset([ide_info_file])) - return True - -def artifact_to_path(artifact): - return artifact.root_execution_path_fragment + "/" + artifact.relative_path - -def collect_kotlin_toolchain_info(target, ctx, ide_info, ide_info_file, output_groups): - """Updates kotlin_toolchain-relevant output groups, returns false if not a kotlin_toolchain target.""" - if ctx.rule.kind == "_kt_toolchain" and platform_common.ToolchainInfo in target: - kt = target[platform_common.ToolchainInfo] - elif hasattr(target, "kt") and hasattr(target.kt, "language_version"): - kt = target.kt # Legacy struct provider mechanism - else: - return False - - if not hasattr(kt, "language_version"): - return False - ide_info["kt_toolchain_ide_info"] = struct( - language_version = kt.language_version, - ) - update_sync_output_groups(output_groups, "intellij-info-kt", depset([ide_info_file])) - return True - -def _is_proto_library_wrapper(target, ctx): - """Returns True if the target is an empty shim around a proto library.""" - if not ctx.rule.kind.endswith("proto_library") or ctx.rule.kind == "proto_library": - return False - - # treat any *proto_library rule with a single proto_library dep as a shim - deps = collect_targets_from_attrs(ctx.rule.attr, ["deps"]) - return len(deps) == 1 and IntelliJInfo in deps[0] and deps[0][IntelliJInfo].kind == "proto_library" - -def _get_forwarded_deps(target, ctx): - """Returns the list of deps of this target to forward. - - Used to handle wrapper/shim targets which are really just pointers to a - different target (for example, java_proto_library) - """ - if _is_proto_library_wrapper(target, ctx): - return collect_targets_from_attrs(ctx.rule.attr, ["deps"]) - return [] - -def _is_analysis_test(target): - """Returns if the target is an analysis test. - - Rules created with analysis_test=True cannot create write actions, so the - aspect should skip them. - """ - return AnalysisTestResultInfo in target - -##### Main aspect function - -def intellij_info_aspect_impl(target, ctx, semantics): - """Aspect implementation function.""" - - tags = ctx.rule.attr.tags - if "no-ide" in tags: - return [] - - if _is_analysis_test(target): - return [] - - rule_attrs = ctx.rule.attr - - # Collect direct dependencies - direct_dep_targets = collect_targets_from_attrs( - rule_attrs, - semantics_extra_deps(DEPS, semantics, "extra_deps"), - ) - - # Collect direct toolchain type-based dependencies - if hasattr(semantics, "toolchains_propagation"): - direct_dep_targets.extend( - semantics.toolchains_propagation.collect_toolchain_deps( - ctx, - semantics.toolchains_propagation.toolchain_types, - ), - ) - - direct_deps = make_deps(direct_dep_targets, COMPILE_TIME) - - # Add exports from direct dependencies - exported_deps_from_deps = [] - for dep in direct_dep_targets: - exported_deps_from_deps = exported_deps_from_deps + dep[IntelliJInfo].export_deps - - # Combine into all compile time deps - compiletime_deps = direct_deps + exported_deps_from_deps - - # Propagate my own exports - export_deps = [] - direct_exports = [] - if java_info_in_target(target): - direct_exports = collect_targets_from_attrs(rule_attrs, ["exports"]) - export_deps.extend(make_deps(direct_exports, COMPILE_TIME)) - - # Collect transitive exports - for export in direct_exports: - export_deps.extend(export[IntelliJInfo].export_deps) - - if ctx.rule.kind == "android_library" or ctx.rule.kind == "kt_android_library": - # Empty android libraries export all their dependencies. - if not hasattr(rule_attrs, "srcs") or not ctx.rule.attr.srcs: - export_deps.extend(compiletime_deps) - - # Deduplicate the entries - export_deps = depset(export_deps).to_list() - - # runtime_deps - runtime_dep_targets = collect_targets_from_attrs( - rule_attrs, - RUNTIME_DEPS, - ) - runtime_deps = make_deps(runtime_dep_targets, RUNTIME) - all_deps = depset(compiletime_deps + runtime_deps).to_list() - - # extra prerequisites - extra_prerequisite_targets = collect_targets_from_attrs( - rule_attrs, - semantics_extra_deps(PREREQUISITE_DEPS, semantics, "extra_prerequisites"), - ) - - forwarded_deps = _get_forwarded_deps(target, ctx) + direct_exports - - # Roll up output files from my prerequisites - prerequisites = direct_dep_targets + runtime_dep_targets + extra_prerequisite_targets + direct_exports - output_groups = dict() - for dep in prerequisites: - for k, v in dep[IntelliJInfo].output_groups.items(): - if dep in forwarded_deps: - # unconditionally roll up deps for these targets - output_groups[k] = output_groups[k] + [v] if k in output_groups else [v] - continue - - # roll up outputs of direct deps into '-direct-deps' output group - if k.endswith("-direct-deps"): - continue - if k.endswith("-outputs"): - directs = k[:-len("outputs")] + "direct-deps" - output_groups[directs] = output_groups[directs] + [v] if directs in output_groups else [v] - continue - - # everything else gets rolled up transitively - output_groups[k] = output_groups[k] + [v] if k in output_groups else [v] - - # Convert output_groups from lists to depsets after the lists are finalized. This avoids - # creating and growing depsets gradually, as that results in depsets many levels deep: - # a construct which would give the build system some trouble. - for k, v in output_groups.items(): - output_groups[k] = depset(transitive = output_groups[k]) - - # Initialize the ide info dict, and corresponding output file - # This will be passed to each language-specific handler to fill in as required - file_name = target.label.name - - # bazel allows target names differing only by case, so append a hash to support - # case-insensitive file systems - file_name = file_name + "-" + str(hash(file_name)) - aspect_ids = get_aspect_ids(ctx) - if aspect_ids: - aspect_hash = hash(".".join(aspect_ids)) - file_name = file_name + "-" + str(aspect_hash) - file_name = file_name + ".intellij-info.txt" - ide_info_file = ctx.actions.declare_file(file_name) - - target_key = make_target_key(target.label, aspect_ids) - ide_info = dict( - build_file_artifact_location = build_file_artifact_location(ctx), - features = ctx.features, - key = target_key, - kind_string = ctx.rule.kind, - tags = tags, - deps = list(all_deps), - ) - - # Collect test info - ide_info["test_info"] = build_test_info(ctx) - - handled = False - handled = collect_py_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled - handled = collect_cpp_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled - handled = collect_c_toolchain_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled - handled = collect_go_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled - handled = collect_java_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled - handled = collect_java_toolchain_info(target, ide_info, ide_info_file, output_groups) or handled - handled = collect_android_info(target, ctx, semantics, ide_info, ide_info_file, output_groups) or handled - handled = collect_kotlin_toolchain_info(target, ctx, ide_info, ide_info_file, output_groups) or handled - - # Any extra ide info - if hasattr(semantics, "extra_ide_info"): - handled = semantics.extra_ide_info(target, ctx, ide_info, ide_info_file, output_groups) or handled - - # Add to generic output group if it's not handled by a language-specific handler - if not handled: - update_sync_output_groups(output_groups, "intellij-info-generic", depset([ide_info_file])) - - # Output the ide information file. - info = struct_omit_none(**ide_info) - ctx.actions.write(ide_info_file, proto.encode_text(info)) - - # Return providers. - return [ - IntelliJInfo( - export_deps = export_deps, - kind = ctx.rule.kind, - output_groups = output_groups, - target_key = target_key, - ), - OutputGroupInfo(**output_groups), - ] - -def semantics_extra_deps(base, semantics, name): - if not hasattr(semantics, name): - return base - extra_deps = getattr(semantics, name) - return base + extra_deps - -def make_intellij_info_aspect(aspect_impl, semantics, **kwargs): - """Creates the aspect given the semantics.""" - tool_label = semantics.tool_label - flag_hack_label = semantics.flag_hack_label - deps = semantics_extra_deps(DEPS, semantics, "extra_deps") - runtime_deps = RUNTIME_DEPS - prerequisite_deps = semantics_extra_deps(PREREQUISITE_DEPS, semantics, "extra_prerequisites") - - attr_aspects = deps + runtime_deps + prerequisite_deps - - attrs = { - "_package_parser": attr.label( - default = tool_label("PackageParser_deploy.jar"), - allow_single_file = True, - ), - "_jar_filter": attr.label( - default = tool_label("JarFilter_deploy.jar"), - allow_single_file = True, - ), - "_flag_hack": attr.label( - default = flag_hack_label, - ), - "_create_aar": attr.label( - default = tool_label("CreateAar_deploy.jar"), - allow_single_file = True, - ), - "_java_runtime": attr.label( - default = "@bazel_tools//tools/jdk:current_java_runtime", - cfg = "exec", - ), - } - - # add attrs required by semantics - if hasattr(semantics, "attrs"): - attrs.update(semantics.attrs) - - return aspect( - attr_aspects = attr_aspects, - attrs = attrs, - fragments = ["cpp"], - required_aspect_providers = [java_info_reference(), [CcInfo]] + semantics.extra_required_aspect_providers, - implementation = aspect_impl, - **kwargs - ) diff --git a/.ijwb/aspects/java_classpath.bzl b/.ijwb/aspects/java_classpath.bzl deleted file mode 100644 index 7ad254f751..0000000000 --- a/.ijwb/aspects/java_classpath.bzl +++ /dev/null @@ -1,37 +0,0 @@ -"""An aspect which extracts the runtime classpath from a java target.""" - -load(":java_info.bzl", "get_java_info", "java_info_in_target") - -def _runtime_classpath_impl(target, ctx): - """The top level aspect implementation function. - - Args: - target: Essentially a struct representing a BUILD target. - - ctx: The context object that can be used to access attributes and generate - outputs and actions. - - Returns: - A struct with only the output_groups provider. - """ - ctx = ctx # unused argument - return struct(output_groups = { - "runtime_classpath": _get_runtime_jars(target), - }) - -def _get_runtime_jars(target): - java_info = get_java_info(target) - if not java_info: - return depset() - if java_info.compilation_info: - return java_info.compilation_info.runtime_classpath - - # JavaInfo constructor doesn't fill in compilation info, so just return the - # full transitive set of runtime jars - # https://github.com/bazelbuild/bazel/issues/10170 - return java_info.transitive_runtime_jars - -def _aspect_def(impl): - return aspect(implementation = impl) - -java_classpath_aspect = _aspect_def(_runtime_classpath_impl) diff --git a/.ijwb/aspects/java_info.bzl b/.ijwb/aspects/java_info.bzl deleted file mode 100644 index 9ee0d32ea5..0000000000 --- a/.ijwb/aspects/java_info.bzl +++ /dev/null @@ -1,23 +0,0 @@ -# TEMPLATE-INCLUDE-BEGIN -# TEMPLATE-INCLUDE-END - -def java_info_in_target(target): - -# TEMPLATE-INCLUDE-BEGIN - return JavaInfo in target -# TEMPLATE-INCLUDE-END - -def get_java_info(target): - -# TEMPLATE-INCLUDE-BEGIN - if JavaInfo in target: - return target[JavaInfo] - else: - return None -# TEMPLATE-INCLUDE-END - -def java_info_reference(): - -# TEMPLATE-INCLUDE-BEGIN - return [JavaInfo] -# TEMPLATE-INCLUDE-END diff --git a/.ijwb/aspects/make_variables.bzl b/.ijwb/aspects/make_variables.bzl deleted file mode 100644 index 0ffd697921..0000000000 --- a/.ijwb/aspects/make_variables.bzl +++ /dev/null @@ -1,216 +0,0 @@ -# Copyright 2020 The Bazel Authors. All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Copied from: https://github.com/bazelbuild/bazel/blob/6f7faa659e5eb3e56c8a6274ebcb86884703d603/src/main/starlark/builtins_bzl/common/cc/cc_helper.bzl - -"""Utility functions to expand make variables. Implementation taken from cc_helper. """ - -def expand_make_variables(ctx, tokenization, unexpanded_tokens, additional_make_variable_substitutions = {}): - tokens = [] - targets = [] - for additional_compiler_input in getattr(ctx.attr, "additional_compiler_inputs", []): - targets.append(additional_compiler_input) - for token in unexpanded_tokens: - if tokenization: - expanded_token = _expand(ctx, token, additional_make_variable_substitutions, targets = targets) - _tokenize(tokens, expanded_token) - else: - exp = _expand_single_make_variable(ctx, token, additional_make_variable_substitutions) - if exp != None: - _tokenize(tokens, exp) - else: - tokens.append(_expand(ctx, token, additional_make_variable_substitutions, targets = targets)) - return tokens - -# Tries to expand a single make variable from token. -# If token has additional characters other than ones -# corresponding to make variable returns None. -def _expand_single_make_variable(ctx, token, additional_make_variable_substitutions = {}): - if len(token) < 3: - return None - if token[0] != "$" or token[1] != "(" or token[len(token) - 1] != ")": - return None - unexpanded_var = token[2:len(token) - 1] - expanded_var = _expand_nested_variable(ctx, additional_make_variable_substitutions, unexpanded_var) - return expanded_var - - -def _expand_nested_variable(ctx, additional_vars, exp, execpath = True, targets = []): - # If make variable is predefined path variable(like $(location ...)) - # we will expand it first. - if exp.find(" ") != -1: - if not execpath: - if exp.startswith("location"): - exp = exp.replace("location", "rootpath", 1) - data_targets = [] - if ctx.attr.data != None: - data_targets = ctx.attr.data - - # Make sure we do not duplicate targets. - unified_targets_set = {} - for data_target in data_targets: - unified_targets_set[data_target] = True - for target in targets: - unified_targets_set[target] = True - return ctx.expand_location("$({})".format(exp), targets = unified_targets_set.keys()) - - # Recursively expand nested make variables, but since there is no recursion - # in Starlark we will do it via for loop. - unbounded_recursion = True - - # The only way to check if the unbounded recursion is happening or not - # is to have a look at the depth of the recursion. - # 10 seems to be a reasonable number, since it is highly unexpected - # to have nested make variables which are expanding more than 10 times. - for _ in range(10): - exp = _lookup_var(ctx, additional_vars, exp) - if len(exp) >= 3 and exp[0] == "$" and exp[1] == "(" and exp[len(exp) - 1] == ")": - # Try to expand once more. - exp = exp[2:len(exp) - 1] - continue - unbounded_recursion = False - break - - if unbounded_recursion: - fail("potentially unbounded recursion during expansion of {}".format(exp)) - return exp - -def _lookup_var(ctx, additional_vars, var): - expanded_make_var = additional_vars.get(var) - if expanded_make_var != None: - return expanded_make_var - - expanded_make_var = ctx.var.get(var) - if expanded_make_var != None: - return expanded_make_var - - # ctx.rule.var is only available in Bazel 9+ - expanded_make_var = getattr(ctx.rule, "var", {}).get(var) - if expanded_make_var != None: - return expanded_make_var - - fail("{}: {} not defined".format(ctx.label, "$(" + var + ")")) - -def _expand(ctx, expression, additional_make_variable_substitutions, execpath = True, targets = []): - idx = 0 - last_make_var_end = 0 - result = [] - n = len(expression) - for _ in range(n): - if idx >= n: - break - if expression[idx] != "$": - idx += 1 - continue - - idx += 1 - - # We've met $$ pattern, so $ is escaped. - if idx < n and expression[idx] == "$": - idx += 1 - result.append(expression[last_make_var_end:idx - 1]) - last_make_var_end = idx - # We might have found a potential start for Make Variable. - - elif idx < n and expression[idx] == "(": - # Try to find the closing parentheses. - make_var_start = idx - make_var_end = make_var_start - for j in range(idx + 1, n): - if expression[j] == ")": - make_var_end = j - break - - # Note we cannot go out of string's bounds here, - # because of this check. - # If start of the variable is different from the end, - # we found a make variable. - if make_var_start != make_var_end: - # Some clarifications: - # *****$(MAKE_VAR_1)*******$(MAKE_VAR_2)***** - # ^ ^ ^ - # | | | - # last_make_var_end make_var_start make_var_end - result.append(expression[last_make_var_end:make_var_start - 1]) - make_var = expression[make_var_start + 1:make_var_end] - exp = _expand_nested_variable(ctx, additional_make_variable_substitutions, make_var, execpath, targets) - result.append(exp) - - # Update indexes. - idx = make_var_end + 1 - last_make_var_end = idx - - # Add the last substring which would be skipped by for loop. - if last_make_var_end < n: - result.append(expression[last_make_var_end:n]) - - return "".join(result) - -def _tokenize(options, options_string): - token = [] - force_token = False - quotation = "\0" - length = len(options_string) - - # Since it is impossible to modify loop variable inside loop - # in Starlark, and also there is no while loop, I have to - # use this ugly hack. - i = -1 - for _ in range(length): - i += 1 - if i >= length: - break - c = options_string[i] - if quotation != "\0": - # In quotation. - if c == quotation: - # End quotation. - quotation = "\0" - elif c == "\\" and quotation == "\"": - i += 1 - if i == length: - fail("backslash at the end of the string: {}".format(options_string)) - c = options_string[i] - if c != "\\" and c != "\"": - token.append("\\") - token.append(c) - else: - # Regular char, in quotation. - token.append(c) - else: - # Not in quotation. - if c == "'" or c == "\"": - # Begin single double quotation. - quotation = c - force_token = True - elif c == " " or c == "\t": - # Space not quoted. - if force_token or len(token) > 0: - options.append("".join(token)) - token = [] - force_token = False - elif c == "\\": - # Backslash not quoted. - i += 1 - if i == length: - fail("backslash at the end of the string: {}".format(options_string)) - token.append(options_string[i]) - else: - # Regular char, not quoted. - token.append(c) - if quotation != "\0": - fail("unterminated quotation at the end of the string: {}".format(options_string)) - - if force_token or len(token) > 0: - options.append("".join(token)) diff --git a/.ijwb/aspects/python_info.bzl b/.ijwb/aspects/python_info.bzl deleted file mode 100644 index bcf6c76d63..0000000000 --- a/.ijwb/aspects/python_info.bzl +++ /dev/null @@ -1,24 +0,0 @@ -# TEMPLATE-INCLUDE-BEGIN -load("@rules_python//python:defs.bzl", RulesPyInfo = "PyInfo") -# TEMPLATE-INCLUDE-END - -def py_info_in_target(target): - -# TEMPLATE-INCLUDE-BEGIN - if RulesPyInfo in target: - return True - if PyInfo in target: - return True - return False -# TEMPLATE-INCLUDE-END - -def get_py_info(target): - -# TEMPLATE-INCLUDE-BEGIN - if RulesPyInfo in target: - return target[RulesPyInfo] - if PyInfo in target: - return target[PyInfo] - return None -# TEMPLATE-INCLUDE-END - diff --git a/.ijwb/aspects/xcode_query.bzl b/.ijwb/aspects/xcode_query.bzl deleted file mode 100644 index 60193b82d2..0000000000 --- a/.ijwb/aspects/xcode_query.bzl +++ /dev/null @@ -1,22 +0,0 @@ -provider_attrs = ["xcode_version", "default_macos_sdk_version"] - -def all_items_are_true(items): - for item in items: - if item == False: - return False - - return True - -def hasattrs(obj, attrs): - return all_items_are_true([hasattr(obj, attr) for attr in attrs]) - -def format(target): - all_providers = providers(target) - for key in all_providers: - provider = all_providers[key] - - if hasattrs(provider, provider_attrs): - attrs = [getattr(provider, attr) for attr in provider_attrs] - return "{} {}".format(attrs[0], attrs[1]) - - return ""