diff --git a/.changes/unreleased/Features-20230616-163045.yaml b/.changes/unreleased/Features-20230616-163045.yaml new file mode 100644 index 00000000000..f2e5c6aa5d4 --- /dev/null +++ b/.changes/unreleased/Features-20230616-163045.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Added semantic models to the DAG and partial parsing module +time: 2023-06-16T16:30:45.513314-04:00 +custom: + Author: peterallenwebb + Issue: "7800" diff --git a/core/dbt/compilation.py b/core/dbt/compilation.py index c45713b786e..52c4e4dddd1 100644 --- a/core/dbt/compilation.py +++ b/core/dbt/compilation.py @@ -179,6 +179,9 @@ def link_node(self, node: GraphMemberNode, manifest: Manifest): def link_graph(self, manifest: Manifest): for source in manifest.sources.values(): self.add_node(source.unique_id) + for semantic_node in manifest.semantic_nodes.values(): + self.add_node(semantic_node.unique_id) + for node in manifest.nodes.values(): self.link_node(node, manifest) for exposure in manifest.exposures.values(): diff --git a/core/dbt/contracts/graph/manifest.py b/core/dbt/contracts/graph/manifest.py index 5df82b79672..5ce162b6af2 100644 --- a/core/dbt/contracts/graph/manifest.py +++ b/core/dbt/contracts/graph/manifest.py @@ -762,6 +762,9 @@ def build_flat_graph(self): "nodes": {k: v.to_dict(omit_none=False) for k, v in self.nodes.items()}, "sources": {k: v.to_dict(omit_none=False) for k, v in self.sources.items()}, "public_nodes": {k: v.to_dict(omit_none=False) for k, v in self.public_nodes.items()}, + "semantic_nodes": { + k: v.to_dict(omit_none=False) for k, v in self.semantic_nodes.items() + }, } def build_disabled_by_file_id(self): @@ -822,6 +825,7 @@ def get_resource_fqns(self) -> Mapping[str, PathSet]: self.nodes.values(), self.sources.values(), self.metrics.values(), + self.semantic_nodes.values(), ) for resource in all_resources: resource_type_plural = resource.resource_type.pluralize() @@ -857,6 +861,8 @@ def deepcopy(self): public_nodes={k: _deepcopy(v) for k, v in self.public_nodes.items()}, files={k: _deepcopy(v) for k, v in self.files.items()}, state_check=_deepcopy(self.state_check), + publications={k: _deepcopy(v) for k, v in self.publications.items()}, + semantic_nodes={k: _deepcopy(v) for k, v in self.semantic_nodes.items()}, ) copy.build_flat_graph() return copy @@ -869,6 +875,7 @@ def build_parent_and_child_maps(self): self.exposures.values(), self.metrics.values(), self.public_nodes.values(), + self.semantic_nodes.values(), ) ) forward_edges, backward_edges = build_node_edges(edge_members) @@ -933,6 +940,8 @@ def expect(self, unique_id: str) -> GraphMemberNode: return self.exposures[unique_id] elif unique_id in self.metrics: return self.metrics[unique_id] + elif unique_id in self.semantic_nodes: + return self.semantic_nodes[unique_id] else: # something terrible has happened raise dbt.exceptions.DbtInternalError( @@ -1002,7 +1011,9 @@ def pydantic_semantic_manifest(self) -> PydanticSemanticManifest: return pydantic_semantic_manifest def resolve_refs( - self, source_node: GraphMemberNode, current_project: str + self, + source_node: ModelNode, + current_project: str, # TODO: ModelNode is overly restrictive typing ) -> List[MaybeNonSource]: resolved_refs: List[MaybeNonSource] = [] for ref in source_node.refs: @@ -1297,6 +1308,8 @@ def __reduce_ex__(self, protocol): self.disabled, self.env_vars, self.public_nodes, + self.publications, + self.semantic_nodes, self._doc_lookup, self._source_lookup, self._ref_lookup, diff --git a/core/dbt/contracts/graph/nodes.py b/core/dbt/contracts/graph/nodes.py index a3a00441a41..14a42088223 100644 --- a/core/dbt/contracts/graph/nodes.py +++ b/core/dbt/contracts/graph/nodes.py @@ -1478,12 +1478,14 @@ class NodeRelation(dbtClassMixin): class SemanticModel(GraphNode): model: str node_relation: Optional[NodeRelation] + depends_on: DependsOn = field(default_factory=DependsOn) description: Optional[str] = None defaults: Optional[Defaults] = None entities: Sequence[Entity] = field(default_factory=list) measures: Sequence[Measure] = field(default_factory=list) dimensions: Sequence[Dimension] = field(default_factory=list) metadata: Optional[SourceFileMetadata] = None + created_at: float = field(default_factory=lambda: time.time()) # REVIEW: Needed? @property def entity_references(self) -> List[LinkableElementReference]: @@ -1534,6 +1536,18 @@ def partition(self) -> Optional[Dimension]: def reference(self) -> SemanticModelReference: return SemanticModelReference(semantic_model_name=self.name) + @property + def depends_on_nodes(self): + return self.depends_on.nodes + + @property + def depends_on_public_nodes(self): + return self.depends_on.public_nodes + + @property + def depends_on_macros(self): + return self.depends_on.macros + # ==================================== # Patches @@ -1641,6 +1655,7 @@ def alias(self): ResultNode, Exposure, Metric, + SemanticModel, ] # All "nodes" (or node-like objects) in this file diff --git a/core/dbt/parser/manifest.py b/core/dbt/parser/manifest.py index 48596f3e29b..5675ea4fcc3 100644 --- a/core/dbt/parser/manifest.py +++ b/core/dbt/parser/manifest.py @@ -586,6 +586,7 @@ def check_for_model_deprecations(self): resolved_refs = self.manifest.resolve_refs(node, self.root_project.project_name) resolved_model_refs = [r for r in resolved_refs if isinstance(r, ModelNode)] + node.depends_on for resolved_ref in resolved_model_refs: if resolved_ref.deprecation_date: @@ -1198,6 +1199,7 @@ def process_semantic_models(self) -> None: schema_name=refd_node.schema, database=refd_node.database, ) + semantic_model.depends_on.add_node(refd_node.unique_id) # nodes: node and column descriptions # sources: source and table descriptions, column descriptions diff --git a/core/dbt/parser/partial.py b/core/dbt/parser/partial.py index afc2d3e9258..07fdafadcd0 100644 --- a/core/dbt/parser/partial.py +++ b/core/dbt/parser/partial.py @@ -1,3 +1,4 @@ +import functools import os from copy import deepcopy from typing import MutableMapping, Dict, List @@ -346,7 +347,7 @@ def remove_node_in_saved(self, source_file, unique_id): elem_patch = elem break if elem_patch: - self.delete_schema_mssa_links(schema_file, dict_key, elem_patch) + self.delete_schema_mssa_links(dict_key, schema_file, elem_patch) self.merge_patch(schema_file, dict_key, elem_patch) if unique_id in schema_file.node_patches: schema_file.node_patches.remove(unique_id) @@ -402,42 +403,15 @@ def schedule_nodes_for_parsing(self, unique_ids): self.saved_files[file_id] = deepcopy(self.new_files[file_id]) self.add_to_pp_files(self.saved_files[file_id]) elif unique_id in self.saved_manifest.sources: - source = self.saved_manifest.sources[unique_id] - file_id = source.file_id - if file_id in self.saved_files and file_id not in self.file_diff["deleted"]: - schema_file = self.saved_files[file_id] - sources = [] - if "sources" in schema_file.dict_from_yaml: - sources = schema_file.dict_from_yaml["sources"] - source_element = self.get_schema_element(sources, source.source_name) - if source_element: - self.delete_schema_source(schema_file, source_element) - self.remove_tests(schema_file, "sources", source_element["name"]) - self.merge_patch(schema_file, "sources", source_element) + self.schedule_for_parsing("sources", unique_id, self.delete_schema_source) elif unique_id in self.saved_manifest.exposures: - exposure = self.saved_manifest.exposures[unique_id] - file_id = exposure.file_id - if file_id in self.saved_files and file_id not in self.file_diff["deleted"]: - schema_file = self.saved_files[file_id] - exposures = [] - if "exposures" in schema_file.dict_from_yaml: - exposures = schema_file.dict_from_yaml["exposures"] - exposure_element = self.get_schema_element(exposures, exposure.name) - if exposure_element: - self.delete_schema_exposure(schema_file, exposure_element) - self.merge_patch(schema_file, "exposures", exposure_element) + self.schedule_for_parsing("exposures", unique_id, self.delete_schema_exposure) elif unique_id in self.saved_manifest.metrics: - metric = self.saved_manifest.metrics[unique_id] - file_id = metric.file_id - if file_id in self.saved_files and file_id not in self.file_diff["deleted"]: - schema_file = self.saved_files[file_id] - metrics = [] - if "metrics" in schema_file.dict_from_yaml: - metrics = schema_file.dict_from_yaml["metrics"] - metric_element = self.get_schema_element(metrics, metric.name) - if metric_element: - self.delete_schema_metric(schema_file, metric_element) - self.merge_patch(schema_file, "metrics", metric_element) + self.schedule_for_parsing("metrics", unique_id, self.delete_schema_metric) + elif unique_id in self.saved_manifest.semantic_nodes: + self.schedule_for_parsing( + "semantic_nodes", unique_id, self.delete_schema_semantic_model + ) elif unique_id in self.saved_manifest.macros: macro = self.saved_manifest.macros[unique_id] file_id = macro.file_id @@ -447,6 +421,19 @@ def schedule_nodes_for_parsing(self, unique_ids): self.saved_files[file_id] = deepcopy(self.new_files[file_id]) self.add_to_pp_files(self.saved_files[file_id]) + def schedule_for_parsing(self, key, unique_id, delete_element): + element = getattr(self.saved_manifest, key)[unique_id] + file_id = element.file_id + if file_id in self.saved_files and file_id not in self.file_diff["deleted"]: + schema_file = self.saved_files[file_id] + elements = [] + if key in schema_file.dict_from_yaml: + elements = schema_file.dict_from_yaml[key] + schema_element = self.get_schema_element(elements, element.name) + if schema_element: + delete_element(schema_file, schema_element) + self.merge_patch(schema_file, key, schema_element) + def delete_macro_file(self, source_file, follow_references=False): self.check_for_special_deleted_macros(source_file) self.handle_macro_file_links(source_file, follow_references) @@ -528,7 +515,7 @@ def schedule_macro_nodes_for_parsing(self, unique_ids): patch = self.get_schema_element(patch_list, name) if patch: if key in ["models", "seeds", "snapshots"]: - self.delete_schema_mssa_links(schema_file, key, patch) + self.delete_schema_mssa_links(key, schema_file, patch) self.merge_patch(schema_file, key, patch) if unique_id in schema_file.node_patches: schema_file.node_patches.remove(unique_id) @@ -538,7 +525,6 @@ def schedule_macro_nodes_for_parsing(self, unique_ids): # This is a source patch; need to re-parse orig source self.remove_source_override_target(patch) self.delete_schema_source(schema_file, patch) - self.remove_tests(schema_file, "sources", patch["name"]) self.merge_patch(schema_file, "sources", patch) else: file_id = node.file_id @@ -610,26 +596,15 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict # models, seeds, snapshots, analyses for dict_key in ["models", "seeds", "snapshots", "analyses"]: - key_diff = self.get_diff_for(dict_key, saved_yaml_dict, new_yaml_dict) - if key_diff["changed"]: - for elem in key_diff["changed"]: - self.delete_schema_mssa_links(schema_file, dict_key, elem) - self.merge_patch(schema_file, dict_key, elem) - if key_diff["deleted"]: - for elem in key_diff["deleted"]: - self.delete_schema_mssa_links(schema_file, dict_key, elem) - if key_diff["added"]: - for elem in key_diff["added"]: - self.merge_patch(schema_file, dict_key, elem) - # Handle schema file updates due to env_var changes - if dict_key in env_var_changes and dict_key in new_yaml_dict: - for name in env_var_changes[dict_key]: - if name in key_diff["changed_or_deleted_names"]: - continue - elem = self.get_schema_element(new_yaml_dict[dict_key], name) - if elem: - self.delete_schema_mssa_links(schema_file, dict_key, elem) - self.merge_patch(schema_file, dict_key, elem) + delete_function = functools.partial(self.delete_schema_mssa_links, dict_key) + self.handle_key_changes( + schema_file, + dict_key, + env_var_changes, + saved_yaml_dict, + new_yaml_dict, + delete_function, + ) # sources dict_key = "sources" @@ -639,14 +614,12 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict if "overrides" in source: # This is a source patch; need to re-parse orig source self.remove_source_override_target(source) self.delete_schema_source(schema_file, source) - self.remove_tests(schema_file, dict_key, source["name"]) self.merge_patch(schema_file, dict_key, source) if source_diff["deleted"]: for source in source_diff["deleted"]: if "overrides" in source: # This is a source patch; need to re-parse orig source self.remove_source_override_target(source) self.delete_schema_source(schema_file, source) - self.remove_tests(schema_file, dict_key, source["name"]) if source_diff["added"]: for source in source_diff["added"]: if "overrides" in source: # This is a source patch; need to re-parse orig source @@ -662,100 +635,78 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict if "overrides" in source: self.remove_source_override_target(source) self.delete_schema_source(schema_file, source) - self.remove_tests(schema_file, dict_key, source["name"]) self.merge_patch(schema_file, dict_key, source) - # macros - dict_key = "macros" - macro_diff = self.get_diff_for(dict_key, saved_yaml_dict, new_yaml_dict) - if macro_diff["changed"]: - for macro in macro_diff["changed"]: - self.delete_schema_macro_patch(schema_file, macro) - self.merge_patch(schema_file, dict_key, macro) - if macro_diff["deleted"]: - for macro in macro_diff["deleted"]: - self.delete_schema_macro_patch(schema_file, macro) - if macro_diff["added"]: - for macro in macro_diff["added"]: - self.merge_patch(schema_file, dict_key, macro) - # Handle schema file updates due to env_var changes - if dict_key in env_var_changes and dict_key in new_yaml_dict: - for name in env_var_changes[dict_key]: - if name in macro_diff["changed_or_deleted_names"]: - continue - elem = self.get_schema_element(new_yaml_dict[dict_key], name) - if elem: - self.delete_schema_macro_patch(schema_file, elem) - self.merge_patch(schema_file, dict_key, elem) - - # exposures - dict_key = "exposures" - exposure_diff = self.get_diff_for(dict_key, saved_yaml_dict, new_yaml_dict) - if exposure_diff["changed"]: - for exposure in exposure_diff["changed"]: - self.delete_schema_exposure(schema_file, exposure) - self.merge_patch(schema_file, dict_key, exposure) - if exposure_diff["deleted"]: - for exposure in exposure_diff["deleted"]: - self.delete_schema_exposure(schema_file, exposure) - if exposure_diff["added"]: - for exposure in exposure_diff["added"]: - self.merge_patch(schema_file, dict_key, exposure) - # Handle schema file updates due to env_var changes - if dict_key in env_var_changes and dict_key in new_yaml_dict: - for name in env_var_changes[dict_key]: - if name in exposure_diff["changed_or_deleted_names"]: - continue - elem = self.get_schema_element(new_yaml_dict[dict_key], name) - if elem: - self.delete_schema_exposure(schema_file, elem) - self.merge_patch(schema_file, dict_key, elem) - - # metrics - dict_key = "metrics" - metric_diff = self.get_diff_for("metrics", saved_yaml_dict, new_yaml_dict) - if metric_diff["changed"]: - for metric in metric_diff["changed"]: - self.delete_schema_metric(schema_file, metric) - self.merge_patch(schema_file, dict_key, metric) - if metric_diff["deleted"]: - for metric in metric_diff["deleted"]: - self.delete_schema_metric(schema_file, metric) - if metric_diff["added"]: - for metric in metric_diff["added"]: - self.merge_patch(schema_file, dict_key, metric) - # Handle schema file updates due to env_var changes - if dict_key in env_var_changes and dict_key in new_yaml_dict: - for name in env_var_changes[dict_key]: - if name in metric_diff["changed_or_deleted_names"]: - continue - elem = self.get_schema_element(new_yaml_dict[dict_key], name) - if elem: - self.delete_schema_metric(schema_file, elem) - self.merge_patch(schema_file, dict_key, elem) - - # groups - dict_key = "groups" - group_diff = self.get_diff_for("groups", saved_yaml_dict, new_yaml_dict) - if group_diff["changed"]: - for group in group_diff["changed"]: - self.delete_schema_group(schema_file, group) - self.merge_patch(schema_file, dict_key, group) - if group_diff["deleted"]: - for group in group_diff["deleted"]: - self.delete_schema_group(schema_file, group) - if group_diff["added"]: - for group in group_diff["added"]: - self.merge_patch(schema_file, dict_key, group) + self.handle_key_changes( + schema_file, + "macros", + env_var_changes, + saved_yaml_dict, + new_yaml_dict, + self.delete_schema_macro_patch, + ) + self.handle_key_changes( + schema_file, + "exposures", + env_var_changes, + saved_yaml_dict, + new_yaml_dict, + self.delete_schema_exposure, + ) + self.handle_key_changes( + schema_file, + "metrics", + env_var_changes, + saved_yaml_dict, + new_yaml_dict, + self.delete_schema_metric, + ) + self.handle_key_changes( + schema_file, + "groups", + env_var_changes, + saved_yaml_dict, + new_yaml_dict, + self.delete_schema_group, + ) + self.handle_key_changes( + schema_file, + "semantic_models", + env_var_changes, + saved_yaml_dict, + new_yaml_dict, + self.delete_schema_semantic_model, + ) + + def handle_key_changes( + self, + schema_file, + dict_key, + env_var_changes, + saved_yaml_dict, + new_yaml_dict, + delete_element, + ): + key_diff = self.get_diff_for(dict_key, saved_yaml_dict, new_yaml_dict) + if key_diff["changed"]: + for elem in key_diff["changed"]: + delete_element(schema_file, elem) + self.merge_patch(schema_file, dict_key, elem) + if key_diff["deleted"]: + for elem in key_diff["deleted"]: + delete_element(schema_file, elem) + if key_diff["added"]: + for elem in key_diff["added"]: + self.merge_patch(schema_file, dict_key, elem) # Handle schema file updates due to env_var changes if dict_key in env_var_changes and dict_key in new_yaml_dict: for name in env_var_changes[dict_key]: - if name in group_diff["changed_or_deleted_names"]: + if name in key_diff["changed_or_deleted_names"]: continue - elem = self.get_schema_element(new_yaml_dict[dict_key], name) - if elem: - self.delete_schema_group(schema_file, elem) - self.merge_patch(schema_file, dict_key, elem) + schema_elem = self.get_schema_element(new_yaml_dict[dict_key], name) + if schema_elem: + delete_element(schema_file, schema_elem) + self.merge_patch(schema_file, dict_key, schema_elem) # Take a "section" of the schema file yaml dictionary from saved and new schema files # and determine which parts have changed @@ -818,7 +769,7 @@ def merge_patch(self, schema_file, key, patch): # For model, seed, snapshot, analysis schema dictionary keys, # delete the patches and tests from the patch - def delete_schema_mssa_links(self, schema_file, dict_key, elem): + def delete_schema_mssa_links(self, dict_key, schema_file, elem): # find elem node unique_id in node_patches prefix = key_to_prefix[dict_key] elem_unique_ids = [] @@ -887,6 +838,8 @@ def delete_schema_source(self, schema_file, source_dict): schema_file.sources.remove(unique_id) self.schedule_referencing_nodes_for_parsing(unique_id) + self.remove_tests(schema_file, "sources", source_name) + def delete_schema_macro_patch(self, schema_file, macro): # This is just macro patches that need to be reapplied macro_unique_id = None @@ -942,6 +895,19 @@ def delete_schema_metric(self, schema_file, metric_dict): elif unique_id in self.saved_manifest.disabled: self.delete_disabled(unique_id, schema_file.file_id) + # semantic models are created from schema files, but are not referred to by other nodes + def delete_schema_semantic_model(self, schema_file, semantic_model_dict): + semantic_model_name = semantic_model_dict["name"] + semantic_models = schema_file.semantic_nodes.copy() + for unique_id in semantic_models: + if unique_id in self.saved_manifest.semantic_nodes: + semantic_model = self.saved_manifest.semantic_nodes[unique_id] + if semantic_model.name == semantic_model_name: + self.saved_manifest.semantic_nodes.pop(unique_id) + schema_file.semantic_nodes.remove(unique_id) + elif unique_id in self.saved_manifest.disabled: + self.delete_disabled(unique_id, schema_file.file_id) + def get_schema_element(self, elem_list, elem_name): for element in elem_list: if "name" in element and element["name"] == elem_name: @@ -970,7 +936,6 @@ def remove_source_override_target(self, source_dict): (orig_file, orig_source) = self.get_source_override_file_and_dict(source_dict) if orig_source: self.delete_schema_source(orig_file, orig_source) - self.remove_tests(orig_file, "sources", orig_source["name"]) self.merge_patch(orig_file, "sources", orig_source) self.add_to_pp_files(orig_file) diff --git a/tests/functional/partial_parsing/test_pp_semantic_models.py b/tests/functional/partial_parsing/test_pp_semantic_models.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/semantic_models/test_semantic_model_parsing.py b/tests/functional/semantic_models/test_semantic_model_parsing.py index e0fc2cd72a7..7c5bb560e3d 100644 --- a/tests/functional/semantic_models/test_semantic_model_parsing.py +++ b/tests/functional/semantic_models/test_semantic_model_parsing.py @@ -1,7 +1,11 @@ import pytest +from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity + from dbt.cli.main import dbtRunner from dbt.contracts.graph.manifest import Manifest +from dbt.tests.util import write_file + schema_yml = """models: - name: fct_revenue @@ -57,3 +61,22 @@ def test_semantic_model_parsing(self, project): assert len(manifest.semantic_nodes) == 1 semantic_model = manifest.semantic_nodes["semanticmodel.test.revenue"] assert semantic_model.node_relation.alias == "fct_revenue" + + def test_semantic_model_partial_parsing(self, project): + # First, use the default schema.yml to define our semantic model, and + # run the dbt parse command + runner = dbtRunner() + result = runner.invoke(["parse"]) + + # Next, modify the default schema.yml to change a detail of the semantic + # model. + modified_schema_yml = schema_yml.replace("time_granularity: day", "time_granularity: week") + write_file(modified_schema_yml, project.project_root, "models", "schema.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + + # Finally, verify that the manifest reflects the partially parsed change + manifest = result.result + semantic_model = manifest.semantic_nodes["semanticmodel.test.revenue"] + assert semantic_model.dimensions[0].type_params.time_granularity == TimeGranularity.WEEK diff --git a/tests/unit/test_manifest.py b/tests/unit/test_manifest.py index c5cae56c293..e10613e0c3f 100644 --- a/tests/unit/test_manifest.py +++ b/tests/unit/test_manifest.py @@ -335,6 +335,9 @@ def setUp(self): original_file_path="schema.yml", ), } + + self.semantic_nodes = {} + for exposure in self.exposures.values(): exposure.validate(exposure.to_dict(omit_none=True)) for metric in self.metrics.values(): @@ -475,15 +478,27 @@ def test_build_flat_graph(self): flat_metrics = flat_graph["metrics"] flat_nodes = flat_graph["nodes"] flat_sources = flat_graph["sources"] + flat_semantic_nodes = flat_graph["semantic_nodes"] self.assertEqual( set(flat_graph), - set(["exposures", "groups", "nodes", "sources", "metrics", "public_nodes"]), + set( + [ + "exposures", + "groups", + "nodes", + "sources", + "metrics", + "public_nodes", + "semantic_nodes", + ] + ), ) self.assertEqual(set(flat_exposures), set(self.exposures)) self.assertEqual(set(flat_groups), set(self.groups)) self.assertEqual(set(flat_metrics), set(self.metrics)) self.assertEqual(set(flat_nodes), set(self.nested_nodes)) self.assertEqual(set(flat_sources), set(self.sources)) + self.assertEqual(set(flat_semantic_nodes), set(self.semantic_nodes)) for node in flat_nodes.values(): self.assertEqual(frozenset(node), REQUIRED_PARSED_NODE_KEYS) @@ -1001,13 +1016,24 @@ def test_build_flat_graph(self): selectors={}, files={}, exposures={}, + semantic_nodes={}, ) manifest.build_flat_graph() flat_graph = manifest.flat_graph flat_nodes = flat_graph["nodes"] self.assertEqual( set(flat_graph), - set(["exposures", "groups", "metrics", "nodes", "sources", "public_nodes"]), + set( + [ + "exposures", + "groups", + "metrics", + "nodes", + "sources", + "public_nodes", + "semantic_nodes", + ] + ), ) self.assertEqual(set(flat_nodes), set(self.nested_nodes)) compiled_count = 0