Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added baseline for workflow linter #1613

Merged
merged 26 commits into from
May 7, 2024
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions src/databricks/labs/ucx/contexts/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
from databricks.labs.ucx.hive_metastore.udfs import UdfsCrawler
from databricks.labs.ucx.hive_metastore.verification import VerifyHasMetastore
from databricks.labs.ucx.installer.workflows import DeployedWorkflows
from databricks.labs.ucx.source_code.jobs import WorkflowLinter
from databricks.labs.ucx.source_code.notebooks.loaders import NotebookResolver, NotebookLoader, WorkspaceNotebookLoader
from databricks.labs.ucx.source_code.files import FileLoader, LocalFileResolver
from databricks.labs.ucx.source_code.path_lookup import PathLookup
Expand Down Expand Up @@ -398,6 +399,15 @@
def dependency_graph_builder(self):
return DependencyGraphBuilder(self.dependency_resolver, self.path_lookup)

@cached_property
def workflow_linter(self):
return WorkflowLinter(

Check warning on line 404 in src/databricks/labs/ucx/contexts/application.py

View check run for this annotation

Codecov / codecov/patch

src/databricks/labs/ucx/contexts/application.py#L404

Added line #L404 was not covered by tests
self.workspace_client,
self.dependency_graph_builder,
self.tables_migrator.index(),
self.whitelist,
)


class CliContext(GlobalContext, abc.ABC):
@cached_property
Expand Down
2 changes: 1 addition & 1 deletion src/databricks/labs/ucx/contexts/workspace_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,4 +166,4 @@

@cached_property
def notebook_loader(self) -> NotebookLoader:
return LocalNotebookLoader(self.path_lookup)
return LocalNotebookLoader()

Check warning on line 169 in src/databricks/labs/ucx/contexts/workspace_cli.py

View check run for this annotation

Codecov / codecov/patch

src/databricks/labs/ucx/contexts/workspace_cli.py#L169

Added line #L169 was not covered by tests
52 changes: 22 additions & 30 deletions src/databricks/labs/ucx/mixins/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -566,6 +566,8 @@ def create(
) -> str:
if path is None:
path = f"/Users/{ws.current_user.me().user_name}/sdk-{make_random(4)}"
elif isinstance(path, pathlib.Path):
nfx marked this conversation as resolved.
Show resolved Hide resolved
path = str(path)
if content is None:
content = io.BytesIO(b"print(1)")
path = str(path)
Expand Down Expand Up @@ -754,43 +756,33 @@ def create(*, instance_pool_name=None, node_type_id=None, **kwargs):

@pytest.fixture
def make_job(ws, make_random, make_notebook):
def create(**kwargs):
def create(notebook_path: str | None = None, **kwargs):
nfx marked this conversation as resolved.
Show resolved Hide resolved
task_spark_conf = None
if "name" not in kwargs:
kwargs["name"] = f"sdk-{make_random(4)}"
if "spark_conf" in kwargs:
task_spark_conf = kwargs["spark_conf"]
kwargs.pop("spark_conf")
if isinstance(notebook_path, pathlib.Path):
notebook_path = str(notebook_path)
if not notebook_path:
notebook_path = make_notebook()
assert notebook_path is not None
if "tasks" not in kwargs:
if task_spark_conf:
kwargs["tasks"] = [
jobs.Task(
task_key=make_random(4),
description=make_random(4),
new_cluster=compute.ClusterSpec(
num_workers=1,
node_type_id=ws.clusters.select_node_type(local_disk=True, min_memory_gb=16),
spark_version=ws.clusters.select_spark_version(latest=True),
spark_conf=task_spark_conf,
),
notebook_task=jobs.NotebookTask(notebook_path=make_notebook()),
timeout_seconds=0,
)
]
else:
kwargs["tasks"] = [
jobs.Task(
task_key=make_random(4),
description=make_random(4),
new_cluster=compute.ClusterSpec(
num_workers=1,
node_type_id=ws.clusters.select_node_type(local_disk=True, min_memory_gb=16),
spark_version=ws.clusters.select_spark_version(latest=True),
),
notebook_task=jobs.NotebookTask(notebook_path=make_notebook()),
timeout_seconds=0,
)
]
kwargs["tasks"] = [
jobs.Task(
task_key=make_random(4),
description=make_random(4),
new_cluster=compute.ClusterSpec(
num_workers=1,
node_type_id=ws.clusters.select_node_type(local_disk=True, min_memory_gb=16),
spark_version=ws.clusters.select_spark_version(latest=True),
spark_conf=task_spark_conf,
),
notebook_task=jobs.NotebookTask(notebook_path=notebook_path),
timeout_seconds=0,
)
]
job = ws.jobs.create(**kwargs)
logger.info(f"Job: {ws.config.host}#job/{job.job_id}")
return job
Expand Down
2 changes: 1 addition & 1 deletion src/databricks/labs/ucx/source_code/dbfs.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def lint(self, code: str) -> Iterable[Advice]:
tree = ast.parse(code)
visitor = DetectDbfsVisitor()
visitor.visit(tree)
return visitor.get_advices()
yield from visitor.get_advices()


class FromDbfsFolder(Linter):
Expand Down
156 changes: 54 additions & 102 deletions src/databricks/labs/ucx/source_code/files.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,21 @@
from __future__ import annotations # for type hints

import ast
import logging
from pathlib import Path
from collections.abc import Callable

from databricks.labs.ucx.source_code.path_lookup import PathLookup
from databricks.sdk.service.workspace import Language

from databricks.labs.ucx.source_code.languages import Languages
from databricks.labs.ucx.source_code.notebooks.cells import CellLanguage
from databricks.labs.ucx.source_code.notebooks.base import NOTEBOOK_HEADER
from databricks.labs.ucx.source_code.python_linter import PythonLinter, ASTLinter
from databricks.labs.ucx.source_code.graph import (
DependencyGraph,
SourceContainer,
DependencyProblem,
DependencyLoader,
Dependency,
BaseDependencyResolver,
MaybeDependency,
)

logger = logging.getLogger(__name__)
Expand All @@ -32,68 +29,22 @@
# using CellLanguage so we can reuse the facilities it provides
self._language = CellLanguage.of_language(language)

def build_dependency_graph(self, parent: DependencyGraph) -> list[DependencyProblem]:
if self._language is not CellLanguage.PYTHON:
nfx marked this conversation as resolved.
Show resolved Hide resolved
logger.warning(f"Unsupported language: {self._language.language}")
return []

Check warning on line 35 in src/databricks/labs/ucx/source_code/files.py

View check run for this annotation

Codecov / codecov/patch

src/databricks/labs/ucx/source_code/files.py#L34-L35

Added lines #L34 - L35 were not covered by tests
maybe = parent.build_graph_from_python_source(self._original_code)
problems = []
for problem in maybe.problems:
problems.append(problem.replace(source_path=self._path))
return problems

@property
def path(self):
def path(self) -> Path:
return self._path

def build_dependency_graph(self, parent: DependencyGraph, path_lookup: PathLookup) -> None:
if self._language is not CellLanguage.PYTHON:
logger.warning(f"Unsupported language: {self._language.language}")
return
path_lookup.push_cwd(self.path.parent)
self._build_dependency_graph(parent)
path_lookup.pop_cwd()

def _build_dependency_graph(self, parent: DependencyGraph) -> None:
# TODO replace the below with parent.build_graph_from_python_source
# can only be done after https://github.com/databrickslabs/ucx/issues/1287
linter = ASTLinter.parse(self._original_code)
run_notebook_calls = PythonLinter.list_dbutils_notebook_run_calls(linter)
for call in run_notebook_calls:
call_problems: list[DependencyProblem] = []
notebook_path_arg = PythonLinter.get_dbutils_notebook_run_path_arg(call)
if isinstance(notebook_path_arg, ast.Constant):
notebook_path = notebook_path_arg.value
parent.register_notebook(Path(notebook_path), call_problems.append)
call_problems = [
problem.replace(
source_path=self._path,
start_line=call.lineno,
start_col=call.col_offset,
end_line=call.end_lineno or 0,
end_col=call.end_col_offset or 0,
)
for problem in call_problems
]
parent.add_problems(call_problems)
continue
problem = DependencyProblem(
code='dependency-not-constant',
message="Can't check dependency not provided as a constant",
source_path=self._path,
start_line=call.lineno,
start_col=call.col_offset,
end_line=call.end_lineno or 0,
end_col=call.end_col_offset or 0,
)
parent.add_problems([problem])
# TODO https://github.com/databrickslabs/ucx/issues/1287
for pair in PythonLinter.list_import_sources(linter):
import_name = pair[0]
import_problems: list[DependencyProblem] = []
parent.register_import(import_name, import_problems.append)
node = pair[1]
import_problems = [
problem.replace(
source_path=self._path,
start_line=node.lineno,
start_col=node.col_offset,
end_line=node.end_lineno or 0,
end_col=node.end_col_offset or 0,
)
for problem in import_problems
]
parent.add_problems(import_problems)
def __repr__(self):
return f"<LocalFile {self._path}>"

Check warning on line 47 in src/databricks/labs/ucx/source_code/files.py

View check run for this annotation

Codecov / codecov/patch

src/databricks/labs/ucx/source_code/files.py#L47

Added line #L47 was not covered by tests


class LocalFileMigrator:
Expand Down Expand Up @@ -148,34 +99,17 @@


class FileLoader(DependencyLoader):

def __init__(self, path_lookup: PathLookup):
self._path_lookup = path_lookup

def load_dependency(self, dependency: Dependency) -> SourceContainer | None:
fullpath = self.full_path(dependency.path)
assert fullpath is not None
return LocalFile(fullpath, fullpath.read_text("utf-8"), Language.PYTHON)
def load_dependency(self, path_lookup: PathLookup, dependency: Dependency) -> SourceContainer | None:
absolute_path = path_lookup.resolve(dependency.path)
if not absolute_path:
return None

Check warning on line 105 in src/databricks/labs/ucx/source_code/files.py

View check run for this annotation

Codecov / codecov/patch

src/databricks/labs/ucx/source_code/files.py#L105

Added line #L105 was not covered by tests
return LocalFile(absolute_path, absolute_path.read_text("utf-8"), Language.PYTHON)

def exists(self, path: Path) -> bool:
return self.full_path(path) is not None

def full_path(self, path: Path) -> Path | None:
if path.is_file():
return path
for parent in self._path_lookup.paths:
child = Path(parent, path)
if child.is_file():
return child
return None

def is_notebook(self, path: Path) -> bool:
fullpath = self.full_path(path)
if fullpath is None:
return False
with fullpath.open(mode="r", encoding="utf-8") as stream:
line = stream.readline()
return NOTEBOOK_HEADER in line
return path.exists()

Check warning on line 109 in src/databricks/labs/ucx/source_code/files.py

View check run for this annotation

Codecov / codecov/patch

src/databricks/labs/ucx/source_code/files.py#L109

Added line #L109 was not covered by tests

def __repr__(self):
return "FileLoader()"

Check warning on line 112 in src/databricks/labs/ucx/source_code/files.py

View check run for this annotation

Codecov / codecov/patch

src/databricks/labs/ucx/source_code/files.py#L112

Added line #L112 was not covered by tests


class LocalFileResolver(BaseDependencyResolver):
Expand All @@ -187,16 +121,34 @@
def with_next_resolver(self, resolver: BaseDependencyResolver) -> BaseDependencyResolver:
return LocalFileResolver(self._file_loader, resolver)

# TODO problem_collector is tactical, pending https://github.com/databrickslabs/ucx/issues/1559
def resolve_local_file(
self, path: Path, problem_collector: Callable[[DependencyProblem], None]
) -> Dependency | None:
if self._file_loader.exists(path) and not self._file_loader.is_notebook(path):
return Dependency(self._file_loader, path)
return super().resolve_local_file(path, problem_collector)

def resolve_import(self, name: str, problem_collector: Callable[[DependencyProblem], None]) -> Dependency | None:
fullpath = self._file_loader.full_path(Path(f"{name}.py"))
if fullpath is not None:
return Dependency(self._file_loader, fullpath)
return super().resolve_import(name, problem_collector)
def resolve_local_file(self, path_lookup, path: Path) -> MaybeDependency:
absolute_path = path_lookup.resolve(path)
if absolute_path:
return MaybeDependency(Dependency(self._file_loader, absolute_path), [])
return super().resolve_local_file(path_lookup, path)

def resolve_import(self, path_lookup: PathLookup, name: str) -> MaybeDependency:
parts = []
# Relative imports use leading dots. A single leading dot indicates a relative import, starting with
# the current package. Two or more leading dots indicate a relative import to the parent(s) of the current
# package, one level per dot after the first.
# see https://docs.python.org/3/reference/import.html#package-relative-imports
for i, rune in enumerate(name):
if not i and rune == '.': # leading single dot
parts.append(path_lookup.cwd.as_posix())
continue
if rune != '.':
parts.append(name[i:].replace('.', '/'))
nfx marked this conversation as resolved.
Show resolved Hide resolved
break
parts.append("..")

Check warning on line 143 in src/databricks/labs/ucx/source_code/files.py

View check run for this annotation

Codecov / codecov/patch

src/databricks/labs/ucx/source_code/files.py#L143

Added line #L143 was not covered by tests
for candidate in (f'{"/".join(parts)}.py', f'{"/".join(parts)}/__init__.py'):
nfx marked this conversation as resolved.
Show resolved Hide resolved
relative_path = Path(candidate)
absolute_path = path_lookup.resolve(relative_path)
if not absolute_path:
continue
dependency = Dependency(self._file_loader, absolute_path)
return MaybeDependency(dependency, [])
return super().resolve_import(path_lookup, name)

def __repr__(self):
return "LocalFileResolver()"

Check warning on line 154 in src/databricks/labs/ucx/source_code/files.py

View check run for this annotation

Codecov / codecov/patch

src/databricks/labs/ucx/source_code/files.py#L154

Added line #L154 was not covered by tests
Loading