Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
1 change: 1 addition & 0 deletions .generator/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -139,4 +139,5 @@ RUN chmod a+rx ./cli.py
COPY .generator/parse_googleapis_content.py .
RUN chmod a+rx ./parse_googleapis_content.py

ENV ENABLE_PERF_LOGS=1
ENTRYPOINT ["python3.14", "./cli.py"]
113 changes: 95 additions & 18 deletions .generator/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import subprocess
import sys
import tempfile
import time
import yaml
from datetime import date, datetime
from functools import lru_cache
Expand All @@ -31,6 +32,40 @@
import build.util
import parse_googleapis_content

logging.basicConfig(stream=sys.stdout, level=logging.INFO)

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high

Calling logging.basicConfig() at the module level is generally discouraged because it can interfere with the logging configuration of other modules that import this one. It's better to move this configuration into a main function or an if __name__ == '__main__': block to ensure it only runs when the script is executed directly.


import functools

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

To adhere to PEP 8, this import should be moved to the top of the file with other standard library imports.


PERF_LOGGING_ENABLED = os.environ.get("ENABLE_PERF_LOGS") == "1"

if PERF_LOGGING_ENABLED:
perf_logger = logging.getLogger("performance_metrics")
perf_logger.setLevel(logging.INFO)
perf_handler = logging.FileHandler("performance_metrics.log", mode='w')
perf_formatter = logging.Formatter('%(asctime)s | %(message)s', datefmt='%H:%M:%S')
perf_handler.setFormatter(perf_formatter)
perf_logger.addHandler(perf_handler)
perf_logger.propagate = False

def track_time(func):
"""
Decorator. Usage: @track_time
If logging is OFF, it returns the original function (Zero Overhead).
If logging is ON, it wraps the function to measure execution time.
"""
if not PERF_LOGGING_ENABLED:
return func

@functools.wraps(func)
def wrapper(*args, **kwargs):
start_time = time.perf_counter()
try:
return func(*args, **kwargs)
finally:
duration = time.perf_counter() - start_time
perf_logger.info(f"{func.__name__:<30} | {duration:.4f} seconds")

return wrapper

try:
import synthtool
Expand Down Expand Up @@ -320,8 +355,9 @@ def _get_library_id(request_data: Dict) -> str:
return library_id


@track_time
def _run_post_processor(output: str, library_id: str, is_mono_repo: bool):
"""Runs the synthtool post-processor on the output directory.
"""Runs the synthtool post-processor (templates) and Ruff formatter (lint/format).

Args:
output(str): Path to the directory in the container where code
Expand All @@ -331,25 +367,58 @@ def _run_post_processor(output: str, library_id: str, is_mono_repo: bool):
"""
os.chdir(output)
path_to_library = f"packages/{library_id}" if is_mono_repo else "."
logger.info("Running Python post-processor...")

# 1. Run Synthtool (Templates & Fixers only)
# Note: This relies on 'nox' being disabled in your environment (via run_fast.sh shim)
# to avoid the slow formatting step inside owlbot.
logger.info("Running Python post-processor (Templates & Fixers)...")
if SYNTHTOOL_INSTALLED:
if is_mono_repo:
python_mono_repo.owlbot_main(path_to_library)
else:
# Some repositories have customizations in `librarian.py`.
# If this file exists, run those customizations instead of `owlbot_main`
if Path(f"{output}/librarian.py").exists():
subprocess.run(["python3.14", f"{output}/librarian.py"])
try:
if is_mono_repo:
python_mono_repo.owlbot_main(path_to_library)
else:
python.owlbot_main()
else:
raise SYNTHTOOL_IMPORT_ERROR # pragma: NO COVER

# If there is no noxfile, run `isort`` and `black` on the output.
# This is required for proto-only libraries which are not GAPIC.
if not Path(f"{output}/{path_to_library}/noxfile.py").exists():
subprocess.run(["isort", output])
subprocess.run(["black", output])
# Handle custom librarian scripts if present
if Path(f"{output}/librarian.py").exists():
subprocess.run(["python3.14", f"{output}/librarian.py"])
else:
python.owlbot_main()
except Exception as e:
logger.warning(f"Synthtool warning (non-fatal): {e}")

# 2. Run RUFF (Fast Formatter & Import Sorter)
# This replaces both 'isort' and 'black' and runs in < 1 second.
# We hardcode flags here to match Black defaults so you don't need config files.
# logger.info("🚀 Running Ruff (Fast Formatter)...")
# try:
# # STEP A: Fix Imports (like isort)
# subprocess.run(
# [
# "ruff", "check",
# "--select", "I", # Only run Import sorting rules
# "--fix", # Auto-fix them
# "--line-length=88", # Match Black default
# "--known-first-party=google", # Prevent 'google' moving to 3rd party block
# output
# ],
# check=False,
# stdout=subprocess.DEVNULL,
# stderr=subprocess.DEVNULL
# )

# # STEP B: Format Code (like black)
# subprocess.run(
# [
# "ruff", "format",
# "--line-length=88", # Match Black default
# output
# ],
# check=False,
# stdout=subprocess.DEVNULL,
# stderr=subprocess.DEVNULL
# )
# except FileNotFoundError:
# logger.warning("⚠️ Ruff binary not found. Code will be unformatted.")
# logger.warning(" Please run: pip install ruff")

logger.info("Python post-processor ran successfully.")
Comment on lines 368 to 423

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

high

This change removes the existing isort and black formatting step and replaces it with a commented-out block for ruff. This means that currently no formatting will be applied, which seems like an unintentional regression. If ruff is not ready to be used, the old formatting logic should be kept. If ruff is ready, this block should be uncommented.


Expand Down Expand Up @@ -389,6 +458,7 @@ def _add_header_to_files(directory: str) -> None:
f.writelines(lines)


@track_time
def _copy_files_needed_for_post_processing(
output: str, input: str, library_id: str, is_mono_repo: bool
):
Expand Down Expand Up @@ -435,6 +505,7 @@ def _copy_files_needed_for_post_processing(
)


@track_time
def _clean_up_files_after_post_processing(
output: str, library_id: str, is_mono_repo: bool
):
Expand Down Expand Up @@ -581,6 +652,7 @@ def _get_repo_name_from_repo_metadata(base: str, library_id: str, is_mono_repo:
return repo_name


@track_time
def _generate_repo_metadata_file(
output: str, library_id: str, source: str, apis: List[Dict], is_mono_repo: bool
):
Expand Down Expand Up @@ -622,6 +694,7 @@ def _generate_repo_metadata_file(
_write_json_file(output_repo_metadata, metadata_content)


@track_time
def _copy_readme_to_docs(output: str, library_id: str, is_mono_repo: bool):
"""Copies the README.rst file for a generated library to docs/README.rst.

Expand Down Expand Up @@ -663,6 +736,7 @@ def _copy_readme_to_docs(output: str, library_id: str, is_mono_repo: bool):
f.write(content)


@track_time
def handle_generate(
librarian: str = LIBRARIAN_DIR,
source: str = SOURCE_DIR,
Expand Down Expand Up @@ -711,6 +785,7 @@ def handle_generate(
_run_post_processor(output, library_id, is_mono_repo)
_copy_readme_to_docs(output, library_id, is_mono_repo)
_clean_up_files_after_post_processing(output, library_id, is_mono_repo)

except Exception as e:
raise ValueError("Generation failed.") from e
logger.info("'generate' command executed.")
Expand Down Expand Up @@ -924,6 +999,7 @@ def _stage_gapic_library(tmp_dir: str, staging_dir: str) -> None:
shutil.copytree(tmp_dir, staging_dir, dirs_exist_ok=True)


@track_time
def _generate_api(
api_path: str,
library_id: str,
Expand Down Expand Up @@ -1744,6 +1820,7 @@ def handle_release_stage(
output=args.output,
input=args.input,
)

elif args.command == "build":
args.func(librarian=args.librarian, repo=args.repo)
elif args.command == "release-stage":
Expand Down
1 change: 1 addition & 0 deletions .generator/requirements.in
Original file line number Diff line number Diff line change
Expand Up @@ -5,3 +5,4 @@ starlark-pyo3>=2025.1
build
black==23.7.0
isort==5.11.0
ruff

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

To ensure reproducible builds, it's best practice to pin dependencies to a specific version. Please consider pinning ruff to a specific version (e.g., ruff==0.1.6). Also, it's a good practice to end files with a newline character.

32 changes: 32 additions & 0 deletions .librarian/generate-request.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
{
"id": "google-cloud-discoveryengine",
"version": "0.4.0",
"apis": [
{
"path": "google/cloud/discoveryengine/v1",
"service_config": "discoveryengine_v1.yaml"
},
{
"path": "google/cloud/discoveryengine/v1beta",
"service_config": "discoveryengine_v1beta.yaml"
},
{
"path": "google/cloud/discoveryengine/v1alpha",
"service_config": "discoveryengine_v1alpha.yaml"
}
],
"source_roots": [
"packages/google-cloud-discoveryengine"
],
"preserve_regex": [
"packages/google-cloud-discoveryengine/CHANGELOG.md",
"docs/CHANGELOG.md",
"samples/README.txt",
"samples/snippets/README.rst",
"tests/system"
],
"remove_regex": [
"packages/google-cloud-discoveryengine/"
],
"tag_format": "{id}-v{version}"
}

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

It's a common convention for text files to end with a newline character. Please add a newline at the end of this file.

Loading