Skip to content

Commit 5991c65

Browse files
committed
[doc build] use rayci.anyscale.dev to fetch doc build cache
stop fetching from s3 directly Signed-off-by: Lonnie Liu <[email protected]>
1 parent 1e38c94 commit 5991c65

File tree

1 file changed

+15
-21
lines changed

1 file changed

+15
-21
lines changed

doc/load_doc_cache.py

Lines changed: 15 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,17 @@
1-
import boto3
2-
import botocore
31
import subprocess
42
import tarfile
53
import os
6-
import click
7-
from botocore import UNSIGNED
8-
from botocore.client import Config
94
import time
5+
6+
import click
107
import requests
118

129
S3_BUCKET = "ray-ci-results"
1310
DOC_BUILD_DIR_S3 = "doc_build"
1411
LAST_BUILD_CUTOFF = 3 # how many days ago to consider a build outdated
1512
PENDING_FILES_PATH = "pending_files.txt"
1613
ENVIRONMENT_PICKLE = "_build/doctrees/environment.pickle"
17-
DOC_BUILD_S3_URL = "https://ray-ci-results.s3.us-west-2.amazonaws.com/doc_build"
14+
DOC_BUILD_S3_URL = "https://rayci.anyscale.dev/doc-build-cache"
1815

1916

2017
def find_latest_master_commit():
@@ -34,16 +31,16 @@ def find_latest_master_commit():
3431
.split("\n")
3532
)
3633
for commit in latest_commits:
37-
result = requests.head(f"{DOC_BUILD_S3_URL}/{commit}.tgz")
38-
if result.status_code == 200:
39-
return commit
34+
with requests.head(f"{DOC_BUILD_S3_URL}/{commit}.tgz", allow_redirects=True) as response:
35+
if response.status_code == 200:
36+
return commit
4037
raise Exception(
41-
"No cache found for latest master commit."
38+
f"No cache found for latest master commit {commit}. "
4239
"Please merge with upstream master or use 'make develop'."
4340
)
4441

4542

46-
def fetch_cache_from_s3(commit, target_file_path):
43+
def fetch_cache(commit, target_file_path):
4744
"""
4845
Fetch doc cache archive from ray-ci-results S3 bucket
4946
@@ -52,15 +49,12 @@ def fetch_cache_from_s3(commit, target_file_path):
5249
target_file_path: The file path to save the doc cache archive
5350
"""
5451
# Create an S3 client
55-
s3 = boto3.client("s3", config=Config(signature_version=UNSIGNED))
56-
s3_file_path = f"{DOC_BUILD_DIR_S3}/{commit}.tgz"
57-
try:
58-
print(f"Fetching doc cache from commit {commit}...")
59-
s3.download_file(S3_BUCKET, s3_file_path, target_file_path)
60-
print(f"Successfully downloaded {s3_file_path} to {target_file_path}")
61-
except botocore.exceptions.ClientError as e:
62-
print(f"Failed to download {s3_file_path} from S3: {str(e)}")
63-
raise e
52+
with requests.get(f"{DOC_BUILD_S3_URL}/{commit}.tgz", allow_redirects=True) as response:
53+
response.raise_for_status()
54+
with open(target_file_path, "wb") as f:
55+
for chunk in response.iter_content(chunk_size=8192):
56+
f.write(chunk)
57+
print(f"Successfully downloaded {target_file_path}")
6458

6559

6660
def extract_cache(cache_path: str, doc_dir: str):
@@ -150,7 +144,7 @@ def main(ray_dir: str) -> None:
150144

151145
cache_path = f"{ray_dir}/doc.tgz"
152146
# Fetch cache of that commit from S3 to cache_path
153-
fetch_cache_from_s3(latest_master_commit, cache_path)
147+
fetch_cache(latest_master_commit, cache_path)
154148
# Extract cache to override ray/doc directory
155149
extract_cache(cache_path, f"{ray_dir}/doc")
156150
os.remove(cache_path)

0 commit comments

Comments
 (0)