1- import boto3
2- import botocore
31import subprocess
42import tarfile
53import os
6- import click
7- from botocore import UNSIGNED
8- from botocore .client import Config
94import time
5+
6+ import click
107import requests
118
12- S3_BUCKET = "ray-ci-results"
13- DOC_BUILD_DIR_S3 = "doc_build"
149LAST_BUILD_CUTOFF = 3 # how many days ago to consider a build outdated
1510PENDING_FILES_PATH = "pending_files.txt"
1611ENVIRONMENT_PICKLE = "_build/doctrees/environment.pickle"
17- DOC_BUILD_S3_URL = "https://ray-ci-results.s3.us-west-2.amazonaws.com/doc_build "
12+ DOC_BUILD_CACHE_URL = "https://rayci.anyscale.dev/doc-build-cache "
1813
1914
2015def find_latest_master_commit ():
@@ -34,33 +29,29 @@ def find_latest_master_commit():
3429 .split ("\n " )
3530 )
3631 for commit in latest_commits :
37- result = requests .head (f"{ DOC_BUILD_S3_URL } /{ commit } .tgz" )
38- if result .status_code == 200 :
39- return commit
32+ with requests .head (f"{ DOC_BUILD_CACHE_URL } /{ commit } .tgz" , allow_redirects = True ) as response :
33+ if response .status_code == 200 :
34+ return commit
4035 raise Exception (
41- "No cache found for latest master commit."
36+ "No cache found for latest master commit. "
4237 "Please merge with upstream master or use 'make develop'."
4338 )
4439
4540
46- def fetch_cache_from_s3 (commit , target_file_path ):
41+ def fetch_cache (commit , target_file_path ):
4742 """
48- Fetch doc cache archive from ray-ci-results S3 bucket
43+ Fetch doc cache archive from rayci.anyscale.dev
4944
5045 Args:
5146 commit: The commit hash of the doc cache to fetch
5247 target_file_path: The file path to save the doc cache archive
5348 """
54- # Create an S3 client
55- s3 = boto3 .client ("s3" , config = Config (signature_version = UNSIGNED ))
56- s3_file_path = f"{ DOC_BUILD_DIR_S3 } /{ commit } .tgz"
57- try :
58- print (f"Fetching doc cache from commit { commit } ..." )
59- s3 .download_file (S3_BUCKET , s3_file_path , target_file_path )
60- print (f"Successfully downloaded { s3_file_path } to { target_file_path } " )
61- except botocore .exceptions .ClientError as e :
62- print (f"Failed to download { s3_file_path } from S3: { str (e )} " )
63- raise e
49+ with requests .get (f"{ DOC_BUILD_CACHE_URL } /{ commit } .tgz" , allow_redirects = True ) as response :
50+ response .raise_for_status ()
51+ with open (target_file_path , "wb" ) as f :
52+ for chunk in response .iter_content (chunk_size = 8192 ):
53+ f .write (chunk )
54+ print (f"Successfully downloaded { target_file_path } " )
6455
6556
6657def extract_cache (cache_path : str , doc_dir : str ):
@@ -149,8 +140,8 @@ def main(ray_dir: str) -> None:
149140 f .write ("\n " .join (filenames ))
150141
151142 cache_path = f"{ ray_dir } /doc.tgz"
152- # Fetch cache of that commit from S3 to cache_path
153- fetch_cache_from_s3 (latest_master_commit , cache_path )
143+ # Fetch cache of that commit from build cache archive to cache_path
144+ fetch_cache (latest_master_commit , cache_path )
154145 # Extract cache to override ray/doc directory
155146 extract_cache (cache_path , f"{ ray_dir } /doc" )
156147 os .remove (cache_path )
0 commit comments