1- import boto3
2- import botocore
31import subprocess
42import tarfile
53import os
6- import click
7- from botocore import UNSIGNED
8- from botocore .client import Config
94import time
5+
6+ import click
107import requests
118
129S3_BUCKET = "ray-ci-results"
1310DOC_BUILD_DIR_S3 = "doc_build"
1411LAST_BUILD_CUTOFF = 3 # how many days ago to consider a build outdated
1512PENDING_FILES_PATH = "pending_files.txt"
1613ENVIRONMENT_PICKLE = "_build/doctrees/environment.pickle"
17- DOC_BUILD_S3_URL = "https://ray-ci-results.s3.us-west-2.amazonaws.com/doc_build "
14+ DOC_BUILD_S3_URL = "https://rayci.anyscale.dev/doc-build-cache "
1815
1916
2017def find_latest_master_commit ():
@@ -34,16 +31,16 @@ def find_latest_master_commit():
3431 .split ("\n " )
3532 )
3633 for commit in latest_commits :
37- result = requests .head (f"{ DOC_BUILD_S3_URL } /{ commit } .tgz" )
38- if result .status_code == 200 :
39- return commit
34+ with requests .head (f"{ DOC_BUILD_S3_URL } /{ commit } .tgz" , allow_redirects = True ) as response :
35+ if response .status_code == 200 :
36+ return commit
4037 raise Exception (
41- "No cache found for latest master commit. "
38+ f "No cache found for latest master commit { commit } . "
4239 "Please merge with upstream master or use 'make develop'."
4340 )
4441
4542
46- def fetch_cache_from_s3 (commit , target_file_path ):
43+ def fetch_cache (commit , target_file_path ):
4744 """
4845 Fetch doc cache archive from ray-ci-results S3 bucket
4946
@@ -52,15 +49,12 @@ def fetch_cache_from_s3(commit, target_file_path):
5249 target_file_path: The file path to save the doc cache archive
5350 """
5451 # Create an S3 client
55- s3 = boto3 .client ("s3" , config = Config (signature_version = UNSIGNED ))
56- s3_file_path = f"{ DOC_BUILD_DIR_S3 } /{ commit } .tgz"
57- try :
58- print (f"Fetching doc cache from commit { commit } ..." )
59- s3 .download_file (S3_BUCKET , s3_file_path , target_file_path )
60- print (f"Successfully downloaded { s3_file_path } to { target_file_path } " )
61- except botocore .exceptions .ClientError as e :
62- print (f"Failed to download { s3_file_path } from S3: { str (e )} " )
63- raise e
52+ with requests .get (f"{ DOC_BUILD_S3_URL } /{ commit } .tgz" , allow_redirects = True ) as response :
53+ response .raise_for_status ()
54+ with open (target_file_path , "wb" ) as f :
55+ for chunk in response .iter_content (chunk_size = 8192 ):
56+ f .write (chunk )
57+ print (f"Successfully downloaded { target_file_path } " )
6458
6559
6660def extract_cache (cache_path : str , doc_dir : str ):
@@ -150,7 +144,7 @@ def main(ray_dir: str) -> None:
150144
151145 cache_path = f"{ ray_dir } /doc.tgz"
152146 # Fetch cache of that commit from S3 to cache_path
153- fetch_cache_from_s3 (latest_master_commit , cache_path )
147+ fetch_cache (latest_master_commit , cache_path )
154148 # Extract cache to override ray/doc directory
155149 extract_cache (cache_path , f"{ ray_dir } /doc" )
156150 os .remove (cache_path )
0 commit comments