@@ -34,10 +34,11 @@ def add_job_properties(jobs: List[Dict], prefix: str) -> List[Job]:
3434 """
3535 modified_jobs = []
3636 for job in jobs :
37- job = dict (job )
38- job ["image" ] = get_job_image (job )
39- job ["name" ] = f"{ prefix } - { job ['name' ]} "
40- modified_jobs .append (job )
37+ # Create a copy of the `job` dictionary to avoid modifying `jobs`
38+ new_job = dict (job )
39+ new_job ["image" ] = get_job_image (new_job )
40+ new_job ["name" ] = f"{ prefix } - { new_job ['name' ]} "
41+ modified_jobs .append (new_job )
4142 return modified_jobs
4243
4344
@@ -46,11 +47,15 @@ def add_base_env(jobs: List[Job], environment: Dict[str, str]) -> List[Job]:
4647 Prepends `environment` to the `env` attribute of each job.
4748 The `env` of each job has higher precedence than `environment`.
4849 """
50+ modified_jobs = []
4951 for job in jobs :
5052 env = environment .copy ()
5153 env .update (job .get ("env" , {}))
52- job ["env" ] = env
53- return jobs
54+
55+ new_job = dict (job )
56+ new_job ["env" ] = env
57+ modified_jobs .append (new_job )
58+ return modified_jobs
5459
5560
5661@dataclasses .dataclass
@@ -188,28 +193,49 @@ def format_run_type(run_type: WorkflowRunType) -> str:
188193 raise AssertionError ()
189194
190195
191- def get_job_image (job ) -> str :
196+ def get_job_image (job : Job ) -> str :
192197 """
193198 By default, the Docker image of a job is based on its name.
194199 However, it can be overridden by its IMAGE environment variable.
195200 """
196- return job .get ("env" , {}).get ("IMAGE" , job ["name" ])
201+ env = job .get ("env" , {})
202+ # Return the IMAGE environment variable if it exists, otherwise return the job name
203+ return env .get ("IMAGE" , job ["name" ])
197204
198205
199- def run_workflow_locally (job_data : Dict [str , Any ], job_name : str , pr_jobs : bool ):
200- DOCKER_DIR = Path (__file__ ).absolute ().parent .parent / "docker"
206+ def is_linux_job (job : Job ) -> bool :
207+ return "ubuntu" in job ["os" ]
208+
201209
202- jobs = job_data ["pr" ] if pr_jobs else job_data ["auto" ]
203- jobs = [job for job in jobs if job .get ("name" ) == job_name ]
210+ def find_linux_job (job_data : Dict [str , Any ], job_name : str , pr_jobs : bool ) -> Job :
211+ candidates = job_data ["pr" ] if pr_jobs else job_data ["auto" ]
212+ jobs = [job for job in candidates if job .get ("name" ) == job_name ]
204213 if len (jobs ) == 0 :
205- raise Exception (f"Job `{ job_name } ` not found in { 'pr' if pr_jobs else 'auto' } jobs" )
214+ available_jobs = "\n " .join (sorted (job ["name" ] for job in candidates if is_linux_job (job )))
215+ raise Exception (f"""Job `{ job_name } ` not found in { 'pr' if pr_jobs else 'auto' } jobs.
216+ The following jobs are available:
217+ { available_jobs } """ )
206218 assert len (jobs ) == 1
219+
207220 job = jobs [0 ]
208- if "ubuntu" not in job [ "os" ] :
221+ if not is_linux_job ( job ) :
209222 raise Exception ("Only Linux jobs can be executed locally" )
223+ return job
224+
225+
226+ def run_workflow_locally (job_data : Dict [str , Any ], job_name : str , pr_jobs : bool ):
227+ DOCKER_DIR = Path (__file__ ).absolute ().parent .parent / "docker"
228+
229+ job = find_linux_job (job_data , job_name = job_name , pr_jobs = pr_jobs )
210230
211231 custom_env = {}
212- custom_env ["DEPLOY" ] = "1"
232+ # Replicate src/ci/scripts/setup-environment.sh
233+ # Adds custom environment variables to the job
234+ if job_name .startswith ("dist-" ):
235+ if job_name .endswith ("-alt" ):
236+ custom_env ["DEPLOY_ALT" ] = "1"
237+ else :
238+ custom_env ["DEPLOY" ] = "1"
213239 custom_env .update ({k : str (v ) for (k , v ) in job .get ("env" , {}).items ()})
214240
215241 args = [
@@ -222,27 +248,42 @@ def run_workflow_locally(job_data: Dict[str, Any], job_name: str, pr_jobs: bool)
222248 env = os .environ .copy ()
223249 env .update (custom_env )
224250
225- process = subprocess .Popen (args , env = env )
226- try :
227- process .wait ()
228- except KeyboardInterrupt :
229- process .kill ()
251+ subprocess .run (args , env = env )
230252
231253
232- if __name__ == "__main__" :
233- logging . basicConfig ( level = logging . INFO )
254+ def calculate_job_matrix ( job_data : Dict [ str , Any ]) :
255+ github_ctx = get_github_ctx ( )
234256
235- with open ( JOBS_YAML_PATH ) as f :
236- data = yaml . safe_load ( f )
257+ run_type = find_run_type ( github_ctx )
258+ logging . info ( f"Job type: { run_type } " )
237259
260+ with open (CI_DIR / "channel" ) as f :
261+ channel = f .read ().strip ()
262+
263+ jobs = []
264+ if run_type is not None :
265+ jobs = calculate_jobs (run_type , job_data )
266+ jobs = skip_jobs (jobs , channel )
267+
268+ if not jobs :
269+ raise Exception ("Scheduled job list is empty, this is an error" )
270+
271+ run_type = format_run_type (run_type )
272+
273+ logging .info (f"Output:\n { yaml .dump (dict (jobs = jobs , run_type = run_type ), indent = 4 )} " )
274+ print (f"jobs={ json .dumps (jobs )} " )
275+ print (f"run_type={ run_type } " )
276+
277+
278+ def create_cli_parser ():
238279 parser = argparse .ArgumentParser (
239280 prog = "ci.py" ,
240281 description = "Generate or run CI workflows"
241282 )
242- generate_matrix = argparse .ArgumentParser ()
243283 subparsers = parser .add_subparsers (help = "Command to execute" , dest = "command" , required = True )
244- subparsers .add_parser ("calculate-job-matrix" )
245- run_parser = subparsers .add_parser ("run-local" )
284+ subparsers .add_parser ("calculate-job-matrix" ,
285+ help = "Generate a matrix of jobs that should be executed in CI" )
286+ run_parser = subparsers .add_parser ("run-local" , help = "Run a CI jobs locally (on Linux)" )
246287 run_parser .add_argument (
247288 "job_name" ,
248289 help = "CI job that should be executed. By default, a merge (auto) "
@@ -253,30 +294,20 @@ def run_workflow_locally(job_data: Dict[str, Any], job_name: str, pr_jobs: bool)
253294 action = "store_true" ,
254295 help = "Run a PR job instead of an auto job"
255296 )
256- args = parser . parse_args ()
297+ return parser
257298
258- if args .command == "calculate-job-matrix" :
259- github_ctx = get_github_ctx ()
260299
261- run_type = find_run_type (github_ctx )
262- logging .info (f"Job type: { run_type } " )
263-
264- with open (CI_DIR / "channel" ) as f :
265- channel = f .read ().strip ()
266-
267- jobs = []
268- if run_type is not None :
269- jobs = calculate_jobs (run_type , data )
270- jobs = skip_jobs (jobs , channel )
300+ if __name__ == "__main__" :
301+ logging .basicConfig (level = logging .INFO )
271302
272- if not jobs :
273- raise Exception ( "Scheduled job list is empty, this is an error" )
303+ with open ( JOBS_YAML_PATH ) as f :
304+ data = yaml . safe_load ( f )
274305
275- run_type = format_run_type (run_type )
306+ parser = create_cli_parser ()
307+ args = parser .parse_args ()
276308
277- logging .info (f"Output:\n { yaml .dump (dict (jobs = jobs , run_type = run_type ), indent = 4 )} " )
278- print (f"jobs={ json .dumps (jobs )} " )
279- print (f"run_type={ run_type } " )
309+ if args .command == "calculate-job-matrix" :
310+ calculate_job_matrix (data )
280311 elif args .command == "run-local" :
281312 run_workflow_locally (data , args .job_name , args .pr )
282313 else :
0 commit comments