Skip to content

Commit

Permalink
[python] Fix multiprocessing pool on Python >=3.8
Browse files Browse the repository at this point in the history
Processes are now spawned instead of forked, which could otherwise cause
lockups and hung processes.
  • Loading branch information
salkinium committed Feb 5, 2023
1 parent 1e5fc12 commit f4e5d35
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 11 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ jobs:
run: |
export TERM=xterm-256color
export COLUMNS=120
python3 tools/scripts/docs_modm_io_generator.py -t -c -j4 -d
python3 tools/scripts/docs_modm_io_generator.py -t -c -j4
- name: Upload Doxypress Documentation
uses: actions/upload-artifact@v2
with:
Expand Down
5 changes: 3 additions & 2 deletions test/all/run_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,9 @@
import random
import tempfile
import argparse
import platform
import subprocess
import multiprocessing
import multiprocessing as mp
from pathlib import Path
from collections import defaultdict

Expand Down Expand Up @@ -287,7 +288,7 @@ def build_device(run):

print("Using {} parallel jobs for {} devices".format(args.jobs, len(devices)))
try:
with multiprocessing.Pool(args.jobs) as pool:
with mp.get_context("spawn").Pool(args.jobs) as pool:
test_runs = pool.map(build_device,
[TestRun(x, cache_dir, cache_limit) for x in devices])

Expand Down
8 changes: 4 additions & 4 deletions tools/scripts/docs_modm_io_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,17 +12,17 @@

import os
import sys
import json
import shutil
import zipfile
import tempfile
import argparse
import datetime
import multiprocessing
import os, sys
import platform
import multiprocessing as mp
from pathlib import Path
from jinja2 import Environment, FileSystemLoader
from collections import defaultdict
import json

def repopath(path):
return Path(__file__).absolute().parents[2] / path
Expand Down Expand Up @@ -136,7 +136,7 @@ def main():
print("Starting to generate documentation...")
template_overview(output_dir, device_list, board_list, template_path)
print("... for {} devices, estimated memory footprint is {} MB".format(len(device_list) + len(board_list), (len(device_list)*70)+2000))
with multiprocessing.Pool(args.jobs) as pool:
with mp.get_context("forkserver").Pool(args.jobs) as pool:
# We can only pass one argument to pool.map
devices = ["{}|{}|{}||{}".format(modm_path, tempdir, dev, args.deduplicate) for dev in device_list]
devices += ["{}|{}|{}|{}|{}".format(modm_path, tempdir, dev, brd, args.deduplicate) for (brd, dev) in board_list]
Expand Down
10 changes: 6 additions & 4 deletions tools/scripts/examples_compile.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import argparse
import platform
import subprocess
import multiprocessing
import multiprocessing as mp
from pathlib import Path

is_running_in_ci = (os.getenv("CIRCLECI") is not None or
Expand Down Expand Up @@ -107,22 +107,24 @@ def compile_examples(paths, jobs, split, part):
if split > 1:
chunk_size = math.ceil(len(projects) / args.split)
projects = projects[chunk_size*args.part:min(chunk_size*(args.part+1), len(projects))]

ctx = mp.get_context("spawn")
# first generate all projects
with multiprocessing.Pool(jobs) as pool:
with ctx.Pool(jobs) as pool:
projects = pool.map(generate, projects)
results += projects.count(None)

# Filter projects for successful generation
projects = [p for p in projects if p is not None]
# Then build the successfully generated ones
with multiprocessing.Pool(jobs) as pool:
with ctx.Pool(jobs) as pool:
projects = pool.map(build, projects)
results += projects.count(None)

# Filter projects for successful compilation and runablity
projects = [p for p in projects if p is not None and "CI: run" in p.read_text()]
# Then run the successfully compiled ones
with multiprocessing.Pool(jobs) as pool:
with ctx.Pool(jobs) as pool:
projects = pool.map(run, projects)
results += projects.count(None)

Expand Down

0 comments on commit f4e5d35

Please sign in to comment.