#!/usr/bin/env python """Run the given command in all packages in parallel. Handy for quick verification test runs, but annoying in that all of the output is interleaved. $ ./parallel ./setup.py lint This will `cd` into each package, run `./setup.py lint`, then `cd ..`, all in parallel, in a separate process for each package. The number of processes is decided by ProcessPoolExecutor. Replace "lint" with any of "test", "clean", "build_sphinx" (for docs), etc. Also consider: $ ./parallel pip install -e .[dev] # install all the packages in editable mode $ ./parallel pip uninstall $(basename $(pwd)) >>>""" from concurrent.futures import ProcessPoolExecutor, wait from os import chdir from subprocess import CalledProcessError, check_output, STDOUT from sys import argv PACKAGES = [ "contract_addresses", "contract_artifacts", "contract_wrappers", "json_schemas", "sra_client", "order_utils", "middlewares", ] def run_cmd_on_package(package: str): """cd to the package dir, ./setup.py lint, cd ..""" chdir(package) command = f"{' '.join(argv[1:])}" try: check_output(command.split(), stderr=STDOUT) except CalledProcessError as error: raise RuntimeError( f"Failure return code received from command `{command}` in package" + f" {package}, which produced the following output:\n" + f"{error.output.decode('utf-8')}") from error finally: chdir("..") with ProcessPoolExecutor() as executor: for future in executor.map(run_cmd_on_package, PACKAGES): # iterate over map()'s return value, to resolve the futures. # but we don't actually care what the return values are, so just `pass`. # if any exceptions were raised by the underlying task, they'll be # raised as the iteration encounters them. pass