mirror of
https://projects.blender.org/blender/blender.git
synced 2025-01-22 07:22:12 -05:00
Tools: add --jobs arguments to clang checker and code-clean tool
Setting jobs to one is useful for debugging, reducing the number of threads can be useful for code_clean as a background task.
This commit is contained in:
parent
537c585aa6
commit
b1daf6bfad
3 changed files with 53 additions and 18 deletions
|
@ -474,7 +474,7 @@ check_struct_comments: .FORCE
|
|||
@cd "$(BUILD_DIR)" ; \
|
||||
$(PYTHON) \
|
||||
"$(BLENDER_DIR)/build_files/cmake/cmake_static_check_clang.py" \
|
||||
--checks=struct_comments --match=".*"
|
||||
--checks=struct_comments --match=".*" --jobs=$(NPROCS)
|
||||
|
||||
check_clang_array: .FORCE
|
||||
@$(CMAKE_CONFIG)
|
||||
|
|
|
@ -47,9 +47,6 @@ ClangSourceLocation = Any
|
|||
|
||||
USE_VERBOSE = os.environ.get("VERBOSE", None) is not None
|
||||
|
||||
# Turn off for debugging.
|
||||
USE_MULTIPROCESS = True
|
||||
|
||||
CLANG_BIND_DIR = os.environ.get("CLANG_BIND_DIR")
|
||||
CLANG_LIB_DIR = os.environ.get("CLANG_LIB_DIR")
|
||||
|
||||
|
@ -440,6 +437,7 @@ def source_info_filter(
|
|||
def run_checks_on_project(
|
||||
check_ids: Sequence[str],
|
||||
regex_list: Sequence[re.Pattern[str]],
|
||||
jobs: int,
|
||||
) -> None:
|
||||
source_info = project_source_info.build_info(ignore_prefix_list=CHECKER_IGNORE_PREFIX)
|
||||
source_defines = project_source_info.build_defines_as_args()
|
||||
|
@ -464,9 +462,11 @@ def run_checks_on_project(
|
|||
|
||||
import multiprocessing
|
||||
|
||||
if USE_MULTIPROCESS:
|
||||
job_total = multiprocessing.cpu_count() + 1
|
||||
with multiprocessing.Pool(processes=job_total) as pool:
|
||||
if jobs <= 0:
|
||||
jobs = multiprocessing.cpu_count() * 2
|
||||
|
||||
if jobs > 1:
|
||||
with multiprocessing.Pool(processes=jobs) as pool:
|
||||
# No `istarmap`, use an intermediate function.
|
||||
for result in pool.imap(check_source_file_for_imap, all_args):
|
||||
if result:
|
||||
|
@ -515,6 +515,17 @@ def create_parser(checkers_all: Sequence[str]) -> argparse.ArgumentParser:
|
|||
"Multiple checkers may be passed at once (comma separated, no spaces)."),
|
||||
required=True,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--jobs",
|
||||
dest="jobs",
|
||||
type=int,
|
||||
default=0,
|
||||
help=(
|
||||
"The number of processes to use. "
|
||||
"Defaults to zero which detects the available cores, 1 is single threaded (useful for debugging)."
|
||||
),
|
||||
required=False,
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
@ -536,7 +547,12 @@ def main() -> int:
|
|||
print("Error in expression: \"{:s}\"\n {!r}".format(expr, ex))
|
||||
return 1
|
||||
|
||||
run_checks_on_project(args.checks.split(','), regex_list)
|
||||
run_checks_on_project(
|
||||
args.checks.split(','),
|
||||
regex_list,
|
||||
args.jobs,
|
||||
)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
|
|
|
@ -33,8 +33,6 @@ from typing import (
|
|||
# List of (source_file, all_arguments)
|
||||
ProcessedCommands = List[Tuple[str, str]]
|
||||
|
||||
USE_MULTIPROCESS = True
|
||||
|
||||
VERBOSE = False
|
||||
|
||||
# Print the output of the compiler (_very_ noisy, only useful for troubleshooting compiler issues).
|
||||
|
@ -1472,8 +1470,11 @@ def run_edits_on_directory(
|
|||
build_dir: str,
|
||||
regex_list: List[re.Pattern[str]],
|
||||
edits_to_apply: Sequence[str],
|
||||
skip_test: bool = False,
|
||||
skip_test: bool,
|
||||
jobs: int,
|
||||
) -> int:
|
||||
import multiprocessing
|
||||
|
||||
# currently only supports ninja or makefiles
|
||||
build_file_ninja = os.path.join(build_dir, "build.ninja")
|
||||
build_file_make = os.path.join(build_dir, "Makefile")
|
||||
|
@ -1490,6 +1491,9 @@ def run_edits_on_directory(
|
|||
)
|
||||
return 1
|
||||
|
||||
if jobs <= 0:
|
||||
jobs = multiprocessing.cpu_count() * 2
|
||||
|
||||
if args is None:
|
||||
# Error will have been reported.
|
||||
return 1
|
||||
|
@ -1566,7 +1570,7 @@ def run_edits_on_directory(
|
|||
shared_edit_data = edit_generator_class.setup()
|
||||
|
||||
try:
|
||||
if USE_MULTIPROCESS:
|
||||
if jobs > 1:
|
||||
args_expanded = [(
|
||||
c,
|
||||
output_from_build_args(build_args, build_cwd),
|
||||
|
@ -1576,10 +1580,8 @@ def run_edits_on_directory(
|
|||
skip_test,
|
||||
shared_edit_data,
|
||||
) for (c, build_args, build_cwd) in args_with_cwd]
|
||||
import multiprocessing
|
||||
job_total = multiprocessing.cpu_count()
|
||||
pool = multiprocessing.Pool(processes=job_total * 2)
|
||||
pool.starmap(wash_source_with_edits, args_expanded)
|
||||
pool = multiprocessing.Pool(processes=jobs)
|
||||
pool.starmap(wash_source_with_edit_list, args_expanded)
|
||||
del args_expanded
|
||||
else:
|
||||
# now we have commands
|
||||
|
@ -1603,7 +1605,7 @@ def run_edits_on_directory(
|
|||
|
||||
|
||||
def create_parser(edits_all: Sequence[str]) -> argparse.ArgumentParser:
|
||||
from textwrap import indent, dedent
|
||||
from textwrap import indent
|
||||
|
||||
# Create docstring for edits.
|
||||
edits_all_docs = []
|
||||
|
@ -1651,6 +1653,17 @@ def create_parser(edits_all: Sequence[str]) -> argparse.ArgumentParser:
|
|||
),
|
||||
required=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--jobs",
|
||||
dest="jobs",
|
||||
type=int,
|
||||
default=0,
|
||||
help=(
|
||||
"The number of processes to use. "
|
||||
"Defaults to zero which detects the available cores, 1 is single threaded (useful for debugging)."
|
||||
),
|
||||
required=False,
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
@ -1683,7 +1696,13 @@ def main() -> int:
|
|||
))
|
||||
return 1
|
||||
|
||||
return run_edits_on_directory(build_dir, regex_list, edits_all_from_args, args.skip_test)
|
||||
return run_edits_on_directory(
|
||||
build_dir,
|
||||
regex_list,
|
||||
edits_all_from_args,
|
||||
args.skip_test,
|
||||
args.jobs,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
Loading…
Reference in a new issue