mirror of
https://projects.blender.org/blender/blender.git
synced 2025-01-22 07:22:12 -05:00
Add 'system python' validation for some py scripts.
The goal of this test is to try to import some critical py scripts with the system python of the building machine. The main target is to ensure that these py scripts remain usable by all buildbot machines, as some of them are using fairly outdated python versions. Current status: * Scripts in `build_files` and `docs` are checked. * Some python scripts in `build_files` were 'reverted' to be compatible with older required python version currently (3.6). * A few scripts are excluded from the test, mostly because they use Blender's `bpy` module, which means they are only intended to be ran with Blender's python anyway. * The test is only enabled for Linux buildbots currently, as they use the oldest Python by far. Notes: * Some more scripts are likely to be moved around in the future. * Whether these tests need to be enabled on windows or macos platforms remains an open question. Pull Request: https://projects.blender.org/blender/blender/pulls/130746
This commit is contained in:
parent
2040b8e864
commit
2c9ab53273
10 changed files with 252 additions and 45 deletions
|
@ -878,6 +878,24 @@ mark_as_advanced(WITH_TESTS_SINGLE_BINARY)
|
|||
set(TEST_PYTHON_EXE "" CACHE PATH "Python executable to run unit tests")
|
||||
mark_as_advanced(TEST_PYTHON_EXE)
|
||||
|
||||
|
||||
# System python tests.
|
||||
option(WITH_SYSTEM_PYTHON_TESTS "\
|
||||
Enable tests validating some build-related scripts against the 'system' version of Python \
|
||||
(buildbots currently can use significantly older versions of Python than Blender's)"
|
||||
OFF
|
||||
)
|
||||
mark_as_advanced(WITH_SYSTEM_PYTHON_TESTS)
|
||||
# We could use `find_package (Python3 COMPONENTS Interpreter)` to set that value automatically.
|
||||
# However, on some buildbots this will give the default python version of the current virtual environment,
|
||||
# which may differ from the OS default python version.
|
||||
# And it would set that global 'python3 exec path' CMake value for all CMake scripts, which could have
|
||||
# unexpected and dangerous side effects.
|
||||
# So this has to be set explicitely for all builders.
|
||||
set(TEST_SYSTEM_PYTHON_EXE "" CACHE PATH "Python executable used to run 'system python' tests")
|
||||
mark_as_advanced(TEST_SYSTEM_PYTHON_EXE)
|
||||
|
||||
|
||||
# Documentation
|
||||
if(UNIX AND NOT APPLE)
|
||||
option(WITH_DOC_MANPAGE "Create a manual page (Unix manpage)" OFF)
|
||||
|
|
|
@ -18,3 +18,8 @@ set(HIPRT_COMPILER_PARALLEL_JOBS 6 CACHE STRING "" FORCE)
|
|||
set(SYCL_OFFLINE_COMPILER_PARALLEL_JOBS 6 CACHE STRING "" FORCE)
|
||||
|
||||
set(WITH_LINUX_OFFICIAL_RELEASE_TESTS ON CACHE BOOL "" FORCE)
|
||||
|
||||
# Validate that some python scripts in out `build_files` and `docs` directories
|
||||
# can be used with the builder's system python.
|
||||
set(WITH_SYSTEM_PYTHON_TESTS ON CACHE BOOL "" FORCE)
|
||||
set(TEST_SYSTEM_PYTHON_EXE "/usr/bin/python3.6" CACHE PATH "" FORCE)
|
||||
|
|
|
@ -8,6 +8,10 @@
|
|||
import re
|
||||
import sys
|
||||
|
||||
from typing import (
|
||||
Union,
|
||||
)
|
||||
|
||||
cmakelists_file = sys.argv[-1]
|
||||
|
||||
|
||||
|
@ -22,7 +26,7 @@ def count_backslashes_before_pos(file_data: str, pos: int) -> int:
|
|||
return slash_count
|
||||
|
||||
|
||||
def extract_cmake_string_at_pos(file_data: str, pos_beg: int) -> str | None:
|
||||
def extract_cmake_string_at_pos(file_data: str, pos_beg: int) -> Union[str, None]:
|
||||
assert file_data[pos_beg - 1] == '"'
|
||||
|
||||
pos = pos_beg
|
||||
|
|
|
@ -27,7 +27,11 @@ __all__ = (
|
|||
"init",
|
||||
)
|
||||
|
||||
from collections.abc import (
|
||||
from typing import (
|
||||
List,
|
||||
Tuple,
|
||||
Union,
|
||||
# Proxies for `collections.abc`
|
||||
Callable,
|
||||
Iterator,
|
||||
)
|
||||
|
@ -82,7 +86,7 @@ def init(cmake_path: str) -> bool:
|
|||
|
||||
def source_list(
|
||||
path: str,
|
||||
filename_check: Callable[[str], bool] | None = None,
|
||||
filename_check: Union[Callable[[str], bool], None] = None,
|
||||
) -> Iterator[str]:
|
||||
for dirpath, dirnames, filenames in os.walk(path):
|
||||
# skip '.git'
|
||||
|
@ -129,8 +133,8 @@ def is_project_file(filename: str) -> bool:
|
|||
|
||||
|
||||
def cmake_advanced_info() -> (
|
||||
tuple[list[str], list[tuple[str, str]]] |
|
||||
tuple[None, None]
|
||||
Union[Tuple[List[str], List[Tuple[str, str]]],
|
||||
Tuple[None, None]]
|
||||
):
|
||||
""" Extract includes and defines from cmake.
|
||||
"""
|
||||
|
@ -212,12 +216,15 @@ def cmake_advanced_info() -> (
|
|||
return includes, defines
|
||||
|
||||
|
||||
def cmake_cache_var(var: str) -> str | None:
|
||||
def cmake_cache_var(var: str) -> Union[str, None]:
|
||||
def l_strip_gen(cache_file):
|
||||
for l in cache_file:
|
||||
yield l.strip()
|
||||
|
||||
with open(os.path.join(CMAKE_DIR, "CMakeCache.txt"), encoding='utf-8') as cache_file:
|
||||
lines = [
|
||||
l_strip for l in cache_file
|
||||
if (l_strip := l.strip())
|
||||
if not l_strip.startswith(("//", "#"))
|
||||
l_strip for l_strip in l_strip_gen(cache_file)
|
||||
if l_strip and not l_strip.startswith(("//", "#"))
|
||||
]
|
||||
|
||||
for l in lines:
|
||||
|
@ -226,7 +233,7 @@ def cmake_cache_var(var: str) -> str | None:
|
|||
return None
|
||||
|
||||
|
||||
def cmake_compiler_defines() -> list[str] | None:
|
||||
def cmake_compiler_defines() -> Union[List[str], None]:
|
||||
compiler = cmake_cache_var("CMAKE_C_COMPILER") # could do CXX too
|
||||
|
||||
if compiler is None:
|
||||
|
@ -248,5 +255,5 @@ def cmake_compiler_defines() -> list[str] | None:
|
|||
return lines
|
||||
|
||||
|
||||
def project_name_get() -> str | None:
|
||||
def project_name_get() -> Union[str, None]:
|
||||
return cmake_cache_var("CMAKE_PROJECT_NAME")
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
|
||||
__all__ = (
|
||||
"build_info",
|
||||
"SOURCE_DIR",
|
||||
|
@ -23,8 +24,10 @@ import subprocess
|
|||
from typing import (
|
||||
Any,
|
||||
IO,
|
||||
)
|
||||
from collections.abc import (
|
||||
List,
|
||||
Tuple,
|
||||
Union,
|
||||
# Proxies for `collections.abc`
|
||||
Callable,
|
||||
Iterator,
|
||||
Sequence,
|
||||
|
@ -56,7 +59,7 @@ def is_c_any(filename: str) -> bool:
|
|||
CMAKE_DIR = "."
|
||||
|
||||
|
||||
def cmake_cache_var_iter() -> Iterator[tuple[str, str, str]]:
|
||||
def cmake_cache_var_iter() -> Iterator[Tuple[str, str, str]]:
|
||||
import re
|
||||
re_cache = re.compile(r'([A-Za-z0-9_\-]+)?:?([A-Za-z0-9_\-]+)?=(.*)$')
|
||||
with open(join(CMAKE_DIR, "CMakeCache.txt"), 'r', encoding='utf-8') as cache_file:
|
||||
|
@ -67,7 +70,7 @@ def cmake_cache_var_iter() -> Iterator[tuple[str, str, str]]:
|
|||
yield (var, type_ or "", val)
|
||||
|
||||
|
||||
def cmake_cache_var(var: str) -> str | None:
|
||||
def cmake_cache_var(var: str) -> Union[str, None]:
|
||||
for var_iter, _type_iter, value_iter in cmake_cache_var_iter():
|
||||
if var == var_iter:
|
||||
return value_iter
|
||||
|
@ -82,7 +85,7 @@ def cmake_cache_var_or_exit(var: str) -> str:
|
|||
return value
|
||||
|
||||
|
||||
def do_ignore(filepath: str, ignore_prefix_list: Sequence[str] | None) -> bool:
|
||||
def do_ignore(filepath: str, ignore_prefix_list: Union[Sequence[str], None]) -> bool:
|
||||
if ignore_prefix_list is None:
|
||||
return False
|
||||
|
||||
|
@ -90,7 +93,7 @@ def do_ignore(filepath: str, ignore_prefix_list: Sequence[str] | None) -> bool:
|
|||
return any([relpath.startswith(prefix) for prefix in ignore_prefix_list])
|
||||
|
||||
|
||||
def makefile_log() -> list[str]:
|
||||
def makefile_log() -> List[str]:
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
|
@ -130,8 +133,8 @@ def makefile_log() -> list[str]:
|
|||
def build_info(
|
||||
use_c: bool = True,
|
||||
use_cxx: bool = True,
|
||||
ignore_prefix_list: list[str] | None = None,
|
||||
) -> list[tuple[str, list[str], list[str]]]:
|
||||
ignore_prefix_list: Union[List[str], None] = None,
|
||||
) -> List[Tuple[str, List[str], List[str]]]:
|
||||
makelog = makefile_log()
|
||||
|
||||
source = []
|
||||
|
@ -149,7 +152,7 @@ def build_info(
|
|||
print("parsing make log ...")
|
||||
|
||||
for line in makelog:
|
||||
args_orig: str | list[str] = line.split()
|
||||
args_orig: Union[str, List[str]] = line.split()
|
||||
args = [fake_compiler if c in compilers else c for c in args_orig]
|
||||
if args == args_orig:
|
||||
# No compilers in the command, skip.
|
||||
|
@ -216,7 +219,7 @@ def build_defines_as_source() -> str:
|
|||
return stdout.read().strip().decode('ascii')
|
||||
|
||||
|
||||
def build_defines_as_args() -> list[str]:
|
||||
def build_defines_as_args() -> List[str]:
|
||||
return [
|
||||
("-D" + "=".join(l.split(maxsplit=2)[1:]))
|
||||
for l in build_defines_as_source().split("\n")
|
||||
|
@ -224,7 +227,7 @@ def build_defines_as_args() -> list[str]:
|
|||
]
|
||||
|
||||
|
||||
def process_make_non_blocking(proc: subprocess.Popen[Any]) -> subprocess.Popen[Any]:
|
||||
def process_make_non_blocking(proc: subprocess.Popen) -> subprocess.Popen:
|
||||
import fcntl
|
||||
for fh in (proc.stderr, proc.stdout):
|
||||
if fh is None:
|
||||
|
@ -238,11 +241,11 @@ def process_make_non_blocking(proc: subprocess.Popen[Any]) -> subprocess.Popen[A
|
|||
# could be moved elsewhere!, this just happens to be used by scripts that also
|
||||
# use this module.
|
||||
def queue_processes(
|
||||
process_funcs: Sequence[tuple[Callable[..., subprocess.Popen[Any]], tuple[Any, ...]]],
|
||||
process_funcs: Sequence[Tuple[Callable[..., subprocess.Popen], Tuple[Any, ...]]],
|
||||
*,
|
||||
job_total: int = -1,
|
||||
sleep: float = 0.1,
|
||||
process_finalize: Callable[[subprocess.Popen[Any], bytes, bytes], int | None] | None = None,
|
||||
process_finalize: Union[Callable[[subprocess.Popen, bytes, bytes], Union[int, None]], None] = None,
|
||||
) -> None:
|
||||
""" Takes a list of function arg pairs, each function must return a process
|
||||
"""
|
||||
|
@ -266,18 +269,21 @@ def queue_processes(
|
|||
|
||||
if process_finalize is not None:
|
||||
def poll_and_finalize(
|
||||
p: subprocess.Popen[Any],
|
||||
stdout: list[bytes],
|
||||
stderr: list[bytes],
|
||||
) -> int | None:
|
||||
p: subprocess.Popen,
|
||||
stdout: List[bytes],
|
||||
stderr: List[bytes],
|
||||
) -> Union[int, None]:
|
||||
assert p.stdout is not None
|
||||
if data := p.stdout.read():
|
||||
data = p.stdout.read()
|
||||
if data:
|
||||
stdout.append(data)
|
||||
assert p.stderr is not None
|
||||
if data := p.stderr.read():
|
||||
data = p.stderr.read()
|
||||
if data:
|
||||
stderr.append(data)
|
||||
|
||||
if (returncode := p.poll()) is not None:
|
||||
returncode = p.poll()
|
||||
if returncode is not None:
|
||||
data_stdout, data_stderr = p.communicate()
|
||||
if data_stdout:
|
||||
stdout.append(data_stdout)
|
||||
|
@ -287,13 +293,13 @@ def queue_processes(
|
|||
return returncode
|
||||
else:
|
||||
def poll_and_finalize(
|
||||
p: subprocess.Popen[Any],
|
||||
stdout: list[bytes],
|
||||
stderr: list[bytes],
|
||||
) -> int | None:
|
||||
p: subprocess.Popen,
|
||||
stdout: List[bytes],
|
||||
stderr: List[bytes],
|
||||
) -> Union[int, None]:
|
||||
return p.poll()
|
||||
|
||||
processes: list[tuple[subprocess.Popen[Any], list[bytes], list[bytes]]] = []
|
||||
processes: List[Tuple[subprocess.Popen, List[bytes], List[bytes]]] = []
|
||||
for func, args in process_funcs:
|
||||
# wait until a thread is free
|
||||
while 1:
|
||||
|
|
|
@ -37,7 +37,9 @@ import string
|
|||
import setuptools
|
||||
import sys
|
||||
|
||||
from collections.abc import (
|
||||
from typing import (
|
||||
Tuple,
|
||||
# Proxies for `collections.abc`
|
||||
Iterator,
|
||||
Sequence,
|
||||
)
|
||||
|
@ -95,7 +97,7 @@ def find_dominating_file(
|
|||
# ------------------------------------------------------------------------------
|
||||
# CMake Cache Access
|
||||
|
||||
def cmake_cache_var_iter(filepath_cmake_cache: str) -> Iterator[tuple[str, str, str]]:
|
||||
def cmake_cache_var_iter(filepath_cmake_cache: str) -> Iterator[Tuple[str, str, str]]:
|
||||
re_cache = re.compile(r"([A-Za-z0-9_\-]+)?:?([A-Za-z0-9_\-]+)?=(.*)$")
|
||||
with open(filepath_cmake_cache, "r", encoding="utf-8") as cache_file:
|
||||
for l in cache_file:
|
||||
|
|
|
@ -13,8 +13,8 @@ from pathlib import Path
|
|||
from typing import (
|
||||
TextIO,
|
||||
Any,
|
||||
)
|
||||
from collections.abc import (
|
||||
Union,
|
||||
# Proxies for `collections.abc`
|
||||
Iterable,
|
||||
)
|
||||
|
||||
|
@ -95,7 +95,7 @@ def manifest_path(tarball: Path) -> Path:
|
|||
return without_suffix.with_name(f"{name}-manifest.txt")
|
||||
|
||||
|
||||
def packages_path(current_directory: Path, cli_args: Any) -> Path | None:
|
||||
def packages_path(current_directory: Path, cli_args: Any) -> Union[Path, None]:
|
||||
if not cli_args.include_packages:
|
||||
return None
|
||||
|
||||
|
@ -116,7 +116,7 @@ def create_manifest(
|
|||
version: make_utils.BlenderVersion,
|
||||
outpath: Path,
|
||||
blender_srcdir: Path,
|
||||
packages_dir: Path | None,
|
||||
packages_dir: Union[Path, None],
|
||||
) -> None:
|
||||
print(f'Building manifest of files: "{outpath}"...', end="", flush=True)
|
||||
with outpath.open("w", encoding="utf-8") as outfile:
|
||||
|
@ -164,7 +164,7 @@ def create_tarball(
|
|||
tarball: Path,
|
||||
manifest: Path,
|
||||
blender_srcdir: Path,
|
||||
packages_dir: Path | None,
|
||||
packages_dir: Union[Path, None],
|
||||
) -> None:
|
||||
print(f'Creating archive: "{tarball}" ...', end="", flush=True)
|
||||
|
||||
|
@ -238,7 +238,7 @@ def git_ls_files(directory: Path = Path(".")) -> Iterable[Path]:
|
|||
yield path
|
||||
|
||||
|
||||
def git_command(*cli_args: bytes | str | Path) -> Iterable[str]:
|
||||
def git_command(*cli_args: Union[bytes, str, Path]) -> Iterable[str]:
|
||||
"""Generator, yields lines of output from a Git command."""
|
||||
command = ("git", *cli_args)
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ import time
|
|||
|
||||
from typing import (
|
||||
TextIO,
|
||||
Dict,
|
||||
)
|
||||
|
||||
|
||||
|
@ -31,7 +32,7 @@ def man_format(data: str) -> str:
|
|||
return data
|
||||
|
||||
|
||||
def blender_extract_info(blender_bin: str) -> dict[str, str]:
|
||||
def blender_extract_info(blender_bin: str) -> Dict[str, str]:
|
||||
blender_env = {
|
||||
"ASAN_OPTIONS": (
|
||||
os.environ.get("ASAN_OPTIONS", "") +
|
||||
|
|
|
@ -21,6 +21,13 @@ file(MAKE_DIRECTORY ${TEST_OUT_DIR}/blendfile_io)
|
|||
# message(FATAL_ERROR "CMake test directory not found!")
|
||||
# endif()
|
||||
|
||||
if(WITH_SYSTEM_PYTHON_TESTS)
|
||||
if(NOT EXISTS "${TEST_SYSTEM_PYTHON_EXE}")
|
||||
message(ERROR "'System Python' tests requested but no valid system python path, set TEST_SYSTEM_PYTHON_EXE.")
|
||||
set(WITH_SYSTEM_PYTHON_TESTS OFF)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
# Run Blender command with parameters.
|
||||
function(add_blender_test_impl testname envvars_list exe)
|
||||
add_test(
|
||||
|
@ -116,6 +123,29 @@ function(add_render_test testname testscript)
|
|||
add_python_test(${testname} ${testscript} ${_args})
|
||||
endfunction()
|
||||
|
||||
# Run Python script outside Blender, using system default Python3 interpreter,
|
||||
# NOT the one specified in `TEST_PYTHON_EXE`.
|
||||
function(add_system_python_test testname testscript)
|
||||
if(NOT WITH_SYSTEM_PYTHON_TESTS)
|
||||
return()
|
||||
endif()
|
||||
|
||||
add_test(
|
||||
NAME ${testname}
|
||||
COMMAND ${TEST_SYSTEM_PYTHON_EXE} ${testscript} ${ARGN}
|
||||
WORKING_DIRECTORY $<TARGET_FILE_DIR:blender>
|
||||
)
|
||||
blender_test_set_envvars("${testname}" "")
|
||||
endfunction()
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# TESTS USING SYSTEM PYTHON
|
||||
add_system_python_test(
|
||||
script_validate_on_system_python
|
||||
${CMAKE_CURRENT_LIST_DIR}/system_python/load_tool_scripts.py
|
||||
--root-dir ${CMAKE_SOURCE_DIR}
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
# GENERAL PYTHON CORRECTNESS TESTS
|
||||
add_blender_test(
|
||||
|
|
134
tests/python/system_python/load_tool_scripts.py
Normal file
134
tests/python/system_python/load_tool_scripts.py
Normal file
|
@ -0,0 +1,134 @@
|
|||
# SPDX-FileCopyrightText: 2024 Blender Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import pathlib
|
||||
import importlib.util
|
||||
import contextlib
|
||||
import traceback
|
||||
|
||||
|
||||
# For some reasons, these two modules do not work with 'file-path-based' import method used by this script.
|
||||
# So 'pre-load' them here instead.
|
||||
# They are _not used_ directly by this script.
|
||||
import multiprocessing
|
||||
import typing
|
||||
|
||||
|
||||
# Allow-list of directories, relative to the given `--root-dir` argument.
|
||||
INCLUDED_DIRS = {
|
||||
"build_files",
|
||||
# Used to generate the manual and API documentations.
|
||||
"doc",
|
||||
}
|
||||
|
||||
# Block-list of paths to python modules, relative to the given `--root-dir` argument.
|
||||
EXCLUDED_FILE_PATHS = {
|
||||
# Require `bpy` module.
|
||||
"doc/python_api/sphinx_doc_gen.py",
|
||||
|
||||
# XXX These scripts execute on import! bad, need to be fixed or removed.
|
||||
# FIXME: Should be reasonably trivial to fix/cleanup for most of them.
|
||||
"doc/python_api/conf.py",
|
||||
}
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def add_to_sys_path(syspath):
|
||||
old_path = sys.path
|
||||
old_modules = sys.modules
|
||||
sys.modules = old_modules.copy()
|
||||
sys.path = sys.path[:]
|
||||
sys.path.insert(0, str(syspath))
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.path = old_path
|
||||
sys.modules = old_modules
|
||||
|
||||
|
||||
def import_module(file_path):
|
||||
"""Returns `...` if not a python module, `True` if it's a package, `False` otherwise."""
|
||||
file_path = pathlib.Path(file_path)
|
||||
if file_path.suffix != ".py":
|
||||
return ...
|
||||
file_name = file_path.name
|
||||
if not file_name:
|
||||
return ...
|
||||
is_package = file_name == "__init__.py"
|
||||
if is_package:
|
||||
assert (len(file_path.parts) >= 2)
|
||||
module_name = file_path.parts[-2]
|
||||
else:
|
||||
module_name = file_path.stem
|
||||
|
||||
with add_to_sys_path(file_path.parent):
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
module_name, file_path, submodule_search_locations=file_path.parent)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
sys.modules[module_name] = module
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
return module_name, is_package
|
||||
|
||||
|
||||
def import_modules(root_dir):
|
||||
print("+++", sys.executable)
|
||||
|
||||
included_directories = [os.path.join(root_dir, p) for p in INCLUDED_DIRS]
|
||||
excluded_file_paths = {os.path.join(root_dir, p) for p in EXCLUDED_FILE_PATHS}
|
||||
|
||||
has_failures = False
|
||||
directories = included_directories[:]
|
||||
while directories:
|
||||
path = directories.pop(0)
|
||||
sub_directories = []
|
||||
is_package = False
|
||||
with os.scandir(path) as it:
|
||||
for entry in it:
|
||||
if entry.is_dir():
|
||||
if entry.name.startswith('.'):
|
||||
continue
|
||||
sub_directories.append(entry.path)
|
||||
continue
|
||||
if not entry.is_file():
|
||||
continue
|
||||
if not entry.name.endswith(".py"):
|
||||
continue
|
||||
if entry.path in excluded_file_paths:
|
||||
continue
|
||||
try:
|
||||
is_current_package = import_module(entry.path)
|
||||
is_package = is_package or is_current_package
|
||||
except SystemExit as e:
|
||||
has_failures = True
|
||||
print(f"+++ Failed to import {entry.path} (module called `sys.exit`), {e}")
|
||||
except Exception as e:
|
||||
has_failures = True
|
||||
print(f"+++ Failed to import {entry.path} ({e.__class__}), {e}")
|
||||
traceback.print_tb(e.__traceback__)
|
||||
print("\n\n")
|
||||
# Do not attempt to import individual modules of a package. For now assume that if the top-level package can
|
||||
# be imported, it is good enough. This may have to be revisited at some point though. Currently there are
|
||||
# no packages in target directories anyway.
|
||||
if not is_package:
|
||||
directories += sub_directories
|
||||
|
||||
if has_failures:
|
||||
raise Exception("Some module imports failed")
|
||||
|
||||
|
||||
def main():
|
||||
import sys
|
||||
if sys.argv[1] == "--root-dir":
|
||||
root_dir = sys.argv[2]
|
||||
import_modules(root_dir=root_dir)
|
||||
else:
|
||||
raise Exception("Missing --root-dir parameter")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Reference in a new issue