mirror of
https://projects.blender.org/blender/blender.git
synced 2025-01-22 07:22:12 -05:00
Merge branch 'main' into brush-assets-project
This commit is contained in:
commit
ac7d0c4841
14 changed files with 231 additions and 79 deletions
|
@ -72,7 +72,8 @@ watch_check_ruff:
|
|||
check_pylint:
|
||||
@cd "$(BASE_DIR)" && \
|
||||
pylint $(PY_FILES) \
|
||||
--disable=C0103,C0111,C0201,C0301,C0302,C0415,R1702,R1705,R0902,R0903,R0913,E0611,E0401,I1101,R0801,C0209,W0511,W0718,W0719,C0413,R0911,R0912,R0914,R0915 \
|
||||
--enable=useless-suppression \
|
||||
--disable=C0103,C0111,C0201,C0301,C0302,C0415,R0401,R1702,R1705,R0902,R0903,R0913,E0611,E0401,I1101,R0801,C0209,W0511,W0718,W0719,C0413,R0911,R0912,R0914,R0915 \
|
||||
--msg-template='{abspath}:{line}:{column}: {msg_id}: {msg} ({symbol})'
|
||||
watch_check_pylint:
|
||||
@cd "$(BASE_DIR)" && \
|
||||
|
|
|
@ -400,7 +400,6 @@ def monkeypatch_extenions_repos_update_pre_impl():
|
|||
|
||||
def monkeypatch_extenions_repos_update_post_impl():
|
||||
import os
|
||||
# pylint: disable-next=redefined-outer-name
|
||||
from . import bl_extension_ops
|
||||
|
||||
repo_cache_store = repo_cache_store_ensure()
|
||||
|
@ -437,7 +436,7 @@ def monkeypatch_extensions_repos_update_pre(*_):
|
|||
except Exception as ex:
|
||||
print_debug("ERROR", str(ex))
|
||||
try:
|
||||
monkeypatch_extensions_repos_update_pre._fn_orig()
|
||||
monkeypatch_extensions_repos_update_pre.fn_orig()
|
||||
except Exception as ex:
|
||||
print_debug("ERROR", str(ex))
|
||||
|
||||
|
@ -446,7 +445,7 @@ def monkeypatch_extensions_repos_update_pre(*_):
|
|||
def monkeypatch_extenions_repos_update_post(*_):
|
||||
print_debug("POST:")
|
||||
try:
|
||||
monkeypatch_extenions_repos_update_post._fn_orig()
|
||||
monkeypatch_extenions_repos_update_post.fn_orig()
|
||||
except Exception as ex:
|
||||
print_debug("ERROR", str(ex))
|
||||
try:
|
||||
|
@ -458,40 +457,50 @@ def monkeypatch_extenions_repos_update_post(*_):
|
|||
def monkeypatch_install():
|
||||
import addon_utils
|
||||
|
||||
# pylint: disable-next=protected-access
|
||||
handlers = bpy.app.handlers._extension_repos_update_pre
|
||||
# pylint: disable-next=protected-access
|
||||
fn_orig = addon_utils._initialize_extension_repos_pre
|
||||
|
||||
fn_override = monkeypatch_extensions_repos_update_pre
|
||||
for i, fn in enumerate(handlers):
|
||||
if fn is fn_orig:
|
||||
handlers[i] = fn_override
|
||||
fn_override._fn_orig = fn_orig
|
||||
fn_override.fn_orig = fn_orig
|
||||
break
|
||||
|
||||
# pylint: disable-next=protected-access
|
||||
handlers = bpy.app.handlers._extension_repos_update_post
|
||||
# pylint: disable-next=protected-access
|
||||
fn_orig = addon_utils._initialize_extension_repos_post
|
||||
|
||||
fn_override = monkeypatch_extenions_repos_update_post
|
||||
for i, fn in enumerate(handlers):
|
||||
if fn is fn_orig:
|
||||
handlers[i] = fn_override
|
||||
fn_override._fn_orig = fn_orig
|
||||
fn_override.fn_orig = fn_orig
|
||||
break
|
||||
|
||||
|
||||
def monkeypatch_uninstall():
|
||||
# pylint: disable-next=protected-access
|
||||
handlers = bpy.app.handlers._extension_repos_update_pre
|
||||
|
||||
fn_override = monkeypatch_extensions_repos_update_pre
|
||||
for i, fn in enumerate(handlers):
|
||||
if fn is fn_override:
|
||||
handlers[i] = fn_override._fn_orig
|
||||
del fn_override._fn_orig
|
||||
handlers[i] = fn_override.fn_orig
|
||||
del fn_override.fn_orig
|
||||
break
|
||||
|
||||
# pylint: disable-next=protected-access
|
||||
handlers = bpy.app.handlers._extension_repos_update_post
|
||||
|
||||
fn_override = monkeypatch_extenions_repos_update_post
|
||||
for i, fn in enumerate(handlers):
|
||||
if fn is fn_override:
|
||||
handlers[i] = fn_override._fn_orig
|
||||
del fn_override._fn_orig
|
||||
handlers[i] = fn_override.fn_orig
|
||||
del fn_override.fn_orig
|
||||
break
|
||||
|
||||
|
||||
|
@ -636,9 +645,11 @@ def register():
|
|||
from bl_ui.space_userpref import USERPREF_MT_interface_theme_presets
|
||||
USERPREF_MT_interface_theme_presets.append(theme_preset_draw)
|
||||
|
||||
# pylint: disable-next=protected-access
|
||||
handlers = bpy.app.handlers._extension_repos_sync
|
||||
handlers.append(extenion_repos_sync)
|
||||
|
||||
# pylint: disable-next=protected-access
|
||||
handlers = bpy.app.handlers._extension_repos_files_clear
|
||||
handlers.append(extenion_repos_files_clear)
|
||||
|
||||
|
@ -676,10 +687,12 @@ def unregister():
|
|||
from bl_ui.space_userpref import USERPREF_MT_interface_theme_presets
|
||||
USERPREF_MT_interface_theme_presets.remove(theme_preset_draw)
|
||||
|
||||
# pylint: disable-next=protected-access
|
||||
handlers = bpy.app.handlers._extension_repos_sync
|
||||
if extenion_repos_sync in handlers:
|
||||
handlers.remove(extenion_repos_sync)
|
||||
|
||||
# pylint: disable-next=protected-access
|
||||
handlers = bpy.app.handlers._extension_repos_files_clear
|
||||
if extenion_repos_files_clear in handlers:
|
||||
handlers.remove(extenion_repos_files_clear)
|
||||
|
|
|
@ -404,7 +404,7 @@ def repo_iter_valid_only(context, *, exclude_remote, exclude_system):
|
|||
if (not repo_item.use_remote_url) and (repo_item.source == 'SYSTEM'):
|
||||
continue
|
||||
# Ignore repositories that have invalid settings.
|
||||
directory, remote_url = repo_paths_or_none(repo_item)
|
||||
directory, _remote_url = repo_paths_or_none(repo_item)
|
||||
if directory is None:
|
||||
continue
|
||||
yield repo_item
|
||||
|
@ -494,16 +494,17 @@ def repo_cache_store_refresh_from_prefs(repo_cache_store, include_disabled=False
|
|||
return repos
|
||||
|
||||
|
||||
def _preferences_ensure_disabled(*, repo_item, pkg_id_sequence, default_set):
|
||||
def _preferences_ensure_disabled(
|
||||
*,
|
||||
repo_item, # `RepoItem`
|
||||
pkg_id_sequence, # `List[str]`
|
||||
default_set, # `bool`
|
||||
error_fn, # `Callable[[Exception], None]`
|
||||
): # `-> Dict[str, Tuple[boo, bool]]`
|
||||
import sys
|
||||
import addon_utils
|
||||
|
||||
result = {}
|
||||
errors = []
|
||||
|
||||
def handle_error(ex):
|
||||
print("Error:", ex)
|
||||
errors.append(str(ex))
|
||||
|
||||
modules_clear = []
|
||||
|
||||
|
@ -530,7 +531,7 @@ def _preferences_ensure_disabled(*, repo_item, pkg_id_sequence, default_set):
|
|||
if not hasattr(repo_module, pkg_id):
|
||||
print("Repo module \"{:s}.{:s}\" not a sub-module!".format(".".join(module_base_elem), pkg_id))
|
||||
|
||||
addon_utils.disable(addon_module_name, default_set=default_set, handle_error=handle_error)
|
||||
addon_utils.disable(addon_module_name, default_set=default_set, handle_error=error_fn)
|
||||
|
||||
modules_clear.append(pkg_id)
|
||||
|
||||
|
@ -569,11 +570,12 @@ def _preferences_ensure_disabled(*, repo_item, pkg_id_sequence, default_set):
|
|||
continue
|
||||
delattr(repo_module, pkg_id)
|
||||
|
||||
return result, errors
|
||||
return result
|
||||
|
||||
|
||||
def _preferences_ensure_enabled(*, repo_item, pkg_id_sequence, result, handle_error):
|
||||
import addon_utils
|
||||
_ = repo_item, pkg_id_sequence
|
||||
for addon_module_name, (loaded_default, loaded_state) in result.items():
|
||||
# The module was not loaded, so no need to restore it.
|
||||
if not loaded_state:
|
||||
|
@ -639,6 +641,7 @@ def _preferences_ui_redraw():
|
|||
def _preferences_ui_refresh_addons():
|
||||
import addon_utils
|
||||
# TODO: make a public method.
|
||||
# pylint: disable-next=protected-access
|
||||
addon_utils.modules._is_first = True
|
||||
|
||||
|
||||
|
@ -928,6 +931,7 @@ def _extensions_repo_sync_wheels(repo_cache_store, extensions_enabled):
|
|||
local_dir = os.path.join(extensions, ".local")
|
||||
|
||||
# WARNING: bad level call, avoid making this a public function just now.
|
||||
# pylint: disable-next=protected-access
|
||||
addon_utils._extension_sync_wheels(
|
||||
local_dir=local_dir,
|
||||
wheel_list=wheel_list,
|
||||
|
@ -1347,6 +1351,7 @@ class EXTENSIONS_OT_repo_sync(Operator, _ExtCmdMixIn):
|
|||
repos_lock.append(repo_item.directory)
|
||||
|
||||
# Lock repositories.
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self.repo_lock = bl_extension_utils.RepoLock(
|
||||
repo_directories=repos_lock,
|
||||
cookie=cookie_from_session(),
|
||||
|
@ -1444,6 +1449,7 @@ class EXTENSIONS_OT_repo_sync_all(Operator, _ExtCmdMixIn):
|
|||
repos_lock.append(repo_item.directory)
|
||||
|
||||
# Lock repositories.
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self.repo_lock = bl_extension_utils.RepoLock(
|
||||
repo_directories=repos_lock,
|
||||
cookie=cookie_from_session(),
|
||||
|
@ -1496,11 +1502,15 @@ class EXTENSIONS_OT_repo_refresh_all(Operator):
|
|||
# Re-generate JSON meta-data from TOML files (needed for offline repository).
|
||||
repo_cache_store.refresh_remote_from_directory(
|
||||
directory=repo_item.directory,
|
||||
# NOTE: this isn't a problem as the callback isn't stored.
|
||||
# pylint: disable-next=cell-var-from-loop
|
||||
error_fn=lambda ex: self._exceptions_as_report(repo_item.name, ex),
|
||||
force=True,
|
||||
)
|
||||
repo_cache_store.refresh_local_from_directory(
|
||||
directory=repo_item.directory,
|
||||
# NOTE: this isn't a problem as the callback isn't stored.
|
||||
# pylint: disable-next=cell-var-from-loop
|
||||
error_fn=lambda ex: self._exceptions_as_report(repo_item.name, ex),
|
||||
)
|
||||
|
||||
|
@ -1532,7 +1542,9 @@ class EXTENSIONS_OT_repo_enable_from_drop(Operator):
|
|||
print(self.repo_index)
|
||||
if (repo := repo_lookup_by_index_or_none_with_report(self.repo_index, self.report)) is None:
|
||||
return {'CANCELLED'}
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._repo_name = repo.name
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._repo_remote_url = repo.remote_url
|
||||
|
||||
wm = context.window_manager
|
||||
|
@ -1599,8 +1611,11 @@ class EXTENSIONS_OT_package_upgrade_all(Operator, _ExtCmdMixIn):
|
|||
|
||||
def exec_command_iter(self, is_modal):
|
||||
from . import bl_extension_utils
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._repo_directories = set()
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._addon_restore = []
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._theme_restore = _preferences_theme_state_create()
|
||||
|
||||
use_active_only = self.use_active_only
|
||||
|
@ -1690,6 +1705,7 @@ class EXTENSIONS_OT_package_upgrade_all(Operator, _ExtCmdMixIn):
|
|||
return None
|
||||
|
||||
# Lock repositories.
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self.repo_lock = bl_extension_utils.RepoLock(
|
||||
repo_directories=list(self._repo_directories),
|
||||
cookie=cookie_from_session(),
|
||||
|
@ -1698,10 +1714,11 @@ class EXTENSIONS_OT_package_upgrade_all(Operator, _ExtCmdMixIn):
|
|||
return None
|
||||
|
||||
for repo_item, pkg_id_sequence in handle_addons_info:
|
||||
result, errors = _preferences_ensure_disabled(
|
||||
result = _preferences_ensure_disabled(
|
||||
repo_item=repo_item,
|
||||
pkg_id_sequence=pkg_id_sequence,
|
||||
default_set=False,
|
||||
error_fn=lambda ex: self.report({'ERROR'}, str(ex)),
|
||||
)
|
||||
self._addon_restore.append((repo_item, pkg_id_sequence, result))
|
||||
|
||||
|
@ -1765,7 +1782,9 @@ class EXTENSIONS_OT_package_install_marked(Operator, _ExtCmdMixIn):
|
|||
error_fn=self.error_fn_from_exception,
|
||||
))
|
||||
repo_pkg_map = _pkg_marked_by_repo(repo_cache_store, pkg_manifest_remote_all)
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._repo_directories = set()
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._repo_map_packages_addon_only = []
|
||||
package_count = 0
|
||||
|
||||
|
@ -1819,6 +1838,7 @@ class EXTENSIONS_OT_package_install_marked(Operator, _ExtCmdMixIn):
|
|||
return None
|
||||
|
||||
# Lock repositories.
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self.repo_lock = bl_extension_utils.RepoLock(
|
||||
repo_directories=list(self._repo_directories),
|
||||
cookie=cookie_from_session(),
|
||||
|
@ -1919,7 +1939,9 @@ class EXTENSIONS_OT_package_uninstall_marked(Operator, _ExtCmdMixIn):
|
|||
repo_pkg_map = _pkg_marked_by_repo(repo_cache_store, pkg_manifest_local_all)
|
||||
package_count = 0
|
||||
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._repo_directories = set()
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._theme_restore = _preferences_theme_state_create()
|
||||
|
||||
# Track add-ons to disable before uninstalling.
|
||||
|
@ -1958,6 +1980,7 @@ class EXTENSIONS_OT_package_uninstall_marked(Operator, _ExtCmdMixIn):
|
|||
return None
|
||||
|
||||
# Lock repositories.
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self.repo_lock = bl_extension_utils.RepoLock(
|
||||
repo_directories=list(self._repo_directories),
|
||||
cookie=cookie_from_session(),
|
||||
|
@ -1966,11 +1989,12 @@ class EXTENSIONS_OT_package_uninstall_marked(Operator, _ExtCmdMixIn):
|
|||
return None
|
||||
|
||||
for repo_item, pkg_id_sequence in handle_addons_info:
|
||||
# No need to store the result (`_`) because the add-ons aren't going to be enabled again.
|
||||
_, errors = _preferences_ensure_disabled(
|
||||
# No need to store the result because the add-ons aren't going to be enabled again.
|
||||
_preferences_ensure_disabled(
|
||||
repo_item=repo_item,
|
||||
pkg_id_sequence=pkg_id_sequence,
|
||||
default_set=True,
|
||||
error_fn=lambda ex: self.report({'ERROR'}, str(ex)),
|
||||
)
|
||||
|
||||
return bl_extension_utils.CommandBatch(
|
||||
|
@ -2066,7 +2090,9 @@ class EXTENSIONS_OT_package_install_files(Operator, _ExtCmdMixIn):
|
|||
pkg_is_legacy_addon,
|
||||
)
|
||||
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._addon_restore = []
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._theme_restore = _preferences_theme_state_create()
|
||||
|
||||
# Happens when run from scripts and this argument isn't passed in.
|
||||
|
@ -2138,7 +2164,9 @@ class EXTENSIONS_OT_package_install_files(Operator, _ExtCmdMixIn):
|
|||
return None
|
||||
|
||||
# Collect package ID's.
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self.repo_directory = directory
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self.pkg_id_sequence = pkg_id_sequence
|
||||
|
||||
# Detect upgrade.
|
||||
|
@ -2151,15 +2179,17 @@ class EXTENSIONS_OT_package_install_files(Operator, _ExtCmdMixIn):
|
|||
if pkg_manifest_local is not None:
|
||||
pkg_id_sequence_upgrade = [pkg_id for pkg_id in pkg_id_sequence if pkg_id in pkg_manifest_local]
|
||||
if pkg_id_sequence_upgrade:
|
||||
result, errors = _preferences_ensure_disabled(
|
||||
result = _preferences_ensure_disabled(
|
||||
repo_item=repo_item,
|
||||
pkg_id_sequence=pkg_id_sequence_upgrade,
|
||||
default_set=False,
|
||||
error_fn=lambda ex: self.report({'ERROR'}, str(ex)),
|
||||
)
|
||||
self._addon_restore.append((repo_item, pkg_id_sequence_upgrade, result))
|
||||
del repo_cache_store, pkg_manifest_local
|
||||
|
||||
# Lock repositories.
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self.repo_lock = bl_extension_utils.RepoLock(
|
||||
repo_directories=[repo_item.directory],
|
||||
cookie=cookie_from_session(),
|
||||
|
@ -2503,7 +2533,9 @@ class EXTENSIONS_OT_package_install(Operator, _ExtCmdMixIn):
|
|||
if not self._is_ready_to_execute():
|
||||
return None
|
||||
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._addon_restore = []
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._theme_restore = _preferences_theme_state_create()
|
||||
|
||||
directory = _repo_dir_and_index_get(self.repo_index, self.repo_directory, self.report)
|
||||
|
@ -2531,15 +2563,17 @@ class EXTENSIONS_OT_package_install(Operator, _ExtCmdMixIn):
|
|||
|
||||
if is_installed:
|
||||
pkg_id_sequence = (pkg_id,)
|
||||
result, errors = _preferences_ensure_disabled(
|
||||
result = _preferences_ensure_disabled(
|
||||
repo_item=repo_item,
|
||||
pkg_id_sequence=pkg_id_sequence,
|
||||
default_set=False,
|
||||
error_fn=lambda ex: self.report({'ERROR'}, str(ex)),
|
||||
)
|
||||
self._addon_restore.append((repo_item, pkg_id_sequence, result))
|
||||
del pkg_id_sequence
|
||||
|
||||
# Lock repositories.
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self.repo_lock = bl_extension_utils.RepoLock(
|
||||
repo_directories=[repo_item.directory],
|
||||
cookie=cookie_from_session(),
|
||||
|
@ -2729,7 +2763,7 @@ class EXTENSIONS_OT_package_install(Operator, _ExtCmdMixIn):
|
|||
)
|
||||
layout = self.layout
|
||||
|
||||
_repo_index, repo_name, pkg_id, item_remote = self._drop_variables
|
||||
_repo_index, repo_name, _pkg_id, item_remote = self._drop_variables
|
||||
|
||||
layout.label(text="Do you want to install the following {:s}?".format(item_remote.type))
|
||||
|
||||
|
@ -2809,7 +2843,6 @@ class EXTENSIONS_OT_package_install(Operator, _ExtCmdMixIn):
|
|||
repo_from_url_name, # `str`
|
||||
url, # `str`
|
||||
):
|
||||
import string
|
||||
from .bl_extension_utils import (
|
||||
platform_from_this_system,
|
||||
)
|
||||
|
@ -2945,6 +2978,7 @@ class EXTENSIONS_OT_package_uninstall(Operator, _ExtCmdMixIn):
|
|||
def exec_command_iter(self, is_modal):
|
||||
from . import bl_extension_utils
|
||||
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._theme_restore = _preferences_theme_state_create()
|
||||
|
||||
directory = _repo_dir_and_index_get(self.repo_index, self.repo_directory, self.report)
|
||||
|
@ -2959,13 +2993,16 @@ class EXTENSIONS_OT_package_uninstall(Operator, _ExtCmdMixIn):
|
|||
self.report({'ERROR'}, "Package ID not set")
|
||||
return None
|
||||
|
||||
_, errors = _preferences_ensure_disabled(
|
||||
# No need to store the result because the add-ons aren't going to be enabled again.
|
||||
_preferences_ensure_disabled(
|
||||
repo_item=repo_item,
|
||||
pkg_id_sequence=(pkg_id,),
|
||||
default_set=True,
|
||||
error_fn=lambda ex: self.report({'ERROR'}, str(ex)),
|
||||
)
|
||||
|
||||
# Lock repositories.
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self.repo_lock = bl_extension_utils.RepoLock(
|
||||
repo_directories=[repo_item.directory],
|
||||
cookie=cookie_from_session(),
|
||||
|
@ -3388,7 +3425,7 @@ class EXTENSIONS_OT_userpref_tags_set(Operator):
|
|||
return {'CANCELLED'}
|
||||
|
||||
tags_clear(wm, tags_attr)
|
||||
if self.value is False:
|
||||
if value is False:
|
||||
tags_refresh(wm, tags_attr, default_value=False)
|
||||
|
||||
_preferences_ui_redraw()
|
||||
|
|
|
@ -561,6 +561,30 @@ def addons_panel_draw_items(
|
|||
return module_names
|
||||
|
||||
|
||||
def addons_panel_draw_error_duplicates(layout):
|
||||
import addon_utils
|
||||
box = layout.box()
|
||||
row = box.row()
|
||||
row.label(text="Multiple add-ons with the same name found!")
|
||||
row.label(icon='ERROR')
|
||||
box.label(text="Delete one of each pair to resolve:")
|
||||
for (addon_name, addon_file, addon_path) in addon_utils.error_duplicates:
|
||||
box.separator()
|
||||
sub_col = box.column(align=True)
|
||||
sub_col.label(text=addon_name + ":")
|
||||
sub_col.label(text=" " + addon_file)
|
||||
sub_col.label(text=" " + addon_path)
|
||||
|
||||
|
||||
def addons_panel_draw_error_generic(layout, lines):
|
||||
box = layout.box()
|
||||
sub = box.row()
|
||||
sub.label(text=lines[0])
|
||||
sub.label(icon='ERROR')
|
||||
for l in lines[1:]:
|
||||
box.label(text=l)
|
||||
|
||||
|
||||
def addons_panel_draw_impl(
|
||||
self,
|
||||
context, # `bpy.types.Context`
|
||||
|
@ -580,14 +604,26 @@ def addons_panel_draw_impl(
|
|||
|
||||
from . import repo_cache_store_ensure
|
||||
|
||||
layout = self.layout
|
||||
|
||||
# First show any errors, this should be an exceptional situation that should be resolved,
|
||||
# otherwise add-ons may not behave correctly.
|
||||
if addon_utils.error_duplicates:
|
||||
addons_panel_draw_error_duplicates(layout)
|
||||
if addon_utils.error_encoding:
|
||||
addons_panel_draw_error_generic(
|
||||
layout, (
|
||||
"One or more add-ons do not have UTF-8 encoding",
|
||||
"(see console for details)",
|
||||
),
|
||||
)
|
||||
|
||||
repo_cache_store = repo_cache_store_ensure()
|
||||
|
||||
# This isn't elegant, but the preferences aren't available on registration.
|
||||
if not repo_cache_store.is_init():
|
||||
repo_cache_store_refresh_from_prefs(repo_cache_store)
|
||||
|
||||
layout = self.layout
|
||||
|
||||
prefs = context.preferences
|
||||
|
||||
# Define a top-most column to place warnings (if-any).
|
||||
|
@ -716,6 +752,7 @@ def addons_panel_draw(panel, context):
|
|||
# Light weight wrapper for extension local and remote extension manifest data.
|
||||
# Used for display purposes. Includes some information for filtering.
|
||||
|
||||
# pylint: disable-next=wrong-import-order
|
||||
from collections import namedtuple
|
||||
|
||||
ExtensionUI = namedtuple(
|
||||
|
@ -863,15 +900,14 @@ class ExtensionUI_FilterParams:
|
|||
if is_addon:
|
||||
if is_installed:
|
||||
# Currently we only need to know the module name once installed.
|
||||
addon_module_name = repo_module_prefix + pkg_id
|
||||
# pylint: disable-next=possibly-used-before-assignment
|
||||
addon_module_name = repo_module_prefix + pkg_id
|
||||
is_enabled = addon_module_name in self.addons_enabled
|
||||
|
||||
else:
|
||||
is_enabled = False
|
||||
addon_module_name = None
|
||||
elif is_theme:
|
||||
# pylint: disable-next=possibly-used-before-assignment
|
||||
is_enabled = (repo_index, pkg_id) == self.active_theme_info
|
||||
addon_module_name = None
|
||||
else:
|
||||
|
|
|
@ -292,8 +292,9 @@ def command_output_from_json_0(
|
|||
# Internal Functions.
|
||||
#
|
||||
|
||||
|
||||
# pylint: disable-next=useless-return
|
||||
def repositories_validate_or_errors(repos: Sequence[str]) -> Optional[InfoItemSeq]:
|
||||
_ = repos
|
||||
return None
|
||||
|
||||
|
||||
|
@ -853,6 +854,8 @@ class CommandBatch:
|
|||
def _exec_blocking_single(
|
||||
self,
|
||||
report_fn: Callable[[str, str], None],
|
||||
# TODO: investigate using this or removing it.
|
||||
# pylint: disable-next=unused-argument
|
||||
request_exit_fn: Callable[[], bool],
|
||||
) -> bool:
|
||||
for cmd in self._batch:
|
||||
|
@ -1298,6 +1301,7 @@ def pkg_manifest_params_compatible_or_error(
|
|||
item=item,
|
||||
filter_blender_version=this_blender_version,
|
||||
filter_platform=this_platform,
|
||||
# pylint: disable-next=unnecessary-lambda
|
||||
skip_message_fn=lambda msg: result_report.append(msg),
|
||||
error_fn=error_fn,
|
||||
)
|
||||
|
@ -1891,6 +1895,7 @@ class RepoCacheStore:
|
|||
) -> Optional[Dict[str, PkgManifest_Normalized]]:
|
||||
for repo_entry in self._repos:
|
||||
if directory == repo_entry.directory:
|
||||
# pylint: disable-next=protected-access
|
||||
return repo_entry._json_data_refresh(force=force, error_fn=error_fn)
|
||||
raise ValueError("Directory {:s} not a known repo".format(directory))
|
||||
|
||||
|
@ -1927,6 +1932,7 @@ class RepoCacheStore:
|
|||
# While we could yield a valid manifest here,
|
||||
# leave it to the caller to skip "remote" data for local-only repositories.
|
||||
if repo_entry.remote_url:
|
||||
# pylint: disable-next=protected-access
|
||||
yield repo_entry._json_data_ensure(
|
||||
check_files=check_files,
|
||||
ignore_missing=ignore_missing,
|
||||
|
|
|
@ -232,6 +232,7 @@ def force_exit_ok_enable() -> None:
|
|||
def execfile(filepath: str) -> Dict[str, Any]:
|
||||
global_namespace = {"__file__": filepath, "__name__": "__main__"}
|
||||
with open(filepath, "rb") as fh:
|
||||
# pylint: disable-next=exec-used
|
||||
exec(compile(fh.read(), filepath, 'exec'), global_namespace)
|
||||
return global_namespace
|
||||
|
||||
|
@ -446,6 +447,7 @@ def sha256_from_file_or_error(
|
|||
(exact hashing method may change).
|
||||
"""
|
||||
try:
|
||||
# pylint: disable-next=consider-using-with
|
||||
fh_context = open(filepath, 'rb')
|
||||
except Exception as ex:
|
||||
return "error opening file: {:s}".format(str(ex))
|
||||
|
@ -531,6 +533,8 @@ def rmtree_with_fallback_or_error(
|
|||
if sys.version_info >= (3, 12):
|
||||
shutil.rmtree(path, onexc=lambda *args: errors.append(args))
|
||||
else:
|
||||
# Ignore as the deprecated logic is only used for older Python versions.
|
||||
# pylint: disable-next=deprecated-argument
|
||||
shutil.rmtree(path, onerror=lambda *args: errors.append((args[0], args[1], args[2][1])))
|
||||
|
||||
# Happy path (for practically all cases).
|
||||
|
@ -642,7 +646,7 @@ def pkg_manifest_from_dict_and_validate_impl(
|
|||
for key in PkgManifest._fields:
|
||||
val = data.get(key, ...)
|
||||
if val is ...:
|
||||
# pylint: disable-next=no-member
|
||||
# pylint: disable-next=no-member,protected-access
|
||||
val = PkgManifest._field_defaults.get(key, ...)
|
||||
# `pkg_manifest_is_valid_or_error{_all}` will have caught this, assert all the same.
|
||||
assert val is not ...
|
||||
|
@ -820,6 +824,7 @@ def pkg_manifest_from_archive_and_validate(
|
|||
strict: bool,
|
||||
) -> Union[PkgManifest, str]:
|
||||
try:
|
||||
# pylint: disable-next=consider-using-with
|
||||
zip_fh_context = zipfile.ZipFile(filepath, mode="r")
|
||||
except Exception as ex:
|
||||
return "Error extracting archive \"{:s}\"".format(str(ex))
|
||||
|
@ -836,6 +841,7 @@ def pkg_is_legacy_addon(filepath: str) -> bool:
|
|||
return True
|
||||
|
||||
try:
|
||||
# pylint: disable-next=consider-using-with
|
||||
zip_fh_context = zipfile.ZipFile(filepath, mode="r")
|
||||
except Exception:
|
||||
return False
|
||||
|
@ -1450,12 +1456,12 @@ def pkg_manifest_tags_valid_or_error(
|
|||
#
|
||||
# However manifests from severs that don't adhere to strict rules are not prevented from loading.
|
||||
|
||||
# pylint: disable-next=useless-return
|
||||
def pkg_manifest_validate_field_nop(
|
||||
value: Any,
|
||||
strict: bool,
|
||||
) -> Optional[str]:
|
||||
_ = strict, value
|
||||
# pylint: disable-next=useless-return
|
||||
return None
|
||||
|
||||
|
||||
|
@ -1698,9 +1704,16 @@ def pkg_manifest_validate_field_wheels(
|
|||
filename_spec = "{distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl"
|
||||
|
||||
for wheel in value:
|
||||
if "\"" in wheel:
|
||||
return "wheel paths most not contain quotes, found {!r}".format(wheel)
|
||||
if "\\" in wheel:
|
||||
return "wheel paths must use forward slashes, found {!r}".format(wheel)
|
||||
|
||||
if (error := pkg_manifest_validate_field_any_non_empty_string_stripped_no_control_chars(
|
||||
wheel, True,
|
||||
)) is not None:
|
||||
return "wheel paths detected: {:s}, found {!r}".format(error, wheel)
|
||||
|
||||
wheel_filename = os.path.basename(wheel)
|
||||
if not wheel_filename.lower().endswith(".whl"):
|
||||
return "wheel paths must end with \".whl\", found {!r}".format(wheel)
|
||||
|
@ -1809,11 +1822,11 @@ def pkg_manifest_is_valid_or_error_impl(
|
|||
is_default_value = False
|
||||
x_val = data.get(x_key, ...)
|
||||
if x_val is ...:
|
||||
# pylint: disable-next=no-member
|
||||
# pylint: disable-next=no-member, protected-access
|
||||
x_val = PkgManifest._field_defaults.get(x_key, ...)
|
||||
if from_repo:
|
||||
if x_val is ...:
|
||||
# pylint: disable-next=no-member
|
||||
# pylint: disable-next=no-member, protected-access
|
||||
x_val = PkgManifest_Archive._field_defaults.get(x_key, ...)
|
||||
if x_val is ...:
|
||||
error_list.append("missing \"{:s}\"".format(x_key))
|
||||
|
@ -1898,6 +1911,9 @@ def pkg_manifest_dict_apply_build_generated_table(manifest_dict: Dict[str, Any])
|
|||
if (platforms := build_generated.get("platforms")) is not None:
|
||||
manifest_dict["platforms"] = platforms
|
||||
|
||||
if (wheels := build_generated.get("wheels")) is not None:
|
||||
manifest_dict["wheels"] = wheels
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Standalone Utilities
|
||||
|
@ -1986,6 +2002,35 @@ def blender_platform_compatible_with_wheel_platform(platform: str, wheel_platfor
|
|||
return platform == platform_blender
|
||||
|
||||
|
||||
def blender_platform_compatible_with_wheel_platform_from_filepath(platform: str, wheel_filepath: str) -> bool:
|
||||
wheel_filename = os.path.splitext(os.path.basename(wheel_filepath))[0]
|
||||
|
||||
wheel_filename_split = wheel_filename.split("-")
|
||||
# This should be unreachable because the manifest has been validated, add assert.
|
||||
assert len(wheel_filename_split) >= 5, "Internal error, manifest validation disallows this"
|
||||
|
||||
wheel_platform = wheel_filename_split[-1]
|
||||
|
||||
return blender_platform_compatible_with_wheel_platform(platform, wheel_platform)
|
||||
|
||||
|
||||
def paths_filter_wheels_by_platform(
|
||||
wheels: List[str],
|
||||
platform: str,
|
||||
) -> List[str]:
|
||||
"""
|
||||
All paths are wheels with filenames that follow the wheel spec.
|
||||
Return wheels which are compatible with the ``platform``.
|
||||
"""
|
||||
wheels_result: List[str] = []
|
||||
|
||||
for wheel_filepath in wheels:
|
||||
if blender_platform_compatible_with_wheel_platform_from_filepath(platform, wheel_filepath):
|
||||
wheels_result.append(wheel_filepath)
|
||||
|
||||
return wheels_result
|
||||
|
||||
|
||||
def build_paths_filter_wheels_by_platform(
|
||||
build_paths: List[Tuple[str, str]],
|
||||
platform: str,
|
||||
|
@ -1997,17 +2042,7 @@ def build_paths_filter_wheels_by_platform(
|
|||
build_paths_for_platform: List[Tuple[str, str]] = []
|
||||
|
||||
for item in build_paths:
|
||||
# Both the absolute/relative path can be used to get the filename.
|
||||
# Use the relative since it's likely to be shorter.
|
||||
wheel_filename = os.path.splitext(os.path.basename(item[1]))[0]
|
||||
|
||||
wheel_filename_split = wheel_filename.split("-")
|
||||
# This should be unreachable because the manifest has been validated, add assert.
|
||||
assert len(wheel_filename_split) >= 5, "Internal error, manifest validation disallows this"
|
||||
|
||||
wheel_platform = wheel_filename_split[-1]
|
||||
|
||||
if blender_platform_compatible_with_wheel_platform(platform, wheel_platform):
|
||||
if blender_platform_compatible_with_wheel_platform_from_filepath(platform, item[1]):
|
||||
build_paths_for_platform.append(item)
|
||||
|
||||
return build_paths_for_platform
|
||||
|
@ -2221,7 +2256,7 @@ def pkg_manifest_toml_is_valid_or_error(filepath: str, strict: bool) -> Tuple[Op
|
|||
return None, result
|
||||
|
||||
|
||||
def pkg_manifest_detect_duplicates(pkg_idname: str, pkg_items: List[PkgManifest]) -> Optional[str]:
|
||||
def pkg_manifest_detect_duplicates(pkg_items: List[PkgManifest]) -> Optional[str]:
|
||||
"""
|
||||
When a repository includes multiple packages with the same ID, ensure they don't conflict.
|
||||
|
||||
|
@ -3177,7 +3212,7 @@ class subcmd_server:
|
|||
for pkg_idname, pkg_items in repo_data_idname_map.items():
|
||||
if len(pkg_items) == 1:
|
||||
continue
|
||||
if (error := pkg_manifest_detect_duplicates(pkg_idname, pkg_items)) is not None:
|
||||
if (error := pkg_manifest_detect_duplicates(pkg_items)) is not None:
|
||||
msglog.warn("archive found with duplicates for id {:s}: {:s}".format(pkg_idname, error))
|
||||
|
||||
if html:
|
||||
|
@ -3325,6 +3360,7 @@ class subcmd_client:
|
|||
directories_to_clean: List[str] = []
|
||||
with CleanupPathsContext(files=(), directories=directories_to_clean):
|
||||
try:
|
||||
# pylint: disable-next=consider-using-with
|
||||
zip_fh_context = zipfile.ZipFile(filepath_archive, mode="r")
|
||||
except Exception as ex:
|
||||
msglog.error("Error extracting archive: {:s}".format(str(ex)))
|
||||
|
@ -3527,9 +3563,6 @@ class subcmd_client:
|
|||
has_fatal_error = True
|
||||
continue
|
||||
|
||||
def error_handle(ex: Exception) -> None:
|
||||
msglog.error("{:s}: {:s}".format(pkg_idname, str(ex)))
|
||||
|
||||
pkg_info_list = [
|
||||
pkg_info for pkg_info in pkg_info_list
|
||||
if not repository_filter_skip(
|
||||
|
@ -3537,7 +3570,10 @@ class subcmd_client:
|
|||
filter_blender_version=blender_version_tuple,
|
||||
filter_platform=platform_this,
|
||||
skip_message_fn=None,
|
||||
error_fn=error_handle,
|
||||
error_fn=lambda ex: any_as_none(
|
||||
# pylint: disable-next=cell-var-from-loop
|
||||
msglog.error("{:s}: {:s}".format(pkg_idname, str(ex))),
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
|
@ -3653,7 +3689,7 @@ class subcmd_client:
|
|||
|
||||
# Validate:
|
||||
if filename_archive_size_test != archive_size_expected:
|
||||
msglog.error("Archive size mismatch \"{:s}\", expected {:d}, was {:d}".format(
|
||||
msglog.fatal_error("Archive size mismatch \"{:s}\", expected {:d}, was {:d}".format(
|
||||
pkg_idname,
|
||||
archive_size_expected,
|
||||
filename_archive_size_test,
|
||||
|
@ -3661,7 +3697,7 @@ class subcmd_client:
|
|||
return False
|
||||
filename_archive_hash_test = "sha256:" + sha256.hexdigest()
|
||||
if filename_archive_hash_test != archive_hash_expected:
|
||||
msglog.error("Archive checksum mismatch \"{:s}\", expected {:s}, was {:s}".format(
|
||||
msglog.fatal_error("Archive checksum mismatch \"{:s}\", expected {:s}, was {:s}".format(
|
||||
pkg_idname,
|
||||
archive_hash_expected,
|
||||
filename_archive_hash_test,
|
||||
|
@ -3747,9 +3783,8 @@ class subcmd_client:
|
|||
|
||||
if (error := rmtree_with_fallback_or_error(filepath_local_pkg)) is not None:
|
||||
msglog.error("Failure to remove \"{:s}\" with error ({:s})".format(pkg_idname, error))
|
||||
continue
|
||||
|
||||
msglog.status("Removed \"{:s}\"".format(pkg_idname))
|
||||
else:
|
||||
msglog.status("Removed \"{:s}\"".format(pkg_idname))
|
||||
|
||||
filepath_local_cache_archive = os.path.join(local_cache_dir, pkg_idname + PKG_EXT)
|
||||
if os.path.exists(filepath_local_cache_archive):
|
||||
|
@ -3757,12 +3792,13 @@ class subcmd_client:
|
|||
|
||||
if user_dir:
|
||||
filepath_user_pkg = os.path.join(user_dir, pkg_idname)
|
||||
if os.path.isdir(filepath_user_pkg):
|
||||
if os.path.exists(filepath_user_pkg):
|
||||
if (error := rmtree_with_fallback_or_error(filepath_user_pkg)) is not None:
|
||||
msglog.error(
|
||||
"Failure to remove \"{:s}\" user files with error ({:s})".format(pkg_idname, error),
|
||||
)
|
||||
continue
|
||||
else:
|
||||
msglog.status("Removed cache \"{:s}\"".format(pkg_idname))
|
||||
|
||||
return True
|
||||
|
||||
|
@ -3942,7 +3978,7 @@ class subcmd_author:
|
|||
del build_paths_extra_canonical
|
||||
|
||||
except Exception as ex:
|
||||
msglog.status("Error building path list \"{:s}\"".format(str(ex)))
|
||||
msglog.fatal_error("Error building path list \"{:s}\"".format(str(ex)))
|
||||
return False
|
||||
|
||||
request_exit = False
|
||||
|
@ -3994,9 +4030,10 @@ class subcmd_author:
|
|||
|
||||
with CleanupPathsContext(files=(outfile_temp,), directories=()):
|
||||
try:
|
||||
# pylint: disable-next=consider-using-with
|
||||
zip_fh_context = zipfile.ZipFile(outfile_temp, 'w', zipfile.ZIP_DEFLATED, compresslevel=9)
|
||||
except Exception as ex:
|
||||
msglog.status("Error creating archive \"{:s}\"".format(str(ex)))
|
||||
msglog.fatal_error("Error creating archive \"{:s}\"".format(str(ex)))
|
||||
return False
|
||||
|
||||
with contextlib.closing(zip_fh_context) as zip_fh:
|
||||
|
@ -4011,13 +4048,25 @@ class subcmd_author:
|
|||
b"# This must not be included in source manifests.\n",
|
||||
b"[build.generated]\n",
|
||||
"platforms = [\"{:s}\"]\n".format(platform).encode("utf-8"),
|
||||
# Including wheels simplifies server side check as this list can be tested
|
||||
# without the server having to filter by platform too.
|
||||
b"wheels = [",
|
||||
", ".join([
|
||||
# NOTE: accept no string escaping as the rules for wheel paths
|
||||
# are already strict so strings don't require quoting.
|
||||
"\"{:s}\"".format(wheel) for wheel in paths_filter_wheels_by_platform(
|
||||
manifest.wheels or [],
|
||||
platform,
|
||||
)
|
||||
]).encode("utf-8"),
|
||||
b"]\n"
|
||||
b"# END GENERATED CONTENT.\n",
|
||||
))
|
||||
try:
|
||||
with open(filepath_abs, "rb") as temp_fh:
|
||||
zip_data_override = temp_fh.read() + zip_data_override
|
||||
except Exception as ex:
|
||||
msglog.status("Error overriding manifest \"{:s}\"".format(str(ex)))
|
||||
msglog.fatal_error("Error overriding manifest \"{:s}\"".format(str(ex)))
|
||||
return False
|
||||
|
||||
# Handy for testing that sub-directories:
|
||||
|
@ -4029,7 +4078,7 @@ class subcmd_author:
|
|||
else:
|
||||
zip_fh.write(filepath_abs, filepath_rel, compress_type=compress_type)
|
||||
except Exception as ex:
|
||||
msglog.status("Error adding to archive \"{:s}\"".format(str(ex)))
|
||||
msglog.fatal_error("Error adding to archive \"{:s}\"".format(str(ex)))
|
||||
return False
|
||||
|
||||
if verbose:
|
||||
|
@ -4140,6 +4189,7 @@ class subcmd_author:
|
|||
# extract the archive into a temporary directory and run validation there.
|
||||
|
||||
try:
|
||||
# pylint: disable-next=consider-using-with
|
||||
zip_fh_context = zipfile.ZipFile(pkg_source_archive, mode="r")
|
||||
except Exception as ex:
|
||||
msglog.status("Error extracting archive \"{:s}\"".format(str(ex)))
|
||||
|
@ -4147,14 +4197,14 @@ class subcmd_author:
|
|||
|
||||
with contextlib.closing(zip_fh_context) as zip_fh:
|
||||
if (archive_subdir := pkg_zipfile_detect_subdir_or_none(zip_fh)) is None:
|
||||
msglog.status("Error, archive has no manifest: \"{:s}\"".format(PKG_MANIFEST_FILENAME_TOML))
|
||||
msglog.fatal_error("Error, archive has no manifest: \"{:s}\"".format(PKG_MANIFEST_FILENAME_TOML))
|
||||
return False
|
||||
# Demote errors to status as the function of this action is to check the manifest is stable.
|
||||
manifest = pkg_manifest_from_zipfile_and_validate_all_errors(zip_fh, archive_subdir, strict=True)
|
||||
if isinstance(manifest, list):
|
||||
msglog.status("Error parsing TOML in \"{:s}\"".format(pkg_source_archive))
|
||||
msglog.fatal_error("Error parsing TOML in \"{:s}\"".format(pkg_source_archive))
|
||||
for error_msg in manifest:
|
||||
msglog.status(error_msg)
|
||||
msglog.fatal_error(error_msg)
|
||||
return False
|
||||
|
||||
if valid_tags_filepath:
|
||||
|
@ -4182,7 +4232,7 @@ class subcmd_author:
|
|||
ok = True
|
||||
for filepath in expected_files:
|
||||
if zip_fh.NameToInfo.get(filepath) is None:
|
||||
msglog.status("Error, file missing from {:s}: \"{:s}\"".format(
|
||||
msglog.fatal_error("Error, file missing from {:s}: \"{:s}\"".format(
|
||||
manifest.type,
|
||||
filepath,
|
||||
))
|
||||
|
@ -4785,6 +4835,7 @@ def main(
|
|||
# While this is typically the case, is only guaranteed to be `TextIO` so check `reconfigure` is available.
|
||||
if not isinstance(fh, io.TextIOWrapper):
|
||||
continue
|
||||
# pylint: disable-next=no-member; False positive.
|
||||
if fh.encoding.lower().partition(":")[0] == "utf-8":
|
||||
continue
|
||||
fh.reconfigure(encoding="utf-8")
|
||||
|
|
|
@ -91,7 +91,9 @@ class HTTPServerContext:
|
|||
http_thread.daemon = True
|
||||
http_thread.start()
|
||||
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._http_thread = http_thread
|
||||
# pylint: disable-next=attribute-defined-outside-init
|
||||
self._http_server = http_server
|
||||
|
||||
def __exit__(self, _type: Any, _value: Any, traceback: Any) -> None:
|
||||
|
|
|
@ -111,6 +111,7 @@ classifiers = [
|
|||
cwd=temp_dir,
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
check=True,
|
||||
)
|
||||
|
||||
result = search(temp_dir, lambda entry: entry.name.endswith(".whl"))
|
||||
|
|
|
@ -15,9 +15,8 @@ import sys
|
|||
import tempfile
|
||||
import tomllib
|
||||
import unittest
|
||||
import zipfile
|
||||
|
||||
import unittest.util
|
||||
import zipfile
|
||||
|
||||
from typing import (
|
||||
Any,
|
||||
|
|
|
@ -291,6 +291,8 @@ def run_blender(
|
|||
},
|
||||
stderr=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
# Allow the caller to read a non-zero return-code.
|
||||
check=False,
|
||||
)
|
||||
stdout = output.stdout.decode("utf-8")
|
||||
stderr = output.stderr.decode("utf-8")
|
||||
|
@ -754,11 +756,8 @@ class TestPlatform(TestWithTempBlenderUser_MixIn, unittest.TestCase):
|
|||
|
||||
|
||||
def main() -> None:
|
||||
global TEMP_DIR_BLENDER_USER
|
||||
global TEMP_DIR_REMOTE
|
||||
global TEMP_DIR_LOCAL
|
||||
global TEMP_DIR_TMPDIR
|
||||
global TEMP_DIR_REMOTE_AS_URL
|
||||
# pylint: disable-next=global-statement
|
||||
global TEMP_DIR_BLENDER_USER, TEMP_DIR_REMOTE, TEMP_DIR_LOCAL, TEMP_DIR_TMPDIR, TEMP_DIR_REMOTE_AS_URL
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_prefix:
|
||||
TEMP_DIR_BLENDER_USER = os.path.join(temp_prefix, "bl_ext_blender")
|
||||
|
|
|
@ -2617,7 +2617,6 @@ class USERPREF_PT_addons(AddOnPanel, Panel):
|
|||
if filter in {"All", "Enabled"}:
|
||||
# Append missing scripts
|
||||
# First collect scripts that are used but have no script file.
|
||||
module_names = {mod.__name__ for mod in addon_modules}
|
||||
missing_modules = {
|
||||
addon_module_name for addon_module_name in used_addon_module_name_map
|
||||
if addon_module_name not in module_names
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
#include "BLI_map.hh"
|
||||
#include "BLI_math_vector_types.hh"
|
||||
#include "BLI_span.hh"
|
||||
#include "BLI_task.hh"
|
||||
|
||||
#include "DNA_customdata_types.h"
|
||||
#include "DNA_material_types.h"
|
||||
|
@ -269,6 +270,14 @@ bool USDMeshReader::topology_changed(const Mesh *existing_mesh, const double mot
|
|||
normal_interpolation_ = mesh_prim_.GetNormalsInterpolation();
|
||||
}
|
||||
|
||||
/* Blender expects mesh normals to actually be normalized. */
|
||||
MutableSpan<pxr::GfVec3f> usd_data(normals_.data(), normals_.size());
|
||||
threading::parallel_for(usd_data.index_range(), 4096, [&](const IndexRange range) {
|
||||
for (const int normal_i : range) {
|
||||
usd_data[normal_i].Normalize();
|
||||
}
|
||||
});
|
||||
|
||||
return positions_.size() != existing_mesh->verts_num ||
|
||||
face_counts_.size() != existing_mesh->faces_num ||
|
||||
face_indices_.size() != existing_mesh->corners_num;
|
||||
|
|
|
@ -9853,7 +9853,7 @@ static void rna_def_modifier_grease_pencil_multiply(BlenderRNA *brna)
|
|||
RNA_def_property_int_sdna(prop, nullptr, "duplications");
|
||||
RNA_def_property_range(prop, 0, 999);
|
||||
RNA_def_property_ui_range(prop, 1, 10, 1, 1);
|
||||
RNA_def_property_ui_text(prop, "duplicates", "How many copies of strokes be displayed");
|
||||
RNA_def_property_ui_text(prop, "Duplicates", "How many copies of strokes be displayed");
|
||||
RNA_def_property_update(prop, 0, "rna_Modifier_update");
|
||||
|
||||
prop = RNA_def_property(srna, "distance", PROP_FLOAT, PROP_DISTANCE);
|
||||
|
|
|
@ -195,7 +195,6 @@ def canonical_author_map() -> Dict[str, str]:
|
|||
"Daniel Salazar <zanqdo@gmail.com>": (
|
||||
"Daniel Salazar <zanqdo>",
|
||||
"Daniel Salazar <zanqdo@noreply.localhost>",
|
||||
"Daniel Santana <dgsantana>",
|
||||
"ZanQdo <zanqdo@gmail.com>",
|
||||
"zanqdo <zanqdo@gmail.com>",
|
||||
),
|
||||
|
|
Loading…
Reference in a new issue