feat: Working logging, before- and after-deps.

We have a far more sane approach to nodeps now, which
allows us to essentially have two loggers - one that is
very useful, pretty, and clear, but requires a 'rich'
dependency, and one that is simple.
In this spirit, we factored out services/ too.

We can also set the initial console log level now when
packing the .zip.

There's still work to do with the actual flow for deps
installing / uninstalling.
But it should be far more robust now.

Finally, we have a barebones working `quartodoc`-based docs site.
It's super clever; see <https://github.com/machow/quartodoc>.
As it's "just" a quarto project with some python autodiscovery,
fleshing it out with ex. math, images, diagrams, and so forth
should be exceptionally easy.

As we develop, various linter-guided fixes are being realized.
This will be a long process, best done as we spiff everything up
in preparation for general release.
main
Sofus Albert Høgsbro Rose 2024-03-21 18:45:38 +01:00
parent 0fbf201d08
commit 6f665b891d
Signed by: so-rose
GPG Key ID: AD901CB0F3701434
46 changed files with 1020 additions and 367 deletions

6
doc/.gitignore vendored 100644
View File

@ -0,0 +1,6 @@
/.quarto/
_site
_sidebar.yml
_site
objects.json
reference

29
doc/_quarto.yml 100644
View File

@ -0,0 +1,29 @@
project:
type: website
metadata-files:
- _sidebar.yml
quartodoc:
# Python Package
source_dir: ../src
package: blender_maxwell
parser: google
# Style
style: pkgdown
title: Package Reference
# Write Sidebar Data to Dedicated Metadata File
sidebar: _sidebar.yml
sections:
- title: Blender Maxwell API
desc: Root package for the Blender Maxwell addon
contents:
- register
- unregister
- name: preferences
children: embedded

View File

@ -6,22 +6,23 @@ authors = [
{ name = "Sofus Albert Høgsbro Rose", email = "blender-maxwell@sofusrose.com" }
]
dependencies = [
"tidy3d~=2.6.1",
"pydantic~=2.6.4",
"sympy~=1.12",
"scipy~=1.12.0",
"trimesh~=4.2.0",
"networkx~=3.2.1",
"rtree~=1.2.0",
"tidy3d==2.6.*",
"pydantic==2.6.*",
"sympy==1.12",
"scipy==1.12.*",
"trimesh==4.2.*",
"networkx==3.2.*",
"rich==12.5.*",
"rtree==1.2.*",
# Pin Blender 4.1.0-Compatible Versions
## The dependency resolver will report if anything is wonky.
"urllib3==1.26.8",
"requests==2.27.1",
"numpy==1.24.3",
"idna==3.3",
"charset-normalizer==2.0.10",
"certifi==2021.10.8",
# Pin Blender 4.1.0-Compatible Versions
## The dependency resolver will report if anything is wonky.
"urllib3==1.26.8",
"requests==2.27.1",
"numpy==1.24.3",
"idna==3.3",
"charset-normalizer==2.0.10",
"certifi==2021.10.8",
]
readme = "README.md"
requires-python = "~= 3.11"
@ -39,7 +40,8 @@ dev-dependencies = [
]
[tool.rye.scripts]
dev = "python ./scripts/run.py"
dev = "python ./src/scripts/dev.py"
pack = "python ./src/scripts/pack.py"
####################
@ -47,7 +49,8 @@ dev = "python ./scripts/run.py"
####################
[tool.ruff]
target-version = "py311"
line-length = 79
line-length = 88
pycodestyle.max-doc-length = 120
[tool.ruff.lint]
task-tags = ["TODO"]
@ -62,8 +65,8 @@ select = [
"ERA", # eradicate ## Ban Commented Code
"TRY", # tryceratops ## Exception Handling Style
"B", # flake8-bugbear ## Opinionated, Probable-Bug Patterns
#"N", # pep8-naming ## TODO: Force Good Naming Conventions
#"D", # pydocstyle ## TODO: Force docstrings
"N", # pep8-naming
"D", # pydocstyle
"SIM", # flake8-simplify ## Sanity-Check for Code Simplification
"SLF", # flake8-self ## Ban Private Member Access
"RUF", # Ruff-specific rules ## Extra Good-To-Have Rules
@ -100,6 +103,15 @@ ignore = [
"B008", # FastAPI uses this for Depends(), Security(), etc. .
"E701", # class foo(Parent): pass or if simple: return are perfectly elegant
"ERA001", # 'Commented-out code' seems to be just about anything to ruff
# Line Length - Controversy Incoming
## Hot Take: Let the Formatter Worry about Line Length
## - Yes dear reader, I'm with you. Soft wrap can go too far.
## - ...but also, sometimes there are real good reasons not to split.
## - Ex. I think 'one sentence per line' docstrings are a valid thing.
## - Overlong lines tend to be be a code smell anyway
## - We'll see if my hot takes survive the week :)
"E501", # Let Formatter Worry about Line Length
]
####################

View File

@ -57,7 +57,7 @@ matplotlib==3.8.3
# via tidy3d
mpmath==1.3.0
# via sympy
networkx==3.2.1
networkx==3.2
numpy==1.24.3
# via contourpy
# via h5py
@ -77,9 +77,9 @@ partd==1.4.1
# via dask
pillow==10.2.0
# via matplotlib
pydantic==2.6.4
pydantic==2.6.0
# via tidy3d
pydantic-core==2.16.3
pydantic-core==2.16.1
# via pydantic
pygments==2.17.2
# via rich
@ -104,7 +104,7 @@ requests==2.27.1
# via tidy3d
responses==0.23.1
# via tidy3d
rich==12.5.1
rich==12.5.0
# via tidy3d
rtree==1.2.0
ruff==0.3.2
@ -117,7 +117,7 @@ shapely==2.0.3
six==1.16.0
# via python-dateutil
sympy==1.12
tidy3d==2.6.1
tidy3d==2.6.0
toml==0.10.2
# via tidy3d
toolz==0.12.1

View File

@ -56,7 +56,7 @@ matplotlib==3.8.3
# via tidy3d
mpmath==1.3.0
# via sympy
networkx==3.2.1
networkx==3.2
numpy==1.24.3
# via contourpy
# via h5py
@ -76,9 +76,9 @@ partd==1.4.1
# via dask
pillow==10.2.0
# via matplotlib
pydantic==2.6.4
pydantic==2.6.0
# via tidy3d
pydantic-core==2.16.3
pydantic-core==2.16.1
# via pydantic
pygments==2.17.2
# via rich
@ -103,7 +103,7 @@ requests==2.27.1
# via tidy3d
responses==0.23.1
# via tidy3d
rich==12.5.1
rich==12.5.0
# via tidy3d
rtree==1.2.0
s3transfer==0.5.2
@ -115,7 +115,7 @@ shapely==2.0.3
six==1.16.0
# via python-dateutil
sympy==1.12
tidy3d==2.6.1
tidy3d==2.6.0
toml==0.10.2
# via tidy3d
toolz==0.12.1

View File

View File

@ -1,16 +1,16 @@
import tomllib
from pathlib import Path
import bpy
from . import info
from .nodeps.utils import simple_logger
from . import operators_nodeps, preferences, registration
from .utils import pydeps
from .utils import logger as _logger
simple_logger.sync_bootstrap_logging(
console_level=info.BOOTSTRAP_LOG_LEVEL,
)
log = _logger.get()
PATH_ADDON_ROOT = Path(__file__).resolve().parent
with (PATH_ADDON_ROOT / 'pyproject.toml').open('rb') as f:
PROJ_SPEC = tomllib.load(f)
from . import nodeps, preferences, registration # noqa: E402
from .nodeps.utils import pydeps # noqa: E402
log = simple_logger.get(__name__)
####################
# - Addon Information
@ -33,22 +33,18 @@ bl_info = {
## The mechanism is a 'dumb' - output of 'ruff fmt' MUST be basis for replacing
def ADDON_PREFS():
return bpy.context.preferences.addons[
PROJ_SPEC['project']['name']
].preferences
####################
# - Load and Register Addon
####################
log.info('Loading Before-Deps BL_REGISTER')
BL_REGISTER__BEFORE_DEPS = [
*operators_nodeps.BL_REGISTER,
*nodeps.operators.BL_REGISTER,
*preferences.BL_REGISTER,
]
def BL_REGISTER__AFTER_DEPS(path_deps: Path):
log.info('Loading After-Deps BL_REGISTER')
with pydeps.importable_addon_deps(path_deps):
from . import node_trees, operators
return [
@ -57,11 +53,18 @@ def BL_REGISTER__AFTER_DEPS(path_deps: Path):
]
def BL_KEYMAP_ITEM_DEFS(path_deps: Path):
log.info('Loading Before-Deps BL_KEYMAP_ITEM_DEFS')
BL_KEYMAP_ITEM_DEFS__BEFORE_DEPS = [
*nodeps.operators.BL_KEYMAP_ITEM_DEFS,
]
def BL_KEYMAP_ITEM_DEFS__AFTER_DEPS(path_deps: Path):
log.info('Loading After-Deps BL_KEYMAP_ITEM_DEFS')
with pydeps.importable_addon_deps(path_deps):
from . import operators
return [
*operators.BL_KMI_REGISTER,
*operators.BL_KEYMAP_ITEM_DEFS,
]
@ -69,29 +72,44 @@ def BL_KEYMAP_ITEM_DEFS(path_deps: Path):
# - Registration
####################
def register():
"""Register the Blender addon."""
log.info('Starting %s Registration', info.ADDON_NAME)
# Register Barebones Addon for Dependency Installation
registration.register_classes(BL_REGISTER__BEFORE_DEPS)
registration.register_keymap_items(BL_KEYMAP_ITEM_DEFS__BEFORE_DEPS)
# Retrieve PyDeps Path from Addon Preferences
addon_prefs = ADDON_PREFS()
path_pydeps = addon_prefs.path_addon_pydeps
if (addon_prefs := info.addon_prefs()) is None:
unregister()
msg = f'Addon preferences not found; aborting registration of {info.ADDON_NAME}'
raise RuntimeError(msg)
log.debug('Found Addon Preferences')
# Retrieve PyDeps Path
path_pydeps = addon_prefs.pydeps_path
log.info('Loaded PyDeps Path from Addon Prefs: %s', path_pydeps)
# If Dependencies are Satisfied, Register Everything
if pydeps.check_pydeps(path_pydeps):
registration.register_classes(BL_REGISTER__AFTER_DEPS())
registration.register_keymap_items(BL_KEYMAP_ITEM_DEFS())
log.info('PyDeps Satisfied: Loading Addon %s', info.ADDON_NAME)
registration.register_classes(BL_REGISTER__AFTER_DEPS(path_pydeps))
registration.register_keymap_items(BL_KEYMAP_ITEM_DEFS__AFTER_DEPS(path_pydeps))
else:
# Delay Registration
log.info(
'PyDeps Invalid: Delaying Addon Registration of %s',
info.ADDON_NAME,
)
registration.delay_registration(
registration.EVENT__DEPS_SATISFIED,
classes_cb=BL_REGISTER__AFTER_DEPS,
keymap_item_defs_cb=BL_KEYMAP_ITEM_DEFS,
keymap_item_defs_cb=BL_KEYMAP_ITEM_DEFS__AFTER_DEPS,
)
# TODO: A popup before the addon fully loads or something like that?
## TODO: Communicate that deps must be installed and all that?
## TODO: bpy Popup to Deal w/Dependency Errors
def unregister():
"""Unregister the Blender addon."""
log.info('Starting %s Unregister', info.ADDON_NAME)
registration.unregister_classes()
registration.unregister_keymap_items()
log.info('Finished %s Unregister', info.ADDON_NAME)

View File

@ -0,0 +1,43 @@
import tomllib
from pathlib import Path
import bpy
####################
# - Addon Info
####################
PATH_ADDON_ROOT = Path(__file__).resolve().parent
# Addon Information
## bl_info is filled with PROJ_SPEC when packing the .zip.
with (PATH_ADDON_ROOT / 'pyproject.toml').open('rb') as f:
PROJ_SPEC = tomllib.load(f)
ADDON_NAME = PROJ_SPEC['project']['name']
ADDON_VERSION = PROJ_SPEC['project']['version']
# PyDeps Path Info
## requirements.lock is written when packing the .zip.
## By default, the addon pydeps are kept in the addon dir.
PATH_REQS = PATH_ADDON_ROOT / 'requirements.lock'
DEFAULT_PATH_DEPS = PATH_ADDON_ROOT / '.addon_dependencies'
# Logging Info
## By default, the addon file log writes to the addon dir.
## The initial .log_level contents are written when packing the .zip.
## Subsequent changes are managed by nodeps.utils.simple_logger.py.
DEFAULT_LOG_PATH = PATH_ADDON_ROOT / 'addon.log'
DEFAULT_LOG_PATH.touch(exist_ok=True)
PATH_BOOTSTRAP_LOG_LEVEL = PATH_ADDON_ROOT / '.bootstrap_log_level'
with PATH_BOOTSTRAP_LOG_LEVEL.open('r') as f:
BOOTSTRAP_LOG_LEVEL = int(f.read().strip())
####################
# - Addon Getters
####################
def addon_prefs() -> bpy.types.AddonPreferences | None:
if (addon := bpy.context.preferences.addons.get(ADDON_NAME)) is None:
return None
return addon.preferences

View File

@ -3,7 +3,3 @@ from . import maxwell_sim_nodes
BL_REGISTER = [
*maxwell_sim_nodes.BL_REGISTER,
]
BL_NODE_CATEGORIES = [
*maxwell_sim_nodes.BL_NODE_CATEGORIES,
]

View File

@ -16,7 +16,3 @@ BL_REGISTER = [
*nodes.BL_REGISTER,
*categories.BL_REGISTER,
]
BL_NODE_CATEGORIES = [
*categories.BL_NODE_CATEGORIES,
]

View File

@ -1,16 +1,16 @@
import typing as typ
import typing_extensions as typx
import pydantic as pyd
import sympy as sp
import sympy.physics.units as spu
import bpy
import sympy as sp
import sympy.physics.units as spu
import typing_extensions as typx
from ...utils import extra_sympy_units as spuex
from ...utils import logger as _logger
from . import contracts as ct
from .contracts import SocketType as ST
from . import sockets as sck
from .contracts import SocketType as ST
log = _logger.get(__name__)
# TODO: Caching?
# TODO: Move the manual labor stuff to contracts
@ -38,7 +38,7 @@ for socket_type in ST:
sck,
socket_type.value.removesuffix('SocketType') + 'SocketDef',
):
print('Missing SocketDef for', socket_type.value)
log.warning('Missing SocketDef for %s', socket_type.value)
####################

View File

@ -1,3 +1,5 @@
# ruff: noqa: I001
####################
# - String Types
####################
@ -5,6 +7,7 @@ from .bl import SocketName
from .bl import PresetName
from .bl import ManagedObjName
from .bl import BLEnumID
from .bl import BLColorRGBA
@ -54,3 +57,29 @@ from .data_flows import DataFlowKind
# - Schemas
####################
from . import schemas
####################
# - Export
####################
__all__ = [
'SocketName',
'PresetName',
'ManagedObjName',
'BLEnumID',
'BLColorRGBA',
'Icon',
'TreeType',
'SocketType',
'SOCKET_UNITS',
'SOCKET_COLORS',
'SOCKET_SHAPES',
'BL_SOCKET_DESCR_TYPE_MAP',
'BL_SOCKET_DIRECT_TYPE_MAP',
'BL_SOCKET_DESCR_ANNOT_STRING',
'NodeType',
'NodeCategory',
'NODE_CAT_LABELS',
'ManagedObjType',
'DataFlowKind',
'schemas',
]

View File

@ -1,9 +1,6 @@
import typing as typ
import pydantic as pyd
import typing_extensions as pytypes_ext
import bpy
####################
# - Pure BL Types
####################

View File

@ -1,4 +1,11 @@
from .preset_def import PresetDef
from .socket_def import SocketDef
from .managed_obj import ManagedObj
from .managed_obj_def import ManagedObjDef
from .preset_def import PresetDef
from .socket_def import SocketDef
__all__ = [
'SocketDef',
'ManagedObj',
'ManagedObjDef',
'PresetDef',
]

View File

@ -19,8 +19,4 @@ class ManagedObj(typ.Protocol):
def free(self): ...
def bl_select(self):
"""If this is a managed Blender object, and the operation "select this in Blender" makes sense, then do so.
Else, do nothing.
"""
def bl_select(self): ...

View File

@ -1,9 +1,7 @@
import typing as typ
from dataclasses import dataclass
import pydantic as pyd
from ..bl import PresetName, SocketName, BLEnumID
from .managed_obj import ManagedObj

View File

@ -2,7 +2,7 @@ import typing as typ
import pydantic as pyd
from ..bl import PresetName, SocketName, BLEnumID
from ..bl import PresetName, SocketName
class PresetDef(pyd.BaseModel):

View File

@ -7,9 +7,12 @@ import bpy
import pydantic as pyd
import typing_extensions as typx
from ....utils import logger
from .. import contracts as ct
from .. import sockets
log = logger.get(__name__)
CACHE: dict[str, typ.Any] = {} ## By Instance UUID
## NOTE: CACHE does not persist between file loads.
@ -56,6 +59,7 @@ class MaxwellSimNode(bpy.types.Node):
####################
def __init_subclass__(cls, **kwargs: typ.Any):
super().__init_subclass__(**kwargs)
log.debug('Initializing Node: %s', cls.node_type)
# Setup Blender ID for Node
if not hasattr(cls, 'node_type'):

View File

@ -1,16 +1,10 @@
import functools
import tempfile
from pathlib import Path
import typing as typ
from pathlib import Path
import bpy
import sympy as sp
import pydantic as pyd
import tidy3d as td
import tidy3d.web as td_web
from ......utils import tdcloud
from ......services import tdcloud
from .... import contracts as ct
from .... import sockets
from ... import base

View File

@ -1,17 +1,6 @@
import json
import tempfile
import functools
import typing as typ
import json
from pathlib import Path
import bpy
import sympy as sp
import pydantic as pyd
import tidy3d as td
import tidy3d.web as _td_web
from ......utils import tdcloud
from ......services import tdcloud
from .... import contracts as ct
from .... import sockets
from ... import base

View File

@ -1,14 +1,9 @@
import typing as typ
import tempfile
import bpy
import pydantic as pyd
import tidy3d as td
import tidy3d.web as _td_web
from .....utils import tdcloud
from .. import base
from .....services import tdcloud
from ... import contracts as ct
from .. import base
####################
@ -21,7 +16,6 @@ class ReloadFolderList(bpy.types.Operator):
@classmethod
def poll(cls, context):
space = context.space_data
return (
tdcloud.IS_AUTHENTICATED
and hasattr(context, 'socket')
@ -94,7 +88,7 @@ class Tidy3DCloudTaskBLSocket(base.MaxwellSimSocket):
existing_folder_id: bpy.props.EnumProperty(
name='Folder of Cloud Tasks',
description='An existing folder on the Tidy3D Cloud',
items=lambda self, context: self.retrieve_folders(context),
items=lambda self, _: self.retrieve_folders(),
update=(
lambda self, context: self.sync_prop('existing_folder_id', context)
),
@ -102,7 +96,7 @@ class Tidy3DCloudTaskBLSocket(base.MaxwellSimSocket):
existing_task_id: bpy.props.EnumProperty(
name='Existing Cloud Task',
description='An existing task on the Tidy3D Cloud, within the given folder',
items=lambda self, context: self.retrieve_tasks(context),
items=lambda self, _: self.retrieve_tasks(),
update=(
lambda self, context: self.sync_prop('existing_task_id', context)
),
@ -122,14 +116,14 @@ class Tidy3DCloudTaskBLSocket(base.MaxwellSimSocket):
# - Property Methods
####################
def sync_existing_folder_id(self, context):
folder_task_ids = self.retrieve_tasks(context)
folder_task_ids = self.retrieve_tasks()
self.existing_task_id = folder_task_ids[0][0]
## There's guaranteed to at least be one element, even if it's "NONE".
self.sync_prop('existing_folder_id', context)
def retrieve_folders(self, context) -> list[tuple]:
def retrieve_folders(self) -> list[tuple]:
folders = tdcloud.TidyCloudFolders.folders()
if not folders:
return [('NONE', 'None', 'No folders')]
@ -143,7 +137,7 @@ class Tidy3DCloudTaskBLSocket(base.MaxwellSimSocket):
for folder_id, cloud_folder in folders.items()
]
def retrieve_tasks(self, context) -> list[tuple]:
def retrieve_tasks(self) -> list[tuple]:
if (
cloud_folder := tdcloud.TidyCloudFolders.folders().get(
self.existing_folder_id
@ -212,7 +206,7 @@ class Tidy3DCloudTaskBLSocket(base.MaxwellSimSocket):
# Propagate along Link
if self.is_linked:
msg = (
f'Cannot sync newly created task to linked Cloud Task socket.'
'Cannot sync newly created task to linked Cloud Task socket.'
)
raise ValueError(msg)
## TODO: A little aggressive. Is there a good use case?
@ -230,7 +224,7 @@ class Tidy3DCloudTaskBLSocket(base.MaxwellSimSocket):
# Propagate along Link
if self.is_linked:
msg = (
f'Cannot sync newly created task to linked Cloud Task socket.'
'Cannot sync newly created task to linked Cloud Task socket.'
)
raise ValueError(msg)
## TODO: A little aggressive. Is there a good use case?

View File

@ -0,0 +1,3 @@
from . import operators, utils
__all__ = ['operators', 'utils']

View File

@ -0,0 +1,14 @@
from . import install_deps, uninstall_deps
BL_REGISTER = [
*install_deps.BL_REGISTER,
*uninstall_deps.BL_REGISTER,
]
BL_KEYMAP_ITEM_DEFS = [
*install_deps.BL_KEYMAP_ITEM_DEFS,
*uninstall_deps.BL_KEYMAP_ITEM_DEFS,
]
__all__ = []

View File

@ -4,7 +4,10 @@ from pathlib import Path
import bpy
from .. import registration
from ... import registration
from ..utils import pydeps, simple_logger
log = simple_logger.get(__name__)
class InstallPyDeps(bpy.types.Operator):
@ -12,15 +15,30 @@ class InstallPyDeps(bpy.types.Operator):
bl_label = 'Install BLMaxwell Python Deps'
path_addon_pydeps: bpy.props.StringProperty(
name='Path to Addon Python Dependencies'
name='Path to Addon Python Dependencies',
default='',
)
path_addon_reqs: bpy.props.StringProperty(
name='Path to Addon Python Dependencies'
name='Path to Addon Python Dependencies',
default='',
)
@classmethod
def poll(cls, _: bpy.types.Context):
return not pydeps.DEPS_OK
def execute(self, _: bpy.types.Context):
if self.path_addon_pydeps == '' or self.path_addon_reqs == '':
msg = f"A path for operator {self.bl_idname} isn't set"
raise ValueError(msg)
path_addon_pydeps = Path(self.path_addon_pydeps)
path_addon_reqs = Path(self.path_addon_reqs)
log.info(
'Running Install PyDeps w/requirements.txt (%s) to path: %s',
path_addon_reqs,
path_addon_pydeps,
)
# Create the Addon-Specific Folder (if Needed)
## It MUST, however, have a parent already
@ -34,21 +52,23 @@ class InstallPyDeps(bpy.types.Operator):
# Install Deps w/Bundled pip
try:
subprocess.check_call(
[
str(python_exec),
'-m',
'pip',
'install',
'-r',
str(path_addon_reqs),
'--target',
str(path_addon_pydeps),
]
cmdline = [
str(python_exec),
'-m',
'pip',
'install',
'-r',
str(path_addon_reqs),
'--target',
str(path_addon_pydeps),
]
log.info(
'Running pip w/cmdline: %s',
' '.join(cmdline),
)
except subprocess.CalledProcessError as e:
msg = f'Failed to install dependencies: {str(e)}'
self.report({'ERROR'}, msg)
subprocess.check_call(cmdline)
except subprocess.CalledProcessError:
log.exception('Failed to install PyDeps')
return {'CANCELLED'}
registration.run_delayed_registration(
@ -64,3 +84,4 @@ class InstallPyDeps(bpy.types.Operator):
BL_REGISTER = [
InstallPyDeps,
]
BL_KEYMAP_ITEM_DEFS = []

View File

@ -0,0 +1,85 @@
import subprocess
import sys
from pathlib import Path
import bpy
from .. import registration
from ..utils import logger as _logger
log = _logger.get(__name__)
class InstallPyDeps(bpy.types.Operator):
bl_idname = 'blender_maxwell.nodeps__addon_install_popup'
bl_label = 'Popup to Install BLMaxwell Python Deps'
path_addon_pydeps: bpy.props.StringProperty(
name='Path to Addon Python Dependencies',
default='',
)
path_addon_reqs: bpy.props.StringProperty(
name='Path to Addon Python Dependencies',
default='',
)
# TODO: poll()
def execute(self, _: bpy.types.Context):
if self.path_addon_pydeps == '' or self.path_addon_reqs == '':
msg = f"A path for operator {self.bl_idname} isn't set"
raise ValueError(msg)
path_addon_pydeps = Path(self.path_addon_pydeps)
path_addon_reqs = Path(self.path_addon_reqs)
log.info(
'Running Install PyDeps w/requirements.txt (%s) to path: %s',
path_addon_reqs,
path_addon_pydeps,
)
# Create the Addon-Specific Folder (if Needed)
## It MUST, however, have a parent already
path_addon_pydeps.mkdir(parents=False, exist_ok=True)
# Determine Path to Blender's Bundled Python
## bpy.app.binary_path_python was deprecated in 2.91.
## sys.executable points to the correct bundled Python.
## See <https://developer.blender.org/docs/release_notes/2.91/python_api/>
python_exec = Path(sys.executable)
# Install Deps w/Bundled pip
try:
cmdline = [
str(python_exec),
'-m',
'pip',
'install',
'-r',
str(path_addon_reqs),
'--target',
str(path_addon_pydeps),
]
log.info(
'Running pip w/cmdline: %s',
' '.join(cmdline),
)
subprocess.check_call(cmdline)
except subprocess.CalledProcessError:
log.exception('Failed to install PyDeps')
return {'CANCELLED'}
registration.run_delayed_registration(
registration.EVENT__ON_DEPS_INSTALLED,
path_addon_pydeps,
)
return {'FINISHED'}
####################
# - Blender Registration
####################
BL_REGISTER = [
InstallPyDeps,
]
BL_KEYMAP_ITEM_DEFS = []

View File

@ -1,9 +1,9 @@
import shutil
from pathlib import Path
import bpy
from ..utils import pydeps
from .. import registration
class UninstallPyDeps(bpy.types.Operator):
@ -14,7 +14,12 @@ class UninstallPyDeps(bpy.types.Operator):
name='Path to Addon Python Dependencies'
)
@classmethod
def poll(cls, _: bpy.types.Context):
return pydeps.DEPS_OK
def execute(self, _: bpy.types.Context):
path_addon_pydeps = Path(self.path_addon_pydeps)
if (
pydeps.check_pydeps()
and self.path_addon_pydeps.exists()
@ -35,3 +40,4 @@ class UninstallPyDeps(bpy.types.Operator):
BL_REGISTER = [
UninstallPyDeps,
]
BL_KEYMAP_ITEM_DEFS = []

View File

@ -0,0 +1,3 @@
from . import pydeps
__all__ = ['pydeps']

View File

@ -4,17 +4,10 @@ import os
import sys
from pathlib import Path
from . import logger as _logger
from ... import info
from . import simple_logger
log = _logger.get()
####################
# - Constants
####################
PATH_ADDON_ROOT = Path(__file__).resolve().parent.parent
PATH_REQS = PATH_ADDON_ROOT / 'requirements.txt'
DEFAULT_PATH_DEPS = PATH_ADDON_ROOT / '.addon_dependencies'
DEFAULT_PATH_DEPS.mkdir(exist_ok=True)
log = simple_logger.get(__name__)
####################
# - Globals
@ -29,21 +22,39 @@ DEPS_ISSUES: list[str] | None = None
@contextlib.contextmanager
def importable_addon_deps(path_deps: Path):
os_path = os.fspath(path_deps)
log.info('Adding Path to sys.path: %s', str(os_path))
sys.path.insert(0, os_path)
try:
yield
finally:
log.info('Removing Path from sys.path: %s', str(os_path))
sys.path.remove(os_path)
@contextlib.contextmanager
def syspath_from_bpy_prefs() -> bool:
import bpy
addon_prefs = bpy.context.preferences.addons[info.ADDON_NAME].preferences
if hasattr(addon_prefs, 'path_addon_pydeps'):
log.info('Retrieved PyDeps Path from Addon Prefs')
path_pydeps = addon_prefs.path_addon_pydeps
with importable_addon_deps(path_pydeps):
yield True
else:
log.info("Couldn't PyDeps Path from Addon Prefs")
yield False
####################
# - Check PyDeps
####################
def _check_pydeps(
path_requirementstxt: Path,
path_requirementslock: Path,
path_deps: Path,
) -> dict[str, tuple[str, str]]:
"""Check if packages defined in a 'requirements.txt' file are currently installed.
"""Check if packages defined in a 'requirements.lock' file are currently installed.
Returns a list of any issues (if empty, then all dependencies are correctly satisfied).
"""
@ -54,7 +65,7 @@ def _check_pydeps(
See <https://peps.python.org/pep-0426/#name>"""
return deplock.lower().replace('_', '-')
with path_requirementstxt.open('r') as file:
with path_requirementslock.open('r') as file:
required_depslock = {
conform_pypi_package_deplock(line)
for raw_line in file.readlines()
@ -108,14 +119,15 @@ def check_pydeps(path_deps: Path):
global DEPS_OK # noqa: PLW0603
global DEPS_ISSUES # noqa: PLW0603
if len(_issues := _check_pydeps(PATH_REQS, path_deps)) > 0:
# log.debug('Package Check Failed:', end='\n\t')
# log.debug(*_issues, sep='\n\t')
if len(issues := _check_pydeps(info.PATH_REQS, path_deps)) > 0:
log.info('PyDeps Check Failed')
log.debug('%s', ', '.join(issues))
DEPS_OK = False
DEPS_ISSUES = _issues
DEPS_ISSUES = issues
else:
log.info('PyDeps Check Succeeded')
DEPS_OK = True
DEPS_ISSUES = _issues
DEPS_ISSUES = []
return DEPS_OK

View File

@ -0,0 +1,166 @@
import logging
import typing as typ
from pathlib import Path
LogLevel: typ.TypeAlias = int
LogHandler: typ.TypeAlias = typ.Any ## TODO: Can we do better?
####################
# - Constants
####################
LOG_LEVEL_MAP: dict[str, LogLevel] = {
'DEBUG': logging.DEBUG,
'INFO': logging.INFO,
'WARNING': logging.WARNING,
'ERROR': logging.ERROR,
'CRITICAL': logging.CRITICAL,
}
SIMPLE_LOGGER_PREFIX = 'simple::'
STREAM_LOG_FORMAT = 11*' ' + '%(levelname)-8s %(message)s (%(name)s)'
FILE_LOG_FORMAT = STREAM_LOG_FORMAT
####################
# - Globals
####################
CACHE = {
'console_level': None,
'file_path': None,
'file_level': logging.NOTSET,
}
####################
# - Logging Handlers
####################
def console_handler(level: LogLevel) -> logging.StreamHandler:
stream_formatter = logging.Formatter(STREAM_LOG_FORMAT)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(stream_formatter)
stream_handler.setLevel(level)
return stream_handler
def file_handler(path_log_file: Path, level: LogLevel) -> logging.FileHandler:
file_formatter = logging.Formatter(FILE_LOG_FORMAT)
file_handler = logging.FileHandler(path_log_file)
file_handler.setFormatter(file_formatter)
file_handler.setLevel(level)
return file_handler
####################
# - Logger Setup
####################
def setup_logger(
cb_console_handler: typ.Callable[[LogLevel], LogHandler],
cb_file_handler: typ.Callable[[Path, LogLevel], LogHandler],
logger: logging.Logger,
console_level: LogLevel | None,
file_path: Path | None,
file_level: LogLevel,
):
# Delegate Level Semantics to Log Handlers
## This lets everything through
logger.setLevel(logging.DEBUG)
# DO NOT Propagate to Root Logger
## This looks like 'double messages'
logger.propagate = False
## See SO/6729268/log-messages-appearing-twice-with-python-logging
# Clear Existing Handlers
if logger.handlers:
logger.handlers.clear()
# Add Console Logging Handler
if console_level is not None:
logger.addHandler(cb_console_handler(console_level))
# Add File Logging Handler
if file_path is not None:
logger.addHandler(cb_file_handler(file_path, file_level))
def get(module_name):
logger = logging.getLogger(SIMPLE_LOGGER_PREFIX + module_name)
# Reuse Cached Arguments from Last sync_*
setup_logger(
console_handler,
file_handler,
logger,
console_level=CACHE['console_level'],
file_path=CACHE['file_path'],
file_level=CACHE['file_level'],
)
return logger
####################
# - Logger Sync
####################
def sync_bootstrap_logging(
console_level: LogLevel | None = None,
file_path: Path | None = None,
file_level: LogLevel = logging.NOTSET,
):
CACHE['console_level'] = console_level
CACHE['file_path'] = file_path
CACHE['file_level'] = file_level
logger_logger = logging.getLogger(__name__)
for name in logging.root.manager.loggerDict:
logger = logging.getLogger(name)
setup_logger(
console_handler,
file_handler,
logger,
console_level=console_level,
file_path=file_path,
file_level=file_level,
)
logger_logger.info("Bootstrapped Logging w/Settings %s", str(CACHE))
def sync_loggers(
cb_console_handler: typ.Callable[[LogLevel], LogHandler],
cb_file_handler: typ.Callable[[Path, LogLevel], LogHandler],
console_level: LogLevel | None,
file_path: Path | None,
file_level: LogLevel,
):
"""Update all loggers to conform to the given per-handler on/off state and log level."""
CACHE['console_level'] = console_level
CACHE['file_path'] = file_path
CACHE['file_level'] = file_level
for name in logging.root.manager.loggerDict:
logger = logging.getLogger(name)
setup_logger(
cb_console_handler,
cb_file_handler,
logger,
console_level=console_level,
file_path=file_path,
file_level=file_level,
)
####################
# - Logger Iteration
####################
def loggers():
return [
logging.getLogger(name) for name in logging.root.manager.loggerDict
]
def simple_loggers():
return [
logging.getLogger(name)
for name in logging.root.manager.loggerDict
if name.startswith(SIMPLE_LOGGER_PREFIX)
]

View File

@ -3,6 +3,6 @@ from . import connect_viewer
BL_REGISTER = [
*connect_viewer.BL_REGISTER,
]
BL_KMI_REGISTER = [
*connect_viewer.BL_KMI_REGISTER,
BL_KEYMAP_ITEM_DEFS = [
*connect_viewer.BL_KEYMAP_ITEM_DEFS,
]

View File

@ -1,33 +0,0 @@
import sys
import shutil
import subprocess
from pathlib import Path
import bpy
from . import types
class BlenderMaxwellUninstallDependenciesOperator(bpy.types.Operator):
bl_idname = types.BlenderMaxwellUninstallDependencies
bl_label = 'Uninstall Dependencies for Blender Maxwell Addon'
def execute(self, context):
addon_dir = Path(__file__).parent.parent
# addon_specific_folder = addon_dir / '.dependencies'
addon_specific_folder = Path(
'/home/sofus/src/college/bsc_ge/thesis/code/.cached-dependencies'
)
shutil.rmtree(addon_specific_folder)
return {'FINISHED'}
####################
# - Blender Registration
####################
BL_REGISTER = [
BlenderMaxwellUninstallDependenciesOperator,
]
BL_KMI_REGISTER = []

View File

@ -1,5 +1,9 @@
import bpy
from ..utils import logger as logger
log = logger.get(__name__)
class ConnectViewerNode(bpy.types.Operator):
bl_idname = 'blender_maxwell.connect_viewer_node'
@ -55,7 +59,7 @@ BL_REGISTER = [
ConnectViewerNode,
]
BL_KMI_REGISTER = [
BL_KEYMAP_ITEM_DEFS = [
{
'_': (
ConnectViewerNode.bl_idname,

View File

@ -1,7 +0,0 @@
from . import install_deps
from . import uninstall_deps
BL_REGISTER = [
*install_deps.BL_REGISTER,
*uninstall_deps.BL_REGISTER,
]

View File

@ -1,149 +1,283 @@
import tomllib
import logging
from pathlib import Path
import bpy
from . import registration
from .operators_nodeps import install_deps, uninstall_deps
from .utils import logger as _logger
from .utils import pydeps
from . import info, registration
from .nodeps.operators import install_deps, uninstall_deps
from .nodeps.utils import pydeps, simple_logger
####################
# - Constants
####################
log = _logger.get()
PATH_ADDON_ROOT = Path(__file__).resolve().parent
with (PATH_ADDON_ROOT / 'pyproject.toml').open('rb') as f:
PROJ_SPEC = tomllib.load(f)
log = simple_logger.get(__name__)
####################
# - Preferences
####################
class BlenderMaxwellAddonPreferences(bpy.types.AddonPreferences):
bl_idname = PROJ_SPEC['project']['name'] ## MUST match addon package name
class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
"""Manages user preferences and settings for the Blender Maxwell addon.
"""
bl_idname = info.ADDON_NAME ## MUST match addon package name
####################
# - Properties
####################
# Default PyDeps Path
use_default_path_addon_pydeps: bpy.props.BoolProperty(
# Use of Default PyDeps Path
use_default_pydeps_path: bpy.props.BoolProperty(
name='Use Default PyDeps Path',
description='Whether to use the default PyDeps path',
default=True,
update=lambda self, context: self.sync_use_default_path_addon_pydeps(
context
),
update=lambda self, context: self.sync_use_default_pydeps_path(context),
)
cache_path_addon_pydeps: bpy.props.StringProperty(
cache__pydeps_path_while_using_default: bpy.props.StringProperty(
name='Cached Addon PyDeps Path',
default=(_default_pydeps_path := str(pydeps.DEFAULT_PATH_DEPS)),
) ## Cache for use when toggling use of default pydeps path.
## Must default to same as raw_path_* if default=True on use_default_*
default=(_default_pydeps_path := str(info.DEFAULT_PATH_DEPS)),
)
# Custom PyDeps Path
raw_path_addon_pydeps: bpy.props.StringProperty(
bl__pydeps_path: bpy.props.StringProperty(
name='Addon PyDeps Path',
description='Path to Addon Python Dependencies',
subtype='FILE_PATH',
default=_default_pydeps_path,
update=lambda self, context: self.sync_path_addon_pydeps(context),
update=lambda self, _: self.sync_pydeps_path(),
)
prev_raw_path_addon_pydeps: bpy.props.StringProperty(
cache__backup_pydeps_path: bpy.props.StringProperty(
name='Previous Addon PyDeps Path',
default=_default_pydeps_path,
) ## Use to restore raw_path_addon_pydeps after non-validated change.
)
# Log Settings
use_log_console: bpy.props.BoolProperty(
name='Log to Console',
description='Whether to use the console for addon logging',
default=True,
update=lambda self, _: self.sync_addon_logging(),
)
bl__log_level_console: bpy.props.EnumProperty(
name='Console Log Level',
description='Level of addon logging to expose in the console',
items=[
('DEBUG', 'Debug', 'Debug'),
('INFO', 'Info', 'Info'),
('WARNING', 'Warning', 'Warning'),
('ERROR', 'Error', 'Error'),
('CRITICAL', 'Critical', 'Critical'),
],
default='DEBUG',
update=lambda self, _: self.sync_addon_logging(),
)
use_log_file: bpy.props.BoolProperty(
name='Log to File',
description='Whether to use a file for addon logging',
default=True,
update=lambda self, _: self.sync_addon_logging(),
)
bl__log_file_path: bpy.props.StringProperty(
name='Log Path',
description='Path to the Addon Log File',
subtype='FILE_PATH',
default=str(info.DEFAULT_LOG_PATH),
update=lambda self, _: self.sync_addon_logging(),
)
bl__log_level_file: bpy.props.EnumProperty(
name='File Log Level',
description='Level of addon logging to expose in the file',
items=[
('DEBUG', 'Debug', 'Debug'),
('INFO', 'Info', 'Info'),
('WARNING', 'Warning', 'Warning'),
('ERROR', 'Error', 'Error'),
('CRITICAL', 'Critical', 'Critical'),
],
default='DEBUG',
update=lambda self, _: self.sync_addon_logging(),
)
# TODO: LOGGING SETTINGS
####################
# - Property Sync
####################
def sync_use_default_path_addon_pydeps(self, _: bpy.types.Context):
# Switch to Default
if self.use_default_path_addon_pydeps:
self.cache_path_addon_pydeps = self.raw_path_addon_pydeps
self.raw_path_addon_pydeps = str(
pydeps.DEFAULT_PATH_DEPS.resolve()
)
# Switch from Default
else:
self.raw_path_addon_pydeps = self.cache_path_addon_pydeps
self.cache_path_addon_pydeps = ''
def sync_path_addon_pydeps(self, _: bpy.types.Context):
# Error if Default Path is in Use
if self.use_default_path_addon_pydeps:
self.raw_path_addon_pydeps = self.prev_raw_path_addon_pydeps
msg = "Can't update pydeps path while default path is being used"
raise ValueError(msg)
# Error if Dependencies are All Installed
if pydeps.DEPS_OK:
self.raw_path_addon_pydeps = self.prev_raw_path_addon_pydeps
msg = "Can't update pydeps path while dependencies are installed"
raise ValueError(msg)
# Update PyDeps
## This also updates pydeps.DEPS_OK and pydeps.DEPS_ISSUES.
## The result is used to run any delayed registrations...
## ...which might be waiting for deps to be satisfied.
if pydeps.check_pydeps(self.path_addon_pydeps):
registration.run_delayed_registration(
registration.EVENT__DEPS_SATISFIED,
self.path_addon_pydeps,
)
self.prev_raw_path_addon_pydeps = self.raw_path_addon_pydeps
####################
# - Property Methods
####################
@property
def path_addon_pydeps(self) -> Path:
return Path(bpy.path.abspath(self.raw_path_addon_pydeps))
def pydeps_path(self) -> Path:
return Path(bpy.path.abspath(self.bl__pydeps_path))
@path_addon_pydeps.setter
def path_addon_pydeps(self, value: Path) -> None:
self.raw_path_addon_pydeps = str(value.resolve())
@pydeps_path.setter
def pydeps_path(self, value: Path) -> None:
self.bl__pydeps_path = str(value.resolve())
@property
def log_path(self) -> Path:
return Path(bpy.path.abspath(self.bl__log_file_path))
####################
# - Property Sync
####################
def sync_addon_logging(self, only_sync_logger: logging.Logger | None = None):
if pydeps.DEPS_OK:
log.info('Getting Logger (DEPS_OK = %s)', str(pydeps.DEPS_OK))
with pydeps.importable_addon_deps(self.pydeps_path):
from .utils import logger
else:
log.info('Getting Simple Logger (DEPS_OK = %s)', str(pydeps.DEPS_OK))
logger = simple_logger
# Retrieve Configured Log Levels
log_level_console = logger.LOG_LEVEL_MAP[self.bl__log_level_console]
log_level_file = logger.LOG_LEVEL_MAP[self.bl__log_level_file]
log_setup_kwargs = {
'console_level': log_level_console if self.use_log_console else None,
'file_path': self.log_path if self.use_log_file else None,
'file_level': log_level_file,
}
# Sync Single Logger / All Loggers
if only_sync_logger is not None:
logger.setup_logger(
logger.console_handler,
logger.file_handler,
only_sync_logger,
**log_setup_kwargs,
)
return
logger.sync_loggers(
logger.console_handler,
logger.file_handler,
**log_setup_kwargs,
)
def sync_use_default_pydeps_path(self, _: bpy.types.Context):
# Switch to Default
if self.use_default_pydeps_path:
log.info(
'Switching to Default PyDeps Path %s',
str(info.DEFAULT_PATH_DEPS.resolve()),
)
self.cache__pydeps_path_while_using_default = self.bl__pydeps_path
self.bl__pydeps_path = str(info.DEFAULT_PATH_DEPS.resolve())
# Switch from Default
else:
log.info(
'Switching from Default PyDeps Path %s to Cached PyDeps Path %s',
str(info.DEFAULT_PATH_DEPS.resolve()),
self.cache__pydeps_path_while_using_default,
)
self.bl__pydeps_path = self.cache__pydeps_path_while_using_default
self.cache__pydeps_path_while_using_default = ''
def sync_pydeps_path(self):
if self.cache__backup_pydeps_path != self.bl__pydeps_path:
log.info(
'Syncing PyDeps Path from/to: %s => %s',
self.cache__backup_pydeps_path,
self.bl__pydeps_path,
)
else:
log.info(
'Syncing PyDeps Path In-Place @ %s',
str(self.bl__pydeps_path),
)
# Error: Default Path in Use
if self.use_default_pydeps_path:
self.bl__pydeps_path = self.cache__backup_pydeps_path
msg = "Can't update pydeps path while default path is being used"
raise ValueError(msg)
# Error: PyDeps Already Installed
if pydeps.DEPS_OK:
self.bl__pydeps_path = self.cache__backup_pydeps_path
msg = "Can't update pydeps path while dependencies are installed"
raise ValueError(msg)
# Re-Check PyDeps
log.info(
'Checking PyDeps of New Path %s',
str(self.pydeps_path),
)
if pydeps.check_pydeps(self.pydeps_path):
# Re-Sync Loggers
## We can now upgrade to the fancier loggers.
self.sync_addon_logging()
# Run Delayed Registrations
## Since the deps are OK, we can now register the whole addon.
registration.run_delayed_registration(
registration.EVENT__DEPS_SATISFIED,
self.pydeps_path,
)
# Backup New PyDeps Path
self.cache__backup_pydeps_path = self.bl__pydeps_path
####################
# - UI
####################
def draw(self, _: bpy.types.Context) -> None:
layout = self.layout
num_pydeps_issues = (
len(pydeps.DEPS_ISSUES) if pydeps.DEPS_ISSUES is not None else 0
)
num_pydeps_issues = len(pydeps.DEPS_ISSUES) if pydeps.DEPS_ISSUES else 0
# Box w/Split: Log Level
box = layout.box()
row = box.row()
row.alignment = 'CENTER'
row.label(text='Logging')
split = box.split(factor=0.5)
## Split Col: Console Logging
col = split.column()
row = col.row()
row.prop(self, 'use_log_console', toggle=True)
row = col.row()
row.enabled = self.use_log_console
row.prop(self, 'bl__log_level_console')
## Split Col: File Logging
col = split.column()
row = col.row()
row.prop(self, 'use_log_file', toggle=True)
row = col.row()
row.enabled = self.use_log_file
row.prop(self, 'bl__log_file_path')
row = col.row()
row.enabled = self.use_log_file
row.prop(self, 'bl__log_level_file')
# Box: Dependency Status
box = layout.box()
## Row: Header
row = box.row(align=True)
row.alignment = 'CENTER'
row.label(text='Addon-Specific Python Deps')
row.label(text='Python Dependencies')
## Row: Toggle Default PyDeps Path
row = box.row(align=True)
row.enabled = not pydeps.DEPS_OK
row.prop(
self,
'use_default_path_addon_pydeps',
'use_default_pydeps_path',
text='Use Default PyDeps Install Path',
toggle=True,
)
## Row: Current PyDeps Path
row = box.row(align=True)
row.enabled = (
not pydeps.DEPS_OK and not self.use_default_path_addon_pydeps
)
row.prop(self, 'raw_path_addon_pydeps', text='PyDeps Install Path')
row.enabled = not pydeps.DEPS_OK and not self.use_default_pydeps_path
row.prop(self, 'bl__pydeps_path', text='PyDeps Install Path')
## Row: More Information Panel
row = box.row(align=True)
header, panel = row.panel('pydeps_issues', default_closed=True)
header.label(text=f'Dependency Conflicts ({num_pydeps_issues})')
col = box.column(align=True)
header, panel = col.panel('pydeps_issues', default_closed=True)
header.label(text=f'Install Mismatches ({num_pydeps_issues})')
if panel is not None:
grid = panel.grid_flow()
for issue in pydeps.DEPS_ISSUES:
@ -151,27 +285,25 @@ class BlenderMaxwellAddonPreferences(bpy.types.AddonPreferences):
## Row: Install
row = box.row(align=True)
row.enabled = not pydeps.DEPS_OK
op = row.operator(
install_deps.InstallPyDeps.bl_idname,
text='Install PyDeps',
)
op.path_addon_pydeps = str(self.path_addon_pydeps)
op.path_addon_reqs = str(pydeps.PATH_REQS)
op.path_addon_pydeps = str(self.pydeps_path)
op.path_addon_reqs = str(info.PATH_REQS)
## Row: Uninstall
row = box.row(align=True)
row.enabled = pydeps.DEPS_OK
op = row.operator(
uninstall_deps.UninstallPyDeps.bl_idname,
text='Uninstall PyDeps',
)
op.path_addon_pydeps = str(self.path_addon_pydeps)
op.path_addon_pydeps = str(self.pydeps_path)
####################
# - Blender Registration
####################
BL_REGISTER = [
BlenderMaxwellAddonPreferences,
BLMaxwellAddonPrefs,
]

View File

@ -3,9 +3,9 @@ from pathlib import Path
import bpy
from .utils import logger as _logger
from .nodeps.utils import simple_logger
log = _logger.get()
log = simple_logger.get(__name__)
# TODO: More types for these things!
DelayedRegKey: typ.TypeAlias = str
@ -33,18 +33,28 @@ EVENT__DEPS_SATISFIED: str = 'on_deps_satisfied'
# - Class Registration
####################
def register_classes(bl_register: list):
log.info('Registering %s Classes', len(bl_register))
for cls in bl_register:
if cls.bl_idname in REG__CLASSES:
msg = f'Skipping register of {cls.bl_idname}'
log.info(msg)
continue
log.debug(
'Registering Class %s',
repr(cls),
)
bpy.utils.register_class(cls)
REG__CLASSES.append(cls)
def unregister_classes():
log.info('Unregistering %s Classes', len(REG__CLASSES))
for cls in reversed(REG__CLASSES):
log.debug(
'Unregistering Class %s',
repr(cls),
)
bpy.utils.unregister_class(cls)
REG__CLASSES.clear()
@ -61,8 +71,13 @@ def register_keymap_items(keymap_item_defs: list[dict]):
name='Node Editor',
space_type='NODE_EDITOR',
)
log.info(
'Registered Keymap %s',
str(BL_KEYMAP),
)
# Register Keymaps
log.info('Registering %s Keymap Items', len(keymap_item_defs))
for keymap_item_def in keymap_item_defs:
keymap_item = BL_KEYMAP.keymap_items.new(
*keymap_item_def['_'],
@ -70,6 +85,11 @@ def register_keymap_items(keymap_item_defs: list[dict]):
shift=keymap_item_def['shift'],
alt=keymap_item_def['alt'],
)
log.debug(
'Registered Keymap Item %s with spec %s',
repr(keymap_item),
keymap_item_def,
)
REG__KEYMAP_ITEMS.append(keymap_item)
@ -77,11 +97,20 @@ def unregister_keymap_items():
global BL_KEYMAP # noqa: PLW0603
# Unregister Keymaps
log.info('Unregistering %s Keymap Items', len(REG__KEYMAP_ITEMS))
for keymap_item in reversed(REG__KEYMAP_ITEMS):
log.debug(
'Unregistered Keymap Item %s',
repr(keymap_item),
)
BL_KEYMAP.keymap_items.remove(keymap_item)
# Lazy-Unload BL_NODE_KEYMAP
if BL_KEYMAP is not None:
log.info(
'Unregistered Keymap %s',
repr(BL_KEYMAP),
)
REG__KEYMAP_ITEMS.clear()
BL_KEYMAP = None
@ -99,6 +128,11 @@ def delay_registration(
raise ValueError(msg)
def register_cb(path_deps: Path):
log.info(
'Running Delayed Registration (key %s) with PyDeps: %s',
delayed_reg_key,
path_deps,
)
register_classes(classes_cb(path_deps))
register_keymap_items(keymap_item_defs_cb(path_deps))

View File

@ -0,0 +1,3 @@
from . import tdcloud
__all__ = ['tdcloud']

View File

@ -30,17 +30,16 @@ IS_AUTHENTICATED = False
def is_online():
global IS_ONLINE
return IS_ONLINE
def set_online():
global IS_ONLINE
global IS_ONLINE # noqa: PLW0603
IS_ONLINE = True
def set_offline():
global IS_ONLINE
global IS_ONLINE # noqa: PLW0603
IS_ONLINE = False
@ -48,8 +47,7 @@ def set_offline():
# - Cloud Authentication
####################
def check_authentication() -> bool:
global IS_AUTHENTICATED
global IS_ONLINE
global IS_AUTHENTICATED # noqa: PLW0603
# Check Previous Authentication
## If we authenticated once, we presume that it'll work again.
@ -95,10 +93,10 @@ class TidyCloudFolders:
try:
cloud_folders = td_web.core.task_core.Folder.list()
set_online()
except td.exceptions.WebError:
except td.exceptions.WebError as ex:
set_offline()
msg = 'Tried to get cloud folders, but cannot connect to cloud'
raise RuntimeError(msg)
raise RuntimeError(msg) from ex
folders = {
cloud_folder.folder_id: cloud_folder
@ -117,12 +115,12 @@ class TidyCloudFolders:
try:
cloud_folder = td_web.core.task_core.Folder.create(folder_name)
set_online()
except td.exceptions.WebError:
except td.exceptions.WebError as ex:
set_offline()
msg = (
'Tried to create cloud folder, but cannot connect to cloud'
)
raise RuntimeError(msg)
raise RuntimeError(msg) from ex
if cls.cache_folders is None:
cls.cache_folders = {}
@ -185,9 +183,11 @@ class TidyCloudTasks:
- `cloud_task.get_log(path)`: GET the run log. Remember to use `NamedTemporaryFile` if a stringified log is desired.
"""
cache_tasks: dict[CloudTaskID, CloudTask] = {}
cache_folder_tasks: dict[CloudFolderID, set[CloudTaskID]] = {}
cache_task_info: dict[CloudTaskID, CloudTaskInfo] = {}
cache_tasks: typ.ClassVar[dict[CloudTaskID, CloudTask]] = {}
cache_folder_tasks: typ.ClassVar[
dict[CloudFolderID, set[CloudTaskID]]
] = {}
cache_task_info: typ.ClassVar[dict[CloudTaskID, CloudTaskInfo]] = {}
@classmethod
def clear_cache(cls):
@ -217,12 +217,12 @@ class TidyCloudTasks:
try:
folder_tasks = cloud_folder.list_tasks()
set_online()
except td.exceptions.WebError:
except td.exceptions.WebError as ex:
set_offline()
msg = (
'Tried to get tasks of a cloud folder, but cannot access cloud'
)
raise RuntimeError(msg)
raise RuntimeError(msg) from ex
# No Tasks: Empty Set
if folder_tasks is None:
@ -250,9 +250,7 @@ class TidyCloudTasks:
)
## Task by-Folder Cache
cls.cache_folder_tasks[cloud_folder.folder_id] = {
task_id for task_id in cloud_tasks
}
cls.cache_folder_tasks[cloud_folder.folder_id] = set(cloud_tasks)
return cloud_tasks
@ -287,14 +285,14 @@ class TidyCloudTasks:
folder_name=cloud_folder.folder_name,
)
set_online()
except td.exceptions.WebError:
except td.exceptions.WebError as ex:
set_offline()
msg = 'Tried to create cloud task, but cannot access cloud'
raise RuntimeError(msg)
raise RuntimeError(msg) from ex
# Upload Simulation to Cloud Task
if upload_progress_cb is not None:
upload_progress_cb = lambda uploaded_bytes: None
raise NotImplementedError
try:
cloud_task.upload_simulation(
stub,
@ -302,10 +300,10 @@ class TidyCloudTasks:
# progress_callback=upload_progress_cb,
)
set_online()
except td.exceptions.WebError:
except td.exceptions.WebError as ex:
set_offline()
msg = 'Tried to upload simulation to cloud task, but cannot access cloud'
raise RuntimeError(msg)
raise RuntimeError(msg) from ex
# Populate Caches
## Direct Task Cache
@ -348,10 +346,10 @@ class TidyCloudTasks:
try:
cloud_task.delete()
set_online()
except td.exceptions.WebError:
except td.exceptions.WebError as ex:
set_offline()
msg = 'Tried to delete cloud task, but cannot access cloud'
raise RuntimeError(msg)
raise RuntimeError(msg) from ex
# Populate Caches
## Direct Task Cache
@ -377,7 +375,6 @@ class TidyCloudTasks:
# Repopulate All Caches
## By deleting the folder ID, all tasks within will be reloaded
del cls.cache_folder_tasks[folder_id]
folder_tasks = cls.tasks(cloud_folder)
return cls.tasks(cloud_folder)[task_id]
@ -395,10 +392,9 @@ class TidyCloudTasks:
# Repopulate All Caches
## By deleting the folder ID, all tasks within will be reloaded
del cls.cache_folder_tasks[folder_id]
folder_tasks = cls.tasks(cloud_folder)
return {
task_id: cls.cache_tasks[task_id]
task_id: cls.tasks(cloud_folder)[task_id]
for task_id in cls.cache_folder_tasks[folder_id]
}
@ -410,9 +406,9 @@ class TidyCloudTasks:
try:
new_cloud_task.abort()
set_online()
except td.exceptions.WebError:
except td.exceptions.WebError as ex:
set_offline()
msg = 'Tried to abort cloud task, but cannot access cloud'
raise RuntimeError(msg)
raise RuntimeError(msg) from ex
return cls.update_task(cloud_task)

View File

@ -0,0 +1,17 @@
from ..nodeps.utils import pydeps
from . import (
analyze_geonodes,
blender_type_enum,
extra_sympy_units,
logger,
pydantic_sympy,
)
__all__ = [
'pydeps',
'analyze_geonodes',
'blender_type_enum',
'extra_sympy_units',
'logger',
'pydantic_sympy',
]

View File

@ -6,7 +6,7 @@ INVALID_BL_SOCKET_TYPES = {
def interface(
geo_nodes,
geo_nodes, ## TODO: bpy type
direc: typx.Literal['INPUT', 'OUTPUT'],
):
"""Returns 'valid' GeoNodes interface sockets, meaning that:

View File

@ -2,7 +2,7 @@ import enum
class BlenderTypeEnum(str, enum.Enum):
def _generate_next_value_(name, start, count, last_values):
def _generate_next_value_(name, *_):
return name

View File

@ -1,7 +1,10 @@
import functools
import sympy as sp
import sympy.physics.units as spu
from . import pydeps
with pydeps.syspath_from_bpy_prefs():
import sympy as sp
import sympy.physics.units as spu
####################

View File

@ -1,31 +1,66 @@
import logging
from pathlib import Path
LOGGER = logging.getLogger('blender_maxwell')
import rich.console
import rich.logging
from .. import info
from ..nodeps.utils import simple_logger
from ..nodeps.utils.simple_logger import (
LOG_LEVEL_MAP, # noqa: F401
LogLevel,
loggers, # noqa: F401
setup_logger, # noqa: F401
simple_loggers, # noqa: F401
sync_loggers, # noqa: F401
)
def get():
if LOGGER is None:
# Set Sensible Defaults
LOGGER.setLevel(logging.DEBUG)
# FORMATTER = logging.Formatter(
# '%(asctime)-15s %(levelname)8s %(name)s %(message)s'
# )
# Add Stream Handler
STREAM_HANDLER = logging.StreamHandler()
# STREAM_HANDLER.setFormatter(FORMATTER)
LOGGER.addHandler(STREAM_HANDLER)
return LOGGER
####################
# - Logging Handlers
####################
def console_handler(level: LogLevel) -> rich.logging.RichHandler:
rich_formatter = logging.Formatter(
'%(message)s',
datefmt='[%X]',
)
rich_handler = rich.logging.RichHandler(
level=level,
console=rich.console.Console(
color_system='truecolor', stderr=True
), ## TODO: Should be 'auto'; bl_run.py hijinks are interfering
# console=rich.console.Console(stderr=True),
rich_tracebacks=True,
)
rich_handler.setFormatter(rich_formatter)
return rich_handler
def set_level(level):
LOGGER.setLevel(level)
def file_handler(
path_log_file: Path, level: LogLevel
) -> rich.logging.RichHandler:
return simple_logger.file_handler(path_log_file, level)
def enable_logfile():
raise NotImplementedError
####################
# - Logger Setup
####################
def get(module_name):
logger = logging.getLogger(module_name)
# Setup Logger from Addon Preferences
if (addon_prefs := info.addon_prefs()) is None:
msg = 'Addon preferences not defined'
raise RuntimeError(msg)
addon_prefs.sync_addon_logging(only_sync_logger=logger)
return logger
def disable_logfile():
raise NotImplementedError
####################
# - Logger Sync
####################
#def upgrade_simple_loggers():
# """Upgrades simple loggers to rich-enabled loggers."""
# for logger in simple_loggers():
# setup_logger(console_handler, file_handler, logger)

View File

@ -3,6 +3,7 @@
See <https://github.com/dfelinto/blender/blob/master/release/scripts/modules/addon_utils.py>
"""
import logging
import shutil
import sys
import traceback
@ -10,9 +11,17 @@ from pathlib import Path
import bpy
sys.path.insert(0, str(Path(__file__).resolve().parent))
import info
import pack
PATH_SCRIPT = str(Path(__file__).resolve().parent)
sys.path.insert(0, str(PATH_SCRIPT))
import info # noqa: E402
import pack # noqa: E402
sys.path.remove(str(PATH_SCRIPT))
# Set Bootstrap Log Level
## This will be the log-level of both console and file logs, at first...
## ...until the addon preferences have been loaded.
BOOTSTRAP_LOG_LEVEL = logging.DEBUG
## TODO: Preferences item that allows using BLMaxwell 'starter.blend' as Blender's default starter blendfile.
@ -113,19 +122,22 @@ def install_addon(addon_name: str, addon_zip: Path) -> None:
msg = f"Couldn't enable addon {addon_name}"
raise RuntimeError(msg)
# Set Dev Path for Addon Dependencies
addon_prefs = bpy.context.preferences.addons[addon_name].preferences
addon_prefs.use_default_path_addon_pydeps = False
addon_prefs.path_addon_pydeps = info.PATH_ADDON_DEV_DEPS
# Save User Preferences
bpy.ops.wm.save_userpref()
def setup_for_development(addon_name: str, path_addon_dev_deps: Path) -> None:
addon_prefs = bpy.context.preferences.addons[addon_name].preferences
# PyDeps Path
addon_prefs.use_default_pydeps_path = False
addon_prefs.pydeps_path = path_addon_dev_deps
####################
# - Entrypoint
####################
if __name__ == '__main__':
def main():
# Delete Addon (maybe; possibly restart)
delete_addon_if_loaded(info.ADDON_NAME)
@ -139,6 +151,7 @@ if __name__ == '__main__':
info.PATH_ADDON_ZIP,
info.PATH_ROOT / 'pyproject.toml',
info.PATH_ROOT / 'requirements.lock',
initial_log_level=BOOTSTRAP_LOG_LEVEL,
) as path_zipped:
try:
install_addon(info.ADDON_NAME, path_zipped)
@ -146,6 +159,9 @@ if __name__ == '__main__':
traceback.print_exc()
install_failed = True
# Setup Addon for Development Use
setup_for_development(info.ADDON_NAME, info.PATH_ADDON_DEV_DEPS)
# Load Development .blend
## TODO: We need a better (also final-deployed-compatible) solution for what happens when a user opened a .blend file without installing dependencies!
if not install_failed:
@ -153,3 +169,7 @@ if __name__ == '__main__':
else:
bpy.ops.wm.quit_blender()
sys.exit(info.STATUS_NOINSTALL_ADDON)
if __name__ == '__main__':
main()

View File

@ -1,3 +1,4 @@
# noqa: INP001
import os
import subprocess
from pathlib import Path
@ -43,7 +44,7 @@ def run_blender(py_script: Path, print_live: bool = False):
####################
# - Run Blender w/Clean Addon Reinstall
####################
if __name__ == '__main__':
def main():
return_code, output = run_blender(info.PATH_BL_RUN, print_live=False)
if return_code == info.STATUS_UNINSTALLED_ADDON:
return_code, output = run_blender(info.PATH_BL_RUN, print_live=True)
@ -52,3 +53,6 @@ if __name__ == '__main__':
raise ValueError(msg)
elif return_code != 0:
print(''.join(output)) # noqa: T201
if __name__ == "__main__":
main()

View File

@ -1,9 +1,9 @@
import tomllib
from pathlib import Path
PATH_ROOT = Path(__file__).resolve().parent.parent
PATH_RUN = PATH_ROOT / 'scripts' / 'run.py'
PATH_BL_RUN = PATH_ROOT / 'scripts' / 'bl_run.py'
PATH_ROOT = Path(__file__).resolve().parent.parent.parent
PATH_SRC = PATH_ROOT / 'src'
PATH_BL_RUN = PATH_SRC / 'scripts' / 'bl_run.py'
PATH_BUILD = PATH_ROOT / 'build'
PATH_BUILD.mkdir(exist_ok=True)
@ -41,6 +41,8 @@ PATH_ADDON_ZIP = (
PATH_ADDON_BLEND_STARTER = PATH_ADDON_PKG / 'blenders' / 'starter.blend'
BOOTSTRAP_LOG_LEVEL_FILENAME = '.bootstrap_log_level'
# Install the ZIPped Addon
####################
# - Development Information

View File

@ -1,4 +1,7 @@
# noqa: INP001
import contextlib
import logging
import tempfile
import typing as typ
import zipfile
@ -6,6 +9,8 @@ from pathlib import Path
import info
LogLevel: typ.TypeAlias = int
_PROJ_VERSION_STR = str(
tuple(int(el) for el in info.PROJ_SPEC['project']['version'].split('.'))
)
@ -18,12 +23,14 @@ BL_INFO_REPLACEMENTS = {
@contextlib.contextmanager
def zipped_addon(
def zipped_addon( # noqa: PLR0913
path_addon_pkg: Path,
path_addon_zip: Path,
path_pyproject_toml: Path,
path_requirements_lock: Path,
initial_log_level: LogLevel = logging.INFO,
replace_if_exists: bool = False,
remove_after_close: bool = True,
) -> typ.Iterator[Path]:
"""Context manager exposing a folder as a (temporary) zip file.
The .zip file is deleted afterwards.
@ -69,25 +76,43 @@ def zipped_addon(
# Install pyproject.toml @ /pyproject.toml of Addon
f_zip.write(
path_pyproject_toml,
str(
(Path(path_addon_pkg.name) / Path(path_pyproject_toml.name))
.with_suffix('')
.with_suffix('.toml')
),
str(Path(path_addon_pkg.name) / Path(path_pyproject_toml.name)),
)
# Install requirements.lock @ /requirements.txt of Addon
f_zip.write(
path_requirements_lock,
str(
(Path(path_addon_pkg.name) / Path(path_requirements_lock.name))
.with_suffix('')
.with_suffix('.txt')
),
str(Path(path_addon_pkg.name) / Path(path_requirements_lock.name)),
)
# Set Initial Log-Level
f_zip.writestr(
str(Path(path_addon_pkg.name) / info.BOOTSTRAP_LOG_LEVEL_FILENAME),
str(initial_log_level),
)
# Delete the ZIP
try:
yield path_addon_zip
finally:
path_addon_zip.unlink()
if remove_after_close:
path_addon_zip.unlink()
####################
# - Run Blender w/Clean Addon Reinstall
####################
def main():
with zipped_addon(
path_addon_pkg=info.PATH_ADDON_PKG,
path_addon_zip=info.PATH_ADDON_ZIP,
path_pyproject_toml=info.PATH_ROOT / 'pyproject.toml',
path_requirements_lock=info.PATH_ROOT / 'requirements.lock',
replace_if_exists=True,
remove_after_close=False,
):
# TODO: GPG signature for distribution
pass
if __name__ == "__main__":
main()