refactor: Big changes to data flow and deps loading

main
Sofus Albert Høgsbro Rose 2024-04-19 16:53:24 +02:00
parent ff5d71aeff
commit 9960cd3480
Signed by: so-rose
GPG Key ID: AD901CB0F3701434
98 changed files with 2555 additions and 1336 deletions

View File

@ -9,3 +9,4 @@ trim_trailing_whitespace = false
[*.yml] [*.yml]
indent_style = space indent_style = space
indent_size = 2

View File

@ -1,10 +1,44 @@
####################
# - Project Config
####################
project: project:
type: website type: website
output-dir: _site
# Website Configuration format:
html:
toc: true
filters:
- interlinks
interlinks:
sources:
numpy:
url: https://numpy.org/doc/stable/
matplotlib:
url: https://matplotlib.org/stable/
python:
url: https://docs.python.org/3/
metadata-files:
# Sidebar for /pydocs Paths
- pydocs/_sidebar.yml
####################
# - Website Config
####################
website: website:
title: "Blender Maxwell"
description: "A Blender-based design and analysis tool for electromagnetic simulations"
page-footer: "Copyright 2024, Sofus Albert Høgsbro Rose" page-footer: "Copyright 2024, Sofus Albert Høgsbro Rose"
repo-url: https://github.com/so-rose/blender_maxwell/
repo-actions: [issue]
page-navigation: true
navbar: navbar:
background: primary
pinned: true
search: true
left: left:
- file: index.qmd - file: index.qmd
text: Home text: Home
@ -18,19 +52,12 @@ website:
- text: Report a Bug - text: Report a Bug
url: https://github.com/so-rose/blender_maxwell/issues/new/choose url: https://github.com/so-rose/blender_maxwell/issues/new/choose
# Auto-Generated Metadata
metadata-files:
# Sidebar for /pydocs Paths
- pydocs/_sidebar.yml
#################### ####################
# - quartodoc - Autogenerated Python Docs # - Quartodoc Config
#################### ####################
quartodoc: quartodoc:
# Output # Output
dir: pydocs dir: pydocs
#out_index: _api_index.qmd
sidebar: pydocs/_sidebar.yml sidebar: pydocs/_sidebar.yml
# Python Package # Python Package
@ -43,8 +70,13 @@ quartodoc:
title: "Blender Maxwell" title: "Blender Maxwell"
# Options # Options
renderer:
style: markdown
#show_signature: true
show_signature_annotations: false
display_name: name
options: options:
include_private: true #include_private: true
include_empty: true include_empty: true
include_attributes: true include_attributes: true
signature_name: "short" signature_name: "short"
@ -57,16 +89,11 @@ quartodoc:
desc: Build/packaging scripts for developing and publishing the addon. desc: Build/packaging scripts for developing and publishing the addon.
package: scripts package: scripts
contents: contents:
- name: info - info
children: embedded - pack
- name: pack - dev
children: embedded - bl_delete_addon
- name: dev - bl_install_addon
children: embedded
- name: bl_delete_addon
children: embedded
- name: bl_install_addon
children: embedded
#################### ####################
# - bl_maxwell # - bl_maxwell
@ -74,54 +101,39 @@ quartodoc:
- title: "`bl_maxwell`" - title: "`bl_maxwell`"
desc: Root package for the addon. desc: Root package for the addon.
contents: contents:
- name: info - preferences
children: embedded - registration
- name: preferences
children: embedded
- name: registration
children: embedded
- subtitle: "`bl_maxwell.assets`" - subtitle: "`bl_maxwell.assets`"
desc: Blender assets bundled w/Blender Maxwell desc: Blender assets bundled w/Blender Maxwell
contents: contents:
- name: assets - assets
children: embedded - assets.import_geonodes
- name: assets.import_geonodes
children: embedded
- subtitle: "`bl_maxwell.nodeps`" - subtitle: "`bl_maxwell.nodeps`"
desc: No-Dependency desc: No-Dependency
contents: contents:
- name: operators - operators
children: embedded
- subtitle: "`bl_maxwell.utils`" - subtitle: "`bl_maxwell.utils`"
desc: Utilities wo/shared global state. desc: Utilities wo/shared global state.
contents: contents:
- name: utils.analyze_geonodes - utils.analyze_geonodes
children: embedded - utils.blender_type_enum
- name: utils.blender_type_enum - utils.extra_sympy_units
children: embedded - utils.logger
- name: utils.extra_sympy_units - utils.pydantic_sympy
children: embedded
- name: utils.logger
children: embedded
- name: utils.pydantic_sympy
children: embedded
- subtitle: "`bl_maxwell.services`" - subtitle: "`bl_maxwell.services`"
desc: Utilities w/shared global state. desc: Utilities w/shared global state.
contents: contents:
- name: services.tdcloud - services.tdcloud
children: embedded
- subtitle: "`bl_maxwell.operators`" - subtitle: "`bl_maxwell.operators`"
desc: General Blender operators. desc: General Blender operators.
contents: contents:
- name: operators.bl_append - operators.bl_append
children: embedded - operators.connect_viewer
- name: operators.connect_viewer
children: embedded
#################### ####################
# - ..maxwell_sim_nodes # - ..maxwell_sim_nodes
@ -130,73 +142,49 @@ quartodoc:
desc: Maxwell Simulation Design/Viz Node Tree. desc: Maxwell Simulation Design/Viz Node Tree.
package: blender_maxwell.node_trees.maxwell_sim_nodes package: blender_maxwell.node_trees.maxwell_sim_nodes
contents: contents:
- name: bl_socket_map - bl_socket_map
children: embedded - categories
- name: categories - bl_cache
children: embedded - node_tree
- name: bl_cache
children: embedded
- name: node_tree
children: embedded
- subtitle: "`contracts`" - subtitle: "`contracts`"
desc: Constants and interfaces for identifying resources. desc: Constants and interfaces for identifying resources.
package: blender_maxwell.node_trees.maxwell_sim_nodes.contracts package: blender_maxwell.node_trees.maxwell_sim_nodes.contracts
contents: contents:
# General # General
- name: bl - flow_kinds
children: embedded - flow_kinds.FlowKind
- name: data_flows - flow_kinds.LazyValueFuncFlow
children: embedded - icons
- name: icons
children: embedded
- name: trees - tree_types
children: embedded
# Managed Objects # Managed Objects
- name: managed_obj_type - mobj_types
children: embedded
# Nodes # Nodes
- name: node_types - node_types
children: embedded - category_types
- name: node_cats - category_labels
children: embedded
- name: node_cat_labels
children: embedded
# Sockets # Sockets
- name: socket_types - socket_types
children: embedded - socket_colors
- name: socket_colors - bl_socket_types
children: embedded - bl_socket_desc_map
- name: socket_from_bl_desc - socket_units
children: embedded
- name: socket_from_bl_direct
children: embedded
- name: socket_shapes
children: embedded
- name: socket_units
children: embedded
- name: unit_systems - unit_systems
children: embedded
- subtitle: "`managed_objs`" - subtitle: "`managed_objs`"
desc: Maxwell Simulation Design/Viz Node Tree desc: Maxwell Simulation Design/Viz Node Tree
package: blender_maxwell.node_trees.maxwell_sim_nodes.managed_objs package: blender_maxwell.node_trees.maxwell_sim_nodes.managed_objs
contents: contents:
- name: managed_bl_collection - managed_bl_collection
children: embedded - managed_bl_empty
- name: managed_bl_empty - managed_bl_image
children: embedded - managed_bl_mesh
- name: managed_bl_image - managed_bl_modifier
children: embedded
- name: managed_bl_mesh
children: embedded
- name: managed_bl_modifier
children: embedded
#################### ####################
# - ..maxwell_sim_nodes.nodes # - ..maxwell_sim_nodes.nodes
@ -205,10 +193,8 @@ quartodoc:
desc: Maxwell Simulation Node Sockets desc: Maxwell Simulation Node Sockets
package: blender_maxwell.node_trees.maxwell_sim_nodes.sockets package: blender_maxwell.node_trees.maxwell_sim_nodes.sockets
contents: contents:
- name: base - base
children: embedded - scan_socket_defs
- name: scan_socket_defs
children: embedded
#################### ####################
# - ..maxwell_sim_nodes.nodes # - ..maxwell_sim_nodes.nodes
@ -217,7 +203,5 @@ quartodoc:
desc: Maxwell Simulation Nodes desc: Maxwell Simulation Nodes
package: blender_maxwell.node_trees.maxwell_sim_nodes.nodes package: blender_maxwell.node_trees.maxwell_sim_nodes.nodes
contents: contents:
- name: base - base
children: embedded - events
- name: events
children: embedded

View File

View File

@ -107,6 +107,7 @@ ignore = [
"B008", # FastAPI uses this for Depends(), Security(), etc. . "B008", # FastAPI uses this for Depends(), Security(), etc. .
"E701", # class foo(Parent): pass or if simple: return are perfectly elegant "E701", # class foo(Parent): pass or if simple: return are perfectly elegant
"ERA001", # 'Commented-out code' seems to be just about anything to ruff "ERA001", # 'Commented-out code' seems to be just about anything to ruff
"F722", # jaxtyping uses type annotations that ruff sees as "syntax error"
# Line Length - Controversy Incoming # Line Length - Controversy Incoming
## Hot Take: Let the Formatter Worry about Line Length ## Hot Take: Let the Formatter Worry about Line Length

View File

@ -1,13 +1,44 @@
"""A Blender-based system for electromagnetic simulation design and analysis, with deep Tidy3D integration.
# `bl_info`
`bl_info` declares information about the addon to Blender.
However, it is not _dynamically_ read: Blender traverses it using `ast.parse`.
This makes it difficult to synchronize `bl_info` with the project's `pyproject.toml`.
As a workaround, **the addon zip-packer will replace `bl_info` entries**.
The following `bl_info` entries are currently replaced when the ZIP is built:
- `description`: To match the description in `pyproject.toml`.
- `version`: To match the version in `pyproject.toml`.
For more information, see `scripts.pack.BL_INFO_REPLACEMENTS`.
**NOTE**: The find/replace procedure is "dumb" (aka. no regex, no `ast` traversal, etc.).
This is surprisingly robust, so long as use of the deterministic code-formatter `ruff fmt` is enforced.
Still. Be careful around `bl_info`.
Attributes:
bl_info: Information about the addon declared to Blender.
BL_REGISTER_BEFORE_DEPS: Blender classes to register before dependencies are verified as installed.
BL_HOTKEYS: Blender keymap item defs to register before dependencies are verified as installed.
"""
from pathlib import Path from pathlib import Path
from . import info import bpy
from . import contracts as ct
from .nodeps.utils import simple_logger from .nodeps.utils import simple_logger
simple_logger.sync_bootstrap_logging( simple_logger.sync_bootstrap_logging(
console_level=info.BOOTSTRAP_LOG_LEVEL, console_level=ct.addon.BOOTSTRAP_LOG_LEVEL,
) )
from . import nodeps, preferences, registration # noqa: E402 from . import preferences, registration # noqa: E402
from .nodeps import operators as nodeps_operators # noqa: E402
from .nodeps.utils import pydeps # noqa: E402 from .nodeps.utils import pydeps # noqa: E402
log = simple_logger.get(__name__) log = simple_logger.get(__name__)
@ -15,9 +46,6 @@ log = simple_logger.get(__name__)
#################### ####################
# - Addon Information # - Addon Information
#################### ####################
# The following parameters are replaced when packing the addon ZIP
## - description
## - version
bl_info = { bl_info = {
'name': 'Maxwell PDE Sim and Viz', 'name': 'Maxwell PDE Sim and Viz',
'blender': (4, 1, 0), 'blender': (4, 1, 0),
@ -28,24 +56,36 @@ bl_info = {
'wiki_url': 'https://git.sofus.io/dtu-courses/bsc_thesis', 'wiki_url': 'https://git.sofus.io/dtu-courses/bsc_thesis',
'tracker_url': 'https://git.sofus.io/dtu-courses/bsc_thesis/issues', 'tracker_url': 'https://git.sofus.io/dtu-courses/bsc_thesis/issues',
} }
## bl_info MUST readable via. ast.parse
## See scripts/pack.py::BL_INFO_REPLACEMENTS for active replacements
## The mechanism is a 'dumb' - output of 'ruff fmt' MUST be basis for replacing
#################### ####################
# - Load and Register Addon # - Load and Register Addon
#################### ####################
log.info('Loading Before-Deps BL_REGISTER') BL_REGISTER_BEFORE_DEPS: list[ct.BLClass] = [
BL_REGISTER__BEFORE_DEPS = [ *nodeps_operators.BL_REGISTER,
*nodeps.operators.BL_REGISTER,
*preferences.BL_REGISTER, *preferences.BL_REGISTER,
] ]
## TODO: BL_HANDLERS and BL_SOCKET_DEFS
def BL_REGISTER__AFTER_DEPS(path_deps: Path): BL_HOTKEYS_BEFORE_DEPS: list[ct.KeymapItemDef] = [
log.info('Loading After-Deps BL_REGISTER') *nodeps_operators.BL_HOTKEYS,
with pydeps.importable_addon_deps(path_deps): ]
def load_main_blclasses(path_pydeps: Path) -> list[ct.BLClass]:
"""Imports all addon classes that rely on Python dependencies.
Notes:
`sys.path` is modified while executing this function.
Parameters:
path_pydeps: The path to the Python dependencies.
Returns:
An ordered list of Blender classes to register.
"""
with pydeps.importable_addon_deps(path_pydeps):
from . import assets, node_trees, operators from . import assets, node_trees, operators
return [ return [
*operators.BL_REGISTER, *operators.BL_REGISTER,
@ -54,64 +94,114 @@ def BL_REGISTER__AFTER_DEPS(path_deps: Path):
] ]
log.info('Loading Before-Deps BL_KEYMAP_ITEM_DEFS') def load_main_blhotkeys(path_deps: Path) -> list[ct.KeymapItemDef]:
BL_KEYMAP_ITEM_DEFS__BEFORE_DEPS = [ """Imports all keymap item defs that rely on Python dependencies.
*nodeps.operators.BL_KEYMAP_ITEM_DEFS,
]
Notes:
`sys.path` is modified while executing this function.
def BL_KEYMAP_ITEM_DEFS__AFTER_DEPS(path_deps: Path): Parameters:
log.info('Loading After-Deps BL_KEYMAP_ITEM_DEFS') path_pydeps: The path to the Python dependencies.
Returns:
An ordered list of Blender keymap item defs to register.
"""
with pydeps.importable_addon_deps(path_deps): with pydeps.importable_addon_deps(path_deps):
from . import assets, operators from . import assets, operators
return [ return [
*operators.BL_KEYMAP_ITEM_DEFS, *operators.BL_HOTKEYS,
*assets.BL_KEYMAP_ITEM_DEFS, *assets.BL_HOTKEYS,
] ]
#################### ####################
# - Registration # - Registration
#################### ####################
def register(): @bpy.app.handlers.persistent
"""Register the Blender addon.""" def manage_pydeps(*_):
log.info('Starting %s Registration', info.ADDON_NAME) # ct.addon.operator(
# ct.OperatorType.ManagePyDeps,
# Register Barebones Addon (enough for PyDeps Installability) # 'INVOKE_DEFAULT',
registration.register_classes(BL_REGISTER__BEFORE_DEPS) # path_addon_pydeps='',
registration.register_keymap_items(BL_KEYMAP_ITEM_DEFS__BEFORE_DEPS) # path_addon_reqs='',
# )
# Retrieve PyDeps Path from Addon Preferences ct.addon.prefs().on_addon_pydeps_changed(show_popup_if_deps_invalid=True)
if (addon_prefs := info.addon_prefs()) is None:
unregister()
msg = f'Addon preferences not found; aborting registration of {info.ADDON_NAME}'
raise RuntimeError(msg)
# Retrieve PyDeps Path
path_pydeps = addon_prefs.pydeps_path
log.info('Loaded PyDeps Path from Addon Prefs: %s', path_pydeps)
if pydeps.check_pydeps(path_pydeps):
log.info('PyDeps Satisfied: Loading Addon %s', info.ADDON_NAME)
addon_prefs.sync_addon_logging()
registration.register_classes(BL_REGISTER__AFTER_DEPS(path_pydeps))
registration.register_keymap_items(BL_KEYMAP_ITEM_DEFS__AFTER_DEPS(path_pydeps))
else:
log.info(
'PyDeps Invalid: Delaying Addon Registration of %s',
info.ADDON_NAME,
)
registration.delay_registration(
registration.EVENT__DEPS_SATISFIED,
classes_cb=BL_REGISTER__AFTER_DEPS,
keymap_item_defs_cb=BL_KEYMAP_ITEM_DEFS__AFTER_DEPS,
)
## TODO: bpy Popup to Deal w/Dependency Errors
def unregister(): def register() -> None:
"""Unregister the Blender addon.""" """Implements a multi-stage addon registration, which accounts for Python dependency management.
log.info('Starting %s Unregister', info.ADDON_NAME)
# Multi-Stage Registration
The trouble is that many classes in our addon might require Python dependencies.
## Stage 1: Barebones Addon
Many classes in our addon might require Python dependencies.
However, they may not yet be installed.
To solve this bootstrapping problem in a streamlined manner, we only **guarantee** the registration of a few key classes, including:
- `AddonPreferences`: The addon preferences provide an interface for the user to fix Python dependency problems, thereby triggering subsequent stages.
- `InstallPyDeps`: An operator that installs missing Python dependencies, using Blender's embeded `pip`.
- `UninstallPyDeps`: An operator that uninstalls Python dependencies.
**These classes provide just enough interface to help the user install the missing Python dependencies**.
## Stage 2: Declare Delayed Registration
We may not be able to register any classes that rely on Python dependencies.
However, we can use `registration.delay_registration()` to **delay the registration until it is determined that the Python dependencies are satisfied**.`
For now, we just pass a callback that will import + return a list of classes to register (`load_main_blclasses()`) when the time comes.
## Stage 3: Trigger "PyDeps Changed"
The addon preferences is responsible for storing (and exposing to the user) the path to the Python dependencies.
Thus, the addon preferences method `on_addon_pydeps_changed()` has the responsibility for checking when the dependencies are valid, and running the delayed registrations (and any other delayed setup) in response.
In general, `on_addon_pydeps_changed()` runs whenever the PyDeps path is changed, but it can also be run manually.
As the last part of this process, that's exactly what `register()` does: Runs `on_addon_pydeps_changed()` manually.
Depending on the addon preferences (which persist), one of two things can happen:
1. **Deps Satisfied**: The addon will load without issue: The just-declared "delayed registrations" will run immediately, and all is well.
2. **Deps Not Satisfied**: The user must take action to fix the conflicts due to Python dependencies, before the addon can load. **A popup will show to help the user do so.
Notes:
Called by Blender when enabling the addon.
"""
log.info('Commencing Registration of Addon: %s', ct.addon.NAME)
bpy.app.handlers.load_post.append(manage_pydeps)
# Register Barebones Addon
## Contains all no-dependency BLClasses:
## - Contains AddonPreferences.
## Contains all BLClasses from 'nodeps'.
registration.register_classes(BL_REGISTER_BEFORE_DEPS)
registration.register_hotkeys(BL_HOTKEYS_BEFORE_DEPS)
# Delay Complete Registration until DEPS_SATISFIED
registration.delay_registration_until(
registration.BLRegisterEvent.DepsSatisfied,
then_register_classes=load_main_blclasses,
then_register_hotkeys=load_main_blhotkeys,
)
# Trigger PyDeps Check
## Deps ARE OK: Delayed registration will trigger.
## Deps NOT OK: User must fix the pydeps, then trigger this method.
ct.addon.prefs().on_addon_pydeps_changed()
def unregister() -> None:
"""Unregisters anything that was registered by the addon.
Notes:
Run by Blender when disabling the addon.
This doesn't clean `sys.modules`.
To fully revert to Blender's state before the addon was in use (especially various import-related caches in the Python process), Blender must be restarted.
"""
log.info('Starting %s Unregister', ct.addon.NAME)
registration.unregister_classes() registration.unregister_classes()
registration.unregister_keymap_items() registration.unregister_hotkeys()
log.info('Finished %s Unregister', info.ADDON_NAME) registration.clear_delayed_registrations()
log.info('Finished %s Unregister', ct.addon.NAME)

View File

@ -4,11 +4,11 @@ BL_REGISTER = [
*import_geonodes.BL_REGISTER, *import_geonodes.BL_REGISTER,
] ]
BL_KEYMAP_ITEM_DEFS = [ BL_HOTKEYS = [
*import_geonodes.BL_KEYMAP_ITEM_DEFS, *import_geonodes.BL_HOTKEYS,
] ]
__all__ = [ __all__ = [
'BL_REGISTER', 'BL_REGISTER',
'BL_KEYMAP_ITEM_DEFS', 'BL_HOTKEYS',
] ]

View File

@ -8,8 +8,6 @@ import bpy
from blender_maxwell import contracts as ct from blender_maxwell import contracts as ct
from blender_maxwell.utils import logger from blender_maxwell.utils import logger
from .. import info
log = logger.get(__name__) log = logger.get(__name__)
@ -75,7 +73,7 @@ class GeoNodes(enum.StrEnum):
# GeoNodes Paths # GeoNodes Paths
## Internal ## Internal
GN_INTERNAL_PATH = info.PATH_ASSETS / 'internal' / 'primitives' GN_INTERNAL_PATH = ct.addon.PATH_ASSETS / 'internal' / 'primitives'
GN_INTERNAL_INPUTS_PATH = GN_INTERNAL_PATH / 'input' GN_INTERNAL_INPUTS_PATH = GN_INTERNAL_PATH / 'input'
GN_INTERNAL_SOURCES_PATH = GN_INTERNAL_PATH / 'source' GN_INTERNAL_SOURCES_PATH = GN_INTERNAL_PATH / 'source'
GN_INTERNAL_STRUCTURES_PATH = GN_INTERNAL_PATH / 'structure' GN_INTERNAL_STRUCTURES_PATH = GN_INTERNAL_PATH / 'structure'
@ -83,7 +81,7 @@ GN_INTERNAL_MONITORS_PATH = GN_INTERNAL_PATH / 'monitor'
GN_INTERNAL_SIMULATIONS_PATH = GN_INTERNAL_PATH / 'simulation' GN_INTERNAL_SIMULATIONS_PATH = GN_INTERNAL_PATH / 'simulation'
## Structures ## Structures
GN_STRUCTURES_PATH = info.PATH_ASSETS / 'structures' GN_STRUCTURES_PATH = ct.addon.PATH_ASSETS / 'structures'
GN_STRUCTURES_PRIMITIVES_PATH = GN_STRUCTURES_PATH / 'primitives' GN_STRUCTURES_PRIMITIVES_PATH = GN_STRUCTURES_PATH / 'primitives'
GN_PARENT_PATHS: dict[GeoNodes, Path] = { GN_PARENT_PATHS: dict[GeoNodes, Path] = {
@ -377,14 +375,14 @@ class AppendGeoNodes(bpy.types.Operator):
asset_libraries = bpy.context.preferences.filepaths.asset_libraries asset_libraries = bpy.context.preferences.filepaths.asset_libraries
if ( if (
asset_library_idx := asset_libraries.find('Blender Maxwell') asset_library_idx := asset_libraries.find('Blender Maxwell')
) != -1 and asset_libraries['Blender Maxwell'].path != str(info.PATH_ASSETS): ) != -1 and asset_libraries['Blender Maxwell'].path != str(ct.addon.PATH_ASSETS):
bpy.ops.preferences.asset_library_remove(asset_library_idx) bpy.ops.preferences.asset_library_remove(asset_library_idx)
if 'Blender Maxwell' not in asset_libraries: if 'Blender Maxwell' not in asset_libraries:
bpy.ops.preferences.asset_library_add() bpy.ops.preferences.asset_library_add()
asset_library = asset_libraries[-1] ## Since the operator adds to the end asset_library = asset_libraries[-1] ## Since the operator adds to the end
asset_library.name = 'Blender Maxwell' asset_library.name = 'Blender Maxwell'
asset_library.path = str(info.PATH_ASSETS) asset_library.path = str(ct.addon.PATH_ASSETS)
bpy.types.WindowManager.active_asset_list = bpy.props.CollectionProperty( bpy.types.WindowManager.active_asset_list = bpy.props.CollectionProperty(
type=bpy.types.AssetHandle type=bpy.types.AssetHandle
@ -398,7 +396,7 @@ BL_REGISTER = [
AppendGeoNodes, AppendGeoNodes,
] ]
BL_KEYMAP_ITEM_DEFS = [ BL_HOTKEYS = [
# { # {
# '_': [ # '_': [
# AppendGeoNodes.bl_idname, # AppendGeoNodes.bl_idname,

View File

@ -9,11 +9,19 @@ from .bl import (
BLModifierType, BLModifierType,
BLNodeTreeInterfaceID, BLNodeTreeInterfaceID,
BLOperatorStatus, BLOperatorStatus,
BLRegionType,
BLSpaceType,
KeymapItemDef, KeymapItemDef,
ManagedObjName, ManagedObjName,
PresetName, PresetName,
SocketName, SocketName,
) )
from .operator_types import (
OperatorType,
)
from .panel_types import (
PanelType,
)
__all__ = [ __all__ = [
'addon', 'addon',
@ -26,8 +34,12 @@ __all__ = [
'BLModifierType', 'BLModifierType',
'BLNodeTreeInterfaceID', 'BLNodeTreeInterfaceID',
'BLOperatorStatus', 'BLOperatorStatus',
'BLRegionType',
'BLSpaceType',
'KeymapItemDef', 'KeymapItemDef',
'ManagedObjName', 'ManagedObjName',
'PresetName', 'PresetName',
'SocketName', 'SocketName',
'OperatorType',
'PanelType',
] ]

View File

@ -1,7 +1,9 @@
import random
import tomllib import tomllib
from pathlib import Path from pathlib import Path
import bpy import bpy
import bpy_restrict_state
PATH_ADDON_ROOT = Path(__file__).resolve().parent.parent PATH_ADDON_ROOT = Path(__file__).resolve().parent.parent
with (PATH_ADDON_ROOT / 'pyproject.toml').open('rb') as f: with (PATH_ADDON_ROOT / 'pyproject.toml').open('rb') as f:
@ -32,11 +34,47 @@ ADDON_CACHE.mkdir(exist_ok=True)
#################### ####################
# - Addon Prefs Info # - Dynamic Addon Information
#################### ####################
def is_loading() -> bool:
"""Checks whether the addon is currently loading.
While an addon is loading, `bpy.context` is temporarily very limited.
For example, operators can't run while the addon is loading.
By checking whether `bpy.context` is limited like this, we can determine whether the addon is currently loading.
Notes:
Since `bpy_restrict_state._RestrictContext` is a very internal thing, this function may be prone to breakage on Blender updates.
**Keep an eye out**!
Returns:
Whether the addon has been fully loaded, such that `bpy.context` is fully accessible.
"""
return isinstance(bpy.context, bpy_restrict_state._RestrictContext)
def operator(name: str, *operator_args, **operator_kwargs) -> None:
# Parse Operator Name
operator_namespace, operator_name = name.split('.')
if operator_namespace != NAME:
msg = f'Tried to call operator {operator_name}, but addon operators may only use the addon operator namespace "{operator_namespace}.<name>"'
raise RuntimeError(msg)
# Addon Not Loading: Run Operator
if not is_loading():
operator = getattr(getattr(bpy.ops, NAME), operator_name)
operator(*operator_args, **operator_kwargs)
else:
msg = f'Tried to call operator "{operator_name}" while addon is loading'
raise RuntimeError(msg)
def prefs() -> bpy.types.AddonPreferences | None: def prefs() -> bpy.types.AddonPreferences | None:
if (addon := bpy.context.preferences.addons.get(NAME)) is None: if (addon := bpy.context.preferences.addons.get(NAME)) is None:
return None msg = 'Addon is not installed'
raise RuntimeError(msg)
return addon.preferences return addon.preferences

View File

@ -1,30 +1,18 @@
import typing as typ import typing as typ
import bpy import bpy
import pydantic as pyd
import typing_extensions as typx
#################### ####################
# - Blender Strings # - Blender Strings
#################### ####################
BLEnumID = typx.Annotated[ BLEnumID = str
str, SocketName = str
pyd.StringConstraints(
pattern=r'^[A-Z_]+$',
),
]
SocketName = typx.Annotated[
str,
pyd.StringConstraints(
pattern=r'^[a-zA-Z0-9_]+$',
),
]
#################### ####################
# - Blender Enums # - Blender Enums
#################### ####################
BLImportMethod: typ.TypeAlias = typx.Literal['append', 'link'] BLImportMethod: typ.TypeAlias = typ.Literal['append', 'link']
BLModifierType: typ.TypeAlias = typx.Literal['NODES', 'ARRAY'] BLModifierType: typ.TypeAlias = typ.Literal['NODES', 'ARRAY']
BLNodeTreeInterfaceID: typ.TypeAlias = str BLNodeTreeInterfaceID: typ.TypeAlias = str
BLIconSet: frozenset[str] = frozenset( BLIconSet: frozenset[str] = frozenset(
@ -49,30 +37,60 @@ BLClass: typ.TypeAlias = (
BLKeymapItem: typ.TypeAlias = typ.Any ## TODO: Better Type BLKeymapItem: typ.TypeAlias = typ.Any ## TODO: Better Type
BLColorRGBA = tuple[float, float, float, float] BLColorRGBA = tuple[float, float, float, float]
#################### ####################
# - Operators # - Operators
#################### ####################
BLSpaceType: typ.TypeAlias = typ.Literal[
'EMPTY',
'VIEW_3D',
'IMAGE_EDITOR',
'NODE_EDITOR',
'SEQUENCE_EDITOR',
'CLIP_EDITOR',
'DOPESHEET_EDITOR',
'GRAPH_EDITOR',
'NLA_EDITOR',
'TEXT_EDITOR',
'CONSOLE',
'INFO',
'TOPBAR',
'STATUSBAR',
'OUTLINER',
'PROPERTIES',
'FILE_BROWSER',
'SPREADSHEET',
'PREFERENCES',
]
BLRegionType: typ.TypeAlias = typ.Literal[
'WINDOW',
'HEADER',
'CHANNELS',
'TEMPORARY',
'UI',
'TOOLS',
'TOOL_PROPS',
'ASSET_SHELF',
'ASSET_SHELF_HEADER',
'PREVIEW',
'HUD',
'NAVIGATION_BAR',
'EXECUTE',
'FOOTER',
'TOOL_HEADER',
'XR',
]
BLOperatorStatus: typ.TypeAlias = set[ BLOperatorStatus: typ.TypeAlias = set[
typx.Literal['RUNNING_MODAL', 'CANCELLED', 'FINISHED', 'PASS_THROUGH', 'INTERFACE'] typ.Literal['RUNNING_MODAL', 'CANCELLED', 'FINISHED', 'PASS_THROUGH', 'INTERFACE']
] ]
#################### ####################
# - Addon Types # - Addon Types
#################### ####################
KeymapItemDef: typ.TypeAlias = typ.Any ## TODO: Better Type KeymapItemDef: typ.TypeAlias = typ.Any ## TODO: Better Type
ManagedObjName = typx.Annotated[ ManagedObjName = str
str,
pyd.StringConstraints(
pattern=r'^[a-z_]+$',
),
]
#################### ####################
# - Blender Strings # - Blender Strings
#################### ####################
PresetName = typx.Annotated[ PresetName = str
str,
pyd.StringConstraints(
pattern=r'^[a-zA-Z0-9_]+$',
),
]

View File

@ -0,0 +1,15 @@
"""Defines Operator Types as an enum, making it easy for any part of the addon to refer to any operator."""
import enum
from ..nodeps.utils import blender_type_enum
from .addon import NAME as ADDON_NAME
@blender_type_enum.prefix_values_with(f'{ADDON_NAME}.')
class OperatorType(enum.StrEnum):
"""Identifiers for addon-defined `bpy.types.Operator`."""
InstallPyDeps = enum.auto()
UninstallPyDeps = enum.auto()
ManagePyDeps = enum.auto()

View File

@ -0,0 +1,12 @@
"""Defines Panel Types as an enum, making it easy for any part of the addon to refer to any panel."""
import enum
from blender_maxwell.nodeps.utils import blender_type_enum
from .addon import NAME as ADDON_NAME
@blender_type_enum.prefix_values_with(f'{ADDON_NAME.upper()}_PT_')
class PanelType(enum.StrEnum):
"""Identifiers for addon-defined `bpy.types.Panel`."""

View File

@ -1,9 +1,3 @@
import sympy as sp
sp.printing.str.StrPrinter._default_settings['abbrev'] = True
## In this tree, all Sympy unit printing must be abbreviated.
## By configuring this in __init__.py, we guarantee it for all subimports.
## (Unless, elsewhere, this setting is changed. Be careful!)
from . import categories, node_tree, nodes, sockets from . import categories, node_tree, nodes, sockets

View File

@ -7,8 +7,12 @@ from blender_maxwell.contracts import (
BLModifierType, BLModifierType,
BLNodeTreeInterfaceID, BLNodeTreeInterfaceID,
BLOperatorStatus, BLOperatorStatus,
BLRegionType,
BLSpaceType,
KeymapItemDef, KeymapItemDef,
ManagedObjName, ManagedObjName,
OperatorType,
PanelType,
PresetName, PresetName,
SocketName, SocketName,
addon, addon,
@ -25,7 +29,7 @@ from .flow_kinds import (
FlowKind, FlowKind,
InfoFlow, InfoFlow,
LazyArrayRangeFlow, LazyArrayRangeFlow,
LazyValueFlow, LazyValueFuncFlow,
ParamsFlow, ParamsFlow,
ValueFlow, ValueFlow,
) )
@ -48,8 +52,12 @@ __all__ = [
'BLModifierType', 'BLModifierType',
'BLNodeTreeInterfaceID', 'BLNodeTreeInterfaceID',
'BLOperatorStatus', 'BLOperatorStatus',
'BLRegionType',
'BLSpaceType',
'KeymapItemDef', 'KeymapItemDef',
'ManagedObjName', 'ManagedObjName',
'OperatorType',
'PanelType',
'PresetName', 'PresetName',
'SocketName', 'SocketName',
'addon', 'addon',
@ -74,7 +82,7 @@ __all__ = [
'FlowKind', 'FlowKind',
'InfoFlow', 'InfoFlow',
'LazyArrayRangeFlow', 'LazyArrayRangeFlow',
'LazyValueFlow', 'LazyValueFuncFlow',
'ParamsFlow', 'ParamsFlow',
'ValueFlow', 'ValueFlow',
] ]

View File

@ -1,7 +1,5 @@
import enum import enum
import typing_extensions as typx
from blender_maxwell.utils.staticproperty import staticproperty from blender_maxwell.utils.staticproperty import staticproperty
@ -48,7 +46,7 @@ class FlowEvent(enum.StrEnum):
# Properties # Properties
@staticproperty @staticproperty
def flow_direction() -> typx.Literal['input', 'output']: def flow_direction() -> typ.Literal['input', 'output']:
"""Describes the direction in which the event should flow. """Describes the direction in which the event should flow.
Doesn't include `FlowEvent`s that aren't meant to be triggered: Doesn't include `FlowEvent`s that aren't meant to be triggered:

View File

@ -6,10 +6,10 @@ from types import MappingProxyType
import jax import jax
import jax.numpy as jnp import jax.numpy as jnp
import jaxtyping as jtyp
import numba import numba
import sympy as sp import sympy as sp
import sympy.physics.units as spu import sympy.physics.units as spu
import typing_extensions as typx
from blender_maxwell.utils import extra_sympy_units as spux from blender_maxwell.utils import extra_sympy_units as spux
@ -48,7 +48,7 @@ class FlowKind(enum.StrEnum):
Array = enum.auto() Array = enum.auto()
# Lazy # Lazy
LazyValue = enum.auto() LazyValueFunc = enum.auto()
LazyArrayRange = enum.auto() LazyArrayRange = enum.auto()
# Auxiliary # Auxiliary
@ -107,14 +107,14 @@ class ArrayFlow:
None if unitless. None if unitless.
""" """
values: jax.Array values: jtyp.Shaped[jtyp.Array, '...']
unit: spu.Quantity | None = None unit: spu.Quantity | None = None
def correct_unit(self, real_unit: spu.Quantity) -> typ.Self: def correct_unit(self, corrected_unit: spu.Quantity) -> typ.Self:
if self.unit is not None: if self.unit is not None:
return ArrayFlow(values=self.values, unit=real_unit) return ArrayFlow(values=self.values, unit=corrected_unit)
msg = f'Tried to correct unit of unitless LazyDataValueRange "{real_unit}"' msg = f'Tried to correct unit of unitless LazyDataValueRange "{corrected_unit}"'
raise ValueError(msg) raise ValueError(msg)
def rescale_to_unit(self, unit: spu.Quantity) -> typ.Self: def rescale_to_unit(self, unit: spu.Quantity) -> typ.Self:
@ -137,28 +137,119 @@ LazyFunction: typ.TypeAlias = typ.Callable[[typ.Any, ...], ValueFlow]
@dataclasses.dataclass(frozen=True, kw_only=True) @dataclasses.dataclass(frozen=True, kw_only=True)
class LazyValueFuncFlow: class LazyValueFuncFlow:
r"""Encapsulates a lazily evaluated data value as a composable function with bound and free arguments. r"""Wraps a composable function, providing useful information and operations.
- **Bound Args**: Arguments that are realized when **defining** the lazy value. # Data Flow as Function Composition
Both positional values and keyword values are supported. When using nodes to do math, it can be a good idea to express a **flow of data as the composition of functions**.
- **Free Args**: Arguments that are specified when evaluating the lazy value.
Both positional values and keyword values are supported.
The **root function** is encapsulated using `from_function`, and must accept arguments in the following order: Each node creates a new function, which uses the still-unknown (aka. **lazy**) output of the previous function to plan some calculations.
Some new arguments may also be added, of course.
## Root Function
Of course, one needs to select a "bottom" function, which has no previous function as input.
Thus, the first step is to define this **root function**:
$$ $$
f_0:\ \ \ \ (\underbrace{b_1, b_2, ...}_{\text{Bound}}\ ,\ \underbrace{r_1, r_2, ...}_{\text{Free}}) \to \text{output}_0 f_0:\ \ \ \ \biggl(
\underbrace{a_1, a_2, ..., a_p}_{\texttt{args}},\
\underbrace{
\begin{bmatrix} k_1 \\ v_1\end{bmatrix},
\begin{bmatrix} k_2 \\ v_2\end{bmatrix},
...,
\begin{bmatrix} k_q \\ v_q\end{bmatrix}
}_{\texttt{kwargs}}
\biggr) \to \text{output}_0
$$ $$
Subsequent **composed functions** are encapsulated from the _root function_, and are created with `root_function.compose`. We'll express this simple snippet like so:
They must accept arguments in the following order:
```python
# Presume 'A0', 'KV0' contain only the args/kwargs for f_0
## 'A0', 'KV0' are of length 'p' and 'q'
def f_0(*args, **kwargs): ...
lazy_value_func_0 = LazyValueFuncFlow(
func=f_0,
func_args=[(a_i, type(a_i)) for a_i in A0],
func_kwargs={k: v for k,v in KV0},
)
output_0 = lazy_value_func.func(*A0_computed, **KV0_computed)
```
So far so good.
But of course, nothing interesting has really happened yet.
## Composing Functions
The key thing is the next step: The function that uses the result of $f_0$!
$$ $$
f_k:\ \ \ \ (\underbrace{b_1, b_2, ...}_{\text{Bound}}\ ,\ \text{output}_{k-1} ,\ \underbrace{r_p, r_{p+1}, ...}_{\text{Free}}) \to \text{output}_k f_1:\ \ \ \ \biggl(
f_0(...),\ \
\underbrace{\{a_i\}_p^{p+r}}_{\texttt{args[p:]}},\
\underbrace{\biggl\{
\begin{bmatrix} k_i \\ v_i\end{bmatrix}
\biggr\}_q^{q+s}}_{\texttt{kwargs[p:]}}
\biggr) \to \text{output}_1
$$ $$
Notice that _$f_1$ needs the arguments of both $f_0$ and $f_1$_.
Tracking arguments is already getting out of hand; we already have to use `...` to keep it readeable!
But doing so with `LazyValueFunc` is not so complex:
```python
# Presume 'A1', 'K1' contain only the args/kwarg names for f_1
## 'A1', 'KV1' are therefore of length 'r' and 's'
def f_1(output_0, *args, **kwargs): ...
lazy_value_func_1 = lazy_value_func_0.compose_within(
enclosing_func=f_1,
enclosing_func_args=[(a_i, type(a_i)) for a_i in A1],
enclosing_func_kwargs={k: type(v) for k,v in K1},
)
A_computed = A0_computed + A1_computed
KW_computed = KV0_computed + KV1_computed
output_1 = lazy_value_func_1.func(*A_computed, **KW_computed)
```
We only need the arguments to $f_1$, and `LazyValueFunc` figures out how to make one function with enough arguments to call both.
## Isn't Laying Functions Slow/Hard?
Imagine that each function represents the action of a node, each of which performs expensive calculations on huge `numpy` arrays (**as one does when processing electromagnetic field data**).
At the end, a node might run the entire procedure with all arguments:
```python
output_n = lazy_value_func_n.func(*A_all, **KW_all)
```
It's rough: Most non-trivial pipelines drown in the time/memory overhead of incremental `numpy` operations - individually fast, but collectively iffy.
The killer feature of `LazyValueFuncFlow` is a sprinkle of black magic:
```python
func_n_jax = lazy_value_func_n.func_jax
output_n = func_n_jax(*A_all, **KW_all) ## Runs on your GPU
```
What happened was, **the entire pipeline** was compiled and optimized for high performance on not just your CPU, _but also (possibly) your GPU_.
All the layered function calls and inefficient incremental processing is **transformed into a high-performance program**.
Thank `jax` - specifically, `jax.jit` (https://jax.readthedocs.io/en/latest/_autosummary/jax.jit.html#jax.jit), which internally enables this magic with a single function call.
## Other Considerations
**Auto-Differentiation**: Incredibly, `jax.jit` isn't the killer feature of `jax`. The function that comes out of `LazyValueFuncFlow` can also be differentiated with `jax.grad` (read: high-performance Jacobians for optimizing input parameters).
Though designed for machine learning, there's no reason other fields can't enjoy their inventions!
**Impact of Independent Caching**: JIT'ing can be slow.
That's why `LazyValueFuncFlow` has its own `FlowKind` "lane", which means that **only changes to the processing procedures will cause recompilation**.
Generally, adjustable values that affect the output will flow via the `Param` "lane", which has its own incremental caching, and only meets the compiled function when it's "plugged in" for final evaluation.
The effect is a feeling of snappiness and interactivity, even as the volume of data grows.
Attributes: Attributes:
function: The function to be lazily evaluated. func: The function that the object encapsulates.
bound_args: Arguments that will be packaged into function, which can't be later modifier. bound_args: Arguments that will be packaged into function, which can't be later modifier.
func_kwargs: Arguments to be specified by the user at the time of use. func_kwargs: Arguments to be specified by the user at the time of use.
supports_jax: Whether the contained `self.function` can be compiled with JAX's JIT compiler. supports_jax: Whether the contained `self.function` can be compiled with JAX's JIT compiler.
@ -166,37 +257,29 @@ class LazyValueFuncFlow:
""" """
func: LazyFunction func: LazyFunction
func_kwargs: dict[str, type] func_args: list[tuple[str, type]] = MappingProxyType({})
func_kwargs: dict[str, type] = MappingProxyType({})
supports_jax: bool = False supports_jax: bool = False
supports_numba: bool = False supports_numba: bool = False
@staticmethod
def from_func(
func: LazyFunction,
supports_jax: bool = False,
supports_numba: bool = False,
**func_kwargs: dict[str, type],
) -> typ.Self:
return LazyValueFuncFlow(
func=func,
func_kwargs=func_kwargs,
supports_jax=supports_jax,
supports_numba=supports_numba,
)
# Composition # Composition
def compose_within( def compose_within(
self, self,
enclosing_func: LazyFunction, enclosing_func: LazyFunction,
enclosing_func_args: list[tuple[str, type]] = (),
enclosing_func_kwargs: dict[str, type] = MappingProxyType({}),
supports_jax: bool = False, supports_jax: bool = False,
supports_numba: bool = False, supports_numba: bool = False,
**enclosing_func_kwargs: dict[str, type],
) -> typ.Self: ) -> typ.Self:
return LazyValueFuncFlow( return LazyValueFuncFlow(
function=lambda **kwargs: enclosing_func( function=lambda *args, **kwargs: enclosing_func(
self.func(**{k: v for k, v in kwargs if k in self.func_kwargs}), self.func(
*list(args[len(self.func_args) :]),
**{k: v for k, v in kwargs.items() if k in self.func_kwargs},
),
**kwargs, **kwargs,
), ),
func_args=self.func_args + enclosing_func_args,
func_kwargs=self.func_kwargs | enclosing_func_kwargs, func_kwargs=self.func_kwargs | enclosing_func_kwargs,
supports_jax=self.supports_jax and supports_jax, supports_jax=self.supports_jax and supports_jax,
supports_numba=self.supports_numba and supports_numba, supports_numba=self.supports_numba and supports_numba,
@ -224,89 +307,295 @@ class LazyValueFuncFlow:
#################### ####################
@dataclasses.dataclass(frozen=True, kw_only=True) @dataclasses.dataclass(frozen=True, kw_only=True)
class LazyArrayRangeFlow: class LazyArrayRangeFlow:
symbols: set[sp.Symbol] r"""Represents a linearly/logarithmically spaced array using symbolic boundary expressions, with support for units and lazy evaluation.
start: sp.Basic # Advantages
stop: sp.Basic Whenever an array can be represented like this, the advantages over an `ArrayFlow` are numerous.
## Memory
`ArrayFlow` generally has a memory scaling of $O(n)$.
Naturally, `LazyArrayRangeFlow` is always constant, since only the boundaries and steps are stored.
## Symbolic
Both boundary points are symbolic expressions, within which pre-defined `sp.Symbol`s can participate in a constrained manner (ex. an integer symbol).
One need not know the value of the symbols immediately - such decisions can be deferred until later in the computational flow.
## Performant Unit-Aware Operations
While `ArrayFlow`s are also unit-aware, the time-cost of _any_ unit-scaling operation scales with $O(n)$.
`LazyArrayRangeFlow`, by contrast, scales as $O(1)$.
As a result, more complicated operations (like symbolic or unit-based) that might be difficult to perform interactively in real-time on an `ArrayFlow` will work perfectly with this object, even with added complexity
## High-Performance Composition and Gradiant
With `self.as_func`, a `jax` function is produced that generates the array according to the symbolic `start`, `stop` and `steps`.
There are two nice things about this:
- **Gradient**: The gradient of the output array, with respect to any symbols used to define the input bounds, can easily be found using `jax.grad` over `self.as_func`.
- **JIT**: When `self.as_func` is composed with other `jax` functions, and `jax.jit` is run to optimize the entire thing, the "cost of array generation" _will often be optimized away significantly or entirely_.
Thus, as part of larger computations, the performance properties of `LazyArrayRangeFlow` is extremely favorable.
## Numerical Properties
Since the bounds support exact (ex. rational) calculations and symbolic manipulations (_by virtue of being symbolic expressions_), the opportunities for certain kinds of numerical instability are mitigated.
Attributes:
start: An expression generating a scalar, unitless, complex value for the array's lower bound.
_Integer, rational, and real values are also supported._
stop: An expression generating a scalar, unitless, complex value for the array's upper bound.
_Integer, rational, and real values are also supported._
steps: The amount of steps (**inclusive**) to generate from `start` to `stop`.
scaling: The method of distributing `step` values between the two endpoints.
Generally, the linear default is sufficient.
unit: The unit of the generated array values
int_symbols: Set of integer-valued variables from which `start` and/or `stop` are determined.
real_symbols: Set of real-valued variables from which `start` and/or `stop` are determined.
complex_symbols: Set of complex-valued variables from which `start` and/or `stop` are determined.
"""
start: spux.ScalarUnitlessComplexExpr
stop: spux.ScalarUnitlessComplexExpr
steps: int steps: int
scaling: typx.Literal['lin', 'geom', 'log'] = 'lin' scaling: typ.Literal['lin', 'geom', 'log'] = 'lin'
unit: spu.Quantity | None = False unit: spux.Unit | None = None
def correct_unit(self, real_unit: spu.Quantity) -> typ.Self: int_symbols: set[spux.IntSymbol] = frozenset()
real_symbols: set[spux.RealSymbol] = frozenset()
complex_symbols: set[spux.ComplexSymbol] = frozenset()
@functools.cached_property
def symbols(self) -> list[sp.Symbol]:
"""Retrieves all symbols by concatenating int, real, and complex symbols, and sorting them by name.
The order is guaranteed to be **deterministic**.
Returns:
All symbols valid for use in the expression.
"""
return sorted(
self.int_symbols | self.real_symbols | self.complex_symbols,
key=lambda sym: sym.name,
)
####################
# - Units
####################
def correct_unit(self, corrected_unit: spux.Unit) -> typ.Self:
"""Replaces the unit without rescaling the unitless bounds.
Parameters:
corrected_unit: The unit to replace the current unit with.
Returns:
A new `LazyArrayRangeFlow` with replaced unit.
Raises:
ValueError: If the existing unit is `None`, indicating that there is no unit to correct.
"""
if self.unit is not None: if self.unit is not None:
return LazyArrayRangeFlow( return LazyArrayRangeFlow(
symbols=self.symbols,
unit=real_unit,
start=self.start, start=self.start,
stop=self.stop, stop=self.stop,
steps=self.steps, steps=self.steps,
scaling=self.scaling, scaling=self.scaling,
unit=corrected_unit,
int_symbols=self.int_symbols,
real_symbols=self.real_symbols,
complex_symbols=self.complex_symbols,
) )
msg = f'Tried to correct unit of unitless LazyDataValueRange "{real_unit}"' msg = f'Tried to correct unit of unitless LazyDataValueRange "{corrected_unit}"'
raise ValueError(msg) raise ValueError(msg)
def rescale_to_unit(self, unit: spu.Quantity) -> typ.Self: def rescale_to_unit(self, unit: spux.Unit) -> typ.Self:
"""Replaces the unit, **with** rescaling of the bounds.
Parameters:
unit: The unit to convert the bounds to.
Returns:
A new `LazyArrayRangeFlow` with replaced unit.
Raises:
ValueError: If the existing unit is `None`, indicating that there is no unit to correct.
"""
if self.unit is not None: if self.unit is not None:
return LazyArrayRangeFlow( return LazyArrayRangeFlow(
symbols=self.symbols,
unit=unit,
start=spu.convert_to(self.start, unit), start=spu.convert_to(self.start, unit),
stop=spu.convert_to(self.stop, unit), stop=spu.convert_to(self.stop, unit),
steps=self.steps, steps=self.steps,
scaling=self.scaling, scaling=self.scaling,
unit=unit,
symbols=self.symbols,
int_symbols=self.int_symbols,
real_symbols=self.real_symbols,
complex_symbols=self.complex_symbols,
) )
msg = f'Tried to rescale unitless LazyDataValueRange to unit {unit}' msg = f'Tried to rescale unitless LazyDataValueRange to unit {unit}'
raise ValueError(msg) raise ValueError(msg)
####################
# - Bound Operations
####################
def rescale_bounds( def rescale_bounds(
self, self,
bound_cb: typ.Callable[[sp.Expr], sp.Expr], scaler: typ.Callable[
[spux.ScalarUnitlessComplexExpr], spux.ScalarUnitlessComplexExpr
],
reverse: bool = False, reverse: bool = False,
) -> typ.Self: ) -> typ.Self:
"""Call a function on both bounds (start and stop), creating a new `LazyDataValueRange`.""" """Apply a function to the bounds, effectively rescaling the represented array.
Notes:
**It is presumed that the bounds are scaled with the same factor**.
Breaking this presumption may have unexpected results.
The scalar, unitless, complex-valuedness of the bounds must also be respected; additionally, new symbols must not be introduced.
Parameters:
scaler: The function that scales each bound.
reverse: Whether to reverse the bounds after running the `scaler`.
Returns:
A rescaled `LazyArrayRangeFlow`.
"""
return LazyArrayRangeFlow( return LazyArrayRangeFlow(
symbols=self.symbols,
unit=self.unit,
start=spu.convert_to( start=spu.convert_to(
bound_cb(self.start if not reverse else self.stop), self.unit scaler(self.start if not reverse else self.stop), self.unit
), ),
stop=spu.convert_to( stop=spu.convert_to(
bound_cb(self.stop if not reverse else self.start), self.unit scaler(self.stop if not reverse else self.start), self.unit
), ),
steps=self.steps, steps=self.steps,
scaling=self.scaling, scaling=self.scaling,
unit=self.unit,
int_symbols=self.int_symbols,
real_symbols=self.real_symbols,
complex_symbols=self.complex_symbols,
) )
####################
# - Lazy Representation
####################
@functools.cached_property
def array_generator(
self,
) -> typ.Callable[
[int | float | complex, int | float | complex, int],
jtyp.Inexact[jtyp.Array, ' steps'],
]:
"""Compute the correct `jnp.*space` array generator, where `*` is one of the supported scaling methods.
Returns:
A `jax` function that takes a valid `start`, `stop`, and `steps`, and returns a 1D `jax` array.
"""
jnp_nspace = {
'lin': jnp.linspace,
'geom': jnp.geomspace,
'log': jnp.logspace,
}.get(self.scaling)
if jnp_nspace is None:
msg = f'ArrayFlow scaling method {self.scaling} is unsupported'
raise RuntimeError(msg)
return jnp_nspace
@functools.cached_property
def as_func(
self,
) -> typ.Callable[[int | float | complex, ...], jtyp.Inexact[jtyp.Array, ' steps']]:
"""Create a function that can compute the non-lazy output array as a function of the symbols in the expressions for `start` and `stop`.
Notes:
The ordering of the symbols is identical to `self.symbols`, which is guaranteed to be a deterministically sorted list of symbols.
Returns:
A `LazyValueFuncFlow` that, given the input symbols defined in `self.symbols`,
"""
# Compile JAX Functions for Start/End Expressions
## FYI, JAX-in-JAX works perfectly fine.
start_jax = sp.lambdify(self.symbols, self.start, 'jax')
stop_jax = sp.lambdify(self.symbols, self.stop, 'jax')
# Compile ArrayGen Function
def gen_array(
*args: list[int | float | complex],
) -> jtyp.Inexact[jtyp.Array, ' steps']:
return self.array_generator(start_jax(*args), stop_jax(*args), self.steps)
# Return ArrayGen Function
return gen_array
@functools.cached_property
def as_lazy_value_func(self) -> LazyValueFuncFlow:
"""Creates a `LazyValueFuncFlow` using the output of `self.as_func`.
This is useful for ex. parameterizing the first array in the node graph, without binding an entire computed array.
Notes:
The the function enclosed in the `LazyValueFuncFlow` is identical to the one returned by `self.as_func`.
Returns:
A `LazyValueFuncFlow` containing `self.as_func`, as well as appropriate supporting settings.
"""
return LazyValueFuncFlow(
func=self.as_func,
func_args=[
(sym.name, spux.sympy_to_python_type(sym)) for sym in self.symbols
],
supports_jax=True,
)
####################
# - Realization
####################
def realize( def realize(
self, symbol_values: dict[sp.Symbol, ValueFlow] = MappingProxyType({}) self,
) -> ArrayFlow: symbol_values: dict[spux.Symbol, ValueFlow] = MappingProxyType({}),
kind: typ.Literal[FlowKind.Array, FlowKind.LazyValueFunc] = FlowKind.Array,
) -> ArrayFlow | LazyValueFuncFlow:
"""Apply a function to the bounds, effectively rescaling the represented array.
Notes:
**It is presumed that the bounds are scaled with the same factor**.
Breaking this presumption may have unexpected results.
The scalar, unitless, complex-valuedness of the bounds must also be respected; additionally, new symbols must not be introduced.
Parameters:
scaler: The function that scales each bound.
reverse: Whether to reverse the bounds after running the `scaler`.
Returns:
A rescaled `LazyArrayRangeFlow`.
"""
if not set(self.symbols).issubset(set(symbol_values.keys())):
msg = f'Provided symbols ({set(symbol_values.keys())}) do not provide values for all expression symbols ({self.symbols}) that may be found in the boundary expressions (start={self.start}, end={self.end})'
raise ValueError(msg)
# Realize Symbols # Realize Symbols
if self.unit is None: realized_start = spux.sympy_to_python(
start = spux.sympy_to_python(self.start.subs(symbol_values)) self.start.subs({sym: symbol_values[sym.name] for sym in self.symbols})
stop = spux.sympy_to_python(self.stop.subs(symbol_values)) )
else: realized_stop = spux.sympy_to_python(
start = spux.sympy_to_python( self.stop.subs({sym: symbol_values[sym.name] for sym in self.symbols})
spux.scale_to_unit(self.start.subs(symbol_values), self.unit) )
)
stop = spux.sympy_to_python(
spux.scale_to_unit(self.stop.subs(symbol_values), self.unit)
)
# Return Linspace / Logspace # Return Linspace / Logspace
if self.scaling == 'lin': def gen_array() -> jtyp.Inexact[jtyp.Array, ' steps']:
return ArrayFlow( return self.array_generator(realized_start, realized_stop, self.steps)
values=jnp.linspace(start, stop, self.steps), unit=self.unit
)
if self.scaling == 'geom':
return ArrayFlow(jnp.geomspace(start, stop, self.steps), self.unit)
if self.scaling == 'log':
return ArrayFlow(jnp.logspace(start, stop, self.steps), self.unit)
msg = f'ArrayFlow scaling method {self.scaling} is unsupported' if kind == FlowKind.Array:
raise RuntimeError(msg) return ArrayFlow(values=gen_array(), unit=self.unit)
if kind == FlowKind.LazyValueFunc:
return LazyValueFuncFlow(func=gen_array, supports_jax=True)
msg = f'Invalid kind: {kind}'
raise TypeError(msg)
#################### ####################
@ -318,4 +607,35 @@ ParamsFlow: typ.TypeAlias = dict[str, typ.Any]
#################### ####################
# - Lazy Value Func # - Lazy Value Func
#################### ####################
InfoFlow: typ.TypeAlias = dict[str, typ.Any] @dataclasses.dataclass(frozen=True, kw_only=True)
class InfoFlow:
func_args: list[tuple[str, type]] = MappingProxyType({})
func_kwargs: dict[str, type] = MappingProxyType({})
# Dimension Information
has_ndims: bool = False
dim_names: list[str] = ()
dim_idx: dict[str, ArrayFlow | LazyArrayRangeFlow] = MappingProxyType({})
## TODO: Validation, esp. length of dims. Pydantic?
def compose_within(
self,
enclosing_func_args: list[tuple[str, type]] = (),
enclosing_func_kwargs: dict[str, type] = MappingProxyType({}),
) -> typ.Self:
return InfoFlow(
func_args=self.func_args + enclosing_func_args,
func_kwargs=self.func_kwargs | enclosing_func_kwargs,
)
def call_lazy_value_func(
self,
lazy_value_func: LazyValueFuncFlow,
*args: list[typ.Any],
**kwargs: dict[str, typ.Any],
) -> tuple[list[typ.Any], dict[str, typ.Any]]:
if lazy_value_func.supports_jax:
lazy_value_func.func_jax(*args, **kwargs)
lazy_value_func.func(*args, **kwargs)

View File

@ -1,6 +1,6 @@
import enum import enum
from blender_maxwell.blender_type_enum import BlenderTypeEnum from blender_maxwell.utils.blender_type_enum import BlenderTypeEnum
class ManagedObjType(BlenderTypeEnum): class ManagedObjType(BlenderTypeEnum):

View File

@ -8,7 +8,7 @@ from blender_maxwell.utils.blender_type_enum import (
@append_cls_name_to_values @append_cls_name_to_values
class NodeType(BlenderTypeEnum): class NodeType(BlenderTypeEnum):
#KitchenSink = enum.auto() # KitchenSink = enum.auto()
# Analysis # Analysis
Viz = enum.auto() Viz = enum.auto()
@ -24,18 +24,18 @@ class NodeType(BlenderTypeEnum):
UnitSystem = enum.auto() UnitSystem = enum.auto()
## Inputs / Scene ## Inputs / Scene
#Time = enum.auto() # Time = enum.auto()
## Inputs / Web Importers ## Inputs / Web Importers
Tidy3DWebImporter = enum.auto() Tidy3DWebImporter = enum.auto()
## Inputs / File Importers ## Inputs / File Importers
Tidy3DFileImporter = enum.auto() Tidy3DFileImporter = enum.auto()
## Inputs / Constants ## Inputs / Constants
ExprConstant = enum.auto()
ScientificConstant = enum.auto() ScientificConstant = enum.auto()
NumberConstant = enum.auto() NumberConstant = enum.auto()
PhysicalConstant = enum.auto() PhysicalConstant = enum.auto()
BlenderConstant = enum.auto() BlenderConstant = enum.auto()
# Outputs # Outputs
Viewer = enum.auto() Viewer = enum.auto()
## Outputs / File Exporters ## Outputs / File Exporters
@ -48,43 +48,43 @@ class NodeType(BlenderTypeEnum):
PointDipoleSource = enum.auto() PointDipoleSource = enum.auto()
PlaneWaveSource = enum.auto() PlaneWaveSource = enum.auto()
UniformCurrentSource = enum.auto() UniformCurrentSource = enum.auto()
#ModeSource = enum.auto() # ModeSource = enum.auto()
#GaussianBeamSource = enum.auto() # GaussianBeamSource = enum.auto()
#AstigmaticGaussianBeamSource = enum.auto() # AstigmaticGaussianBeamSource = enum.auto()
#TFSFSource = enum.auto() # TFSFSource = enum.auto()
#EHEquivalenceSource = enum.auto() # EHEquivalenceSource = enum.auto()
#EHSource = enum.auto() # EHSource = enum.auto()
## Sources / Temporal Shapes ## Sources / Temporal Shapes
GaussianPulseTemporalShape = enum.auto() GaussianPulseTemporalShape = enum.auto()
#ContinuousWaveTemporalShape = enum.auto() # ContinuousWaveTemporalShape = enum.auto()
#ArrayTemporalShape = enum.auto() # ArrayTemporalShape = enum.auto()
# Mediums # Mediums
LibraryMedium = enum.auto() LibraryMedium = enum.auto()
#PECMedium = enum.auto() # PECMedium = enum.auto()
#IsotropicMedium = enum.auto() # IsotropicMedium = enum.auto()
#AnisotropicMedium = enum.auto() # AnisotropicMedium = enum.auto()
#TripleSellmeierMedium = enum.auto() # TripleSellmeierMedium = enum.auto()
#SellmeierMedium = enum.auto() # SellmeierMedium = enum.auto()
#PoleResidueMedium = enum.auto() # PoleResidueMedium = enum.auto()
#DrudeMedium = enum.auto() # DrudeMedium = enum.auto()
#DrudeLorentzMedium = enum.auto() # DrudeLorentzMedium = enum.auto()
#DebyeMedium = enum.auto() # DebyeMedium = enum.auto()
## Mediums / Non-Linearities ## Mediums / Non-Linearities
#AddNonLinearity = enum.auto() # AddNonLinearity = enum.auto()
#ChiThreeSusceptibilityNonLinearity = enum.auto() # ChiThreeSusceptibilityNonLinearity = enum.auto()
#TwoPhotonAbsorptionNonLinearity = enum.auto() # TwoPhotonAbsorptionNonLinearity = enum.auto()
#KerrNonLinearity = enum.auto() # KerrNonLinearity = enum.auto()
# Structures # Structures
#ObjectStructure = enum.auto() # ObjectStructure = enum.auto()
GeoNodesStructure = enum.auto() GeoNodesStructure = enum.auto()
#ScriptedStructure = enum.auto() # ScriptedStructure = enum.auto()
## Structures / Primitives ## Structures / Primitives
BoxStructure = enum.auto() BoxStructure = enum.auto()
SphereStructure = enum.auto() SphereStructure = enum.auto()
#CylinderStructure = enum.auto() # CylinderStructure = enum.auto()
# Bounds # Bounds
BoundConds = enum.auto() BoundConds = enum.auto()
@ -99,22 +99,22 @@ class NodeType(BlenderTypeEnum):
# Monitors # Monitors
EHFieldMonitor = enum.auto() EHFieldMonitor = enum.auto()
PowerFluxMonitor = enum.auto() PowerFluxMonitor = enum.auto()
#EpsilonTensorMonitor = enum.auto() # EpsilonTensorMonitor = enum.auto()
#DiffractionMonitor = enum.auto() # DiffractionMonitor = enum.auto()
## Monitors / Projected ## Monitors / Projected
#CartesianNearFieldProjectionMonitor = enum.auto() # CartesianNearFieldProjectionMonitor = enum.auto()
#ObservationAngleNearFieldProjectionMonitor = enum.auto() # ObservationAngleNearFieldProjectionMonitor = enum.auto()
#KSpaceNearFieldProjectionMonitor = enum.auto() # KSpaceNearFieldProjectionMonitor = enum.auto()
# Sims # Sims
FDTDSim = enum.auto() FDTDSim = enum.auto()
SimDomain = enum.auto() SimDomain = enum.auto()
SimGrid = enum.auto() SimGrid = enum.auto()
## Sims / Sim Grid Axis ## Sims / Sim Grid Axis
#AutomaticSimGridAxis = enum.auto() # AutomaticSimGridAxis = enum.auto()
#ManualSimGridAxis = enum.auto() # ManualSimGridAxis = enum.auto()
#UniformSimGridAxis = enum.auto() # UniformSimGridAxis = enum.auto()
#ArraySimGridAxis = enum.auto() # ArraySimGridAxis = enum.auto()
# Utilities # Utilities
Combine = enum.auto() Combine = enum.auto()

View File

@ -7,7 +7,6 @@ import jax.numpy as jnp
import matplotlib import matplotlib
import matplotlib.axis as mpl_ax import matplotlib.axis as mpl_ax
import numpy as np import numpy as np
import typing_extensions as typx
from blender_maxwell.utils import logger from blender_maxwell.utils import logger
@ -123,8 +122,8 @@ class ManagedBLImage(base.ManagedObj):
self, self,
width_px: int, width_px: int,
height_px: int, height_px: int,
color_model: typx.Literal['RGB', 'RGBA'], color_model: typ.Literal['RGB', 'RGBA'],
dtype: typx.Literal['uint8', 'float32'], dtype: typ.Literal['uint8', 'float32'],
): ):
"""Returns the managed blender image. """Returns the managed blender image.

View File

@ -2,8 +2,9 @@ import typing as typ
import bpy import bpy
import jax.numpy as jnp import jax.numpy as jnp
import sympy.physics.units as spu
from blender_maxwell.utils import logger from blender_maxwell.utils import bl_cache, logger
from ... import contracts as ct from ... import contracts as ct
from ... import sockets from ... import sockets
@ -11,11 +12,9 @@ from .. import base, events
log = logger.get(__name__) log = logger.get(__name__)
CACHE_SIM_DATA = {}
class ExtractDataNode(base.MaxwellSimNode): class ExtractDataNode(base.MaxwellSimNode):
"""Node for extracting data from other objects.""" """Node for extracting data from particular objects."""
node_type = ct.NodeType.ExtractData node_type = ct.NodeType.ExtractData
bl_label = 'Extract' bl_label = 'Extract'
@ -30,239 +29,196 @@ class ExtractDataNode(base.MaxwellSimNode):
} }
#################### ####################
# - Properties: Sim Data # - Properties
#################### ####################
sim_data__monitor_name: bpy.props.EnumProperty( extract_filter: bpy.props.EnumProperty(
name='Sim Data Monitor Name', name='Extract Filter',
description='Monitor to extract from the attached SimData', description='Data to extract from the input',
items=lambda self, context: self.search_monitors(context), search=lambda self, _, edit_text: self.search_extract_filters(edit_text),
update=lambda self, context: self.sync_prop('sim_data__monitor_name', context), update=lambda self, context: self.on_prop_changed('extract_filter', context),
) )
cache__num_monitors: bpy.props.StringProperty(default='') # Sim Data
cache__monitor_names: bpy.props.StringProperty(default='') sim_data_monitor_nametype: dict[str, str] = bl_cache.BLField({})
cache__monitor_types: bpy.props.StringProperty(default='')
def search_monitors(self, _: bpy.types.Context) -> list[tuple[str, str, str]]: # Field Data
"""Search the linked simulation data for monitors.""" field_data_components: set[str] = bl_cache.BLField(set())
# No Linked Sim Data: Return 'None'
if not self.inputs.get('Sim Data') or not self.inputs['Sim Data'].is_linked:
return [('NONE', 'None', 'No monitors')]
# Return Monitor Names def search_extract_filters(
## Special Case for No Monitors self, _: bpy.types.Context
monitor_names = ( ) -> list[tuple[str, str, str]]:
self.cache__monitor_names.split(',') if self.cache__monitor_names else [] # Sim Data
) if self.active_socket_set == 'Sim Data' and self.inputs['Sim Data'].is_linked:
monitor_types = ( return [
self.cache__monitor_types.split(',') if self.cache__monitor_types else [] (
) monitor_name,
if len(monitor_names) == 0: f'{monitor_name}',
return [('NONE', 'None', 'No monitors')] f'Monitor "{monitor_name}" ({monitor_type}) recorded by the Sim',
return [ )
( for monitor_name, monitor_type in self.sim_data_monitor_nametype.items()
monitor_name, ]
f'{monitor_name}',
f'Monitor "{monitor_name}" ({monitor_type}) recorded by the Sim',
)
for monitor_name, monitor_type in zip(
monitor_names, monitor_types, strict=False
)
]
def draw_props__sim_data( # Field Data
self, _: bpy.types.Context, col: bpy.types.UILayout if self.active_socket_set == 'Field Data' and self.inputs['Sim Data'].is_linked:
) -> None: return [
col.prop(self, 'sim_data__monitor_name', text='') ([('Ex', 'Ex', 'Ex')] if 'Ex' in self.field_data_components else [])
+ ([('Ey', 'Ey', 'Ey')] if 'Ey' in self.field_data_components else [])
+ ([('Ez', 'Ez', 'Ez')] if 'Ez' in self.field_data_components else [])
+ ([('Hx', 'Hx', 'Hx')] if 'Hx' in self.field_data_components else [])
+ ([('Hy', 'Hy', 'Hy')] if 'Hy' in self.field_data_components else [])
+ ([('Hz', 'Hz', 'Hz')] if 'Hz' in self.field_data_components else [])
]
def draw_info__sim_data( # Flux Data
self, _: bpy.types.Context, col: bpy.types.UILayout ## Nothing to extract.
) -> None:
if self.sim_data__monitor_name != 'NONE': # Fallback
return []
####################
# - UI
####################
def draw_props(self, _: bpy.types.Context, col: bpy.types.UILayout) -> None:
col.prop(self, 'extract_filter', text='')
def draw_info(self, _: bpy.types.Context, col: bpy.types.UILayout) -> None:
if self.active_socket_set == 'Sim Data' and self.inputs['Sim Data'].is_linked:
# Header # Header
row = col.row() row = col.row()
row.alignment = 'CENTER' row.alignment = 'CENTER'
row.label(text=f'{self.cache__num_monitors} Monitors') row.label(text=f'{self.cache__num_monitors} Monitors')
# Monitor Info # Monitor Info
if int(self.cache__num_monitors) > 0: if len(self.sim_data_monitor_nametype) > 0:
for monitor_name, monitor_type in zip( for (
self.cache__monitor_names.split(','), monitor_name,
self.cache__monitor_types.split(','), monitor_type,
strict=False, ) in self.sim_data_monitor_nametype.items():
):
col.label(text=f'{monitor_name}: {monitor_type}') col.label(text=f'{monitor_name}: {monitor_type}')
#################### ####################
# - Events: Sim Data # - Events
#################### ####################
@events.on_value_changed( @events.on_value_changed(
socket_name='Sim Data', socket_name='Sim Data',
input_sockets={'Sim Data'},
input_sockets_optional={'Sim Data': True},
) )
def on_sim_data_changed(self): def on_sim_data_changed(self, input_sockets: dict):
# SimData Cache Hit and SimData Input Unlinked if input_sockets['Sim Data'] is not None:
## Delete Cache Entry self.sim_data_monitor_nametype = {
if ( monitor_name: monitor_data.type
CACHE_SIM_DATA.get(self.instance_id) is not None for monitor_name, monitor_data in input_sockets[
and not self.inputs['Sim Data'].is_linked 'Sim Data'
): ].monitor_data.items()
CACHE_SIM_DATA.pop(self.instance_id, None) ## Both member-check
self.cache__num_monitors = ''
self.cache__monitor_names = ''
self.cache__monitor_types = ''
# SimData Cache Miss and Linked SimData
if (
CACHE_SIM_DATA.get(self.instance_id) is None
and self.inputs['Sim Data'].is_linked
):
sim_data = self._compute_input('Sim Data')
## Create Cache Entry
CACHE_SIM_DATA[self.instance_id] = {
'sim_data': sim_data,
'monitor_names': list(sim_data.monitor_data.keys()),
'monitor_types': [
monitor_data.type for monitor_data in sim_data.monitor_data.values()
],
} }
cache = CACHE_SIM_DATA[self.instance_id]
self.cache__num_monitors = str(len(cache['monitor_names']))
self.cache__monitor_names = ','.join(cache['monitor_names'])
self.cache__monitor_types = ','.join(cache['monitor_types'])
####################
# - Properties: Field Data
####################
field_data__component: bpy.props.EnumProperty(
name='Field Data Component',
description='Field monitor component to extract from the attached Field Data',
items=lambda self, context: self.search_field_data_components(context),
update=lambda self, context: self.sync_prop('field_data__component', context),
)
cache__components: bpy.props.StringProperty(default='')
def search_field_data_components(
self, _: bpy.types.Context
) -> list[tuple[str, str, str]]:
if not self.inputs.get('Field Data') or not self.inputs['Field Data'].is_linked:
return [('NONE', 'None', 'No data')]
if not self.cache__components:
return [('NONE', 'Loading...', 'Loading data...')]
components = [
tuple(component_str.split(','))
for component_str in self.cache__components.split('|')
]
if len(components) == 0:
return [('NONE', 'None', 'No components')]
return components
def draw_props__field_data(
self, _: bpy.types.Context, col: bpy.types.UILayout
) -> None:
col.prop(self, 'field_data__component', text='')
def draw_info__field_data(
self, _: bpy.types.Context, col: bpy.types.UILayout
) -> None:
pass
####################
# - Events: Field Data
####################
@events.on_value_changed( @events.on_value_changed(
socket_name='Field Data', socket_name='Field Data',
)
def on_field_data_changed(self):
if self.inputs['Field Data'].is_linked and not self.cache__components:
field_data = self._compute_input('Field Data')
components = [
*([('Ex', 'Ex', 'Ex')] if field_data.Ex is not None else []),
*([('Ey', 'Ey', 'Ey')] if field_data.Ey is not None else []),
*([('Ez', 'Ez', 'Ez')] if field_data.Ez is not None else []),
*([('Hx', 'Hx', 'Hx')] if field_data.Hx is not None else []),
*([('Hy', 'Hy', 'Hy')] if field_data.Hy is not None else []),
*([('Hz', 'Hz', 'Hz')] if field_data.Hz is not None else []),
]
self.cache__components = '|'.join(
[','.join(component) for component in components]
)
elif not self.inputs['Field Data'].is_linked and self.cache__components:
self.cache__components = ''
####################
# - Flux Data
####################
def draw_props__flux_data(
self, _: bpy.types.Context, col: bpy.types.UILayout
) -> None:
pass
def draw_info__flux_data(
self, _: bpy.types.Context, col: bpy.types.UILayout
) -> None:
pass
####################
# - Global
####################
def draw_props(self, context: bpy.types.Context, col: bpy.types.UILayout) -> None:
if self.active_socket_set == 'Sim Data':
self.draw_props__sim_data(context, col)
if self.active_socket_set == 'Field Data':
self.draw_props__field_data(context, col)
if self.active_socket_set == 'Flux Data':
self.draw_props__flux_data(context, col)
def draw_info(self, context: bpy.types.Context, col: bpy.types.UILayout) -> None:
if self.active_socket_set == 'Sim Data':
self.draw_info__sim_data(context, col)
if self.active_socket_set == 'Field Data':
self.draw_info__field_data(context, col)
if self.active_socket_set == 'Flux Data':
self.draw_info__flux_data(context, col)
@events.computes_output_socket(
'Data',
props={'sim_data__monitor_name', 'field_data__component'},
input_sockets={'Field Data'}, input_sockets={'Field Data'},
input_sockets_optional={'Field Data': True}, input_sockets_optional={'Field Data': True},
) )
def on_field_data_changed(self, input_sockets: dict):
if input_sockets['Field Data'] is not None:
self.field_data_components = (
{'Ex'}
if input_sockets['Field Data'].Ex is not None
else set() | {'Ey'}
if input_sockets['Field Data'].Ey is not None
else set() | {'Ez'}
if input_sockets['Field Data'].Ez is not None
else set() | {'Hx'}
if input_sockets['Field Data'].Hx is not None
else set() | {'Hy'}
if input_sockets['Field Data'].Hy is not None
else set() | {'Hz'}
if input_sockets['Field Data'].Hz is not None
else set()
)
####################
# - Output: Value
####################
@events.computes_output_socket(
'Data',
kind=ct.FlowKind.Value,
props={'active_socket_set', 'extract_filter'},
input_sockets={'Sim Data', 'Field Data', 'Flux Data'},
input_sockets_optional={
'Sim Data': True,
'Field Data': True,
'Flux Data': True,
},
)
def compute_extracted_data(self, props: dict, input_sockets: dict): def compute_extracted_data(self, props: dict, input_sockets: dict):
if self.active_socket_set == 'Sim Data': if props['active_socket_set'] == 'Sim Data':
if ( return input_sockets['Sim Data'].monitor_data[props['extract_filter']]
CACHE_SIM_DATA.get(self.instance_id) is None
and self.inputs['Sim Data'].is_linked
):
self.on_sim_data_changed()
sim_data = CACHE_SIM_DATA[self.instance_id]['sim_data'] if props['active_socket_set'] == 'Field Data':
return sim_data.monitor_data[props['sim_data__monitor_name']] return getattr(input_sockets['Field Data'], props['extract_filter'])
elif self.active_socket_set == 'Field Data': # noqa: RET505 if props['active_socket_set'] == 'Flux Data':
xarr = getattr(input_sockets['Field Data'], props['field_data__component']) return input_sockets['Flux Data']
#return jarray.JArray.from_xarray( msg = f'Tried to get a "FlowKind.Value" from socket set {props["active_socket_set"]} in "{self.bl_label}"'
# xarr, raise RuntimeError(msg)
# dim_units={
# 'x': spu.um,
# 'y': spu.um,
# 'z': spu.um,
# 'f': spu.hertz,
# },
#)
elif self.active_socket_set == 'Flux Data': ####################
flux_data = self._compute_input('Flux Data') # - Output: LazyValueFunc
return jnp.array(flux_data.flux) ####################
@events.computes_output_socket(
'Data',
kind=ct.FlowKind.LazyValueFunc,
props={'active_socket_set'},
output_sockets={'Data'},
output_socket_kinds={'Data': ct.FlowKind.Value},
)
def compute_extracted_data_lazy(self, props: dict, output_sockets: dict):
if self.active_socket_set in {'Field Data', 'Flux Data'}:
data = jnp.array(output_sockets['Data'].data)
return ct.LazyValueFuncFlow(func=lambda: data, supports_jax=True)
msg = f'Tried to get data from unknown output socket in "{self.bl_label}"' msg = f'Tried to get a "FlowKind.LazyValueFunc" from socket set {props["active_socket_set"]} in "{self.bl_label}"'
raise RuntimeError(msg)
####################
# - Output: Info
####################
@events.computes_output_socket(
'Data',
kind=ct.FlowKind.Info,
props={'active_socket_set'},
output_sockets={'Data'},
output_socket_kinds={'Data': ct.FlowKind.Value},
)
def compute_extracted_data_info(self, props: dict, output_sockets: dict):
if props['active_socket_set'] == 'Field Data':
xarr = output_sockets['Data']
return ct.InfoFlow(
dim_names=['x', 'y', 'z', 'f'],
dim_idx={
axis: ct.ArrayFlow(values=xarr.get_index(axis).values, unit=spu.um)
for axis in ['x', 'y', 'z']
}
| {
'f': ct.ArrayFlow(
values=xarr.get_index('f').values, unit=spu.hertz
),
},
)
if props['active_socket_set'] == 'Flux Data':
xarr = output_sockets['Data']
return ct.InfoFlow(
dim_names=['f'],
dim_idx={
'f': ct.ArrayFlow(
values=xarr.get_index('f').values, unit=spu.hertz
),
},
)
msg = f'Tried to get a "FlowKind.Info" from socket set {props["active_socket_set"]} in "{self.bl_label}"'
raise RuntimeError(msg) raise RuntimeError(msg)

View File

@ -58,7 +58,7 @@ class FilterMathNode(base.MaxwellSimNode):
name='Op', name='Op',
description='Operation to reduce the input axis with', description='Operation to reduce the input axis with',
items=lambda self, _: self.search_operations(), items=lambda self, _: self.search_operations(),
update=lambda self, context: self.sync_prop('operation', context), update=lambda self, context: self.on_prop_changed('operation', context),
) )
def search_operations(self) -> list[tuple[str, str, str]]: def search_operations(self) -> list[tuple[str, str, str]]:

View File

@ -43,7 +43,7 @@ class MapMathNode(base.MaxwellSimNode):
name='Op', name='Op',
description='Operation to apply to the input', description='Operation to apply to the input',
items=lambda self, _: self.search_operations(), items=lambda self, _: self.search_operations(),
update=lambda self, context: self.sync_prop('operation', context), update=lambda self, context: self.on_prop_changed('operation', context),
) )
def search_operations(self) -> list[tuple[str, str, str]]: def search_operations(self) -> list[tuple[str, str, str]]:
@ -101,9 +101,13 @@ class MapMathNode(base.MaxwellSimNode):
#################### ####################
@events.computes_output_socket( @events.computes_output_socket(
'Data', 'Data',
kind=ct.FlowKind.LazyValueFunc,
props={'active_socket_set', 'operation'}, props={'active_socket_set', 'operation'},
input_sockets={'Data', 'Mapper'}, input_sockets={'Data', 'Mapper'},
input_socket_kinds={'Mapper': ct.FlowKind.LazyValue}, input_socket_kinds={
'Data': ct.FlowKind.LazyValueFunc,
'Mapper': ct.FlowKind.LazyValueFunc,
},
input_sockets_optional={'Mapper': True}, input_sockets_optional={'Mapper': True},
) )
def compute_data(self, props: dict, input_sockets: dict): def compute_data(self, props: dict, input_sockets: dict):
@ -150,8 +154,9 @@ class MapMathNode(base.MaxwellSimNode):
}[props['active_socket_set']][props['operation']] }[props['active_socket_set']][props['operation']]
# Compose w/Lazy Root Function Data # Compose w/Lazy Root Function Data
return input_sockets['Data'].compose( return input_sockets['Data'].compose_within(
function=mapping_func, mapping_func,
supports_jax=True,
) )

View File

@ -42,7 +42,7 @@ class OperateMathNode(base.MaxwellSimNode):
name='Op', name='Op',
description='Operation to apply to the two inputs', description='Operation to apply to the two inputs',
items=lambda self, _: self.search_operations(), items=lambda self, _: self.search_operations(),
update=lambda self, context: self.sync_prop('operation', context), update=lambda self, context: self.on_prop_changed('operation', context),
) )
def search_operations(self) -> list[tuple[str, str, str]]: def search_operations(self) -> list[tuple[str, str, str]]:

View File

@ -44,7 +44,7 @@ class ReduceMathNode(base.MaxwellSimNode):
name='Op', name='Op',
description='Operation to reduce the input axis with', description='Operation to reduce the input axis with',
items=lambda self, _: self.search_operations(), items=lambda self, _: self.search_operations(),
update=lambda self, context: self.sync_prop('operation', context), update=lambda self, context: self.on_prop_changed('operation', context),
) )
def search_operations(self) -> list[tuple[str, str, str]]: def search_operations(self) -> list[tuple[str, str, str]]:
@ -79,24 +79,14 @@ class ReduceMathNode(base.MaxwellSimNode):
#################### ####################
@events.computes_output_socket( @events.computes_output_socket(
'Data', 'Data',
props={'operation'}, props={'active_socket_set', 'operation'},
input_sockets={'Data', 'Axis', 'Reducer'}, input_sockets={'Data', 'Axis', 'Reducer'},
input_socket_kinds={'Reducer': ct.FlowKind.LazyValue}, input_socket_kinds={'Reducer': ct.FlowKind.LazyValueFunc},
input_sockets_optional={'Reducer': True}, input_sockets_optional={'Reducer': True},
) )
def compute_data(self, props: dict, input_sockets: dict): def compute_data(self, props: dict, input_sockets: dict):
if not hasattr(input_sockets['Data'], 'shape'): if props['active_socket_set'] == 'By Axis':
msg = 'Input socket "Data" must be an N-D Array (with a "shape" attribute)' # Simple Accumulation
raise ValueError(msg)
if self.active_socket_set == 'Axis Expr':
ufunc = jnp.ufunc(input_sockets['Reducer'], nin=2, nout=1)
return ufunc.reduce(input_sockets['Data'], axis=input_sockets['Axis'])
if self.active_socket_set == 'By Axis':
## Dimension Reduction
# ('SQUEEZE', 'Squeeze', '(*, 1, *) -> (*, *)'),
# Accumulation
if props['operation'] == 'SUM': if props['operation'] == 'SUM':
return jnp.sum(input_sockets['Data'], axis=input_sockets['Axis']) return jnp.sum(input_sockets['Data'], axis=input_sockets['Axis'])
if props['operation'] == 'PROD': if props['operation'] == 'PROD':
@ -122,6 +112,10 @@ class ReduceMathNode(base.MaxwellSimNode):
if props['operation'] == 'SQUEEZE': if props['operation'] == 'SQUEEZE':
return jnp.squeeze(input_sockets['Data'], axis=input_sockets['Axis']) return jnp.squeeze(input_sockets['Data'], axis=input_sockets['Axis'])
if props['active_socket_set'] == 'Expr':
ufunc = jnp.ufunc(input_sockets['Reducer'], nin=2, nout=1)
return ufunc.reduce(input_sockets['Data'], axis=input_sockets['Axis'])
msg = 'Operation invalid' msg = 'Operation invalid'
raise ValueError(msg) raise ValueError(msg)

View File

@ -43,7 +43,7 @@ class VizNode(base.MaxwellSimNode):
('GRAYSCALE', 'Grayscale', 'Barebones'), ('GRAYSCALE', 'Grayscale', 'Barebones'),
], ],
default='VIRIDIS', default='VIRIDIS',
update=lambda self, context: self.sync_prop('colormap', context), update=lambda self, context: self.on_prop_changed('colormap', context),
) )
##################### #####################

View File

@ -10,13 +10,12 @@ from types import MappingProxyType
import bpy import bpy
import sympy as sp import sympy as sp
import typing_extensions as typx
from blender_maxwell.utils import logger from blender_maxwell.utils import bl_cache, logger
from .. import bl_cache, sockets
from .. import contracts as ct from .. import contracts as ct
from .. import managed_objs as _managed_objs from .. import managed_objs as _managed_objs
from .. import sockets
from . import events from . import events
from . import presets as _presets from . import presets as _presets
@ -102,12 +101,12 @@ class MaxwellSimNode(bpy.types.Node):
Parameters: Parameters:
name: The name of the property to set. name: The name of the property to set.
prop: The `bpy.types.Property` to instantiate and attach.. prop: The `bpy.types.Property` to instantiate and attach..
no_update: Don't attach a `self.sync_prop()` callback to the property's `update`. no_update: Don't attach a `self.on_prop_changed()` callback to the property's `update`.
""" """
_update_with_name = prop_name if update_with_name is None else update_with_name _update_with_name = prop_name if update_with_name is None else update_with_name
extra_kwargs = ( extra_kwargs = (
{ {
'update': lambda self, context: self.sync_prop( 'update': lambda self, context: self.on_prop_changed(
_update_with_name, context _update_with_name, context
), ),
} }
@ -316,7 +315,7 @@ class MaxwellSimNode(bpy.types.Node):
# - Socket Accessors # - Socket Accessors
#################### ####################
def _bl_sockets( def _bl_sockets(
self, direc: typx.Literal['input', 'output'] self, direc: typ.Literal['input', 'output']
) -> bpy.types.NodeInputs: ) -> bpy.types.NodeInputs:
"""Retrieve currently visible Blender sockets on the node, by-direction. """Retrieve currently visible Blender sockets on the node, by-direction.
@ -335,7 +334,7 @@ class MaxwellSimNode(bpy.types.Node):
def _active_socket_set_socket_defs( def _active_socket_set_socket_defs(
self, self,
direc: typx.Literal['input', 'output'], direc: typ.Literal['input', 'output'],
) -> dict[ct.SocketName, sockets.base.SocketDef]: ) -> dict[ct.SocketName, sockets.base.SocketDef]:
"""Retrieve all socket definitions for sockets that should be defined, according to the `self.active_socket_set`. """Retrieve all socket definitions for sockets that should be defined, according to the `self.active_socket_set`.
@ -361,7 +360,7 @@ class MaxwellSimNode(bpy.types.Node):
return socket_sets.get(self.active_socket_set, {}) return socket_sets.get(self.active_socket_set, {})
def active_socket_defs( def active_socket_defs(
self, direc: typx.Literal['input', 'output'] self, direc: typ.Literal['input', 'output']
) -> dict[ct.SocketName, sockets.base.SocketDef]: ) -> dict[ct.SocketName, sockets.base.SocketDef]:
"""Retrieve all socket definitions for sockets that should be defined. """Retrieve all socket definitions for sockets that should be defined.
@ -664,6 +663,9 @@ class MaxwellSimNode(bpy.types.Node):
Notes: Notes:
This can be an unpredictably heavy function, depending on the node graph topology. This can be an unpredictably heavy function, depending on the node graph topology.
Doesn't currently accept `LinkChanged` (->Output) events; rather, these propagate as `DataChanged` events.
**This may change** if it becomes important for the node to differentiate between "change in data" and "change in link".
Parameters: Parameters:
event: The event to report forwards/backwards along the node tree. event: The event to report forwards/backwards along the node tree.
socket_name: The input socket that was altered, if any, in order to trigger this event. socket_name: The input socket that was altered, if any, in order to trigger this event.
@ -714,7 +716,7 @@ class MaxwellSimNode(bpy.types.Node):
#################### ####################
# - Property Event: On Update # - Property Event: On Update
#################### ####################
def sync_prop(self, prop_name: str, _: bpy.types.Context) -> None: def on_prop_changed(self, prop_name: str, _: bpy.types.Context) -> None:
"""Report that a particular property has changed, which may cause certain caches to regenerate. """Report that a particular property has changed, which may cause certain caches to regenerate.
Notes: Notes:

View File

@ -1,14 +1,16 @@
# from . import scientific_constant # from . import scientific_constant
# from . import physical_constant # from . import physical_constant
from . import blender_constant, number_constant, scientific_constant from . import blender_constant, expr_constant, number_constant, scientific_constant
BL_REGISTER = [ BL_REGISTER = [
*expr_constant.BL_REGISTER,
*scientific_constant.BL_REGISTER, *scientific_constant.BL_REGISTER,
*number_constant.BL_REGISTER, *number_constant.BL_REGISTER,
# *physical_constant.BL_REGISTER, # *physical_constant.BL_REGISTER,
*blender_constant.BL_REGISTER, *blender_constant.BL_REGISTER,
] ]
BL_NODES = { BL_NODES = {
**expr_constant.BL_NODES,
**scientific_constant.BL_NODES, **scientific_constant.BL_NODES,
**number_constant.BL_NODES, **number_constant.BL_NODES,
# **physical_constant.BL_NODES, # **physical_constant.BL_NODES,

View File

@ -0,0 +1,41 @@
import typing as typ
from .... import contracts as ct
from .... import sockets
from ... import base, events
class ExprConstantNode(base.MaxwellSimNode):
node_type = ct.NodeType.ExprConstant
bl_label = 'Expr Constant'
input_sockets: typ.ClassVar = {
'Expr': sockets.ExprSocketDef(),
}
output_sockets: typ.ClassVar = {
'Expr': sockets.ExprSocketDef(),
}
## TODO: Symbols (defined w/props?)
## - Currently expr constant isn't excessively useful, since there are no variables.
## - We'll define the #, type, name with props.
## - We'll add loose-socket inputs as int/real/complex/physical socket (based on type) for Param.
## - We the output expr would support `Value` (just the expression), `LazyValueFunc` (evaluate w/symbol support), `Param` (example values for symbols).
####################
# - Callbacks
####################
@events.computes_output_socket(
'Expr', kind=ct.FlowKind.Value, input_sockets={'Expr'}
)
def compute_value(self, input_sockets: dict) -> typ.Any:
return input_sockets['Expr']
####################
# - Blender Registration
####################
BL_REGISTER = [
ExprConstantNode,
]
BL_NODES = {ct.NodeType.ExprConstant: (ct.NodeCategory.MAXWELLSIM_INPUTS_CONSTANTS)}

View File

@ -56,7 +56,7 @@ class ScientificConstantNode(base.MaxwellSimNode):
self.cache__units = '' self.cache__units = ''
self.cache__uncertainty = '' self.cache__uncertainty = ''
self.sync_prop('sci_constant', context) self.on_prop_changed('sci_constant', context)
#################### ####################
# - UI # - UI

View File

@ -74,7 +74,7 @@ class Tidy3DFileImporterNode(base.MaxwellSimNode):
), ),
], ],
default='SIMULATION_DATA', default='SIMULATION_DATA',
update=lambda self, context: self.sync_prop('tidy3d_type', context), update=lambda self, context: self.on_prop_changed('tidy3d_type', context),
) )
disp_fit__min_poles: bpy.props.IntProperty( disp_fit__min_poles: bpy.props.IntProperty(

View File

@ -28,7 +28,7 @@ class WaveConstantNode(base.MaxwellSimNode):
name='Range', name='Range',
description='Whether to use a wavelength/frequency range', description='Whether to use a wavelength/frequency range',
default=False, default=False,
update=lambda self, context: self.sync_prop('use_range', context), update=lambda self, context: self.on_prop_changed('use_range', context),
) )
def draw_props(self, _: bpy.types.Context, col: bpy.types.UILayout): def draw_props(self, _: bpy.types.Context, col: bpy.types.UILayout):
@ -74,7 +74,7 @@ class WaveConstantNode(base.MaxwellSimNode):
@events.computes_output_socket( @events.computes_output_socket(
'WL', 'WL',
kind=ct.FlowKind.LazyValueRange, kind=ct.FlowKind.LazyArrayRange,
# Data # Data
input_sockets={'WL', 'Freq'}, input_sockets={'WL', 'Freq'},
input_sockets_optional={'WL': True, 'Freq': True}, input_sockets_optional={'WL': True, 'Freq': True},
@ -93,12 +93,12 @@ class WaveConstantNode(base.MaxwellSimNode):
@events.computes_output_socket( @events.computes_output_socket(
'Freq', 'Freq',
kind=ct.FlowKind.LazyValueRange, kind=ct.FlowKind.LazyArrayRange,
# Data # Data
input_sockets={'WL', 'Freq'}, input_sockets={'WL', 'Freq'},
input_socket_kinds={ input_socket_kinds={
'WL': ct.FlowKind.LazyValueRange, 'WL': ct.FlowKind.LazyArrayRange,
'Freq': ct.FlowKind.LazyValueRange, 'Freq': ct.FlowKind.LazyArrayRange,
}, },
input_sockets_optional={'WL': True, 'Freq': True}, input_sockets_optional={'WL': True, 'Freq': True},
) )

View File

@ -3,7 +3,6 @@ from pathlib import Path
from blender_maxwell.utils import logger from blender_maxwell.utils import logger
from ...... import info
from ......services import tdcloud from ......services import tdcloud
from .... import contracts as ct from .... import contracts as ct
from .... import sockets from .... import sockets
@ -18,8 +17,8 @@ def _sim_data_cache_path(task_id: str) -> Path:
Arguments: Arguments:
task_id: The ID of the Tidy3D cloud task. task_id: The ID of the Tidy3D cloud task.
""" """
(info.ADDON_CACHE / task_id).mkdir(exist_ok=True) (ct.addon.ADDON_CACHE / task_id).mkdir(exist_ok=True)
return info.ADDON_CACHE / task_id / 'sim_data.hdf5' return ct.addon.ADDON_CACHE / task_id / 'sim_data.hdf5'
#################### ####################

View File

@ -54,7 +54,7 @@ class LibraryMediumNode(base.MaxwellSimNode):
if mat_key != 'graphene' ## For some reason, it's unique... if mat_key != 'graphene' ## For some reason, it's unique...
], ],
default='Au', default='Au',
update=(lambda self, context: self.sync_prop('material', context)), update=(lambda self, context: self.on_prop_changed('material', context)),
) )
@property @property

View File

@ -68,7 +68,7 @@ class EHFieldMonitorNode(base.MaxwellSimNode):
'Freqs', 'Freqs',
}, },
input_socket_kinds={ input_socket_kinds={
'Freqs': ct.FlowKind.LazyValueRange, 'Freqs': ct.FlowKind.LazyArrayRange,
}, },
unit_systems={'Tidy3DUnits': ct.UNITS_TIDY3D}, unit_systems={'Tidy3DUnits': ct.UNITS_TIDY3D},
scale_input_sockets={ scale_input_sockets={

View File

@ -68,7 +68,7 @@ class PowerFluxMonitorNode(base.MaxwellSimNode):
'Direction', 'Direction',
}, },
input_socket_kinds={ input_socket_kinds={
'Freqs': ct.FlowKind.LazyValueRange, 'Freqs': ct.FlowKind.LazyArrayRange,
}, },
unit_systems={'Tidy3DUnits': ct.UNITS_TIDY3D}, unit_systems={'Tidy3DUnits': ct.UNITS_TIDY3D},
scale_input_sockets={ scale_input_sockets={

View File

@ -60,14 +60,14 @@ class ViewerNode(base.MaxwellSimNode):
name='Auto-Plot', name='Auto-Plot',
description='Whether to auto-plot anything plugged into the viewer node', description='Whether to auto-plot anything plugged into the viewer node',
default=False, default=False,
update=lambda self, context: self.sync_prop('auto_plot', context), update=lambda self, context: self.on_prop_changed('auto_plot', context),
) )
auto_3d_preview: bpy.props.BoolProperty( auto_3d_preview: bpy.props.BoolProperty(
name='Auto 3D Preview', name='Auto 3D Preview',
description="Whether to auto-preview anything 3D, that's plugged into the viewer node", description="Whether to auto-preview anything 3D, that's plugged into the viewer node",
default=True, default=True,
update=lambda self, context: self.sync_prop('auto_3d_preview', context), update=lambda self, context: self.on_prop_changed('auto_3d_preview', context),
) )
#################### ####################

View File

@ -190,7 +190,7 @@ class Tidy3DWebExporterNode(base.MaxwellSimNode):
else: else:
self.trigger_event(ct.FlowEvent.DisableLock) self.trigger_event(ct.FlowEvent.DisableLock)
self.sync_prop('lock_tree', context) self.on_prop_changed('lock_tree', context)
def sync_tracked_task_id(self, context): def sync_tracked_task_id(self, context):
# Select Tracked Task # Select Tracked Task
@ -212,7 +212,7 @@ class Tidy3DWebExporterNode(base.MaxwellSimNode):
self.inputs['Cloud Task'].sync_prepare_new_task() self.inputs['Cloud Task'].sync_prepare_new_task()
self.inputs['Cloud Task'].locked = False self.inputs['Cloud Task'].locked = False
self.sync_prop('tracked_task_id', context) self.on_prop_changed('tracked_task_id', context)
#################### ####################
# - Output Socket Callbacks # - Output Socket Callbacks

View File

@ -42,7 +42,7 @@ class PointDipoleSourceNode(base.MaxwellSimNode):
('EZ', 'Ez', 'Electric field in z-dir'), ('EZ', 'Ez', 'Electric field in z-dir'),
], ],
default='EX', default='EX',
update=(lambda self, context: self.sync_prop('pol_axis', context)), update=(lambda self, context: self.on_prop_changed('pol_axis', context)),
) )
#################### ####################

View File

@ -52,13 +52,13 @@ class GaussianPulseTemporalShapeNode(base.MaxwellSimNode):
name='Plot Time Start (ps)', name='Plot Time Start (ps)',
description='The instance ID of a particular MaxwellSimNode instance, used to index caches', description='The instance ID of a particular MaxwellSimNode instance, used to index caches',
default=0.0, default=0.0,
update=(lambda self, context: self.sync_prop('plot_time_start', context)), update=(lambda self, context: self.on_prop_changed('plot_time_start', context)),
) )
plot_time_end: bpy.props.FloatProperty( plot_time_end: bpy.props.FloatProperty(
name='Plot Time End (ps)', name='Plot Time End (ps)',
description='The instance ID of a particular MaxwellSimNode instance, used to index caches', description='The instance ID of a particular MaxwellSimNode instance, used to index caches',
default=5, default=5,
update=(lambda self, context: self.sync_prop('plot_time_start', context)), update=(lambda self, context: self.on_prop_changed('plot_time_start', context)),
) )
#################### ####################

View File

@ -69,7 +69,7 @@ class CombineNode(base.MaxwellSimNode):
default=1, default=1,
min=1, min=1,
# max=MAX_AMOUNT, # max=MAX_AMOUNT,
update=lambda self, context: self.sync_prop('amount', context), update=lambda self, context: self.on_prop_changed('amount', context),
) )
#################### ####################

View File

@ -5,8 +5,8 @@ import typing as typ
import bpy import bpy
import pydantic as pyd import pydantic as pyd
import sympy as sp import sympy as sp
import typing_extensions as typx
from blender_maxwell.utils import extra_sympy_units as spux
from blender_maxwell.utils import logger, serialize from blender_maxwell.utils import logger, serialize
from .. import contracts as ct from .. import contracts as ct
@ -100,7 +100,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
bl_label: str bl_label: str
# Style # Style
display_shape: typx.Literal[ display_shape: typ.Literal[
'CIRCLE', 'CIRCLE',
'SQUARE', 'SQUARE',
'DIAMOND', 'DIAMOND',
@ -144,12 +144,12 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
Parameters: Parameters:
name: The name of the property to set. name: The name of the property to set.
prop: The `bpy.types.Property` to instantiate and attach.. prop: The `bpy.types.Property` to instantiate and attach..
no_update: Don't attach a `self.sync_prop()` callback to the property's `update`. no_update: Don't attach a `self.on_prop_changed()` callback to the property's `update`.
""" """
_update_with_name = prop_name if update_with_name is None else update_with_name _update_with_name = prop_name if update_with_name is None else update_with_name
extra_kwargs = ( extra_kwargs = (
{ {
'update': lambda self, context: self.sync_prop( 'update': lambda self, context: self.on_prop_changed(
_update_with_name, context _update_with_name, context
), ),
} }
@ -185,7 +185,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
# Configure Use of Units # Configure Use of Units
if cls.use_units: if cls.use_units:
if not (socket_units := ct.SOCKET_UNITS.get(cls.socket_type)): if not (socket_units := ct.SOCKET_UNITS.get(cls.socket_type)):
msg = f'Tried to define "use_units" on socket {cls.bl_label} socket, but there is no unit for {cls.socket_type} defined in "contracts.SOCKET_UNITS"' msg = f'{cls.socket_type}: Tried to define "use_units", but there is no unit for {cls.socket_type} defined in "contracts.SOCKET_UNITS"'
raise RuntimeError(msg) raise RuntimeError(msg)
cls.set_prop( cls.set_prop(
@ -193,7 +193,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
bpy.props.EnumProperty, bpy.props.EnumProperty,
name='Unit', name='Unit',
items=[ items=[
(unit_name, str(unit_value), str(unit_value)) (unit_name, spux.sp_to_str(unit_value), sp.srepr(unit_value))
for unit_name, unit_value in socket_units['values'].items() for unit_name, unit_value in socket_units['values'].items()
], ],
default=socket_units['default'], default=socket_units['default'],
@ -204,6 +204,49 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
default=socket_units['default'], default=socket_units['default'],
) )
####################
# - Units
####################
# TODO: Refactor
@functools.cached_property
def possible_units(self) -> dict[str, sp.Expr]:
if not self.use_units:
msg = "Tried to get possible units for socket {self}, but socket doesn't `use_units`"
raise ValueError(msg)
return ct.SOCKET_UNITS[self.socket_type]['values']
@property
def unit(self) -> sp.Expr:
return self.possible_units[self.active_unit]
@property
def prev_unit(self) -> sp.Expr:
return self.possible_units[self.prev_active_unit]
@unit.setter
def unit(self, value: str | sp.Expr) -> None:
# Retrieve Unit by String
if isinstance(value, str) and value in self.possible_units:
self.active_unit = self.possible_units[value]
return
# Retrieve =1 Matching Unit Name
matching_unit_names = [
unit_name
for unit_name, unit_sympy in self.possible_units.items()
if value == unit_sympy
]
if len(matching_unit_names) == 0:
msg = f"Tried to set unit for socket {self} with value {value}, but it is not one of possible units {''.join(self.possible_units.values())} for this socket (as defined in `contracts.SOCKET_UNITS`)"
raise ValueError(msg)
if len(matching_unit_names) > 1:
msg = f"Tried to set unit for socket {self} with value {value}, but multiple possible matching units {''.join(self.possible_units.values())} for this socket (as defined in `contracts.SOCKET_UNITS`); there may only be one"
raise RuntimeError(msg)
self.active_unit = matching_unit_names[0]
#################### ####################
# - Property Event: On Update # - Property Event: On Update
#################### ####################
@ -215,7 +258,8 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
""" """
self.display_shape = ( self.display_shape = (
'SQUARE' 'SQUARE'
if self.active_kind in {ct.FlowKind.LazyValue, ct.FlowKind.LazyValueRange} if self.active_kind
in {ct.FlowKind.LazyValueFunc, ct.FlowKind.LazyValueRange}
else 'CIRCLE' else 'CIRCLE'
) + ('_DOT' if self.use_units else '') ) + ('_DOT' if self.use_units else '')
@ -241,12 +285,12 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
self.prev_unit self.prev_unit
).rescale_to_unit(self.unit) ).rescale_to_unit(self.unit)
else: else:
msg = f'Active kind {self.active_kind} has no way of scaling units (from {self.prev_active_unit} to {self.active_unit}). Please check the node definition' msg = f'Socket {self.bl_label} ({self.socket_type}): Active kind {self.active_kind} declares no method of scaling units from {self.prev_active_unit} to {self.active_unit})'
raise RuntimeError(msg) raise RuntimeError(msg)
self.prev_active_unit = self.active_unit self.prev_active_unit = self.active_unit
def sync_prop(self, prop_name: str, _: bpy.types.Context) -> None: def on_prop_changed(self, prop_name: str, _: bpy.types.Context) -> None:
"""Called when a property has been updated. """Called when a property has been updated.
Contrary to `node.on_prop_changed()`, socket-specific callbacks are baked into this function: Contrary to `node.on_prop_changed()`, socket-specific callbacks are baked into this function:
@ -269,7 +313,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
# Undefined Properties # Undefined Properties
else: else:
msg = f'Property {prop_name} not defined on socket {self}' msg = f'Property {prop_name} not defined on socket {self.bl_label} ({self.socket_type})'
raise RuntimeError(msg) raise RuntimeError(msg)
#################### ####################
@ -298,7 +342,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
""" """
# Output Socket Check # Output Socket Check
if self.is_output: if self.is_output:
msg = 'Tried to ask output socket for consent to add link' msg = f'Socket {self.bl_label} {self.socket_type}): Tried to ask output socket for consent to add link'
raise RuntimeError(msg) raise RuntimeError(msg)
# Lock Check # Lock Check
@ -361,7 +405,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
""" """
# Output Socket Check # Output Socket Check
if self.is_output: if self.is_output:
msg = "Tried to sync 'link add' on output socket" msg = f'Socket {self.bl_label} {self.socket_type}): Tried to ask output socket for consent to remove link'
raise RuntimeError(msg) raise RuntimeError(msg)
# Lock Check # Lock Check
@ -389,29 +433,38 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
self, self,
event: ct.FlowEvent, event: ct.FlowEvent,
) -> None: ) -> None:
"""Recursively triggers an event along the node tree, depending on whether the socket is an input or output socket. """Responds to and triggers subsequent events along the node tree.
- **Locking**: `EnableLock` or `DisableLock` will always affect this socket's lock.
- **Input Socket -> Input**: Trigger event on `from_socket`s along input links.
- **Input Socket -> Output**: Trigger event on node (w/`socket_name`).
- **Output Socket -> Input**: Trigger event on node (w/`socket_name`).
- **Output Socket -> Output**: Trigger event on `to_socket`s along output links.
Notes: Notes:
This can be an unpredictably heavy function, depending on the node graph topology. This can be an unpredictably heavy function, depending on the node graph topology.
A `LinkChanged` (->Output) event will trigger a `DataChanged` event on the node.
**This may change** if it becomes important for the node to differentiate between "change in data" and "change in link".
Parameters: Parameters:
event: The event to report along the node tree. event: The event to report along the node tree.
The value of `ct.FlowEvent.flow_direction[event]` must match either `input` or `output`, depending on whether the socket is input/output. The value of `ct.FlowEvent.flow_direction[event]` (`input` or `output`) determines the direction that an event flows.
""" """
flow_direction = ct.FlowEvent.flow_direction[event] flow_direction = ct.FlowEvent.flow_direction[event]
# Locking
if event in [ct.FlowEvent.EnableLock, ct.FlowEvent.DisableLock]:
self.locked = event == ct.FlowEvent.EnableLock
# Input Socket | Input Flow # Input Socket | Input Flow
if not self.is_output and flow_direction == 'input': if not self.is_output and flow_direction == 'input':
if event in [ct.FlowEvent.EnableLock, ct.FlowEvent.DisableLock]:
self.locked = event == ct.FlowEvent.EnableLock
for link in self.links: for link in self.links:
link.from_socket.trigger_event(event) link.from_socket.trigger_event(event)
# Input Socket | Output Flow # Input Socket | Output Flow
if not self.is_output and flow_direction == 'output': if not self.is_output and flow_direction == 'output':
## THIS IS A WORKAROUND (bc Node only understands DataChanged)
## TODO: Handle LinkChanged on the node.
if event == ct.FlowEvent.LinkChanged: if event == ct.FlowEvent.LinkChanged:
self.node.trigger_event(ct.FlowEvent.DataChanged, socket_name=self.name) self.node.trigger_event(ct.FlowEvent.DataChanged, socket_name=self.name)
@ -419,9 +472,6 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
# Output Socket | Input Flow # Output Socket | Input Flow
if self.is_output and flow_direction == 'input': if self.is_output and flow_direction == 'input':
if event in [ct.FlowEvent.EnableLock, ct.FlowEvent.DisableLock]:
self.locked = event == ct.FlowEvent.EnableLock
self.node.trigger_event(event, socket_name=self.name) self.node.trigger_event(event, socket_name=self.name)
# Output Socket | Output Flow # Output Socket | Output Flow
@ -435,6 +485,11 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
# Capabilities # Capabilities
@property @property
def capabilities(self) -> None: def capabilities(self) -> None:
"""By default, the socket is linkeable with any other socket of the same type and active kind.
Notes:
See `ct.FlowKind` for more information.
"""
return ct.DataCapabilities( return ct.DataCapabilities(
socket_type=self.socket_type, socket_type=self.socket_type,
active_kind=self.active_kind, active_kind=self.active_kind,
@ -443,57 +498,164 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
# Value # Value
@property @property
def value(self) -> ct.ValueFlow: def value(self) -> ct.ValueFlow:
raise NotImplementedError """Throws a descriptive error.
Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to get "ct.FlowKind.Value", but socket does not define it'
raise NotImplementedError(msg)
@value.setter @value.setter
def value(self, value: ct.ValueFlow) -> None: def value(self, value: ct.ValueFlow) -> None:
raise NotImplementedError """Throws a descriptive error.
Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to set "ct.FlowKind.Value", but socket does not define it'
raise NotImplementedError(msg)
# ValueArray # ValueArray
@property @property
def array(self) -> ct.ArrayFlow: def array(self) -> ct.ArrayFlow:
## TODO: Single-element list when value exists. """Throws a descriptive error.
raise NotImplementedError
Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to get "ct.FlowKind.Array", but socket does not define it'
raise NotImplementedError(msg)
@array.setter @array.setter
def array(self, value: ct.ArrayFlow) -> None: def array(self, value: ct.ArrayFlow) -> None:
raise NotImplementedError """Throws a descriptive error.
# LazyValue Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to set "ct.FlowKind.Array", but socket does not define it'
raise NotImplementedError(msg)
# LazyValueFunc
@property @property
def lazy_value(self) -> ct.LazyValueFlow: def lazy_value_func(self) -> ct.LazyValueFuncFlow:
raise NotImplementedError """Throws a descriptive error.
@lazy_value.setter Notes:
def lazy_value(self, lazy_value: ct.LazyValueFlow) -> None: See `ct.FlowKind` for more information.
raise NotImplementedError
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to get "ct.FlowKind.LazyValueFunc", but socket does not define it'
raise NotImplementedError(msg)
@lazy_value_func.setter
def lazy_value_func(self, lazy_value_func: ct.LazyValueFuncFlow) -> None:
"""Throws a descriptive error.
Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to set "ct.FlowKind.LazyValueFunc", but socket does not define it'
raise NotImplementedError(msg)
# LazyArrayRange # LazyArrayRange
@property @property
def lazy_array_range(self) -> ct.LazyArrayRangeFlow: def lazy_array_range(self) -> ct.LazyArrayRangeFlow:
raise NotImplementedError """Throws a descriptive error.
Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to get "ct.FlowKind.LazyArrayRange", but socket does not define it'
raise NotImplementedError(msg)
@lazy_array_range.setter @lazy_array_range.setter
def lazy_array_range(self, value: tuple[ct.DataValue, ct.DataValue, int]) -> None: def lazy_array_range(self, value: tuple[ct.DataValue, ct.DataValue, int]) -> None:
raise NotImplementedError """Throws a descriptive error.
Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to set "ct.FlowKind.LazyArrayRange", but socket does not define it'
raise NotImplementedError(msg)
# Param # Param
@property @property
def param(self) -> ct.ParamsFlow: def param(self) -> ct.ParamsFlow:
raise NotImplementedError """Throws a descriptive error.
Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to get "ct.FlowKind.Param", but socket does not define it'
raise NotImplementedError(msg)
@param.setter @param.setter
def param(self, value: tuple[ct.DataValue, ct.DataValue, int]) -> None: def param(self, value: tuple[ct.DataValue, ct.DataValue, int]) -> None:
raise NotImplementedError """Throws a descriptive error.
Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to set "ct.FlowKind.Param", but socket does not define it'
raise NotImplementedError(msg)
# Info # Info
@property @property
def info(self) -> ct.ParamsFlow: def info(self) -> ct.ParamsFlow:
raise NotImplementedError """Throws a descriptive error.
Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to get "ct.FlowKind.Info", but socket does not define it'
raise NotImplementedError(msg)
@info.setter @info.setter
def info(self, value: tuple[ct.DataValue, ct.DataValue, int]) -> None: def info(self, value: tuple[ct.DataValue, ct.DataValue, int]) -> None:
raise NotImplementedError """Throws a descriptive error.
Notes:
See `ct.FlowKind` for more information.
Raises:
NotImplementedError: When used without being overridden.
"""
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to set "ct.FlowKind.Info", but socket does not define it'
raise NotImplementedError(msg)
#################### ####################
# - Data Chain Computation # - Data Chain Computation
@ -502,32 +664,50 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
self, self,
kind: ct.FlowKind = ct.FlowKind.Value, kind: ct.FlowKind = ct.FlowKind.Value,
) -> typ.Any: ) -> typ.Any:
"""Computes the internal data of this socket, ONLY. """Low-level method to computes the data contained within this socket, for a particular `ct.FlowKind`.
**NOTE**: Low-level method. Use `compute_data` instead. Notes:
Not all `ct.FlowKind`s are meant to be computed; namely, `Capabilities` should be directly referenced.
Raises:
ValueError: When referencing a socket that's meant to be directly referenced.
""" """
return { kind_data_map = {
ct.FlowKind.Value: lambda: self.value, ct.FlowKind.Value: lambda: self.value,
ct.FlowKind.ValueArray: lambda: self.value_array, ct.FlowKind.ValueArray: lambda: self.value_array,
ct.FlowKind.LazyValue: lambda: self.lazy_value, ct.FlowKind.LazyValueFunc: lambda: self.lazy_value,
ct.FlowKind.LazyArrayRange: lambda: self.lazy_array_range, ct.FlowKind.LazyArrayRange: lambda: self.lazy_array_range,
ct.FlowKind.Params: lambda: self.params, ct.FlowKind.Params: lambda: self.params,
ct.FlowKind.Info: lambda: self.info, ct.FlowKind.Info: lambda: self.info,
}[kind]() }
if kind in kind_data_map:
return kind_data_map[kind]()
msg = f'socket._compute_data was called with invalid kind "{kind}"' ## TODO: Reflect this constraint in the type
raise RuntimeError(msg) msg = f'Socket {self.bl_label} ({self.socket_type}): Kind {kind} cannot be computed within a socket "compute_data", as it is meant to be referenced directly'
raise ValueError(msg)
def compute_data( def compute_data(
self, self,
kind: ct.FlowKind = ct.FlowKind.Value, kind: ct.FlowKind = ct.FlowKind.Value,
): ) -> typ.Any:
"""Computes the value of this socket, including all relevant factors. """Computes internal or link-sourced data represented by this socket.
- **Input Socket | Unlinked**: Use socket's own data, by calling `_compute_data`.
- **Input Socket | Linked**: Call `compute_data` on the linked `from_socket`.
- **Output Socket**: Use the node's output data, by calling `node.compute_output()`.
Notes: Notes:
- If input socket, and unlinked, compute internal data. This can be an unpredictably heavy function, depending on the node graph topology.
- If input socket, and linked, compute linked socket data.
- If output socket, ask node for data. Parameters:
kind: The `ct.FlowKind` to reference when retrieving the data.
Returns:
The computed data, whever it came from.
Raises:
NotImplementedError: If multi-input sockets are used (no support yet as of Blender 4.1).
""" """
# Compute Output Socket # Compute Output Socket
if self.is_output: if self.is_output:
@ -538,62 +718,17 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
if not self.is_linked: if not self.is_linked:
return self._compute_data(kind) return self._compute_data(kind)
## Linked: Check Capabilities ## Linked: Compute Data on Linked Socket
for link in self.links: ## -> Capabilities are guaranteed compatible by 'allow_link_add'.
if not link.from_socket.capabilities.is_compatible_with(self.capabilities): ## -> There is no point in rechecking every time data flows.
msg = f'Output socket "{link.from_socket.bl_label}" is linked to input socket "{self.bl_label}" with incompatible capabilities (caps_out="{link.from_socket.capabilities}", caps_in="{self.capabilities}")'
raise ValueError(msg)
## ...and Compute Data on Linked Socket
linked_values = [link.from_socket.compute_data(kind) for link in self.links] linked_values = [link.from_socket.compute_data(kind) for link in self.links]
# Return Single Value / List of Values # Return Single Value / List of Values
## Preparation for multi-input sockets.
if len(linked_values) == 1: if len(linked_values) == 1:
return linked_values[0] return linked_values[0]
return linked_values
#################### msg = f'Socket {self.bl_label} ({self.socket_type}): Multi-input sockets are not yet supported'
# - Unit Properties return NotImplementedError(msg)
####################
@functools.cached_property
def possible_units(self) -> dict[str, sp.Expr]:
if not self.use_units:
msg = "Tried to get possible units for socket {self}, but socket doesn't `use_units`"
raise ValueError(msg)
return ct.SOCKET_UNITS[self.socket_type]['values']
@property
def unit(self) -> sp.Expr:
return self.possible_units[self.active_unit]
@property
def prev_unit(self) -> sp.Expr:
return self.possible_units[self.prev_active_unit]
@unit.setter
def unit(self, value: str | sp.Expr) -> None:
# Retrieve Unit by String
if isinstance(value, str) and value in self.possible_units:
self.active_unit = self.possible_units[value]
return
# Retrieve =1 Matching Unit Name
matching_unit_names = [
unit_name
for unit_name, unit_sympy in self.possible_units.items()
if value == unit_sympy
]
if len(matching_unit_names) == 0:
msg = f"Tried to set unit for socket {self} with value {value}, but it is not one of possible units {''.join(self.possible_units.values())} for this socket (as defined in `contracts.SOCKET_UNITS`)"
raise ValueError(msg)
if len(matching_unit_names) > 1:
msg = f"Tried to set unit for socket {self} with value {value}, but multiple possible matching units {''.join(self.possible_units.values())} for this socket (as defined in `contracts.SOCKET_UNITS`); there may only be one"
raise RuntimeError(msg)
self.active_unit = matching_unit_names[0]
#################### ####################
# - Theme # - Theme
@ -611,7 +746,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
return cls.socket_color return cls.socket_color
#################### ####################
# - UI Methods # - UI
#################### ####################
def draw( def draw(
self, self,
@ -724,7 +859,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
{ {
ct.FlowKind.Value: self.draw_value, ct.FlowKind.Value: self.draw_value,
ct.FlowKind.Array: self.draw_value_array, ct.FlowKind.Array: self.draw_value_array,
ct.FlowKind.LazyValue: self.draw_lazy_value, ct.FlowKind.LazyValueFunc: self.draw_lazy_value,
ct.FlowKind.LazyValueRange: self.draw_lazy_value_range, ct.FlowKind.LazyValueRange: self.draw_lazy_value_range,
}[self.active_kind](col) }[self.active_kind](col)
@ -791,7 +926,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
"""Draws the socket lazy value on its own line. """Draws the socket lazy value on its own line.
Notes: Notes:
Should be overriden by individual socket classes, if they have an editable `FlowKind.LazyValue`. Should be overriden by individual socket classes, if they have an editable `FlowKind.LazyValueFunc`.
Parameters: Parameters:
col: Target for defining UI elements. col: Target for defining UI elements.

View File

@ -18,7 +18,7 @@ class BoolBLSocket(base.MaxwellSimSocket):
name='Boolean', name='Boolean',
description='Represents a boolean value', description='Represents a boolean value',
default=False, default=False,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -1,10 +1,12 @@
import typing as typ
import bpy import bpy
import pydantic as pyd
import sympy as sp import sympy as sp
from blender_maxwell.utils import bl_cache
from blender_maxwell.utils import extra_sympy_units as spux from blender_maxwell.utils import extra_sympy_units as spux
from blender_maxwell.utils.pydantic_sympy import SympyExpr
from ... import bl_cache
from ... import contracts as ct from ... import contracts as ct
from .. import base from .. import base
@ -20,12 +22,26 @@ class ExprBLSocket(base.MaxwellSimSocket):
name='Expr', name='Expr',
description='Represents a symbolic expression', description='Represents a symbolic expression',
default='', default='',
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
symbols: list[sp.Symbol] = bl_cache.BLField([]) int_symbols: set[spux.IntSymbol] = bl_cache.BLField([])
## TODO: Way of assigning assumptions to symbols. real_symbols: set[spux.RealSymbol] = bl_cache.BLField([])
## TODO: Dynamic add/remove of symbols complex_symbols: set[spux.ComplexSymbol] = bl_cache.BLField([])
@property
def symbols(self) -> list[spux.Symbol]:
"""Retrieves all symbols by concatenating int, real, and complex symbols, and sorting them by name.
The order is guaranteed to be **deterministic**.
Returns:
All symbols valid for use in the expression.
"""
return sorted(
self.int_symbols | self.real_symbols | self.complex_symbols,
key=lambda sym: sym.name,
)
#################### ####################
# - Socket UI # - Socket UI
@ -38,21 +54,30 @@ class ExprBLSocket(base.MaxwellSimSocket):
#################### ####################
@property @property
def value(self) -> sp.Expr: def value(self) -> sp.Expr:
return sp.sympify( expr = sp.sympify(
self.raw_value, self.raw_value,
locals={sym.name: sym for sym in self.symbols},
strict=False, strict=False,
convert_xor=True, convert_xor=True,
).subs(spux.ALL_UNIT_SYMBOLS) ).subs(spux.ALL_UNIT_SYMBOLS)
if not expr.free_symbols.issubset(self.symbols):
msg = f'Expression "{expr}" (symbols={self.expr.free_symbols}) has invalid symbols (valid symbols: {self.symbols})'
raise ValueError(msg)
return expr
@value.setter @value.setter
def value(self, value: str) -> None: def value(self, value: str) -> None:
self.raw_value = str(value) self.raw_value = sp.sstr(value)
@property @property
def lazy_value(self) -> sp.Expr: def lazy_value_func(self) -> ct.LazyValueFuncFlow:
return ct.LazyDataValue.from_function( return ct.LazyValueFuncFlow(
sp.lambdify(self.symbols, self.value, 'jax'), func=sp.lambdify(self.symbols, self.value, 'jax'),
free_args=(tuple(str(sym) for sym in self.symbols), frozenset()), func_args=[
(sym.name, spux.sympy_to_python_type(sym)) for sym in self.symbols
],
supports_jax=True, supports_jax=True,
) )
@ -64,8 +89,35 @@ class ExprSocketDef(base.SocketDef):
socket_type: ct.SocketType = ct.SocketType.Expr socket_type: ct.SocketType = ct.SocketType.Expr
_x = sp.Symbol('x', real=True) _x = sp.Symbol('x', real=True)
symbols: list[SympyExpr] = [_x] int_symbols: list[spux.IntSymbol] = []
default_expr: SympyExpr = _x real_symbols: list[spux.RealSymbol] = [_x]
complex_symbols: list[spux.ComplexSymbol] = []
# Expression
default_expr: spux.SympyExpr = _x
allow_units: bool = True
@pyd.model_validator(mode='after')
def check_default_expr_follows_unit_allowance(self) -> typ.Self:
"""Checks that `self.default_expr` only uses units if `self.allow_units` is defined.
Raises:
ValueError: If the expression uses symbols not defined in `self.symbols`.
"""
if not spux.uses_units(self.default_expr):
msg = f'Expression symbols ({self.default_expr.free_symbol}) are not a strict subset of defined symbols ({self.symbols})'
raise ValueError(msg)
@pyd.model_validator(mode='after')
def check_default_expr_uses_allowed_symbols(self) -> typ.Self:
"""Checks that `self.default_expr` only uses symbols defined in `self.symbols`.
Raises:
ValueError: If the expression uses symbols not defined in `self.symbols`.
"""
if not self.default_expr.free_symbols.issubset(self.symbols):
msg = f'Expression symbols ({self.default_expr.free_symbol}) are not a strict subset of defined symbols ({self.symbols})'
raise ValueError(msg)
def init(self, bl_socket: ExprBLSocket) -> None: def init(self, bl_socket: ExprBLSocket) -> None:
bl_socket.value = self.default_expr bl_socket.value = self.default_expr

View File

@ -20,7 +20,7 @@ class FilePathBLSocket(base.MaxwellSimSocket):
name='File Path', name='File Path',
description='Represents the path to a file', description='Represents the path to a file',
subtype='FILE_PATH', subtype='FILE_PATH',
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -18,7 +18,7 @@ class StringBLSocket(base.MaxwellSimSocket):
name='String', name='String',
description='Represents a string', description='Represents a string',
default='', default='',
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -18,7 +18,7 @@ class BlenderCollectionBLSocket(base.MaxwellSimSocket):
name='Blender Collection', name='Blender Collection',
description='A Blender collection', description='A Blender collection',
type=bpy.types.Collection, type=bpy.types.Collection,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -21,7 +21,7 @@ class BlenderMaxwellResetGeoNodesSocket(bpy.types.Operator):
socket = node.inputs[self.socket_name] socket = node.inputs[self.socket_name]
# Report as though the GeoNodes Tree Changed # Report as though the GeoNodes Tree Changed
socket.sync_prop('raw_value', context) socket.on_prop_changed('raw_value', context)
return {'FINISHED'} return {'FINISHED'}
@ -41,7 +41,7 @@ class BlenderGeoNodesBLSocket(base.MaxwellSimSocket):
description='Represents a Blender GeoNodes Tree', description='Represents a Blender GeoNodes Tree',
type=bpy.types.NodeTree, type=bpy.types.NodeTree,
poll=(lambda self, obj: obj.bl_idname == 'GeometryNodeTree'), poll=(lambda self, obj: obj.bl_idname == 'GeometryNodeTree'),
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -18,7 +18,7 @@ class BlenderImageBLSocket(base.MaxwellSimSocket):
name='Blender Image', name='Blender Image',
description='Represents a Blender Image', description='Represents a Blender Image',
type=bpy.types.Image, type=bpy.types.Image,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -15,7 +15,7 @@ class BlenderMaterialBLSocket(base.MaxwellSimSocket):
name='Blender Material', name='Blender Material',
description='Represents a Blender material', description='Represents a Blender material',
type=bpy.types.Material, type=bpy.types.Material,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -39,7 +39,7 @@ class BlenderObjectBLSocket(base.MaxwellSimSocket):
name='Blender Object', name='Blender Object',
description='Represents a Blender object', description='Represents a Blender object',
type=bpy.types.Object, type=bpy.types.Object,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -18,7 +18,7 @@ class BlenderTextBLSocket(base.MaxwellSimSocket):
name='Blender Text', name='Blender Text',
description='Represents a Blender text datablock', description='Represents a Blender text datablock',
type=bpy.types.Text, type=bpy.types.Text,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -1,6 +1,5 @@
import bpy import bpy
import tidy3d as td import tidy3d as td
import typing_extensions as typx
from ... import contracts as ct from ... import contracts as ct
from .. import base from .. import base
@ -23,7 +22,7 @@ class MaxwellBoundCondBLSocket(base.MaxwellSimSocket):
('PERIODIC', 'Periodic', 'Infinitely periodic layer'), ('PERIODIC', 'Periodic', 'Infinitely periodic layer'),
], ],
default='PML', default='PML',
update=(lambda self, context: self.sync_prop('default_choice', context)), update=(lambda self, context: self.on_prop_changed('default_choice', context)),
) )
#################### ####################
@ -45,7 +44,7 @@ class MaxwellBoundCondBLSocket(base.MaxwellSimSocket):
}[self.default_choice] }[self.default_choice]
@value.setter @value.setter
def value(self, value: typx.Literal['PML', 'PEC', 'PMC', 'PERIODIC']) -> None: def value(self, value: typ.Literal['PML', 'PEC', 'PMC', 'PERIODIC']) -> None:
self.default_choice = value self.default_choice = value
@ -55,7 +54,7 @@ class MaxwellBoundCondBLSocket(base.MaxwellSimSocket):
class MaxwellBoundCondSocketDef(base.SocketDef): class MaxwellBoundCondSocketDef(base.SocketDef):
socket_type: ct.SocketType = ct.SocketType.MaxwellBoundCond socket_type: ct.SocketType = ct.SocketType.MaxwellBoundCond
default_choice: typx.Literal['PML', 'PEC', 'PMC', 'PERIODIC'] = 'PML' default_choice: typ.Literal['PML', 'PEC', 'PMC', 'PERIODIC'] = 'PML'
def init(self, bl_socket: MaxwellBoundCondBLSocket) -> None: def init(self, bl_socket: MaxwellBoundCondBLSocket) -> None:
bl_socket.value = self.default_choice bl_socket.value = self.default_choice

View File

@ -29,7 +29,7 @@ class MaxwellBoundCondsBLSocket(base.MaxwellSimSocket):
name='Show Bounds Definition', name='Show Bounds Definition',
description='Toggle to show bound faces', description='Toggle to show bound faces',
default=False, default=False,
update=(lambda self, context: self.sync_prop('show_definition', context)), update=(lambda self, context: self.on_prop_changed('show_definition', context)),
) )
x_pos: bpy.props.EnumProperty( x_pos: bpy.props.EnumProperty(
@ -37,42 +37,42 @@ class MaxwellBoundCondsBLSocket(base.MaxwellSimSocket):
description='+x choice of default boundary face', description='+x choice of default boundary face',
items=BOUND_FACE_ITEMS, items=BOUND_FACE_ITEMS,
default='PML', default='PML',
update=(lambda self, context: self.sync_prop('x_pos', context)), update=(lambda self, context: self.on_prop_changed('x_pos', context)),
) )
x_neg: bpy.props.EnumProperty( x_neg: bpy.props.EnumProperty(
name='-x Bound Face', name='-x Bound Face',
description='-x choice of default boundary face', description='-x choice of default boundary face',
items=BOUND_FACE_ITEMS, items=BOUND_FACE_ITEMS,
default='PML', default='PML',
update=(lambda self, context: self.sync_prop('x_neg', context)), update=(lambda self, context: self.on_prop_changed('x_neg', context)),
) )
y_pos: bpy.props.EnumProperty( y_pos: bpy.props.EnumProperty(
name='+y Bound Face', name='+y Bound Face',
description='+y choice of default boundary face', description='+y choice of default boundary face',
items=BOUND_FACE_ITEMS, items=BOUND_FACE_ITEMS,
default='PML', default='PML',
update=(lambda self, context: self.sync_prop('y_pos', context)), update=(lambda self, context: self.on_prop_changed('y_pos', context)),
) )
y_neg: bpy.props.EnumProperty( y_neg: bpy.props.EnumProperty(
name='-y Bound Face', name='-y Bound Face',
description='-y choice of default boundary face', description='-y choice of default boundary face',
items=BOUND_FACE_ITEMS, items=BOUND_FACE_ITEMS,
default='PML', default='PML',
update=(lambda self, context: self.sync_prop('y_neg', context)), update=(lambda self, context: self.on_prop_changed('y_neg', context)),
) )
z_pos: bpy.props.EnumProperty( z_pos: bpy.props.EnumProperty(
name='+z Bound Face', name='+z Bound Face',
description='+z choice of default boundary face', description='+z choice of default boundary face',
items=BOUND_FACE_ITEMS, items=BOUND_FACE_ITEMS,
default='PML', default='PML',
update=(lambda self, context: self.sync_prop('z_pos', context)), update=(lambda self, context: self.on_prop_changed('z_pos', context)),
) )
z_neg: bpy.props.EnumProperty( z_neg: bpy.props.EnumProperty(
name='-z Bound Face', name='-z Bound Face',
description='-z choice of default boundary face', description='-z choice of default boundary face',
items=BOUND_FACE_ITEMS, items=BOUND_FACE_ITEMS,
default='PML', default='PML',
update=(lambda self, context: self.sync_prop('z_neg', context)), update=(lambda self, context: self.on_prop_changed('z_neg', context)),
) )
#################### ####################

View File

@ -3,7 +3,7 @@ import scipy as sc
import sympy.physics.units as spu import sympy.physics.units as spu
import tidy3d as td import tidy3d as td
from blender_maxwell.utils.pydantic_sympy import ConstrSympyExpr from blender_maxwell.utils import extra_sympy_units as spux
from ... import contracts as ct from ... import contracts as ct
from .. import base from .. import base
@ -25,7 +25,7 @@ class MaxwellMediumBLSocket(base.MaxwellSimSocket):
default=500.0, default=500.0,
precision=4, precision=4,
step=50, step=50,
update=(lambda self, context: self.sync_prop('wl', context)), update=(lambda self, context: self.on_prop_changed('wl', context)),
) )
rel_permittivity: bpy.props.FloatVectorProperty( rel_permittivity: bpy.props.FloatVectorProperty(
@ -34,7 +34,9 @@ class MaxwellMediumBLSocket(base.MaxwellSimSocket):
size=2, size=2,
default=(1.0, 0.0), default=(1.0, 0.0),
precision=2, precision=2,
update=(lambda self, context: self.sync_prop('rel_permittivity', context)), update=(
lambda self, context: self.on_prop_changed('rel_permittivity', context)
),
) )
#################### ####################
@ -72,7 +74,7 @@ class MaxwellMediumBLSocket(base.MaxwellSimSocket):
@value.setter @value.setter
def value( def value(
self, value: tuple[ConstrSympyExpr(allow_variables=False), complex] self, value: tuple[spux.ConstrSympyExpr(allow_variables=False), complex]
) -> None: ) -> None:
_wl, rel_permittivity = value _wl, rel_permittivity = value

View File

@ -19,7 +19,7 @@ class MaxwellSimGridBLSocket(base.MaxwellSimSocket):
min=0.01, min=0.01,
# step=10, # step=10,
precision=2, precision=2,
update=(lambda self, context: self.sync_prop('min_steps_per_wl', context)), update=(lambda self, context: self.on_prop_changed('min_steps_per_wl', context)),
) )
#################### ####################

View File

@ -3,7 +3,7 @@ import typing as typ
import bpy import bpy
import sympy as sp import sympy as sp
from blender_maxwell.utils.pydantic_sympy import SympyExpr from blender_maxwell.utils import extra_sympy_units as spux
from ... import contracts as ct from ... import contracts as ct
from .. import base from .. import base
@ -24,8 +24,7 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
description='Represents a complex number (real, imaginary)', description='Represents a complex number (real, imaginary)',
size=2, size=2,
default=(0.0, 0.0), default=(0.0, 0.0),
subtype='NONE', update=(lambda self, context: self.on_prop_changed('raw_value', context)),
update=(lambda self, context: self.sync_prop('raw_value', context)),
) )
coord_sys: bpy.props.EnumProperty( coord_sys: bpy.props.EnumProperty(
name='Coordinate System', name='Coordinate System',
@ -47,58 +46,20 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
), ),
], ],
default='CARTESIAN', default='CARTESIAN',
update=lambda self, context: self._sync_coord_sys(context), update=lambda self, context: self.on_coord_sys_changed(context),
) )
#################### ####################
# - Socket UI # - Event Methods
#################### ####################
def draw_value(self, col: bpy.types.UILayout) -> None: def on_coord_sys_changed(self, context: bpy.types.Context):
"""Draw the value of the complex number, including a toggle for r"""Transforms values when the coordinate system changes.
specifying the active coordinate system.
Notes:
Cartesian coordinates with $y=0$ has no corresponding $\theta$
Therefore, we manually set $\theta=0$.
""" """
col_row = col.row()
col_row.prop(self, 'raw_value', text='')
col.prop(self, 'coord_sys', text='')
####################
# - Computation of Default Value
####################
@property
def value(self) -> SympyExpr:
"""Return the complex number as a sympy expression, of a form
determined by the coordinate system.
- Cartesian: a,b -> a + ib
- Polar: r,t -> re^(it)
Returns:
The sympy expression representing the complex number.
"""
v1, v2 = self.raw_value
return {
'CARTESIAN': v1 + sp.I * v2,
'POLAR': v1 * sp.exp(sp.I * v2),
}[self.coord_sys]
@value.setter
def value(self, value: SympyExpr) -> None:
"""Set the complex number from a sympy expression, using an internal
representation determined by the coordinate system.
- Cartesian: a,b -> a + ib
- Polar: r,t -> re^(it)
"""
self.raw_value = {
'CARTESIAN': (sp.re(value), sp.im(value)),
'POLAR': (sp.Abs(value), sp.arg(value)),
}[self.coord_sys]
####################
# - Internal Update Methods
####################
def _sync_coord_sys(self, context: bpy.types.Context):
if self.coord_sys == 'CARTESIAN': if self.coord_sys == 'CARTESIAN':
r, theta_rad = self.raw_value r, theta_rad = self.raw_value
self.raw_value = ( self.raw_value = (
@ -109,11 +70,58 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
x, y = self.raw_value x, y = self.raw_value
cart_value = x + sp.I * y cart_value = x + sp.I * y
self.raw_value = ( self.raw_value = (
sp.Abs(cart_value), float(sp.Abs(cart_value)),
sp.arg(cart_value) if y != 0 else 0, float(sp.arg(cart_value)) if y != 0 else float(0),
) )
self.sync_prop('coord_sys', context) self.on_prop_changed('coord_sys', context)
####################
# - Socket UI
####################
def draw_value(self, col: bpy.types.UILayout) -> None:
"""Draw the value of the complex number, including a toggle for specifying the active coordinate system."""
# Value Row
row = col.row()
row.prop(self, 'raw_value', text='')
# Coordinate System Dropdown
col.prop(self, 'coord_sys', text='')
####################
# - Computation of Default Value
####################
@property
def value(self) -> spux.Complex:
"""Return the complex number as a sympy expression, of a form determined by the coordinate system.
- **Cartesian**: $(a,b) -> a + ib$
- **Polar**: $(r,t) -> re^(it)$
Returns:
The complex number as a `sympy` type.
"""
v1, v2 = self.raw_value
return {
'CARTESIAN': v1 + sp.I * v2,
'POLAR': v1 * sp.exp(sp.I * v2),
}[self.coord_sys]
@value.setter
def value(self, value: spux.Complex) -> None:
"""Set the complex number from a sympy expression, by numerically simplifying it into coordinate-system determined components.
- **Cartesian**: $(a,b) -> a + ib$
- **Polar**: $(r,t) -> re^(it)$
Parameters:
value: The complex number as a `sympy` type.
"""
self.raw_value = {
'CARTESIAN': (float(sp.re(value)), float(sp.im(value))),
'POLAR': (float(sp.Abs(value)), float(sp.arg(value))),
}[self.coord_sys]
#################### ####################
@ -122,7 +130,7 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
class ComplexNumberSocketDef(base.SocketDef): class ComplexNumberSocketDef(base.SocketDef):
socket_type: ct.SocketType = ct.SocketType.ComplexNumber socket_type: ct.SocketType = ct.SocketType.ComplexNumber
default_value: SympyExpr = sp.S(0 + 0j) default_value: spux.Complex = sp.S(0)
coord_sys: typ.Literal['CARTESIAN', 'POLAR'] = 'CARTESIAN' coord_sys: typ.Literal['CARTESIAN', 'POLAR'] = 'CARTESIAN'
def init(self, bl_socket: ComplexNumberBLSocket) -> None: def init(self, bl_socket: ComplexNumberBLSocket) -> None:

View File

@ -18,7 +18,7 @@ class IntegerNumberBLSocket(base.MaxwellSimSocket):
name='Integer', name='Integer',
description='Represents an integer', description='Represents an integer',
default=0, default=0,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -23,7 +23,7 @@ class RationalNumberBLSocket(base.MaxwellSimSocket):
size=2, size=2,
default=(1, 1), default=(1, 1),
subtype='NONE', subtype='NONE',
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -21,7 +21,7 @@ class RealNumberBLSocket(base.MaxwellSimSocket):
description='Represents a real number', description='Represents a real number',
default=0.0, default=0.0,
precision=6, precision=6,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -23,7 +23,7 @@ class PhysicalAccelScalarBLSocket(base.MaxwellSimSocket):
description='Represents the unitless part of the acceleration', description='Represents the unitless part of the acceleration',
default=0.0, default=0.0,
precision=6, precision=6,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -23,7 +23,7 @@ class PhysicalAngleBLSocket(base.MaxwellSimSocket):
description='Represents the unitless part of the acceleration', description='Represents the unitless part of the acceleration',
default=0.0, default=0.0,
precision=4, precision=4,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -20,7 +20,7 @@ class PhysicalAreaBLSocket(base.MaxwellSimSocket):
description='Represents the unitless part of the area', description='Represents the unitless part of the area',
default=0.0, default=0.0,
precision=6, precision=6,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -23,7 +23,7 @@ class PhysicalForceScalarBLSocket(base.MaxwellSimSocket):
description='Represents the unitless part of the force', description='Represents the unitless part of the force',
default=0.0, default=0.0,
precision=6, precision=6,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -27,7 +27,7 @@ class PhysicalFreqBLSocket(base.MaxwellSimSocket):
description='Represents the unitless part of the frequency', description='Represents the unitless part of the frequency',
default=0.0, default=0.0,
precision=6, precision=6,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
min_freq: bpy.props.FloatProperty( min_freq: bpy.props.FloatProperty(
@ -35,20 +35,20 @@ class PhysicalFreqBLSocket(base.MaxwellSimSocket):
description='Lowest frequency', description='Lowest frequency',
default=0.0, default=0.0,
precision=4, precision=4,
update=(lambda self, context: self.sync_prop('min_freq', context)), update=(lambda self, context: self.on_prop_changed('min_freq', context)),
) )
max_freq: bpy.props.FloatProperty( max_freq: bpy.props.FloatProperty(
name='Max Frequency', name='Max Frequency',
description='Highest frequency', description='Highest frequency',
default=0.0, default=0.0,
precision=4, precision=4,
update=(lambda self, context: self.sync_prop('max_freq', context)), update=(lambda self, context: self.on_prop_changed('max_freq', context)),
) )
steps: bpy.props.IntProperty( steps: bpy.props.IntProperty(
name='Frequency Steps', name='Frequency Steps',
description='# of steps between min and max', description='# of steps between min and max',
default=2, default=2,
update=(lambda self, context: self.sync_prop('steps', context)), update=(lambda self, context: self.on_prop_changed('steps', context)),
) )
#################### ####################
@ -74,10 +74,9 @@ class PhysicalFreqBLSocket(base.MaxwellSimSocket):
self.raw_value = spux.sympy_to_python(spux.scale_to_unit(value, self.unit)) self.raw_value = spux.sympy_to_python(spux.scale_to_unit(value, self.unit))
@property @property
def lazy_value_range(self) -> ct.LazyDataValueRange: def lazy_array_range(self) -> ct.LazyArrayRange:
return ct.LazyDataValueRange( return ct.LazyArrayRange(
symbols=set(), symbols=set(),
has_unit=True,
unit=self.unit, unit=self.unit,
start=sp.S(self.min_freq) * self.unit, start=sp.S(self.min_freq) * self.unit,
stop=sp.S(self.max_freq) * self.unit, stop=sp.S(self.max_freq) * self.unit,
@ -85,9 +84,8 @@ class PhysicalFreqBLSocket(base.MaxwellSimSocket):
scaling='lin', scaling='lin',
) )
@lazy_value_range.setter @lazy_array_range.setter
def lazy_value_range(self, value: tuple[sp.Expr, sp.Expr, int]) -> None: def lazy_array_range(self, value: ct.LazyArrayRangeFlow) -> None:
log.debug('Lazy Value Range: %s', str(value))
self.min_freq = spux.sympy_to_python(spux.scale_to_unit(value[0], self.unit)) self.min_freq = spux.sympy_to_python(spux.scale_to_unit(value[0], self.unit))
self.max_freq = spux.sympy_to_python(spux.scale_to_unit(value[1], self.unit)) self.max_freq = spux.sympy_to_python(spux.scale_to_unit(value[1], self.unit))
self.steps = value[2] self.steps = value[2]
@ -112,7 +110,7 @@ class PhysicalFreqSocketDef(base.SocketDef):
bl_socket.value = self.default_value bl_socket.value = self.default_value
if self.is_array: if self.is_array:
bl_socket.active_kind = ct.FlowKind.LazyValueRange bl_socket.active_kind = ct.FlowKind.LazyArrayRange
bl_socket.lazy_value_range = (self.min_freq, self.max_freq, self.steps) bl_socket.lazy_value_range = (self.min_freq, self.max_freq, self.steps)

View File

@ -28,7 +28,7 @@ class PhysicalLengthBLSocket(base.MaxwellSimSocket):
description='Represents the unitless part of the length', description='Represents the unitless part of the length',
default=0.0, default=0.0,
precision=6, precision=6,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
min_len: bpy.props.FloatProperty( min_len: bpy.props.FloatProperty(
@ -36,20 +36,20 @@ class PhysicalLengthBLSocket(base.MaxwellSimSocket):
description='Lowest length', description='Lowest length',
default=0.0, default=0.0,
precision=4, precision=4,
update=(lambda self, context: self.sync_prop('min_len', context)), update=(lambda self, context: self.on_prop_changed('min_len', context)),
) )
max_len: bpy.props.FloatProperty( max_len: bpy.props.FloatProperty(
name='Max Length', name='Max Length',
description='Highest length', description='Highest length',
default=0.0, default=0.0,
precision=4, precision=4,
update=(lambda self, context: self.sync_prop('max_len', context)), update=(lambda self, context: self.on_prop_changed('max_len', context)),
) )
steps: bpy.props.IntProperty( steps: bpy.props.IntProperty(
name='Length Steps', name='Length Steps',
description='# of steps between min and max', description='# of steps between min and max',
default=2, default=2,
update=(lambda self, context: self.sync_prop('steps', context)), update=(lambda self, context: self.on_prop_changed('steps', context)),
) )
#################### ####################
@ -75,10 +75,9 @@ class PhysicalLengthBLSocket(base.MaxwellSimSocket):
self.raw_value = spux.sympy_to_python(spux.scale_to_unit(value, self.unit)) self.raw_value = spux.sympy_to_python(spux.scale_to_unit(value, self.unit))
@property @property
def lazy_value_range(self) -> ct.LazyDataValueRange: def lazy_array_range(self) -> ct.LazyArrayRange:
return ct.LazyDataValueRange( return ct.LazyArrayRange(
symbols=set(), symbols=set(),
has_unit=True,
unit=self.unit, unit=self.unit,
start=sp.S(self.min_len) * self.unit, start=sp.S(self.min_len) * self.unit,
stop=sp.S(self.max_len) * self.unit, stop=sp.S(self.max_len) * self.unit,
@ -86,7 +85,7 @@ class PhysicalLengthBLSocket(base.MaxwellSimSocket):
scaling='lin', scaling='lin',
) )
@lazy_value_range.setter @lazy_array_range.setter
def lazy_value_range(self, value: tuple[sp.Expr, sp.Expr, int]) -> None: def lazy_value_range(self, value: tuple[sp.Expr, sp.Expr, int]) -> None:
self.min_len = spux.sympy_to_python(spux.scale_to_unit(value[0], self.unit)) self.min_len = spux.sympy_to_python(spux.scale_to_unit(value[0], self.unit))
self.max_len = spux.sympy_to_python(spux.scale_to_unit(value[1], self.unit)) self.max_len = spux.sympy_to_python(spux.scale_to_unit(value[1], self.unit))
@ -113,7 +112,7 @@ class PhysicalLengthSocketDef(base.SocketDef):
bl_socket.value = self.default_value bl_socket.value = self.default_value
if self.is_array: if self.is_array:
bl_socket.active_kind = ct.FlowKind.LazyValueRange bl_socket.active_kind = ct.FlowKind.LazyArrayRange
bl_socket.lazy_value_range = (self.min_len, self.max_len, self.steps) bl_socket.lazy_value_range = (self.min_len, self.max_len, self.steps)

View File

@ -23,7 +23,7 @@ class PhysicalMassBLSocket(base.MaxwellSimSocket):
description='Represents the unitless part of mass', description='Represents the unitless part of mass',
default=0.0, default=0.0,
precision=6, precision=6,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -24,7 +24,7 @@ class PhysicalPoint3DBLSocket(base.MaxwellSimSocket):
size=3, size=3,
default=(0.0, 0.0, 0.0), default=(0.0, 0.0, 0.0),
precision=4, precision=4,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -39,7 +39,7 @@ class PhysicalPolBLSocket(base.MaxwellSimSocket):
('STOKES', 'Stokes', 'Linear x-pol of field'), ('STOKES', 'Stokes', 'Linear x-pol of field'),
], ],
default='UNPOL', default='UNPOL',
update=(lambda self, context: self.sync_prop('model', context)), update=(lambda self, context: self.on_prop_changed('model', context)),
) )
## Lin Ang ## Lin Ang
@ -47,7 +47,7 @@ class PhysicalPolBLSocket(base.MaxwellSimSocket):
name='Pol. Angle', name='Pol. Angle',
description='Angle to polarize linearly along', description='Angle to polarize linearly along',
default=0.0, default=0.0,
update=(lambda self, context: self.sync_prop('lin_ang', context)), update=(lambda self, context: self.on_prop_changed('lin_ang', context)),
) )
## Circ ## Circ
circ: bpy.props.EnumProperty( circ: bpy.props.EnumProperty(
@ -58,7 +58,7 @@ class PhysicalPolBLSocket(base.MaxwellSimSocket):
('RCP', 'RCP', "'Right Circular Polarization'"), ('RCP', 'RCP', "'Right Circular Polarization'"),
], ],
default='LCP', default='LCP',
update=(lambda self, context: self.sync_prop('circ', context)), update=(lambda self, context: self.on_prop_changed('circ', context)),
) )
## Jones ## Jones
jones_psi: bpy.props.FloatProperty( jones_psi: bpy.props.FloatProperty(
@ -66,14 +66,14 @@ class PhysicalPolBLSocket(base.MaxwellSimSocket):
description='Angle of the ellipse to the x-axis', description='Angle of the ellipse to the x-axis',
default=0.0, default=0.0,
precision=2, precision=2,
update=(lambda self, context: self.sync_prop('jones_psi', context)), update=(lambda self, context: self.on_prop_changed('jones_psi', context)),
) )
jones_chi: bpy.props.FloatProperty( jones_chi: bpy.props.FloatProperty(
name='Jones Major-Axis-Adjacent Angle', name='Jones Major-Axis-Adjacent Angle',
description='Angle of adjacent to the ellipse major axis', description='Angle of adjacent to the ellipse major axis',
default=0.0, default=0.0,
precision=2, precision=2,
update=(lambda self, context: self.sync_prop('jones_chi', context)), update=(lambda self, context: self.on_prop_changed('jones_chi', context)),
) )
## Stokes ## Stokes
@ -82,28 +82,28 @@ class PhysicalPolBLSocket(base.MaxwellSimSocket):
description='Angle of the ellipse to the x-axis', description='Angle of the ellipse to the x-axis',
default=0.0, default=0.0,
precision=2, precision=2,
update=(lambda self, context: self.sync_prop('stokes_psi', context)), update=(lambda self, context: self.on_prop_changed('stokes_psi', context)),
) )
stokes_chi: bpy.props.FloatProperty( stokes_chi: bpy.props.FloatProperty(
name='Stokes Major-Axis-Adjacent Angle', name='Stokes Major-Axis-Adjacent Angle',
description='Angle of adjacent to the ellipse major axis', description='Angle of adjacent to the ellipse major axis',
default=0.0, default=0.0,
precision=2, precision=2,
update=(lambda self, context: self.sync_prop('stokes_chi', context)), update=(lambda self, context: self.on_prop_changed('stokes_chi', context)),
) )
stokes_p: bpy.props.FloatProperty( stokes_p: bpy.props.FloatProperty(
name='Stokes Polarization Degree', name='Stokes Polarization Degree',
description='The degree of polarization', description='The degree of polarization',
default=0.0, default=0.0,
precision=2, precision=2,
update=(lambda self, context: self.sync_prop('stokes_p', context)), update=(lambda self, context: self.on_prop_changed('stokes_p', context)),
) )
stokes_I: bpy.props.FloatProperty( stokes_I: bpy.props.FloatProperty(
name='Stokes Field Intensity', name='Stokes Field Intensity',
description='The intensity of the polarized field', description='The intensity of the polarized field',
default=0.0, default=0.0,
precision=2, precision=2,
update=(lambda self, context: self.sync_prop('stokes_I', context)), update=(lambda self, context: self.on_prop_changed('stokes_I', context)),
) ## TODO: Units? ) ## TODO: Units?
#################### ####################

View File

@ -22,7 +22,7 @@ class PhysicalSize3DBLSocket(base.MaxwellSimSocket):
size=3, size=3,
default=(1.0, 1.0, 1.0), default=(1.0, 1.0, 1.0),
precision=4, precision=4,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -23,7 +23,7 @@ class PhysicalSpeedBLSocket(base.MaxwellSimSocket):
description='Represents the unitless part of the speed', description='Represents the unitless part of the speed',
default=0.0, default=0.0,
precision=6, precision=6,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -25,7 +25,7 @@ class PhysicalTimeBLSocket(base.MaxwellSimSocket):
description='Represents the unitless part of time', description='Represents the unitless part of time',
default=0.0, default=0.0,
precision=4, precision=4,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -46,7 +46,7 @@ class PhysicalUnitSystemBLSocket(base.MaxwellSimSocket):
name='Show Unit System Definition', name='Show Unit System Definition',
description='Toggle to show unit system definition', description='Toggle to show unit system definition',
default=False, default=False,
update=(lambda self, context: self.sync_prop('show_definition', context)), update=(lambda self, context: self.on_prop_changed('show_definition', context)),
) )
unit_time: bpy.props.EnumProperty( unit_time: bpy.props.EnumProperty(
@ -54,7 +54,7 @@ class PhysicalUnitSystemBLSocket(base.MaxwellSimSocket):
description='Unit of time', description='Unit of time',
items=contract_units_to_items(ST.PhysicalTime), items=contract_units_to_items(ST.PhysicalTime),
default=default_unit_key_for(ST.PhysicalTime), default=default_unit_key_for(ST.PhysicalTime),
update=(lambda self, context: self.sync_prop('unit_time', context)), update=(lambda self, context: self.on_prop_changed('unit_time', context)),
) )
unit_angle: bpy.props.EnumProperty( unit_angle: bpy.props.EnumProperty(
@ -62,7 +62,7 @@ class PhysicalUnitSystemBLSocket(base.MaxwellSimSocket):
description='Unit of angle', description='Unit of angle',
items=contract_units_to_items(ST.PhysicalAngle), items=contract_units_to_items(ST.PhysicalAngle),
default=default_unit_key_for(ST.PhysicalAngle), default=default_unit_key_for(ST.PhysicalAngle),
update=(lambda self, context: self.sync_prop('unit_angle', context)), update=(lambda self, context: self.on_prop_changed('unit_angle', context)),
) )
unit_length: bpy.props.EnumProperty( unit_length: bpy.props.EnumProperty(
@ -70,21 +70,21 @@ class PhysicalUnitSystemBLSocket(base.MaxwellSimSocket):
description='Unit of length', description='Unit of length',
items=contract_units_to_items(ST.PhysicalLength), items=contract_units_to_items(ST.PhysicalLength),
default=default_unit_key_for(ST.PhysicalLength), default=default_unit_key_for(ST.PhysicalLength),
update=(lambda self, context: self.sync_prop('unit_length', context)), update=(lambda self, context: self.on_prop_changed('unit_length', context)),
) )
unit_area: bpy.props.EnumProperty( unit_area: bpy.props.EnumProperty(
name='Area Unit', name='Area Unit',
description='Unit of area', description='Unit of area',
items=contract_units_to_items(ST.PhysicalArea), items=contract_units_to_items(ST.PhysicalArea),
default=default_unit_key_for(ST.PhysicalArea), default=default_unit_key_for(ST.PhysicalArea),
update=(lambda self, context: self.sync_prop('unit_area', context)), update=(lambda self, context: self.on_prop_changed('unit_area', context)),
) )
unit_volume: bpy.props.EnumProperty( unit_volume: bpy.props.EnumProperty(
name='Volume Unit', name='Volume Unit',
description='Unit of time', description='Unit of time',
items=contract_units_to_items(ST.PhysicalVolume), items=contract_units_to_items(ST.PhysicalVolume),
default=default_unit_key_for(ST.PhysicalVolume), default=default_unit_key_for(ST.PhysicalVolume),
update=(lambda self, context: self.sync_prop('unit_volume', context)), update=(lambda self, context: self.on_prop_changed('unit_volume', context)),
) )
unit_point_2d: bpy.props.EnumProperty( unit_point_2d: bpy.props.EnumProperty(
@ -92,14 +92,14 @@ class PhysicalUnitSystemBLSocket(base.MaxwellSimSocket):
description='Unit of 2D points', description='Unit of 2D points',
items=contract_units_to_items(ST.PhysicalPoint2D), items=contract_units_to_items(ST.PhysicalPoint2D),
default=default_unit_key_for(ST.PhysicalPoint2D), default=default_unit_key_for(ST.PhysicalPoint2D),
update=(lambda self, context: self.sync_prop('unit_point_2d', context)), update=(lambda self, context: self.on_prop_changed('unit_point_2d', context)),
) )
unit_point_3d: bpy.props.EnumProperty( unit_point_3d: bpy.props.EnumProperty(
name='Point3D Unit', name='Point3D Unit',
description='Unit of 3D points', description='Unit of 3D points',
items=contract_units_to_items(ST.PhysicalPoint3D), items=contract_units_to_items(ST.PhysicalPoint3D),
default=default_unit_key_for(ST.PhysicalPoint3D), default=default_unit_key_for(ST.PhysicalPoint3D),
update=(lambda self, context: self.sync_prop('unit_point_3d', context)), update=(lambda self, context: self.on_prop_changed('unit_point_3d', context)),
) )
unit_size_2d: bpy.props.EnumProperty( unit_size_2d: bpy.props.EnumProperty(
@ -107,14 +107,14 @@ class PhysicalUnitSystemBLSocket(base.MaxwellSimSocket):
description='Unit of 2D sizes', description='Unit of 2D sizes',
items=contract_units_to_items(ST.PhysicalSize2D), items=contract_units_to_items(ST.PhysicalSize2D),
default=default_unit_key_for(ST.PhysicalSize2D), default=default_unit_key_for(ST.PhysicalSize2D),
update=(lambda self, context: self.sync_prop('unit_size_2d', context)), update=(lambda self, context: self.on_prop_changed('unit_size_2d', context)),
) )
unit_size_3d: bpy.props.EnumProperty( unit_size_3d: bpy.props.EnumProperty(
name='Size3D Unit', name='Size3D Unit',
description='Unit of 3D sizes', description='Unit of 3D sizes',
items=contract_units_to_items(ST.PhysicalSize3D), items=contract_units_to_items(ST.PhysicalSize3D),
default=default_unit_key_for(ST.PhysicalSize3D), default=default_unit_key_for(ST.PhysicalSize3D),
update=(lambda self, context: self.sync_prop('unit_size_3d', context)), update=(lambda self, context: self.on_prop_changed('unit_size_3d', context)),
) )
unit_mass: bpy.props.EnumProperty( unit_mass: bpy.props.EnumProperty(
@ -122,7 +122,7 @@ class PhysicalUnitSystemBLSocket(base.MaxwellSimSocket):
description='Unit of mass', description='Unit of mass',
items=contract_units_to_items(ST.PhysicalMass), items=contract_units_to_items(ST.PhysicalMass),
default=default_unit_key_for(ST.PhysicalMass), default=default_unit_key_for(ST.PhysicalMass),
update=(lambda self, context: self.sync_prop('unit_mass', context)), update=(lambda self, context: self.on_prop_changed('unit_mass', context)),
) )
unit_speed: bpy.props.EnumProperty( unit_speed: bpy.props.EnumProperty(
@ -130,35 +130,35 @@ class PhysicalUnitSystemBLSocket(base.MaxwellSimSocket):
description='Unit of speed', description='Unit of speed',
items=contract_units_to_items(ST.PhysicalSpeed), items=contract_units_to_items(ST.PhysicalSpeed),
default=default_unit_key_for(ST.PhysicalSpeed), default=default_unit_key_for(ST.PhysicalSpeed),
update=(lambda self, context: self.sync_prop('unit_speed', context)), update=(lambda self, context: self.on_prop_changed('unit_speed', context)),
) )
unit_accel_scalar: bpy.props.EnumProperty( unit_accel_scalar: bpy.props.EnumProperty(
name='Accel Unit', name='Accel Unit',
description='Unit of acceleration', description='Unit of acceleration',
items=contract_units_to_items(ST.PhysicalAccelScalar), items=contract_units_to_items(ST.PhysicalAccelScalar),
default=default_unit_key_for(ST.PhysicalAccelScalar), default=default_unit_key_for(ST.PhysicalAccelScalar),
update=(lambda self, context: self.sync_prop('unit_accel_scalar', context)), update=(lambda self, context: self.on_prop_changed('unit_accel_scalar', context)),
) )
unit_force_scalar: bpy.props.EnumProperty( unit_force_scalar: bpy.props.EnumProperty(
name='Force Scalar Unit', name='Force Scalar Unit',
description='Unit of scalar force', description='Unit of scalar force',
items=contract_units_to_items(ST.PhysicalForceScalar), items=contract_units_to_items(ST.PhysicalForceScalar),
default=default_unit_key_for(ST.PhysicalForceScalar), default=default_unit_key_for(ST.PhysicalForceScalar),
update=(lambda self, context: self.sync_prop('unit_force_scalar', context)), update=(lambda self, context: self.on_prop_changed('unit_force_scalar', context)),
) )
unit_accel_3d: bpy.props.EnumProperty( unit_accel_3d: bpy.props.EnumProperty(
name='Accel3D Unit', name='Accel3D Unit',
description='Unit of 3D vector acceleration', description='Unit of 3D vector acceleration',
items=contract_units_to_items(ST.PhysicalAccel3D), items=contract_units_to_items(ST.PhysicalAccel3D),
default=default_unit_key_for(ST.PhysicalAccel3D), default=default_unit_key_for(ST.PhysicalAccel3D),
update=(lambda self, context: self.sync_prop('unit_accel_3d', context)), update=(lambda self, context: self.on_prop_changed('unit_accel_3d', context)),
) )
unit_force_3d: bpy.props.EnumProperty( unit_force_3d: bpy.props.EnumProperty(
name='Force3D Unit', name='Force3D Unit',
description='Unit of 3D vector force', description='Unit of 3D vector force',
items=contract_units_to_items(ST.PhysicalForce3D), items=contract_units_to_items(ST.PhysicalForce3D),
default=default_unit_key_for(ST.PhysicalForce3D), default=default_unit_key_for(ST.PhysicalForce3D),
update=(lambda self, context: self.sync_prop('unit_force_3d', context)), update=(lambda self, context: self.on_prop_changed('unit_force_3d', context)),
) )
unit_freq: bpy.props.EnumProperty( unit_freq: bpy.props.EnumProperty(
@ -166,7 +166,7 @@ class PhysicalUnitSystemBLSocket(base.MaxwellSimSocket):
description='Unit of frequency', description='Unit of frequency',
items=contract_units_to_items(ST.PhysicalFreq), items=contract_units_to_items(ST.PhysicalFreq),
default=default_unit_key_for(ST.PhysicalFreq), default=default_unit_key_for(ST.PhysicalFreq),
update=(lambda self, context: self.sync_prop('unit_freq', context)), update=(lambda self, context: self.on_prop_changed('unit_freq', context)),
) )
#################### ####################

View File

@ -20,7 +20,7 @@ class PhysicalVolumeBLSocket(base.MaxwellSimSocket):
description='Represents the unitless part of the area', description='Represents the unitless part of the area',
default=0.0, default=0.0,
precision=6, precision=6,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -88,13 +88,13 @@ class Tidy3DCloudTaskBLSocket(base.MaxwellSimSocket):
name='Folder of Cloud Tasks', name='Folder of Cloud Tasks',
description='An existing folder on the Tidy3D Cloud', description='An existing folder on the Tidy3D Cloud',
items=lambda self, _: self.retrieve_folders(), items=lambda self, _: self.retrieve_folders(),
update=(lambda self, context: self.sync_prop('existing_folder_id', context)), update=(lambda self, context: self.on_prop_changed('existing_folder_id', context)),
) )
existing_task_id: bpy.props.EnumProperty( existing_task_id: bpy.props.EnumProperty(
name='Existing Cloud Task', name='Existing Cloud Task',
description='An existing task on the Tidy3D Cloud, within the given folder', description='An existing task on the Tidy3D Cloud, within the given folder',
items=lambda self, _: self.retrieve_tasks(), items=lambda self, _: self.retrieve_tasks(),
update=(lambda self, context: self.sync_prop('existing_task_id', context)), update=(lambda self, context: self.on_prop_changed('existing_task_id', context)),
) )
# (Potential) New Task # (Potential) New Task
@ -102,7 +102,7 @@ class Tidy3DCloudTaskBLSocket(base.MaxwellSimSocket):
name='New Cloud Task Name', name='New Cloud Task Name',
description='Name of a new task to submit to the Tidy3D Cloud', description='Name of a new task to submit to the Tidy3D Cloud',
default='', default='',
update=(lambda self, context: self.sync_prop('new_task_name', context)), update=(lambda self, context: self.on_prop_changed('new_task_name', context)),
) )
#################### ####################
@ -114,7 +114,7 @@ class Tidy3DCloudTaskBLSocket(base.MaxwellSimSocket):
self.existing_task_id = folder_task_ids[0][0] self.existing_task_id = folder_task_ids[0][0]
## There's guaranteed to at least be one element, even if it's "NONE". ## There's guaranteed to at least be one element, even if it's "NONE".
self.sync_prop('existing_folder_id', context) self.on_prop_changed('existing_folder_id', context)
def retrieve_folders(self) -> list[tuple]: def retrieve_folders(self) -> list[tuple]:
folders = tdcloud.TidyCloudFolders.folders() folders = tdcloud.TidyCloudFolders.folders()

View File

@ -30,7 +30,7 @@ class Integer3DVectorBLSocket(base.MaxwellSimSocket):
description='Represents an integer 3D (coordinate) vector', description='Represents an integer 3D (coordinate) vector',
size=3, size=3,
default=(0, 0, 0), default=(0, 0, 0),
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -31,7 +31,7 @@ class Real2DVectorBLSocket(base.MaxwellSimSocket):
size=2, size=2,
default=(0.0, 0.0), default=(0.0, 0.0),
precision=4, precision=4,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################

View File

@ -1,19 +1,11 @@
import bpy import bpy
import sympy as sp import sympy as sp
from blender_maxwell.utils.pydantic_sympy import ConstrSympyExpr import blender_maxwell.utils.extra_sympy_units as spux
from ... import contracts as ct from ... import contracts as ct
from .. import base from .. import base
Real3DVector = ConstrSympyExpr(
allow_variables=False,
allow_units=False,
allowed_sets={'integer', 'rational', 'real'},
allowed_structures={'matrix'},
allowed_matrix_shapes={(3, 1)},
)
#################### ####################
# - Blender Socket # - Blender Socket
@ -31,7 +23,7 @@ class Real3DVectorBLSocket(base.MaxwellSimSocket):
size=3, size=3,
default=(0.0, 0.0, 0.0), default=(0.0, 0.0, 0.0),
precision=4, precision=4,
update=(lambda self, context: self.sync_prop('raw_value', context)), update=(lambda self, context: self.on_prop_changed('raw_value', context)),
) )
#################### ####################
@ -44,11 +36,11 @@ class Real3DVectorBLSocket(base.MaxwellSimSocket):
# - Computation of Default Value # - Computation of Default Value
#################### ####################
@property @property
def value(self) -> Real3DVector: def value(self) -> spux.Real3DVector:
return sp.Matrix(tuple(self.raw_value)) return sp.Matrix(tuple(self.raw_value))
@value.setter @value.setter
def value(self, value: Real3DVector) -> None: def value(self, value: spux.Real3DVector) -> None:
self.raw_value = tuple(value) self.raw_value = tuple(value)
@ -58,7 +50,7 @@ class Real3DVectorBLSocket(base.MaxwellSimSocket):
class Real3DVectorSocketDef(base.SocketDef): class Real3DVectorSocketDef(base.SocketDef):
socket_type: ct.SocketType = ct.SocketType.Real3DVector socket_type: ct.SocketType = ct.SocketType.Real3DVector
default_value: Real3DVector = sp.Matrix([0.0, 0.0, 0.0]) default_value: spux.Real3DVector = sp.Matrix([0.0, 0.0, 0.0])
def init(self, bl_socket: Real3DVectorBLSocket) -> None: def init(self, bl_socket: Real3DVectorBLSocket) -> None:
bl_socket.value = self.default_value bl_socket.value = self.default_value

View File

@ -1,3 +1 @@
from . import operators, utils
__all__ = ['operators', 'utils'] __all__ = ['operators', 'utils']

View File

@ -1,14 +1,13 @@
from . import install_deps, uninstall_deps from . import install_deps, uninstall_deps, manage_pydeps
BL_REGISTER = [ BL_REGISTER = [
*install_deps.BL_REGISTER, *install_deps.BL_REGISTER,
*uninstall_deps.BL_REGISTER, *uninstall_deps.BL_REGISTER,
*manage_pydeps.BL_REGISTER,
] ]
BL_KEYMAP_ITEM_DEFS = [ BL_HOTKEYS = [
*install_deps.BL_KEYMAP_ITEM_DEFS, *install_deps.BL_HOTKEYS,
*uninstall_deps.BL_KEYMAP_ITEM_DEFS, *uninstall_deps.BL_HOTKEYS,
*manage_pydeps.BL_HOTKEYS,
] ]
__all__ = []

View File

@ -4,46 +4,64 @@ from pathlib import Path
import bpy import bpy
from blender_maxwell.utils import pydeps, simple_logger from ... import contracts as ct
from ... import registration from ... import registration
from ..utils import pydeps, simple_logger
log = simple_logger.get(__name__) log = simple_logger.get(__name__)
class InstallPyDeps(bpy.types.Operator): class InstallPyDeps(bpy.types.Operator):
bl_idname = 'blender_maxwell.nodeps__install_py_deps' bl_idname = ct.OperatorType.InstallPyDeps
bl_label = 'Install BLMaxwell Python Deps' bl_label = 'Install BLMaxwell Python Deps'
path_addon_pydeps: bpy.props.StringProperty(
name='Path to Addon Python Dependencies',
default='',
)
path_addon_reqs: bpy.props.StringProperty(
name='Path to Addon Python Dependencies',
default='',
)
@classmethod @classmethod
def poll(cls, _: bpy.types.Context): def poll(cls, _: bpy.types.Context):
return not pydeps.DEPS_OK return not pydeps.DEPS_OK
def execute(self, _: bpy.types.Context): ####################
if self.path_addon_pydeps == '' or self.path_addon_reqs == '': # - Property: PyDeps Path
msg = f"A path for operator {self.bl_idname} isn't set" ####################
raise ValueError(msg) bl__pydeps_path: bpy.props.StringProperty(
default='',
)
path_addon_pydeps = Path(self.path_addon_pydeps) @property
path_addon_reqs = Path(self.path_addon_reqs) def pydeps_path(self):
return Path(bpy.path.abspath(self.bl__pydeps_path))
@pydeps_path.setter
def pydeps_path(self, path: Path) -> None:
self.bl__pydeps_path = str(path.resolve())
####################
# - Property: requirements.lock
####################
bl__pydeps_reqlock_path: bpy.props.StringProperty(
default='',
)
@property
def pydeps_reqlock_path(self):
return Path(bpy.path.abspath(self.bl__pydeps_reqlock_path))
@pydeps_reqlock_path.setter
def pydeps_reqlock_path(self, path: Path) -> None:
self.bl__pydeps_reqlock_path = str(path.resolve())
####################
# - Execution
####################
def execute(self, _: bpy.types.Context):
log.info( log.info(
'Running Install PyDeps w/requirements.txt (%s) to path: %s', 'Running Install PyDeps w/requirements.txt (%s) to path: %s',
path_addon_reqs, self.pydeps_reqlock_path,
path_addon_pydeps, self.pydeps_path,
) )
# Create the Addon-Specific Folder (if Needed) # Create the Addon-Specific Folder (if Needed)
## It MUST, however, have a parent already ## It MUST, however, have a parent already
path_addon_pydeps.mkdir(parents=False, exist_ok=True) self.pydeps_path.mkdir(parents=False, exist_ok=True)
# Determine Path to Blender's Bundled Python # Determine Path to Blender's Bundled Python
## bpy.app.binary_path_python was deprecated in 2.91. ## bpy.app.binary_path_python was deprecated in 2.91.
@ -59,9 +77,9 @@ class InstallPyDeps(bpy.types.Operator):
'pip', 'pip',
'install', 'install',
'-r', '-r',
str(path_addon_reqs), str(self.pydeps_reqlock_path),
'--target', '--target',
str(path_addon_pydeps), str(self.pydeps_path),
] ]
log.info( log.info(
'Running pip w/cmdline: %s', 'Running pip w/cmdline: %s',
@ -72,10 +90,8 @@ class InstallPyDeps(bpy.types.Operator):
log.exception('Failed to install PyDeps') log.exception('Failed to install PyDeps')
return {'CANCELLED'} return {'CANCELLED'}
registration.run_delayed_registration( # Report PyDeps Changed
registration.EVENT__DEPS_SATISFIED, ct.addon.prefs().on_addon_pydeps_changed()
path_addon_pydeps,
)
return {'FINISHED'} return {'FINISHED'}
@ -85,4 +101,4 @@ class InstallPyDeps(bpy.types.Operator):
BL_REGISTER = [ BL_REGISTER = [
InstallPyDeps, InstallPyDeps,
] ]
BL_KEYMAP_ITEM_DEFS = [] BL_HOTKEYS = []

View File

@ -0,0 +1,129 @@
from pathlib import Path
import bpy
from blender_maxwell import contracts as ct
from ..utils import pydeps, simple_logger
log = simple_logger.get(__name__)
class ManagePyDeps(bpy.types.Operator):
bl_idname = ct.OperatorType.ManagePyDeps
bl_label = 'Blender Maxwell Python Dependency Manager'
bl_options = {'REGISTER'}
show_pydeps_conflicts: bpy.props.BoolProperty(
name='Show Conflicts',
description='Show the conflicts between installed and required packages.',
default=False,
)
####################
# - Property: PyDeps Path
####################
bl__pydeps_path: bpy.props.StringProperty(
default='',
)
@property
def pydeps_path(self):
return Path(bpy.path.abspath(self.bl__pydeps_path))
@pydeps_path.setter
def pydeps_path(self, path: Path) -> None:
self.bl__pydeps_path = str(path.resolve())
####################
# - Property: requirements.lock
####################
bl__pydeps_reqlock_path: bpy.props.StringProperty(
default='',
)
@property
def pydeps_reqlock_path(self):
return Path(bpy.path.abspath(self.bl__pydeps_reqlock_path))
@pydeps_reqlock_path.setter
def pydeps_reqlock_path(self, path: Path) -> None:
self.bl__pydeps_reqlock_path = str(path.resolve())
####################
# - UI
####################
def draw(self, _: bpy.types.Context) -> None:
layout = self.layout
## Row: Toggle Default PyDeps Path
row = layout.row()
row.alignment = 'CENTER'
row.label(
text="Blender Maxwell relies on Python dependencies that aren't currently satisfied."
)
row.prop(
self,
'show_pydeps_conflicts',
text=f'Show Conflicts ({len(pydeps.DEPS_ISSUES)})',
toggle=True,
)
## Grid: Issues Panel
if self.show_pydeps_conflicts:
grid = layout.grid_flow()
grid.alignment = 'CENTER'
for issue in pydeps.DEPS_ISSUES:
grid.label(text=issue)
# Install Deps
row = layout.row(align=True)
op = row.operator(
ct.OperatorType.InstallPyDeps,
text='Install Python Dependencies (requires internet)',
)
op.bl__pydeps_path = str(self.pydeps_path)
op.bl__pydeps_reqlock_path = str(self.bl__pydeps_reqlock_path)
## Row: Toggle Default PyDeps Path
row = layout.row()
row.alignment = 'CENTER'
row.label(
text='After installation, the addon is ready to use. For more details, please refer to the addon preferences.'
)
####################
# - Execute
####################
def invoke(self, context: bpy.types.Context, event: bpy.types.Event):
if not bpy.app.background:
# Force-Move Mouse Cursor to Window Center
## This forces the popup dialog to spawn in the center of the screen.
context.window.cursor_warp(
context.window.width // 2,
context.window.height // 2 + 2 * bpy.context.preferences.system.dpi,
)
# Spawn Popup Dialogue
return context.window_manager.invoke_props_dialog(
self, width=8 * bpy.context.preferences.system.dpi
)
log.info('Skipping ManagePyDeps popup, since Blender is running without a GUI')
return {'INTERFACE'}
def execute(self, _: bpy.types.Context):
if not pydeps.DEPS_OK:
self.report(
{'ERROR'},
f'Python Dependencies for "{ct.addon.NAME}" were not installed. Please refer to the addon preferences.',
)
return {'FINISHED'}
####################
# - Blender Registration
####################
BL_REGISTER = [
ManagePyDeps,
]
BL_HOTKEYS = []

View File

@ -1,86 +0,0 @@
import subprocess
import sys
from pathlib import Path
import bpy
from blender_maxwell.utils import logger as _logger
from .. import registration
log = _logger.get(__name__)
class InstallPyDeps(bpy.types.Operator):
bl_idname = 'blender_maxwell.nodeps__addon_install_popup'
bl_label = 'Popup to Install BLMaxwell Python Deps'
path_addon_pydeps: bpy.props.StringProperty(
name='Path to Addon Python Dependencies',
default='',
)
path_addon_reqs: bpy.props.StringProperty(
name='Path to Addon Python Dependencies',
default='',
)
# TODO: poll()
def execute(self, _: bpy.types.Context):
if self.path_addon_pydeps == '' or self.path_addon_reqs == '':
msg = f"A path for operator {self.bl_idname} isn't set"
raise ValueError(msg)
path_addon_pydeps = Path(self.path_addon_pydeps)
path_addon_reqs = Path(self.path_addon_reqs)
log.info(
'Running Install PyDeps w/requirements.txt (%s) to path: %s',
path_addon_reqs,
path_addon_pydeps,
)
# Create the Addon-Specific Folder (if Needed)
## It MUST, however, have a parent already
path_addon_pydeps.mkdir(parents=False, exist_ok=True)
# Determine Path to Blender's Bundled Python
## bpy.app.binary_path_python was deprecated in 2.91.
## sys.executable points to the correct bundled Python.
## See <https://developer.blender.org/docs/release_notes/2.91/python_api/>
python_exec = Path(sys.executable)
# Install Deps w/Bundled pip
try:
cmdline = [
str(python_exec),
'-m',
'pip',
'install',
'-r',
str(path_addon_reqs),
'--target',
str(path_addon_pydeps),
]
log.info(
'Running pip w/cmdline: %s',
' '.join(cmdline),
)
subprocess.check_call(cmdline)
except subprocess.CalledProcessError:
log.exception('Failed to install PyDeps')
return {'CANCELLED'}
registration.run_delayed_registration(
registration.EVENT__DEPS_SATISFIED,
path_addon_pydeps,
)
return {'FINISHED'}
####################
# - Blender Registration
####################
BL_REGISTER = [
InstallPyDeps,
]
BL_KEYMAP_ITEM_DEFS = []

View File

@ -3,30 +3,47 @@ from pathlib import Path
import bpy import bpy
from blender_maxwell.utils import pydeps from blender_maxwell import contracts as ct
from ..utils import pydeps
class UninstallPyDeps(bpy.types.Operator): class UninstallPyDeps(bpy.types.Operator):
bl_idname = 'blender_maxwell.nodeps__uninstall_py_deps' bl_idname = ct.OperatorType.UninstallPyDeps
bl_label = 'Uninstall BLMaxwell Python Deps' bl_label = 'Uninstall BLMaxwell Python Deps'
path_addon_pydeps: bpy.props.StringProperty(
name='Path to Addon Python Dependencies'
)
@classmethod @classmethod
def poll(cls, _: bpy.types.Context): def poll(cls, _: bpy.types.Context):
return pydeps.DEPS_OK return pydeps.DEPS_OK
####################
# - Property: PyDeps Path
####################
bl__pydeps_path: bpy.props.StringProperty(
default='',
)
@property
def pydeps_path(self):
return Path(bpy.path.abspath(self.bl__pydeps_path))
@pydeps_path.setter
def pydeps_path(self, path: Path) -> None:
self.bl__pydeps_path = str(path.resolve())
####################
# - Execution
####################
def execute(self, _: bpy.types.Context): def execute(self, _: bpy.types.Context):
path_addon_pydeps = Path(self.path_addon_pydeps) path_addon_pydeps = Path(self.pydeps_path)
if ( if (
pydeps.check_pydeps() pydeps.check_pydeps()
and self.path_addon_pydeps.exists() and path_addon_pydeps.exists()
and self.path_addon_pydeps.is_dir() and path_addon_pydeps.is_dir()
): ):
# CAREFUL!! raise NotImplementedError
shutil.rmtree(self.path_addon_pydeps) # TODO: CAREFUL!!
# shutil.rmtree(self.path_addon_pydeps)
else: else:
msg = "Can't uninstall pydeps" msg = "Can't uninstall pydeps"
raise RuntimeError(msg) raise RuntimeError(msg)
@ -40,4 +57,4 @@ class UninstallPyDeps(bpy.types.Operator):
BL_REGISTER = [ BL_REGISTER = [
UninstallPyDeps, UninstallPyDeps,
] ]
BL_KEYMAP_ITEM_DEFS = [] BL_HOTKEYS = []

View File

@ -0,0 +1,73 @@
import enum
####################
# - StrEnum
####################
def prefix_values_with(prefix: str) -> type[enum.Enum]:
"""`StrEnum` class decorator that prepends `prefix` to all class member values.
Parameters:
name: The name to prepend behind all `StrEnum` member values.
Returns:
A new StrEnum class with altered member values.
"""
def _decorator(cls: enum.StrEnum):
new_members = {
member_name: prefix + member_value
for member_name, member_value in cls.__members__.items()
}
new_cls = enum.StrEnum(cls.__name__, new_members)
new_cls.__doc__ = cls.__doc__
new_cls.__module__ = cls.__module__
return new_cls
return _decorator
####################
# - BlenderTypeEnum
####################
## TODO: Migrate everyone to simple StrEnums
class BlenderTypeEnum(str, enum.Enum):
"""Homegrown `str` enum for Blender types."""
def _generate_next_value_(name, *_):
return name
def append_cls_name_to_values(cls) -> type[enum.Enum]:
"""Enum class decorator that appends the class name to all values."""
# Construct Set w/Modified Member Names
new_members = {
name: f'{name}{cls.__name__}' for name, member in cls.__members__.items()
}
# Dynamically Declare New Enum Class w/Modified Members
new_cls = enum.Enum(cls.__name__, new_members, type=BlenderTypeEnum)
new_cls.__doc__ = cls.__doc__
new_cls.__module__ = cls.__module__
# Return New (Replacing) Enum Class
return new_cls
def wrap_values_in_MT(cls) -> type[enum.Enum]:
"""Enum class decorator that prepends "BLENDER_MAXWELL_MT_" to all values."""
# Construct Set w/Modified Member Names
new_members = {
name: f'BLENDER_MAXWELL_MT_{name}' for name, member in cls.__members__.items()
}
# Dynamically Declare New Enum Class w/Modified Members
new_cls = enum.Enum(cls.__name__, new_members, type=BlenderTypeEnum)
new_cls.__doc__ = cls.__doc__
new_cls.__module__ = cls.__module__
new_cls.get_tree = cls.get_tree ## TODO: This is wildly specific...
# Return New (Replacing) Enum Class
return new_cls

View File

@ -1,3 +1,5 @@
"""Tools for fearless managemenet of addon-specific Python dependencies."""
import contextlib import contextlib
import importlib.metadata import importlib.metadata
import os import os
@ -13,8 +15,8 @@ log = simple_logger.get(__name__)
#################### ####################
# - Globals # - Globals
#################### ####################
DEPS_OK: bool | None = None DEPS_OK: bool = False ## Presume no (but we don't know yet)
DEPS_ISSUES: list[str] | None = None DEPS_ISSUES: list[str] = [] ## No known issues (yet)
#################### ####################
@ -22,6 +24,15 @@ DEPS_ISSUES: list[str] | None = None
#################### ####################
@contextlib.contextmanager @contextlib.contextmanager
def importable_addon_deps(path_deps: Path): def importable_addon_deps(path_deps: Path):
"""Temporarily modifies `sys.path` with a light touch and minimum of side-effects.
Warnings:
There are a lot of gotchas with the import system, and this is an enormously imperfect "solution".
Parameters:
path_deps:
Corresponds to the directory into which `pip install --target` was used to install packages.
"""
os_path = os.fspath(path_deps) os_path = os.fspath(path_deps)
if os_path not in sys.path: if os_path not in sys.path:
@ -30,9 +41,10 @@ def importable_addon_deps(path_deps: Path):
try: try:
yield yield
finally: finally:
pass # TODO: Re-add
# log.info('Removing Path from sys.path: %s', str(os_path)) # log.info('Removing Path from sys.path: %s', str(os_path))
# sys.path.remove(os_path) # sys.path.remove(os_path)
pass
else: else:
try: try:
yield yield
@ -42,38 +54,63 @@ def importable_addon_deps(path_deps: Path):
@contextlib.contextmanager @contextlib.contextmanager
def syspath_from_bpy_prefs() -> bool: def syspath_from_bpy_prefs() -> bool:
import bpy """Temporarily modifies `sys.path` using the dependencies found in addon preferences.
addon_prefs = bpy.context.preferences.addons[ct.addon.NAME].preferences Warnings:
if hasattr(addon_prefs, 'path_addon_pydeps'): There are a lot of gotchas with the import system, and this is an enormously imperfect "solution".
Parameters:
path_deps: Path to the directory where Python modules can be found.
Corresponds to the directory into which `pip install --target` was used to install packages.
"""
with importable_addon_deps(ct.addon.prefs().pydeps_path):
log.info('Retrieved PyDeps Path from Addon Prefs') log.info('Retrieved PyDeps Path from Addon Prefs')
path_pydeps = addon_prefs.path_addon_pydeps yield True
with importable_addon_deps(path_pydeps):
yield True
else:
log.info("Couldn't PyDeps Path from Addon Prefs")
yield False
#################### ####################
# - Check PyDeps # - Passive PyDeps Checkers
#################### ####################
def _check_pydeps( def conform_pypi_package_deplock(deplock: str) -> str:
"""Conforms a "deplock" string (`<package>==<version>`) so that comparing it with other "deplock" strings will conform to PyPi's matching rules.
- **Case Sensitivity**: PyPi considers packages with non-matching cases to be the same. _Therefore, we cast all deplocks to lowercase._
- **Special Characters**: PyPi considers `-` and `_` to be the same character. _Therefore, we replace `_` with `-`_.
See <https://peps.python.org/pep-0426/#name> for the specification.
Parameters:
deplock: The string formatted like `<package>==<version>`.
Returns:
The conformed deplock string.
"""
return deplock.lower().replace('_', '-')
def deplock_conflicts(
path_requirementslock: Path, path_requirementslock: Path,
path_deps: Path, path_deps: Path,
) -> dict[str, tuple[str, str]]: ) -> list[str]:
"""Check if packages defined in a 'requirements.lock' file are currently installed. """Check if packages defined in a 'requirements.lock' file are **strictly** realized by a particular dependency path.
Returns a list of any issues (if empty, then all dependencies are correctly satisfied). **Strict** means not only that everything is satisfied, but that _the exact versions_ are satisfied, and that _no extra packages_ are installed either.
Parameters:
path_requirementslock: Path to the `requirements.lock` file.
Generally, one would use `ct.addon.PATH_REQS` to use the `requirements.lock` file shipped with the addon.
path_deps: Path to the directory where Python modules can be found.
Corresponds to the directory into which `pip install --target` was used to install packages.
Returns:
A list of messages explaining mismatches between the currently installed dependencies, and the given `requirements.lock` file.
There are three kinds of conflicts:
- **Version**: The wrong version of something is installed.
- **Missing**: Something should be installed that isn't.
- **Superfluous**: Something is installed that shouldn't be.
""" """
# DepLocks: Required
def conform_pypi_package_deplock(deplock: str):
"""Conforms a <package>==<version> de-lock to match if pypi considers them the same (PyPi is case-insensitive and considers -/_ to be the same).
See <https://peps.python.org/pep-0426/#name>
"""
return deplock.lower().replace('_', '-')
with path_requirementslock.open('r') as file: with path_requirementslock.open('r') as file:
required_depslock = { required_depslock = {
conform_pypi_package_deplock(line) conform_pypi_package_deplock(line)
@ -81,18 +118,15 @@ def _check_pydeps(
if (line := raw_line.strip()) and not line.startswith('#') if (line := raw_line.strip()) and not line.startswith('#')
} }
# Investigate Issues # DepLocks: Installed
installed_deps = importlib.metadata.distributions(
path=[str(path_deps.resolve())] ## resolve() is just-in-case
)
installed_depslock = { installed_depslock = {
conform_pypi_package_deplock( conform_pypi_package_deplock(
f'{dep.metadata["Name"]}=={dep.metadata["Version"]}' f'{dep.metadata["Name"]}=={dep.metadata["Version"]}'
) )
for dep in installed_deps for dep in importlib.metadata.distributions(path=[str(path_deps.resolve())])
} }
# Determine Missing/Superfluous/Conflicting # Determine Diff of Required vs. Installed
req_not_inst = required_depslock - installed_depslock req_not_inst = required_depslock - installed_depslock
inst_not_req = installed_depslock - required_depslock inst_not_req = installed_depslock - required_depslock
conflicts = { conflicts = {
@ -102,7 +136,6 @@ def _check_pydeps(
if req.split('==')[0] == inst.split('==')[0] if req.split('==')[0] == inst.split('==')[0]
} }
# Assemble and Return Issues
return ( return (
[ [
f'{name}: Have {inst_ver}, Need {req_ver}' f'{name}: Have {inst_ver}, Need {req_ver}'
@ -122,20 +155,48 @@ def _check_pydeps(
#################### ####################
# - Refresh PyDeps # - Passive PyDeps Checker
#################### ####################
def check_pydeps(path_deps: Path): def check_pydeps(path_requirementslock: Path, path_deps: Path):
"""Check if all dependencies are satisfied without `deplock_conflicts()` conflicts, and update globals in response.
Notes:
Use of the globals `DEPS_OK` and `DEPS_ISSUES` should be preferred in general, since they are very fast to access.
**Only**, use `check_pydeps()` after any operation something that might have changed the dependency status; both to check the result, but also to update the globals.
Parameters:
path_requirementslock: Path to the `requirements.lock` file.
Generally, one would use `ct.addon.PATH_REQS` to use the `requirements.lock` file shipped with the addon.
path_deps: Path to the directory where Python modules can be found.
Corresponds to the directory into which `pip install --target` was used to install packages.
Returns:
A list of messages explaining mismatches between the currently installed dependencies, and the given `requirements.lock` file.
There are three kinds of conflicts:
- **Version**: The wrong version of something is installed.
- **Missing**: Something should be installed that isn't.
- **Superfluous**: Something is installed that shouldn't be.
"""
global DEPS_OK # noqa: PLW0603 global DEPS_OK # noqa: PLW0603
global DEPS_ISSUES # noqa: PLW0603 global DEPS_ISSUES # noqa: PLW0603
if len(issues := _check_pydeps(ct.addon.PATH_REQS, path_deps)) > 0: log.info(
log.info('PyDeps Check Failed') 'Analyzing PyDeps at: %s',
str(path_deps),
)
if len(issues := deplock_conflicts(path_requirementslock, path_deps)) > 0:
log.info(
'PyDeps Check Failed - adjust Addon Preferences for: %s', ct.addon.NAME
)
log.debug('%s', ', '.join(issues)) log.debug('%s', ', '.join(issues))
log.debug('PyDeps Conflicts: %s', ', '.join(issues))
DEPS_OK = False DEPS_OK = False
DEPS_ISSUES = issues DEPS_ISSUES = issues
else: else:
log.info('PyDeps Check Succeeded') log.info('PyDeps Check Succeeded - DEPS_OK and DEPS_ISSUES have been updated')
DEPS_OK = True DEPS_OK = True
DEPS_ISSUES = [] DEPS_ISSUES = []

View File

@ -179,7 +179,7 @@ def sync_bootstrap_logging(
file_path=file_path, file_path=file_path,
file_level=file_level, file_level=file_level,
) )
logger_logger.info('Bootstrapped Logging w/Settings %s', str(CACHE)) logger_logger.info('Bootstrapped Simple Logging w/Settings %s', str(CACHE))
def sync_all_loggers( def sync_all_loggers(

View File

@ -3,6 +3,6 @@ from . import connect_viewer
BL_REGISTER = [ BL_REGISTER = [
*connect_viewer.BL_REGISTER, *connect_viewer.BL_REGISTER,
] ]
BL_KEYMAP_ITEM_DEFS = [ BL_HOTKEYS = [
*connect_viewer.BL_KEYMAP_ITEM_DEFS, *connect_viewer.BL_HOTKEYS,
] ]

View File

@ -59,7 +59,7 @@ BL_REGISTER = [
ConnectViewerNode, ConnectViewerNode,
] ]
BL_KEYMAP_ITEM_DEFS = [ BL_HOTKEYS = [
{ {
'_': ( '_': (
ConnectViewerNode.bl_idname, ConnectViewerNode.bl_idname,

View File

@ -3,13 +3,11 @@ from pathlib import Path
import bpy import bpy
from . import info, registration from . import contracts as ct
from . import registration
from .nodeps.operators import install_deps, uninstall_deps from .nodeps.operators import install_deps, uninstall_deps
from .nodeps.utils import pydeps, simple_logger from .nodeps.utils import pydeps, simple_logger
####################
# - Constants
####################
log = simple_logger.get(__name__) log = simple_logger.get(__name__)
@ -17,46 +15,66 @@ log = simple_logger.get(__name__)
# - Preferences # - Preferences
#################### ####################
class BLMaxwellAddonPrefs(bpy.types.AddonPreferences): class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
"""Manages user preferences and settings for the Blender Maxwell addon.""" """Manages user preferences and settings for the Blender Maxwell addon.
bl_idname = info.ADDON_NAME ## MUST match addon package name Unfortunately, many of the niceities based on dependencies (ex. `bl_cache.BLField`) aren't available here.
Attributes:
bl_idname: Matches `ct.addon.NAME`.
use_default_pydeps_path: Whether to use the default PyDeps path
"""
bl_idname = ct.addon.NAME
#################### ####################
# - Properties # - Properties
#################### ####################
# Use of Default PyDeps Path # PyDeps Default Path
use_default_pydeps_path: bpy.props.BoolProperty( use_default_pydeps_path: bpy.props.BoolProperty(
name='Use Default PyDeps Path', name='Use Default PyDeps Path',
description='Whether to use the default PyDeps path', description='Whether to use the default PyDeps path',
default=True, default=True,
update=lambda self, context: self.sync_use_default_pydeps_path(context), update=lambda self, context: self.on_addon_pydeps_changed(context),
)
cache__pydeps_path_while_using_default: bpy.props.StringProperty(
name='Cached Addon PyDeps Path',
default=(_default_pydeps_path := str(info.DEFAULT_PATH_DEPS)),
) )
# Custom PyDeps Path # PyDeps Path
bl__pydeps_path: bpy.props.StringProperty( bl__pydeps_path: bpy.props.StringProperty(
name='Addon PyDeps Path', name='Addon PyDeps Path',
description='Path to Addon Python Dependencies', description='Path to Addon Python Dependencies',
subtype='FILE_PATH', subtype='FILE_PATH',
default=_default_pydeps_path, default=str(ct.addon.DEFAULT_PATH_DEPS),
update=lambda self, _: self.sync_pydeps_path(), update=lambda self, _: self.on_addon_pydeps_changed(),
)
cache__backup_pydeps_path: bpy.props.StringProperty(
name='Previous Addon PyDeps Path',
default=_default_pydeps_path,
) )
# Log Settings cache__backup_pydeps_path: bpy.props.StringProperty(
default=str(ct.addon.DEFAULT_PATH_DEPS),
)
@property
def pydeps_path(self) -> Path:
if self.use_default_pydeps_path:
return ct.addon.DEFAULT_PATH_DEPS
return Path(bpy.path.abspath(self.bl__pydeps_path))
@pydeps_path.setter
def pydeps_path(self, path: Path) -> None:
if not self.use_default_pydeps_path:
self.bl__pydeps_path = str(path.resolve())
else:
msg = f'Can\'t set "pydeps_path" to {path} while "use_default_pydeps_path" is "True"'
raise ValueError(msg)
# Logging
## Console Logging
use_log_console: bpy.props.BoolProperty( use_log_console: bpy.props.BoolProperty(
name='Log to Console', name='Log to Console',
description='Whether to use the console for addon logging', description='Whether to use the console for addon logging',
default=True, default=True,
update=lambda self, _: self.sync_addon_logging(), update=lambda self, _: self.on_addon_logging_changed(),
) )
bl__log_level_console: bpy.props.EnumProperty( log_level_console: bpy.props.EnumProperty(
name='Console Log Level', name='Console Log Level',
description='Level of addon logging to expose in the console', description='Level of addon logging to expose in the console',
items=[ items=[
@ -66,24 +84,18 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
('ERROR', 'Error', 'Error'), ('ERROR', 'Error', 'Error'),
('CRITICAL', 'Critical', 'Critical'), ('CRITICAL', 'Critical', 'Critical'),
], ],
default='DEBUG', default='INFO',
update=lambda self, _: self.sync_addon_logging(), update=lambda self, _: self.on_addon_logging_changed(),
) )
## File Logging
use_log_file: bpy.props.BoolProperty( use_log_file: bpy.props.BoolProperty(
name='Log to File', name='Log to File',
description='Whether to use a file for addon logging', description='Whether to use a file for addon logging',
default=True, default=True,
update=lambda self, _: self.sync_addon_logging(), update=lambda self, _: self.on_addon_logging_changed(),
) )
bl__log_file_path: bpy.props.StringProperty( log_level_file: bpy.props.EnumProperty(
name='Log Path',
description='Path to the Addon Log File',
subtype='FILE_PATH',
default=str(info.DEFAULT_LOG_PATH),
update=lambda self, _: self.sync_addon_logging(),
)
bl__log_level_file: bpy.props.EnumProperty(
name='File Log Level', name='File Log Level',
description='Level of addon logging to expose in the file', description='Level of addon logging to expose in the file',
items=[ items=[
@ -93,61 +105,60 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
('ERROR', 'Error', 'Error'), ('ERROR', 'Error', 'Error'),
('CRITICAL', 'Critical', 'Critical'), ('CRITICAL', 'Critical', 'Critical'),
], ],
default='DEBUG', default='INFO',
update=lambda self, _: self.sync_addon_logging(), update=lambda self, _: self.on_addon_logging_changed(),
) )
# TODO: LOGGING SETTINGS bl__log_file_path: bpy.props.StringProperty(
name='Log Path',
#################### description='Path to the Addon Log File',
# - Property Methods subtype='FILE_PATH',
#################### default=str(ct.addon.DEFAULT_LOG_PATH),
@property update=lambda self, _: self.on_addon_logging_changed(),
def pydeps_path(self) -> Path: )
return Path(bpy.path.abspath(self.bl__pydeps_path))
@pydeps_path.setter
def pydeps_path(self, value: Path) -> None:
self.bl__pydeps_path = str(value.resolve())
@property @property
def log_path(self) -> Path: def log_file_path(self) -> Path:
return Path(bpy.path.abspath(self.bl__log_file_path)) return Path(bpy.path.abspath(self.bl__log_file_path))
@pydeps_path.setter
def log_file_path(self, path: Path) -> None:
self.bl__log_file_path = str(path.resolve())
#################### ####################
# - Property Sync # - Events: Properties Changed
#################### ####################
def sync_addon_logging(self, logger_to_setup: logging.Logger | None = None) -> None: def on_addon_logging_changed(
self, single_logger_to_setup: logging.Logger | None = None
) -> None:
"""Configure one, or all, active addon logger(s). """Configure one, or all, active addon logger(s).
Parameters: Parameters:
logger_to_setup: single_logger_to_setup: When set, only this logger will be setup.
When set to None, all addon loggers will be configured Otherwise, **all addon loggers will be setup**.
""" """
if pydeps.DEPS_OK: if pydeps.DEPS_OK:
log.info('Getting Logger (DEPS_OK = %s)', str(pydeps.DEPS_OK))
with pydeps.importable_addon_deps(self.pydeps_path): with pydeps.importable_addon_deps(self.pydeps_path):
from blender_maxwell.utils import logger from blender_maxwell.utils import logger
else: else:
log.info('Getting Simple Logger (DEPS_OK = %s)', str(pydeps.DEPS_OK))
logger = simple_logger logger = simple_logger
# Retrieve Configured Log Levels # Retrieve Configured Log Levels
log_level_console = logger.LOG_LEVEL_MAP[self.bl__log_level_console] log_level_console = logger.LOG_LEVEL_MAP[self.log_level_console]
log_level_file = logger.LOG_LEVEL_MAP[self.bl__log_level_file] log_level_file = logger.LOG_LEVEL_MAP[self.log_level_file]
log_setup_kwargs = { log_setup_kwargs = {
'console_level': log_level_console if self.use_log_console else None, 'console_level': log_level_console if self.use_log_console else None,
'file_path': self.log_path if self.use_log_file else None, 'file_path': self.log_file_path if self.use_log_file else None,
'file_level': log_level_file, 'file_level': log_level_file,
} }
# Sync Single Logger / All Loggers # Sync Single Logger / All Loggers
if logger_to_setup is not None: if single_logger_to_setup is not None:
logger.setup_logger( logger.setup_logger(
logger.console_handler, logger.console_handler,
logger.file_handler, logger.file_handler,
logger_to_setup, single_logger_to_setup,
**log_setup_kwargs, **log_setup_kwargs,
) )
else: else:
@ -158,77 +169,55 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
**log_setup_kwargs, **log_setup_kwargs,
) )
def sync_use_default_pydeps_path(self, _: bpy.types.Context): def on_addon_pydeps_changed(self, show_popup_if_deps_invalid: bool = False) -> None:
# Switch to Default """Checks if the Python dependencies are valid, and runs any delayed setup (inclusing `ct.BLClass` registrations) in response.
if self.use_default_pydeps_path:
log.info(
'Switching to Default PyDeps Path %s',
str(info.DEFAULT_PATH_DEPS.resolve()),
)
self.cache__pydeps_path_while_using_default = self.bl__pydeps_path
self.bl__pydeps_path = str(info.DEFAULT_PATH_DEPS.resolve())
# Switch from Default Notes:
else: **The addon does not load until this method allows it**.
log.info(
'Switching from Default PyDeps Path %s to Cached PyDeps Path %s',
str(info.DEFAULT_PATH_DEPS.resolve()),
self.cache__pydeps_path_while_using_default,
)
self.bl__pydeps_path = self.cache__pydeps_path_while_using_default
self.cache__pydeps_path_while_using_default = ''
def sync_pydeps_path(self): Parameters:
if self.cache__backup_pydeps_path != self.bl__pydeps_path: show_popup_if_deps_invalid: If True, a failed dependency check will `invoke()` the operator `ct.OperatorType.ManagePyDeps`, which is a popup that guides the user through
log.info( **NOTE**: Must be called after addon registration.
'Syncing PyDeps Path from/to: %s => %s',
self.cache__backup_pydeps_path,
self.bl__pydeps_path,
)
else:
log.info(
'Syncing PyDeps Path In-Place @ %s',
str(self.bl__pydeps_path),
)
# Error: Default Path in Use Notes:
if self.use_default_pydeps_path: Run by `__init__.py` after registering a barebones addon (including this class), and after queueing a delayed registration.
self.bl__pydeps_path = self.cache__backup_pydeps_path """
msg = "Can't update pydeps path while default path is being used" if pydeps.check_pydeps(ct.addon.PATH_REQS, self.pydeps_path):
raise ValueError(msg)
# Error: PyDeps Already Installed
if pydeps.DEPS_OK:
self.bl__pydeps_path = self.cache__backup_pydeps_path
msg = "Can't update pydeps path while dependencies are installed"
raise ValueError(msg)
# Re-Check PyDeps
log.info(
'Checking PyDeps of New Path %s',
str(self.pydeps_path),
)
if pydeps.check_pydeps(self.pydeps_path):
# Re-Sync Loggers # Re-Sync Loggers
## We can now upgrade to the fancier loggers. ## We can now upgrade all loggers to the fancier loggers.
self.sync_addon_logging() for _log in simple_logger.simple_loggers:
log.debug('Upgrading Logger (%s)', str(_log))
self.on_addon_logging_changed(single_logger_to_setup=_log)
# Run Delayed Registrations # Run Registrations Waiting on DEPS_SATISFIED
## Since the deps are OK, we can now register the whole addon. ## Since the deps are OK, we can now register the whole addon.
registration.run_delayed_registration( if (
registration.EVENT__DEPS_SATISFIED, registration.BLRegisterEvent.DepsSatisfied
self.pydeps_path, in registration.DELAYED_REGISTRATIONS
) ):
registration.run_delayed_registration(
registration.BLRegisterEvent.DepsSatisfied,
self.pydeps_path,
)
# Backup New PyDeps Path elif show_popup_if_deps_invalid:
self.cache__backup_pydeps_path = self.bl__pydeps_path ct.addon.operator(
ct.OperatorType.ManagePyDeps,
'INVOKE_DEFAULT',
bl__pydeps_path=str(self.pydeps_path),
bl__pydeps_reqlock_path=str(ct.addon.PATH_REQS),
)
## TODO: else:
## TODO: Can we 'downgrade' the loggers back to simple loggers?
## TODO: Can we undo the delayed registration?
## TODO: Do we need the fancy pants sys.modules handling for all this?
#################### ####################
# - UI # - UI
#################### ####################
def draw(self, _: bpy.types.Context) -> None: def draw(self, _: bpy.types.Context) -> None:
layout = self.layout layout = self.layout
num_pydeps_issues = len(pydeps.DEPS_ISSUES) if pydeps.DEPS_ISSUES else 0 num_pydeps_issues = len(pydeps.DEPS_ISSUES)
# Box w/Split: Log Level # Box w/Split: Log Level
box = layout.box() box = layout.box()
@ -244,7 +233,7 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
row = col.row() row = col.row()
row.enabled = self.use_log_console row.enabled = self.use_log_console
row.prop(self, 'bl__log_level_console') row.prop(self, 'log_level_console')
## Split Col: File Logging ## Split Col: File Logging
col = split.column() col = split.column()
@ -257,7 +246,7 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
row = col.row() row = col.row()
row.enabled = self.use_log_file row.enabled = self.use_log_file
row.prop(self, 'bl__log_level_file') row.prop(self, 'log_level_file')
# Box: Dependency Status # Box: Dependency Status
box = layout.box() box = layout.box()
@ -296,8 +285,8 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
install_deps.InstallPyDeps.bl_idname, install_deps.InstallPyDeps.bl_idname,
text='Install PyDeps', text='Install PyDeps',
) )
op.path_addon_pydeps = str(self.pydeps_path) op.bl__pydeps_path = str(self.pydeps_path)
op.path_addon_reqs = str(info.PATH_REQS) op.bl__pydeps_reqlock_path = str(ct.addon.PATH_REQS)
## Row: Uninstall ## Row: Uninstall
row = box.row(align=True) row = box.row(align=True)
@ -305,7 +294,7 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
uninstall_deps.UninstallPyDeps.bl_idname, uninstall_deps.UninstallPyDeps.bl_idname,
text='Uninstall PyDeps', text='Uninstall PyDeps',
) )
op.path_addon_pydeps = str(self.pydeps_path) op.bl__pydeps_path = str(self.pydeps_path)
#################### ####################

View File

@ -1,12 +1,14 @@
"""Manages the registration of Blender classes, including delayed registrations that require access to Python dependencies. """Manages the registration of Blender classes, including delayed registrations that require access to Python dependencies.
Attributes: Attributes:
BL_KEYMAP: Addon-specific keymap used to register operator hotkeys. REG__CLASSES: Currently registered Blender classes. _ADDON_KEYMAP: Addon-specific keymap used to register operator hotkeys.
REG__KEYMAP_ITEMS: Currently registered Blender keymap items.
DELAYED_REGISTRATIONS: Currently pending registration operations, which can be realized with `run_delayed_registration()`. DELAYED_REGISTRATIONS: Currently pending registration operations, which can be realized with `run_delayed_registration()`.
EVENT__DEPS_SATISFIED: A constant representing a semantic choice of key for `DELAYED_REGISTRATIONS`.
REG__CLASSES: Currently registered Blender classes.
_REGISTERED_HOTKEYS: Currently registered Blender keymap items.
""" """
import enum
import typing as typ import typing as typ
from pathlib import Path from pathlib import Path
@ -17,38 +19,36 @@ from .nodeps.utils import simple_logger
log = simple_logger.get(__name__) log = simple_logger.get(__name__)
DelayedRegKey: typ.TypeAlias = str
#################### ####################
# - Globals # - Globals
#################### ####################
BL_KEYMAP: bpy.types.KeyMap | None = None _REGISTERED_CLASSES: list[ct.BLClass] = []
_ADDON_KEYMAP: bpy.types.KeyMap | None = None
_REGISTERED_HOTKEYS: list[ct.BLKeymapItem] = []
REG__CLASSES: list[ct.BLClass] = []
REG__KEYMAP_ITEMS: list[ct.BLKeymapItem] = []
DELAYED_REGISTRATIONS: dict[DelayedRegKey, typ.Callable[[Path], None]] = {}
#################### ####################
# - Delayed Registration Keys # - Delayed Registration
#################### ####################
EVENT__DEPS_SATISFIED: DelayedRegKey = 'on_deps_satisfied' class BLRegisterEvent(enum.StrEnum):
DepsSatisfied = enum.auto()
DELAYED_REGISTRATIONS: dict[BLRegisterEvent, typ.Callable[[Path], None]] = {}
#################### ####################
# - Class Registration # - Class Registration
#################### ####################
def register_classes(bl_register: list[ct.BLClass]) -> None: def register_classes(bl_register: list[ct.BLClass]) -> None:
"""Registers a Blender class, allowing it to hook into relevant Blender features. """Registers a list of Blender classes.
Caches registered classes in the module global `REG__CLASSES`.
Parameters: Parameters:
bl_register: List of Blender classes to register. bl_register: List of Blender classes to register.
""" """
log.info('Registering %s Classes', len(bl_register)) log.info('Registering %s Classes', len(bl_register))
for cls in bl_register: for cls in bl_register:
if cls.bl_idname in REG__CLASSES: if cls.bl_idname in _REGISTERED_CLASSES:
msg = f'Skipping register of {cls.bl_idname}' msg = f'Skipping register of {cls.bl_idname}'
log.info(msg) log.info(msg)
continue continue
@ -58,45 +58,46 @@ def register_classes(bl_register: list[ct.BLClass]) -> None:
repr(cls), repr(cls),
) )
bpy.utils.register_class(cls) bpy.utils.register_class(cls)
REG__CLASSES.append(cls) _REGISTERED_CLASSES.append(cls)
def unregister_classes() -> None: def unregister_classes() -> None:
"""Unregisters all previously registered Blender classes. """Unregisters all previously registered Blender classes."""
log.info('Unregistering %s Classes', len(_REGISTERED_CLASSES))
All previously registered Blender classes can be found in the module global variable `REG__CLASSES`. for cls in reversed(_REGISTERED_CLASSES):
"""
log.info('Unregistering %s Classes', len(REG__CLASSES))
for cls in reversed(REG__CLASSES):
log.debug( log.debug(
'Unregistering Class %s', 'Unregistering Class %s',
repr(cls), repr(cls),
) )
bpy.utils.unregister_class(cls) bpy.utils.unregister_class(cls)
REG__CLASSES.clear() _REGISTERED_CLASSES.clear()
#################### ####################
# - Keymap Registration # - Keymap Registration
#################### ####################
def register_keymap_items(keymap_item_defs: list[dict]): def register_hotkeys(hotkey_defs: list[dict]):
"""Registers a list of Blender hotkey definitions.
Parameters:
hotkey_defs: List of Blender hotkey definitions to register.
"""
# Lazy-Load BL_NODE_KEYMAP # Lazy-Load BL_NODE_KEYMAP
global BL_KEYMAP # noqa: PLW0603 global _ADDON_KEYMAP # noqa: PLW0603
if BL_KEYMAP is None: if _ADDON_KEYMAP is None:
BL_KEYMAP = bpy.context.window_manager.keyconfigs.addon.keymaps.new( _ADDON_KEYMAP = bpy.context.window_manager.keyconfigs.addon.keymaps.new(
name='Node Editor', name=f'{ct.addon.NAME} Keymap',
space_type='NODE_EDITOR',
) )
log.info( log.info(
'Registered Keymap %s', 'Registered Addon Keymap (Base for Keymap Items): %s',
str(BL_KEYMAP), str(_ADDON_KEYMAP),
) )
# Register Keymaps # Register Keymaps
log.info('Registering %s Keymap Items', len(keymap_item_defs)) log.info('Registering %s Keymap Items', len(hotkey_defs))
for keymap_item_def in keymap_item_defs: for keymap_item_def in hotkey_defs:
keymap_item = BL_KEYMAP.keymap_items.new( keymap_item = _ADDON_KEYMAP.keymap_items.new(
*keymap_item_def['_'], *keymap_item_def['_'],
ctrl=keymap_item_def['ctrl'], ctrl=keymap_item_def['ctrl'],
shift=keymap_item_def['shift'], shift=keymap_item_def['shift'],
@ -107,38 +108,39 @@ def register_keymap_items(keymap_item_defs: list[dict]):
repr(keymap_item), repr(keymap_item),
keymap_item_def, keymap_item_def,
) )
REG__KEYMAP_ITEMS.append(keymap_item) _REGISTERED_HOTKEYS.append(keymap_item)
def unregister_keymap_items(): def unregister_hotkeys():
global BL_KEYMAP # noqa: PLW0603 """Unregisters all Blender hotkeys associated with the addon."""
global _ADDON_KEYMAP # noqa: PLW0603
# Unregister Keymaps # Unregister Keymaps
log.info('Unregistering %s Keymap Items', len(REG__KEYMAP_ITEMS)) log.info('Unregistering %s Keymap Items', len(_REGISTERED_HOTKEYS))
for keymap_item in reversed(REG__KEYMAP_ITEMS): for keymap_item in reversed(_REGISTERED_HOTKEYS):
log.debug( log.debug(
'Unregistered Keymap Item %s', 'Unregistered Keymap Item %s',
repr(keymap_item), repr(keymap_item),
) )
BL_KEYMAP.keymap_items.remove(keymap_item) _ADDON_KEYMAP.keymap_items.remove(keymap_item)
# Lazy-Unload BL_NODE_KEYMAP # Lazy-Unload BL_NODE_KEYMAP
if BL_KEYMAP is not None: if _ADDON_KEYMAP is not None:
log.info( log.info(
'Unregistered Keymap %s', 'Unregistered Keymap %s',
repr(BL_KEYMAP), repr(_ADDON_KEYMAP),
) )
REG__KEYMAP_ITEMS.clear() _REGISTERED_HOTKEYS.clear()
BL_KEYMAP = None _ADDON_KEYMAP = None
#################### ####################
# - Delayed Registration Semantics # - Delayed Registration Semantics
#################### ####################
def delay_registration( def delay_registration_until(
delayed_reg_key: DelayedRegKey, delayed_reg_key: BLRegisterEvent,
classes_cb: typ.Callable[[Path], list[ct.BLClass]], then_register_classes: typ.Callable[[Path], list[ct.BLClass]],
keymap_item_defs_cb: typ.Callable[[Path], list[ct.KeymapItemDef]], then_register_hotkeys: typ.Callable[[Path], list[ct.KeymapItemDef]],
) -> None: ) -> None:
"""Delays the registration of Blender classes that depend on certain Python dependencies, for which neither the location nor validity is yet known. """Delays the registration of Blender classes that depend on certain Python dependencies, for which neither the location nor validity is yet known.
@ -147,10 +149,9 @@ def delay_registration(
Parameters: Parameters:
delayed_reg_key: The identifier with which to index the registration callback. delayed_reg_key: The identifier with which to index the registration callback.
Module-level constants like `EVENT__DEPS_SATISFIED` are a good choice.
classes_cb: A function that takes a `sys.path`-compatible path to Python dependencies needed by the Blender classes in question, and returns a list of Blender classes to import. classes_cb: A function that takes a `sys.path`-compatible path to Python dependencies needed by the Blender classes in question, and returns a list of Blender classes to import.
`register_classes()` will be used to actually register the returned Blender classes. `register_classes()` will be used to actually register the returned Blender classes.
keymap_item_defs_cb: Similar, except for addon keymap items. hotkey_defs_cb: Similar, except for addon keymap items.
Returns: Returns:
A function that takes a `sys.path`-compatible path to the Python dependencies needed to import the given Blender classes. A function that takes a `sys.path`-compatible path to the Python dependencies needed to import the given Blender classes.
@ -161,17 +162,19 @@ def delay_registration(
def register_cb(path_pydeps: Path): def register_cb(path_pydeps: Path):
log.info( log.info(
'Running Delayed Registration (key %s) with PyDeps: %s', 'Delayed Registration (key %s) with PyDeps Path: %s',
delayed_reg_key, delayed_reg_key,
path_pydeps, path_pydeps,
) )
register_classes(classes_cb(path_pydeps)) register_classes(then_register_classes(path_pydeps))
register_keymap_items(keymap_item_defs_cb(path_pydeps)) register_hotkeys(then_register_hotkeys(path_pydeps))
DELAYED_REGISTRATIONS[delayed_reg_key] = register_cb DELAYED_REGISTRATIONS[delayed_reg_key] = register_cb
def run_delayed_registration(delayed_reg_key: DelayedRegKey, path_pydeps: Path) -> None: def run_delayed_registration(
delayed_reg_key: BLRegisterEvent, path_pydeps: Path
) -> None:
"""Run a delayed registration, by using `delayed_reg_key` to lookup the correct path, passing `path_pydeps` to the registration. """Run a delayed registration, by using `delayed_reg_key` to lookup the correct path, passing `path_pydeps` to the registration.
Parameters: Parameters:
@ -179,5 +182,9 @@ def run_delayed_registration(delayed_reg_key: DelayedRegKey, path_pydeps: Path)
Must match the parameter with which the delayed registration was first declared. Must match the parameter with which the delayed registration was first declared.
path_pydeps: The `sys.path`-compatible path to the Python dependencies that the classes need to have available in order to register. path_pydeps: The `sys.path`-compatible path to the Python dependencies that the classes need to have available in order to register.
""" """
register_cb = DELAYED_REGISTRATIONS.pop(delayed_reg_key) DELAYED_REGISTRATIONS.pop(delayed_reg_key)(path_pydeps)
register_cb(path_pydeps)
def clear_delayed_registrations() -> None:
"""Dequeue all queued delayed registrations."""
DELAYED_REGISTRATIONS.clear()

View File

@ -1,7 +1,6 @@
from ..nodeps.utils import pydeps from ..nodeps.utils import blender_type_enum, pydeps
from . import ( from . import (
analyze_geonodes, analyze_geonodes,
blender_type_enum,
extra_sympy_units, extra_sympy_units,
logger, logger,
pydantic_sympy, pydantic_sympy,

View File

@ -1,5 +1,6 @@
import typing as typ
import bpy import bpy
import typing_extensions as typx
INVALID_BL_SOCKET_TYPES = { INVALID_BL_SOCKET_TYPES = {
'NodeSocketGeometry', 'NodeSocketGeometry',
@ -8,7 +9,7 @@ INVALID_BL_SOCKET_TYPES = {
def interface( def interface(
geonodes: bpy.types.GeometryNodeTree, ## TODO: bpy type geonodes: bpy.types.GeometryNodeTree, ## TODO: bpy type
direc: typx.Literal['INPUT', 'OUTPUT'], direc: typ.Literal['INPUT', 'OUTPUT'],
): ):
"""Returns 'valid' GeoNodes interface sockets. """Returns 'valid' GeoNodes interface sockets.

View File

@ -430,7 +430,7 @@ class BLField:
Parameters: Parameters:
default_value: The default value to use if the value is read before it's set. default_value: The default value to use if the value is read before it's set.
triggers_prop_update: Whether to run `bl_instance.sync_prop(attr_name)` whenever value is set. triggers_prop_update: Whether to run `bl_instance.on_prop_changed(attr_name)` whenever value is set.
""" """
log.debug( log.debug(

View File

@ -1,35 +0,0 @@
import enum
class BlenderTypeEnum(str, enum.Enum):
def _generate_next_value_(name, *_):
return name
def append_cls_name_to_values(cls):
# Construct Set w/Modified Member Names
new_members = {
name: f'{name}{cls.__name__}' for name, member in cls.__members__.items()
}
# Dynamically Declare New Enum Class w/Modified Members
new_cls = enum.Enum(cls.__name__, new_members, type=BlenderTypeEnum)
new_cls.__module__ = cls.__module__
# Return New (Replacing) Enum Class
return new_cls
def wrap_values_in_MT(cls):
# Construct Set w/Modified Member Names
new_members = {
name: f'BLENDER_MAXWELL_MT_{name}' for name, member in cls.__members__.items()
}
# Dynamically Declare New Enum Class w/Modified Members
new_cls = enum.Enum(cls.__name__, new_members, type=BlenderTypeEnum)
new_cls.__module__ = cls.__module__
new_cls.get_tree = cls.get_tree ## TODO: This is wildly specific...
# Return New (Replacing) Enum Class
return new_cls

View File

@ -1,75 +1,53 @@
import functools """Declares useful sympy units and functions, to make it easier to work with `sympy` as the basis for a unit-aware system.
Attributes:
ALL_UNIT_SYMBOLS: Maps all abbreviated Sympy symbols to their corresponding Sympy unit.
This is essential for parsing string expressions that use units, since a pure parse of ex. `a*m + m` would not otherwise be able to differentiate between `sp.Symbol(m)` and `spu.meter`.
SympyType: A simple union of valid `sympy` types, used to check whether arbitrary objects should be handled using `sympy` functions.
For simple `isinstance` checks, this should be preferred, as it is most performant.
For general use, `SympyExpr` should be preferred.
SympyExpr: A `SympyType` that is compatible with `pydantic`, including serialization/deserialization.
Should be used via the `ConstrSympyExpr`, which also adds expression validation.
"""
import itertools import itertools
import typing as typ import typing as typ
import pydantic as pyd
import sympy as sp import sympy as sp
import sympy.physics.units as spu import sympy.physics.units as spu
import typing_extensions as typx
from pydantic_core import core_schema as pyd_core_schema
SympyType = sp.Basic | sp.Expr | sp.MatrixBase | spu.Quantity SympyType = sp.Basic | sp.Expr | sp.MatrixBase | sp.MutableDenseMatrix | spu.Quantity
#################### ####################
# - Useful Methods # - Units
####################
def uses_units(expression: sp.Expr) -> bool:
## TODO: An LFU cache could do better than an LRU.
"""Checks if an expression uses any units (`Quantity`)."""
for arg in sp.preorder_traversal(expression):
if isinstance(arg, spu.Quantity):
return True
return False
# Function to return a set containing all units used in the expression
def get_units(expression: sp.Expr):
## TODO: An LFU cache could do better than an LRU.
"""Gets all the units of an expression (as `Quantity`)."""
return {
arg
for arg in sp.preorder_traversal(expression)
if isinstance(arg, spu.Quantity)
}
####################
# - Time
#################### ####################
femtosecond = fs = spu.Quantity('femtosecond', abbrev='fs') femtosecond = fs = spu.Quantity('femtosecond', abbrev='fs')
femtosecond.set_global_relative_scale_factor(spu.femto, spu.second) femtosecond.set_global_relative_scale_factor(spu.femto, spu.second)
# Length
####################
# - Length
####################
femtometer = fm = spu.Quantity('femtometer', abbrev='fm') femtometer = fm = spu.Quantity('femtometer', abbrev='fm')
femtometer.set_global_relative_scale_factor(spu.femto, spu.meter) femtometer.set_global_relative_scale_factor(spu.femto, spu.meter)
# Lum Flux
####################
# - Lum Flux
####################
lumen = lm = spu.Quantity('lumen', abbrev='lm') lumen = lm = spu.Quantity('lumen', abbrev='lm')
lumen.set_global_relative_scale_factor(1, spu.candela * spu.steradian) lumen.set_global_relative_scale_factor(1, spu.candela * spu.steradian)
# Force
#################### nanonewton = nN = spu.Quantity('nanonewton', abbrev='nN') # noqa: N816
# - Force
####################
# Newton
nanonewton = nN = spu.Quantity('nanonewton', abbrev='nN')
nanonewton.set_global_relative_scale_factor(spu.nano, spu.newton) nanonewton.set_global_relative_scale_factor(spu.nano, spu.newton)
micronewton = uN = spu.Quantity('micronewton', abbrev='μN') micronewton = uN = spu.Quantity('micronewton', abbrev='μN') # noqa: N816
micronewton.set_global_relative_scale_factor(spu.micro, spu.newton) micronewton.set_global_relative_scale_factor(spu.micro, spu.newton)
millinewton = mN = spu.Quantity('micronewton', abbrev='mN') millinewton = mN = spu.Quantity('micronewton', abbrev='mN') # noqa: N816
micronewton.set_global_relative_scale_factor(spu.milli, spu.newton) micronewton.set_global_relative_scale_factor(spu.milli, spu.newton)
#################### # Frequency
# - Frequency kilohertz = KHz = spu.Quantity('kilohertz', abbrev='KHz')
####################
# Hertz
kilohertz = kHz = spu.Quantity('kilohertz', abbrev='kHz')
kilohertz.set_global_relative_scale_factor(spu.kilo, spu.hertz) kilohertz.set_global_relative_scale_factor(spu.kilo, spu.hertz)
megahertz = MHz = spu.Quantity('megahertz', abbrev='MHz') megahertz = MHz = spu.Quantity('megahertz', abbrev='MHz')
@ -87,30 +65,120 @@ petahertz.set_global_relative_scale_factor(spu.peta, spu.hertz)
exahertz = EHz = spu.Quantity('exahertz', abbrev='EHz') exahertz = EHz = spu.Quantity('exahertz', abbrev='EHz')
exahertz.set_global_relative_scale_factor(spu.exa, spu.hertz) exahertz.set_global_relative_scale_factor(spu.exa, spu.hertz)
####################
# - Sympy Printer
####################
_SYMPY_EXPR_PRINTER_STR = sp.printing.str.StrPrinter(
settings={
'abbrev': True,
}
)
def sp_to_str(sp_obj: SympyType) -> str:
"""Converts a sympy object to an output-oriented string (w/abbreviated units), using a dedicated StrPrinter.
This should be used whenever a **string for UI use** is needed from a `sympy` object.
Notes:
This should **NOT** be used in cases where the string will be `sp.sympify()`ed back into a sympy expression.
For such cases, rely on `sp.srepr()`, which uses an _explicit_ representation.
Parameters:
sp_obj: The `sympy` object to convert to a string.
Returns:
A string representing the expression for human use.
_The string is not re-encodable to the expression._
"""
return _SYMPY_EXPR_PRINTER_STR.doprint(sp_obj)
####################
# - Expr Analysis: Units
####################
## TODO: Caching w/srepr'ed expression.
## TODO: An LFU cache could do better than an LRU.
def uses_units(expr: sp.Expr) -> bool:
"""Determines if an expression uses any units.
Notes:
The expression graph is traversed depth-first with `sp.postorder_traversal`, to search for `sp.Quantity` elements.
Depth-first was chosen since `sp.Quantity`s are likelier to be found among individual symbols, rather than complete subexpressions.
The **worst-case** runtime is when there are no units, in which case the **entire expression graph will be traversed**.
Parameters:
expr: The sympy expression that may contain units.
Returns:
Whether or not there are units used within the expression.
"""
return any(
isinstance(subexpr, spu.Quantity) for subexpr in sp.postorder_traversal(expr)
)
## TODO: Caching w/srepr'ed expression.
## TODO: An LFU cache could do better than an LRU.
def get_units(expr: sp.Expr) -> set[spu.Quantity]:
"""Finds all units used by the expression, and returns them as a set.
No information about _the relationship between units_ is exposed.
For example, compound units like `spu.meter / spu.second` would be mapped to `{spu.meter, spu.second}`.
Notes:
The expression graph is traversed depth-first with `sp.postorder_traversal`, to search for `sp.Quantity` elements.
The performance is comparable to the performance of `sp.postorder_traversal`, since the **entire expression graph will always be traversed**, with the added overhead of one `isinstance` call per expression-graph-node.
Parameters:
expr: The sympy expression that may contain units.
Returns:
All units (`spu.Quantity`) used within the expression.
"""
return {
subexpr
for subexpr in sp.postorder_traversal(expr)
if isinstance(subexpr, spu.Quantity)
}
#################### ####################
# - Sympy Expression Typing # - Sympy Expression Typing
#################### ####################
ALL_UNIT_SYMBOLS = { ALL_UNIT_SYMBOLS: dict[sp.Symbol, spu.Quantity] = {
unit.abbrev: unit unit.name: unit for unit in spu.__dict__.values() if isinstance(unit, spu.Quantity)
for unit in spu.__dict__.values() } | {unit.name: unit for unit in globals().values() if isinstance(unit, spu.Quantity)}
if isinstance(unit, spu.Quantity)
} | {unit.abbrev: unit for unit in globals().values() if isinstance(unit, spu.Quantity)}
@functools.lru_cache(maxsize=4096)
def parse_abbrev_symbols_to_units(expr: sp.Basic) -> sp.Basic:
return expr.subs(ALL_UNIT_SYMBOLS)
#################### ####################
# - Units <-> Scalars # - Units <-> Scalars
#################### ####################
def scaling_factor(unit_from: spu.Quantity, unit_to: spu.Quantity) -> sp.Basic: def scale_to_unit(expr: sp.Expr, unit: spu.Quantity) -> sp.Expr:
if unit_from.dimension == unit_to.dimension: """Convert an expression that uses units to a different unit, then strip all units.
return spu.convert_to(unit_from, unit_to) / unit_to
This is used whenever the unitless part of an expression is needed, but guaranteed expressed in a particular unit, aka. **unit system normalization**.
def scale_to_unit(expr: sp.Expr, unit: spu.Quantity) -> typ.Any: Notes:
The unitless output is still an `sp.Expr`, which may contain ex. symbols.
If you know that the output **should** work as a corresponding Python type (ex. `sp.Integer` vs. `int`), but it doesn't, you can use `sympy_to_python()` to produce a pure-Python type.
In this way, with a little care, broad compatiblity can be bridged between the `sympy.physics.units` unit system and the wider Python ecosystem.
Parameters:
expr: The unit-containing expression to convert.
unit_to: The unit that is converted to.
Returns:
The unitless part of `expr`, after scaling the entire expression to `unit`.
Raises:
ValueError: If the result of unit-conversion and -stripping still has units, as determined by `uses_units()`.
"""
## TODO: An LFU cache could do better than an LRU. ## TODO: An LFU cache could do better than an LRU.
unitless_expr = spu.convert_to(expr, unit) / unit unitless_expr = spu.convert_to(expr, unit) / unit
if not uses_units(unitless_expr): if not uses_units(unitless_expr):
@ -120,9 +188,50 @@ def scale_to_unit(expr: sp.Expr, unit: spu.Quantity) -> typ.Any:
raise ValueError(msg) raise ValueError(msg)
def scaling_factor(unit_from: spu.Quantity, unit_to: spu.Quantity) -> sp.Number:
"""Compute the numerical scaling factor imposed on the unitless part of the expression when converting from one unit to another.
Parameters:
unit_from: The unit that is converted from.
unit_to: The unit that is converted to.
Returns:
The numerical scaling factor between the two units.
Raises:
ValueError: If the two units don't share a common dimension.
"""
if unit_from.dimension == unit_to.dimension:
return scale_to_unit(unit_from, unit_to)
msg = f"Dimension of unit_from={unit_from} ({unit_from.dimension}) doesn't match the dimension of unit_to={unit_to} ({unit_to.dimension}); therefore, there is no scaling factor between them"
raise ValueError(msg)
#################### ####################
# - Sympy <-> Scalars # - Sympy -> Python
#################### ####################
## TODO: Integrate SympyExpr for constraining to the output types.
def sympy_to_python_type(sym: sp.Symbol) -> type:
"""Retrieve the Python type that is implied by a scalar `sympy` symbol.
Arguments:
sym: A scalar sympy symbol.
Returns:
A pure Python type.
"""
if sym.is_integer:
return int
if sym.is_rational or sym.is_real:
return float
if sym.is_complex:
return complex
msg = f'Cannot find Python type for sympy symbol "{sym}". Check the assumptions on the expr (current expr assumptions: "{sym._assumptions}")' # noqa: SLF001
raise ValueError(msg)
def sympy_to_python(scalar: sp.Basic) -> int | float | complex | tuple | list: def sympy_to_python(scalar: sp.Basic) -> int | float | complex | tuple | list:
"""Convert a scalar sympy expression to the directly corresponding Python type. """Convert a scalar sympy expression to the directly corresponding Python type.
@ -133,9 +242,6 @@ def sympy_to_python(scalar: sp.Basic) -> int | float | complex | tuple | list:
Returns: Returns:
A pure Python type that directly corresponds to the input scalar expression. A pure Python type that directly corresponds to the input scalar expression.
""" """
## TODO: If there are symbols, we could simplify.
## - Someone has to do it somewhere, might as well be here.
## - ...Since we have all the information we need.
if isinstance(scalar, sp.MatrixBase): if isinstance(scalar, sp.MatrixBase):
list_2d = [[sympy_to_python(el) for el in row] for row in scalar.tolist()] list_2d = [[sympy_to_python(el) for el in row] for row in scalar.tolist()]
@ -154,3 +260,278 @@ def sympy_to_python(scalar: sp.Basic) -> int | float | complex | tuple | list:
msg = f'Cannot convert sympy scalar expression "{scalar}" to a Python type. Check the assumptions on the expr (current expr assumptions: "{scalar._assumptions}")' # noqa: SLF001 msg = f'Cannot convert sympy scalar expression "{scalar}" to a Python type. Check the assumptions on the expr (current expr assumptions: "{scalar._assumptions}")' # noqa: SLF001
raise ValueError(msg) raise ValueError(msg)
####################
# - Pydantic-Validated SympyExpr
####################
class _SympyExpr:
"""Low-level `pydantic`, schema describing how to serialize/deserialize fields that have a `SympyType` (like `sp.Expr`), so we can cleanly use `sympy` types in `pyd.BaseModel`.
Notes:
You probably want to use `SympyExpr`.
Examples:
To be usable as a type annotation on `pyd.BaseModel`, attach this to `SympyType` using `typx.Annotated`:
```python
SympyExpr = typx.Annotated[SympyType, _SympyExpr]
class Spam(pyd.BaseModel):
line: SympyExpr = sp.Eq(sp.y, 2*sp.Symbol(x, real=True) - 3)
```
"""
@classmethod
def __get_pydantic_core_schema__(
cls,
_source_type: SympyType,
_handler: pyd.GetCoreSchemaHandler,
) -> pyd_core_schema.CoreSchema:
"""Compute a schema that allows `pydantic` to validate a `sympy` type."""
def validate_from_str(sp_str: str | typ.Any) -> SympyType | typ.Any:
"""Parse and validate a string expression.
Parameters:
sp_str: A stringified `sympy` object, that will be parsed to a sympy type.
Before use, `isinstance(expr_str, str)` is checked.
If the object isn't a string, then the validation will be skipped.
Returns:
Either a `sympy` object, if the input is parseable, or the same untouched object.
Raises:
ValueError: If `sp_str` is a string, but can't be parsed into a `sympy` expression.
"""
# Constrain to String
if not isinstance(sp_str, str):
return sp_str
# Parse String -> Sympy
try:
expr = sp.sympify(sp_str)
except ValueError as ex:
msg = f'String {sp_str} is not a valid sympy expression'
raise ValueError(msg) from ex
# Substitute Symbol -> Quantity
return expr.subs(ALL_UNIT_SYMBOLS)
# def validate_from_expr(sp_obj: SympyType) -> SympyType:
# """Validate that a `sympy` object is a `SympyType`.
# In the static sense, this is a dummy function.
# Parameters:
# sp_obj: A `sympy` object.
# Returns:
# The `sympy` object.
# Raises:
# ValueError: If `sp_obj` is not a `sympy` object.
# """
# if not (isinstance(sp_obj, SympyType)):
# msg = f'Value {sp_obj} is not a `sympy` expression'
# raise ValueError(msg)
# return sp_obj
sympy_expr_schema = pyd_core_schema.chain_schema(
[
pyd_core_schema.no_info_plain_validator_function(validate_from_str),
# pyd_core_schema.no_info_plain_validator_function(validate_from_expr),
pyd_core_schema.is_instance_schema(SympyType),
]
)
return pyd_core_schema.json_or_python_schema(
json_schema=sympy_expr_schema,
python_schema=sympy_expr_schema,
serialization=pyd_core_schema.plain_serializer_function_ser_schema(
lambda sp_obj: sp.srepr(sp_obj)
),
)
SympyExpr = typx.Annotated[
SympyType,
_SympyExpr,
]
def ConstrSympyExpr( # noqa: N802, PLR0913
# Features
allow_variables: bool = True,
allow_units: bool = True,
# Structures
allowed_sets: set[typ.Literal['integer', 'rational', 'real', 'complex']]
| None = None,
allowed_structures: set[typ.Literal['scalar', 'matrix']] | None = None,
# Element Class
max_symbols: int | None = None,
allowed_symbols: set[sp.Symbol] | None = None,
allowed_units: set[spu.Quantity] | None = None,
# Shape Class
allowed_matrix_shapes: set[tuple[int, int]] | None = None,
) -> SympyType:
"""Constructs a `SympyExpr` type, which will validate `sympy` types when used in a `pyd.BaseModel`.
Relies on the `sympy` assumptions system.
See <https://docs.sympy.org/latest/guides/assumptions.html#predicates>
Parameters (TBD):
Returns:
A type that represents a constrained `sympy` expression.
"""
def validate_expr(expr: SympyType):
if not (isinstance(expr, SympyType),):
msg = f"expr '{expr}' is not an allowed Sympy expression ({SympyType})"
raise ValueError(msg)
msgs = set()
# Validate Feature Class
if (not allow_variables) and (len(expr.free_symbols) > 0):
msgs.add(
f'allow_variables={allow_variables} does not match expression {expr}.'
)
if (not allow_units) and uses_units(expr):
msgs.add(f'allow_units={allow_units} does not match expression {expr}.')
# Validate Structure Class
if (
allowed_sets
and isinstance(expr, sp.Expr)
and not any(
{
'integer': expr.is_integer,
'rational': expr.is_rational,
'real': expr.is_real,
'complex': expr.is_complex,
}[allowed_set]
for allowed_set in allowed_sets
)
):
msgs.add(
f"allowed_sets={allowed_sets} does not match expression {expr} (remember to add assumptions to symbols, ex. `x = sp.Symbol('x', real=True))"
)
if allowed_structures and not any(
{
'matrix': isinstance(expr, sp.MatrixBase),
}[allowed_set]
for allowed_set in allowed_structures
if allowed_structures != 'scalar'
):
msgs.add(
f"allowed_structures={allowed_structures} does not match expression {expr} (remember to add assumptions to symbols, ex. `x = sp.Symbol('x', real=True))"
)
# Validate Element Class
if max_symbols and len(expr.free_symbols) > max_symbols:
msgs.add(f'max_symbols={max_symbols} does not match expression {expr}')
if allowed_symbols and expr.free_symbols.issubset(allowed_symbols):
msgs.add(
f'allowed_symbols={allowed_symbols} does not match expression {expr}'
)
if allowed_units and get_units(expr).issubset(allowed_units):
msgs.add(f'allowed_units={allowed_units} does not match expression {expr}')
# Validate Shape Class
if (
allowed_matrix_shapes and isinstance(expr, sp.MatrixBase)
) and expr.shape not in allowed_matrix_shapes:
msgs.add(
f'allowed_matrix_shapes={allowed_matrix_shapes} does not match expression {expr} with shape {expr.shape}'
)
# Error or Return
if msgs:
raise ValueError(str(msgs))
return expr
return typx.Annotated[
SympyType,
_SympyExpr,
pyd.AfterValidator(validate_expr),
]
####################
# - Common ConstrSympyExpr
####################
# Expression
ScalarUnitlessRealExpr: typ.TypeAlias = ConstrSympyExpr(
allow_variables=False,
allow_units=False,
allowed_structures={'scalar'},
allowed_sets={'integer', 'rational', 'real'},
)
ScalarUnitlessComplexExpr: typ.TypeAlias = ConstrSympyExpr(
allow_variables=False,
allow_units=False,
allowed_structures={'scalar'},
allowed_sets={'integer', 'rational', 'real', 'complex'},
)
# Symbol
IntSymbol: typ.TypeAlias = ConstrSympyExpr(
allow_variables=True,
allow_units=False,
allowed_sets={'integer'},
max_symbols=1,
)
RealSymbol: typ.TypeAlias = ConstrSympyExpr(
allow_variables=True,
allow_units=False,
allowed_sets={'integer', 'rational', 'real'},
max_symbols=1,
)
ComplexSymbol: typ.TypeAlias = ConstrSympyExpr(
allow_variables=True,
allow_units=False,
allowed_sets={'integer', 'rational', 'real', 'complex'},
max_symbols=1,
)
Symbol: typ.TypeAlias = IntSymbol | RealSymbol | ComplexSymbol
# Unit
## Technically a "unit expression", which includes compound types.
## Support for this is the killer feature compared to spu.Quantity.
Unit: typ.TypeAlias = ConstrSympyExpr(
allow_variables=False,
allow_units=True,
allowed_structures={'scalar'},
)
# Number
IntNumber: typ.TypeAlias = ConstrSympyExpr(
allow_variables=False,
allow_units=False,
allowed_sets={'integer'},
allowed_structures={'scalar'},
)
RealNumber: typ.TypeAlias = ConstrSympyExpr(
allow_variables=False,
allow_units=False,
allowed_sets={'integer', 'rational', 'real'},
allowed_structures={'scalar'},
)
ComplexNumber: typ.TypeAlias = ConstrSympyExpr(
allow_variables=False,
allow_units=False,
allowed_sets={'integer', 'rational', 'real', 'complex'},
allowed_structures={'scalar'},
)
Number: typ.TypeAlias = IntNumber | RealNumber | ComplexNumber
# Vector
Real3DVector: typ.TypeAlias = ConstrSympyExpr(
allow_variables=False,
allow_units=False,
allowed_sets={'integer', 'rational', 'real'},
allowed_structures={'matrix'},
allowed_matrix_shapes={(3, 1)},
)

View File

@ -5,7 +5,8 @@ import rich.console
import rich.logging import rich.logging
import rich.traceback import rich.traceback
from .. import info from blender_maxwell import contracts as ct
from ..nodeps.utils import simple_logger from ..nodeps.utils import simple_logger
from ..nodeps.utils.simple_logger import ( from ..nodeps.utils.simple_logger import (
LOG_LEVEL_MAP, # noqa: F401 LOG_LEVEL_MAP, # noqa: F401
@ -56,10 +57,7 @@ def get(module_name):
logger = logging.getLogger(module_name) logger = logging.getLogger(module_name)
# Setup Logger from Addon Preferences # Setup Logger from Addon Preferences
if (addon_prefs := info.addon_prefs()) is None: ct.addon.prefs().on_addon_logging_changed(single_logger_to_setup=logger)
msg = 'Addon preferences not defined'
raise RuntimeError(msg)
addon_prefs.sync_addon_logging(logger_to_setup=logger)
return logger return logger

View File

@ -60,7 +60,7 @@ class _SympyExpr:
json_schema=sympy_expr_schema, json_schema=sympy_expr_schema,
python_schema=sympy_expr_schema, python_schema=sympy_expr_schema,
serialization=pyd_core_schema.plain_serializer_function_ser_schema( serialization=pyd_core_schema.plain_serializer_function_ser_schema(
lambda instance: str(instance) lambda instance: sp.srepr(instance)
), ),
) )
@ -79,9 +79,9 @@ def ConstrSympyExpr(
allow_variables: bool = True, allow_variables: bool = True,
allow_units: bool = True, allow_units: bool = True,
# Structure Class # Structure Class
allowed_sets: set[typx.Literal['integer', 'rational', 'real', 'complex']] allowed_sets: set[typ.Literal['integer', 'rational', 'real', 'complex']]
| None = None, | None = None,
allowed_structures: set[typx.Literal['scalar', 'matrix']] | None = None, allowed_structures: set[typ.Literal['scalar', 'matrix']] | None = None,
# Element Class # Element Class
allowed_symbols: set[sp.Symbol] | None = None, allowed_symbols: set[sp.Symbol] | None = None,
allowed_units: set[spu.Quantity] | None = None, allowed_units: set[spu.Quantity] | None = None,

View File

@ -78,7 +78,7 @@ if __name__ == '__main__':
print(f'\tBlender: Install & Enable "{info.ADDON_NAME}"') print(f'\tBlender: Install & Enable "{info.ADDON_NAME}"')
else: else:
print(f'\tBlender: "{info.ADDON_NAME}" Not Installed') print(f'\tBlender: "{info.ADDON_NAME}" Not Installed')
print(output) print(*output, sep='')
sys.exit(1) sys.exit(1)
# Run Addon # Run Addon