feat: High-performance math system and depsflow.
Enormously important changes to the data flow semantics and invalidation rules. Especially significant is the way in which the node graph produces a deeply composed function, compiles it to optimized machine code with `jax`, and uses a seperately cached data flow to insert values into the function from anywhere along the node graph without recompiling the function. A critical portion of the math system, namely the unit-aware dimensional representation, is also finished. The `Data` node socket type now dynamically reports the dimensional properties of the object flowing through it, courtesy the use of a seperate data flow for information. This allows for very high-peformance unit-aware nearest-value indexing built on binary search. Also, dependency management is completely ironed out. The `pip install` process now runs concurrently, and the installation log is parsed in the background to update a progress bar. This is the foundational work for a similar concurrent process wrt. Tidy3D progress reporting.main
parent
9960cd3480
commit
b4d6eae036
13
TODO.md
13
TODO.md
|
@ -3,6 +3,7 @@
|
|||
- [x] Implement Robust DataFlowKind for list-like / spectral-like composite types
|
||||
- [x] Unify random node/socket caches.
|
||||
- [x] Revalidate cache logic
|
||||
- [x] Finish math system
|
||||
- [ ] Finish the "Low-Hanging Fruit" Nodes
|
||||
- [ ] Move preview GN trees to the asset library.
|
||||
|
||||
|
@ -10,8 +11,12 @@
|
|||
|
||||
# Nodes
|
||||
## Analysis
|
||||
- [ ] Extract
|
||||
- [ ] Viz
|
||||
- [x] Extract
|
||||
- [x] Viz
|
||||
- [x] Math / Map Math
|
||||
- [x] Math / Filter Math
|
||||
- [ ] Math / Reduce Math
|
||||
- [ ] Math / Operate Math
|
||||
|
||||
## Inputs
|
||||
- [x] Wave Constant
|
||||
|
@ -462,6 +467,10 @@ We're trying to do our part by reporting bugs we find!
|
|||
This is where we keep track of them for now.
|
||||
|
||||
## Blender Maxwell Bugs
|
||||
- [ ] Detaching data chained into Viz node makes for a very laggy error, as non-implemented LazyValueFunc suddenly can't propagate live into the Viz node.
|
||||
- [ ] Need to clear invalid searched StrProperties on copy
|
||||
- [ ] Enabled 3D preview is really slow for some reason when working with the math nodes.
|
||||
|
||||
- [ ] BUG: CTRL+SHIFT+CLICK not on a node shows an error; should just do nothing.
|
||||
- [ ] Slow changing of socket sets / range on wave constant.
|
||||
- [ ] API auth shouldn't show if everything is fine in Cloud Task socket
|
||||
|
|
|
@ -28,15 +28,23 @@ Attributes:
|
|||
|
||||
from pathlib import Path
|
||||
|
||||
import bpy
|
||||
|
||||
from . import contracts as ct
|
||||
from .nodeps.utils import simple_logger
|
||||
|
||||
simple_logger.sync_bootstrap_logging(
|
||||
console_level=ct.addon.BOOTSTRAP_LOG_LEVEL,
|
||||
)
|
||||
# Initialize Logging Defaults
|
||||
## Initial logger settings (ex. log level) must be set somehow.
|
||||
## The Addon ZIP-packer makes this decision, and packs it into files.
|
||||
## AddonPreferences will, once loaded, override this.
|
||||
_PATH_ADDON_ROOT = Path(__file__).resolve().parent
|
||||
_PATH_BOOTSTRAP_LOG_LEVEL = _PATH_ADDON_ROOT / '.bootstrap_log_level'
|
||||
with _PATH_BOOTSTRAP_LOG_LEVEL.open('r') as f:
|
||||
_BOOTSTRAP_LOG_LEVEL = int(f.read().strip())
|
||||
|
||||
simple_logger.init_simple_logger_defaults(console_level=_BOOTSTRAP_LOG_LEVEL)
|
||||
|
||||
# Import Statements
|
||||
import bpy # noqa: E402
|
||||
|
||||
from . import contracts as ct # noqa: E402
|
||||
from . import preferences, registration # noqa: E402
|
||||
from .nodeps import operators as nodeps_operators # noqa: E402
|
||||
from .nodeps.utils import pydeps # noqa: E402
|
||||
|
|
|
@ -183,7 +183,7 @@ def import_geonodes(
|
|||
# - GeoNodes Asset Shelf Panel for MaxwellSimTree
|
||||
####################
|
||||
class NodeAssetPanel(bpy.types.Panel):
|
||||
bl_idname = 'blender_maxwell.panel__node_asset_panel'
|
||||
bl_idname = ct.PanelType.NodeAssetPanel
|
||||
bl_label = 'Node GeoNodes Asset Panel'
|
||||
bl_space_type = 'NODE_EDITOR'
|
||||
bl_region_type = 'UI'
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import random
|
||||
import sys
|
||||
import tomllib
|
||||
from pathlib import Path
|
||||
|
||||
|
@ -23,15 +23,20 @@ PATH_ASSETS = PATH_ADDON_ROOT / 'assets'
|
|||
####################
|
||||
PATH_REQS = PATH_ADDON_ROOT / 'requirements.lock'
|
||||
DEFAULT_PATH_DEPS = PATH_ADDON_ROOT / '.addon_dependencies'
|
||||
DEFAULT_PATH_DEPS.mkdir(exist_ok=True)
|
||||
## requirements.lock is written when packing the .zip.
|
||||
## By default, the addon pydeps are kept in the addon dir.
|
||||
|
||||
ORIGINAL_SYS_PATH = sys.path.copy()
|
||||
|
||||
####################
|
||||
# - Local Addon Cache
|
||||
####################
|
||||
ADDON_CACHE = PATH_ADDON_ROOT / '.addon_cache'
|
||||
ADDON_CACHE.mkdir(exist_ok=True)
|
||||
|
||||
PIP_INSTALL_LOG = ADDON_CACHE / 'pip_install.log'
|
||||
|
||||
|
||||
####################
|
||||
# - Dynamic Addon Information
|
||||
|
@ -83,11 +88,6 @@ def prefs() -> bpy.types.AddonPreferences | None:
|
|||
# - Logging Info
|
||||
####################
|
||||
DEFAULT_LOG_PATH = PATH_ADDON_ROOT / 'addon.log'
|
||||
DEFAULT_LOG_PATH.touch(exist_ok=True)
|
||||
## By default, the addon file log writes to the addon dir.
|
||||
## The initial .log_level contents are written when packing the .zip.
|
||||
## Subsequent changes are managed by nodeps.utils.simple_logger.py.
|
||||
|
||||
PATH_BOOTSTRAP_LOG_LEVEL = PATH_ADDON_ROOT / '.bootstrap_log_level'
|
||||
with PATH_BOOTSTRAP_LOG_LEVEL.open('r') as f:
|
||||
BOOTSTRAP_LOG_LEVEL = int(f.read().strip())
|
||||
|
|
|
@ -13,3 +13,5 @@ class OperatorType(enum.StrEnum):
|
|||
InstallPyDeps = enum.auto()
|
||||
UninstallPyDeps = enum.auto()
|
||||
ManagePyDeps = enum.auto()
|
||||
|
||||
ConnectViewerNode = enum.auto()
|
||||
|
|
|
@ -10,3 +10,5 @@ from .addon import NAME as ADDON_NAME
|
|||
@blender_type_enum.prefix_values_with(f'{ADDON_NAME.upper()}_PT_')
|
||||
class PanelType(enum.StrEnum):
|
||||
"""Identifiers for addon-defined `bpy.types.Panel`."""
|
||||
|
||||
NodeAssetPanel = enum.auto()
|
||||
|
|
|
@ -18,8 +18,8 @@ from blender_maxwell.contracts import (
|
|||
addon,
|
||||
)
|
||||
|
||||
from .bl_socket_desc_map import BL_SOCKET_DESCR_TYPE_MAP
|
||||
from .bl_socket_types import BL_SOCKET_DESCR_ANNOT_STRING, BL_SOCKET_DIRECT_TYPE_MAP
|
||||
from .bl_socket_desc_map import BL_SOCKET_DESCR_ANNOT_STRING, BL_SOCKET_DESCR_TYPE_MAP
|
||||
from .bl_socket_types import BL_SOCKET_DIRECT_TYPE_MAP
|
||||
from .category_labels import NODE_CAT_LABELS
|
||||
from .category_types import NodeCategory
|
||||
from .flow_events import FlowEvent
|
||||
|
@ -37,9 +37,8 @@ from .icons import Icon
|
|||
from .mobj_types import ManagedObjType
|
||||
from .node_types import NodeType
|
||||
from .socket_colors import SOCKET_COLORS
|
||||
from .socket_shapes import SOCKET_SHAPES
|
||||
from .socket_types import SocketType
|
||||
from .socket_units import SOCKET_UNITS
|
||||
from .socket_units import SOCKET_UNITS, unit_to_socket_type
|
||||
from .tree_types import TreeType
|
||||
from .unit_systems import UNITS_BLENDER, UNITS_TIDY3D
|
||||
|
||||
|
@ -65,6 +64,7 @@ __all__ = [
|
|||
'TreeType',
|
||||
'SocketType',
|
||||
'SOCKET_UNITS',
|
||||
'unit_to_socket_type',
|
||||
'SOCKET_COLORS',
|
||||
'SOCKET_SHAPES',
|
||||
'UNITS_BLENDER',
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from .node_cats import NodeCategory as NC
|
||||
from .category_types import NodeCategory as NC
|
||||
|
||||
NODE_CAT_LABELS = {
|
||||
# Analysis/
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.blender_type_enum import BlenderTypeEnum, wrap_values_in_MT
|
||||
from blender_maxwell.utils import blender_type_enum
|
||||
|
||||
|
||||
@wrap_values_in_MT
|
||||
class NodeCategory(BlenderTypeEnum):
|
||||
@blender_type_enum.wrap_values_in_MT
|
||||
class NodeCategory(blender_type_enum.BlenderTypeEnum):
|
||||
MAXWELLSIM = enum.auto()
|
||||
|
||||
# Analysis/
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import enum
|
||||
import typing as typ
|
||||
|
||||
from blender_maxwell.utils.staticproperty import staticproperty
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import jax
|
|||
import jax.numpy as jnp
|
||||
import jaxtyping as jtyp
|
||||
import numba
|
||||
import numpy as np
|
||||
import sympy as sp
|
||||
import sympy.physics.units as spu
|
||||
|
||||
|
@ -80,8 +81,18 @@ class CapabilitiesFlow:
|
|||
active_kind: FlowKind
|
||||
|
||||
is_universal: bool = False
|
||||
must_match: dict[str, typ.Any] = dataclasses.field(default_factory=dict)
|
||||
|
||||
def is_compatible_with(self, other: typ.Self) -> bool:
|
||||
return other.is_universal or (
|
||||
self.socket_type == other.socket_type
|
||||
and self.active_kind == other.active_kind
|
||||
and all(
|
||||
name in other.must_match
|
||||
and self.must_match[name] == other.must_match[name]
|
||||
for name in self.must_match
|
||||
)
|
||||
)
|
||||
return (
|
||||
self.socket_type == other.socket_type
|
||||
and self.active_kind == other.active_kind
|
||||
|
@ -108,11 +119,55 @@ class ArrayFlow:
|
|||
"""
|
||||
|
||||
values: jtyp.Shaped[jtyp.Array, '...']
|
||||
unit: spu.Quantity | None = None
|
||||
unit: spux.Unit | None = None
|
||||
|
||||
is_sorted: bool = False
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self.values)
|
||||
|
||||
def nearest_idx_of(self, value: spux.SympyType, require_sorted: bool = True) -> int:
|
||||
"""Find the index of the value that is closest to the given value.
|
||||
|
||||
Units are taken into account; the given value will be scaled to the internal unit before direct use.
|
||||
|
||||
Parameters:
|
||||
require_sorted: Require that `self.values` be sorted, so that use of the faster binary-search algorithm is guaranteed.
|
||||
|
||||
Returns:
|
||||
The index of `self.values` that is closest to the value `value`.
|
||||
"""
|
||||
if not require_sorted:
|
||||
raise NotImplementedError
|
||||
|
||||
# Scale Given Value to Internal Unit
|
||||
scaled_value = spux.sympy_to_python(spux.scale_to_unit(value, self.unit))
|
||||
|
||||
# BinSearch for "Right IDX"
|
||||
## >>> self.values[right_idx] > scaled_value
|
||||
## >>> self.values[right_idx - 1] < scaled_value
|
||||
right_idx = np.searchsorted(self.values, scaled_value, side='left')
|
||||
|
||||
# Case: Right IDX is Boundary
|
||||
if right_idx == 0:
|
||||
return right_idx
|
||||
if right_idx == len(self.values):
|
||||
return right_idx - 1
|
||||
|
||||
# Find Closest of [Right IDX - 1, Right IDX]
|
||||
left_val = self.values[right_idx - 1]
|
||||
right_val = self.values[right_idx]
|
||||
|
||||
if (scaled_value - left_val) <= (right_val - scaled_value):
|
||||
return right_idx - 1
|
||||
|
||||
return right_idx
|
||||
|
||||
def correct_unit(self, corrected_unit: spu.Quantity) -> typ.Self:
|
||||
if self.unit is not None:
|
||||
return ArrayFlow(values=self.values, unit=corrected_unit)
|
||||
return ArrayFlow(
|
||||
values=self.values, unit=corrected_unit, is_sorted=self.is_sorted
|
||||
)
|
||||
|
||||
msg = f'Tried to correct unit of unitless LazyDataValueRange "{corrected_unit}"'
|
||||
raise ValueError(msg)
|
||||
|
@ -122,6 +177,7 @@ class ArrayFlow:
|
|||
return ArrayFlow(
|
||||
values=float(spux.scaling_factor(self.unit, unit)) * self.values,
|
||||
unit=unit,
|
||||
is_sorted=self.is_sorted, ## TODO: Can we really say that?
|
||||
)
|
||||
## TODO: Is this scaling numerically stable?
|
||||
|
||||
|
@ -257,8 +313,8 @@ class LazyValueFuncFlow:
|
|||
"""
|
||||
|
||||
func: LazyFunction
|
||||
func_args: list[tuple[str, type]] = MappingProxyType({})
|
||||
func_kwargs: dict[str, type] = MappingProxyType({})
|
||||
func_args: list[type] = dataclasses.field(default_factory=list)
|
||||
func_kwargs: dict[str, type] = dataclasses.field(default_factory=dict)
|
||||
supports_jax: bool = False
|
||||
supports_numba: bool = False
|
||||
|
||||
|
@ -266,21 +322,22 @@ class LazyValueFuncFlow:
|
|||
def compose_within(
|
||||
self,
|
||||
enclosing_func: LazyFunction,
|
||||
enclosing_func_args: list[tuple[str, type]] = (),
|
||||
enclosing_func_args: list[type] = (),
|
||||
enclosing_func_kwargs: dict[str, type] = MappingProxyType({}),
|
||||
supports_jax: bool = False,
|
||||
supports_numba: bool = False,
|
||||
) -> typ.Self:
|
||||
return LazyValueFuncFlow(
|
||||
function=lambda *args, **kwargs: enclosing_func(
|
||||
func=lambda *args, **kwargs: enclosing_func(
|
||||
self.func(
|
||||
*list(args[len(self.func_args) :]),
|
||||
*list(args[: len(self.func_args)]),
|
||||
**{k: v for k, v in kwargs.items() if k in self.func_kwargs},
|
||||
),
|
||||
**kwargs,
|
||||
*args[len(self.func_args) :],
|
||||
**{k: v for k, v in kwargs.items() if k not in self.func_kwargs},
|
||||
),
|
||||
func_args=self.func_args + enclosing_func_args,
|
||||
func_kwargs=self.func_kwargs | enclosing_func_kwargs,
|
||||
func_args=self.func_args + list(enclosing_func_args),
|
||||
func_kwargs=self.func_kwargs | dict(enclosing_func_kwargs),
|
||||
supports_jax=self.supports_jax and supports_jax,
|
||||
supports_numba=self.supports_numba and supports_numba,
|
||||
)
|
||||
|
@ -380,6 +437,9 @@ class LazyArrayRangeFlow:
|
|||
key=lambda sym: sym.name,
|
||||
)
|
||||
|
||||
def __len__(self):
|
||||
return self.steps
|
||||
|
||||
####################
|
||||
# - Units
|
||||
####################
|
||||
|
@ -590,7 +650,7 @@ class LazyArrayRangeFlow:
|
|||
return self.array_generator(realized_start, realized_stop, self.steps)
|
||||
|
||||
if kind == FlowKind.Array:
|
||||
return ArrayFlow(values=gen_array(), unit=self.unit)
|
||||
return ArrayFlow(values=gen_array(), unit=self.unit, is_sorted=True)
|
||||
if kind == FlowKind.LazyValueFunc:
|
||||
return LazyValueFuncFlow(func=gen_array, supports_jax=True)
|
||||
|
||||
|
@ -601,7 +661,20 @@ class LazyArrayRangeFlow:
|
|||
####################
|
||||
# - Params
|
||||
####################
|
||||
ParamsFlow: typ.TypeAlias = dict[str, typ.Any]
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class ParamsFlow:
|
||||
func_args: list[typ.Any] = dataclasses.field(default_factory=list)
|
||||
func_kwargs: dict[str, typ.Any] = dataclasses.field(default_factory=dict)
|
||||
|
||||
def compose_within(
|
||||
self,
|
||||
enclosing_func_args: list[tuple[type]] = (),
|
||||
enclosing_func_kwargs: dict[str, type] = MappingProxyType({}),
|
||||
) -> typ.Self:
|
||||
return ParamsFlow(
|
||||
func_args=self.func_args + list(enclosing_func_args),
|
||||
func_kwargs=self.func_kwargs | dict(enclosing_func_kwargs),
|
||||
)
|
||||
|
||||
|
||||
####################
|
||||
|
@ -609,33 +682,10 @@ ParamsFlow: typ.TypeAlias = dict[str, typ.Any]
|
|||
####################
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class InfoFlow:
|
||||
func_args: list[tuple[str, type]] = MappingProxyType({})
|
||||
func_kwargs: dict[str, type] = MappingProxyType({})
|
||||
|
||||
# Dimension Information
|
||||
has_ndims: bool = False
|
||||
dim_names: list[str] = ()
|
||||
dim_idx: dict[str, ArrayFlow | LazyArrayRangeFlow] = MappingProxyType({})
|
||||
dim_names: list[str] = dataclasses.field(default_factory=list)
|
||||
dim_idx: dict[str, ArrayFlow | LazyArrayRangeFlow] = dataclasses.field(
|
||||
default_factory=dict
|
||||
) ## TODO: Rename to dim_idxs
|
||||
|
||||
## TODO: Validation, esp. length of dims. Pydantic?
|
||||
|
||||
def compose_within(
|
||||
self,
|
||||
enclosing_func_args: list[tuple[str, type]] = (),
|
||||
enclosing_func_kwargs: dict[str, type] = MappingProxyType({}),
|
||||
) -> typ.Self:
|
||||
return InfoFlow(
|
||||
func_args=self.func_args + enclosing_func_args,
|
||||
func_kwargs=self.func_kwargs | enclosing_func_kwargs,
|
||||
)
|
||||
|
||||
def call_lazy_value_func(
|
||||
self,
|
||||
lazy_value_func: LazyValueFuncFlow,
|
||||
*args: list[typ.Any],
|
||||
**kwargs: dict[str, typ.Any],
|
||||
) -> tuple[list[typ.Any], dict[str, typ.Any]]:
|
||||
if lazy_value_func.supports_jax:
|
||||
lazy_value_func.func_jax(*args, **kwargs)
|
||||
|
||||
lazy_value_func.func(*args, **kwargs)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from blender_maxwell.utils.blender_type_enum import BlenderTypeEnum
|
||||
import enum
|
||||
|
||||
|
||||
class Icon(BlenderTypeEnum):
|
||||
class Icon(enum.StrEnum):
|
||||
SimNodeEditor = 'MOD_SIMPLEDEFORM'
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.blender_type_enum import BlenderTypeEnum
|
||||
from blender_maxwell.utils import blender_type_enum
|
||||
|
||||
|
||||
class ManagedObjType(BlenderTypeEnum):
|
||||
class ManagedObjType(blender_type_enum.BlenderTypeEnum):
|
||||
ManagedBLImage = enum.auto()
|
||||
|
||||
ManagedBLCollection = enum.auto()
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.blender_type_enum import (
|
||||
BlenderTypeEnum,
|
||||
append_cls_name_to_values,
|
||||
)
|
||||
from blender_maxwell.utils import blender_type_enum
|
||||
|
||||
|
||||
@append_cls_name_to_values
|
||||
class NodeType(BlenderTypeEnum):
|
||||
@blender_type_enum.append_cls_name_to_values
|
||||
class NodeType(blender_type_enum.BlenderTypeEnum):
|
||||
# KitchenSink = enum.auto()
|
||||
|
||||
# Analysis
|
||||
|
|
|
@ -3,7 +3,8 @@ from .socket_types import SocketType as ST
|
|||
## TODO: Don't just presume sRGB.
|
||||
SOCKET_COLORS = {
|
||||
# Basic
|
||||
ST.Any: (0.8, 0.8, 0.8, 1.0), # Light Grey
|
||||
ST.Any: (0.9, 0.9, 0.9, 1.0), # Light Grey
|
||||
ST.Data: (0.8, 0.8, 0.8, 1.0), # Light Grey
|
||||
ST.Bool: (0.7, 0.7, 0.7, 1.0), # Medium Light Grey
|
||||
ST.String: (0.7, 0.7, 0.7, 1.0), # Medium Light Grey
|
||||
ST.FilePath: (0.6, 0.6, 0.6, 1.0), # Medium Grey
|
||||
|
|
|
@ -1,15 +1,13 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.blender_type_enum import (
|
||||
BlenderTypeEnum,
|
||||
append_cls_name_to_values,
|
||||
)
|
||||
from blender_maxwell.utils import blender_type_enum
|
||||
|
||||
|
||||
@append_cls_name_to_values
|
||||
class SocketType(BlenderTypeEnum):
|
||||
@blender_type_enum.append_cls_name_to_values
|
||||
class SocketType(blender_type_enum.BlenderTypeEnum):
|
||||
# Base
|
||||
Any = enum.auto()
|
||||
Data = enum.auto()
|
||||
Bool = enum.auto()
|
||||
String = enum.auto()
|
||||
FilePath = enum.auto()
|
||||
|
|
|
@ -258,3 +258,20 @@ SOCKET_UNITS = {
|
|||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def unit_to_socket_type(unit: spux.Unit) -> ST:
|
||||
"""Returns a SocketType that accepts the given unit.
|
||||
|
||||
Only the unit-compatibility is taken into account; in the case of overlap, several the ordering of `SOCKET_UNITS` determines which is returned.
|
||||
This isn't super clean, but it's good enough for our needs right now.
|
||||
|
||||
Returns:
|
||||
**The first `SocketType` in `SOCKET_UNITS`, which contains the given unit as a valid possibility.
|
||||
"""
|
||||
for socket_type, _units in SOCKET_UNITS.items():
|
||||
if unit in _units['values'].values():
|
||||
return socket_type
|
||||
|
||||
msg = f"Unit {unit} doesn't have an obvious SocketType."
|
||||
raise ValueError(msg)
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.blender_type_enum import (
|
||||
BlenderTypeEnum,
|
||||
append_cls_name_to_values,
|
||||
)
|
||||
from blender_maxwell.utils import blender_type_enum
|
||||
|
||||
|
||||
@append_cls_name_to_values
|
||||
class TreeType(BlenderTypeEnum):
|
||||
@blender_type_enum.append_cls_name_to_values
|
||||
class TreeType(blender_type_enum.BlenderTypeEnum):
|
||||
MaxwellSim = enum.auto()
|
||||
|
|
|
@ -196,7 +196,7 @@ class NodeLinkCache:
|
|||
class MaxwellSimTree(bpy.types.NodeTree):
|
||||
bl_idname = ct.TreeType.MaxwellSim.value
|
||||
bl_label = 'Maxwell Sim Editor'
|
||||
bl_icon = ct.Icon.SimNodeEditor.value
|
||||
bl_icon = ct.Icon.SimNodeEditor
|
||||
|
||||
####################
|
||||
# - Lock Methods
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import jax
|
||||
import jax.numpy as jnp
|
||||
import sympy.physics.units as spu
|
||||
|
||||
|
@ -21,17 +22,17 @@ class ExtractDataNode(base.MaxwellSimNode):
|
|||
|
||||
input_socket_sets: typ.ClassVar = {
|
||||
'Sim Data': {'Sim Data': sockets.MaxwellFDTDSimDataSocketDef()},
|
||||
'Field Data': {'Field Data': sockets.AnySocketDef()},
|
||||
'Flux Data': {'Flux Data': sockets.AnySocketDef()},
|
||||
'Monitor Data': {'Monitor Data': sockets.DataSocketDef(format='monitor_data')},
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
output_socket_sets: typ.ClassVar = {
|
||||
'Sim Data': {'Monitor Data': sockets.DataSocketDef(format='monitor_data')},
|
||||
'Monitor Data': {'Data': sockets.DataSocketDef(format='jax')},
|
||||
}
|
||||
|
||||
####################
|
||||
# - Properties
|
||||
####################
|
||||
extract_filter: bpy.props.EnumProperty(
|
||||
extract_filter: bpy.props.StringProperty(
|
||||
name='Extract Filter',
|
||||
description='Data to extract from the input',
|
||||
search=lambda self, _, edit_text: self.search_extract_filters(edit_text),
|
||||
|
@ -41,38 +42,50 @@ class ExtractDataNode(base.MaxwellSimNode):
|
|||
# Sim Data
|
||||
sim_data_monitor_nametype: dict[str, str] = bl_cache.BLField({})
|
||||
|
||||
# Field Data
|
||||
field_data_components: set[str] = bl_cache.BLField(set())
|
||||
# Monitor Data
|
||||
monitor_data_type: str = bl_cache.BLField('')
|
||||
monitor_data_components: list[str] = bl_cache.BLField([])
|
||||
|
||||
def search_extract_filters(
|
||||
self, _: bpy.types.Context
|
||||
) -> list[tuple[str, str, str]]:
|
||||
# Sim Data
|
||||
if self.active_socket_set == 'Sim Data' and self.inputs['Sim Data'].is_linked:
|
||||
####################
|
||||
# - Computed Properties
|
||||
####################
|
||||
@bl_cache.cached_bl_property(persist=False)
|
||||
def has_sim_data(self) -> bool:
|
||||
return (
|
||||
self.active_socket_set == 'Sim Data'
|
||||
and self.inputs['Sim Data'].is_linked
|
||||
and self.sim_data_monitor_nametype
|
||||
)
|
||||
|
||||
@bl_cache.cached_bl_property(persist=False)
|
||||
def has_monitor_data(self) -> bool:
|
||||
return (
|
||||
self.active_socket_set == 'Monitor Data'
|
||||
and self.inputs['Monitor Data'].is_linked
|
||||
and self.monitor_data_type
|
||||
)
|
||||
|
||||
####################
|
||||
# - Extraction Filter Search
|
||||
####################
|
||||
def search_extract_filters(self, edit_text: str) -> list[tuple[str, str, str]]:
|
||||
if self.has_sim_data:
|
||||
return [
|
||||
(
|
||||
monitor_name,
|
||||
f'{monitor_name}',
|
||||
f'Monitor "{monitor_name}" ({monitor_type}) recorded by the Sim',
|
||||
monitor_type.removesuffix('Data'),
|
||||
)
|
||||
for monitor_name, monitor_type in self.sim_data_monitor_nametype.items()
|
||||
if edit_text == '' or edit_text.lower() in monitor_name.lower()
|
||||
]
|
||||
|
||||
# Field Data
|
||||
if self.active_socket_set == 'Field Data' and self.inputs['Sim Data'].is_linked:
|
||||
if self.has_monitor_data:
|
||||
return [
|
||||
([('Ex', 'Ex', 'Ex')] if 'Ex' in self.field_data_components else [])
|
||||
+ ([('Ey', 'Ey', 'Ey')] if 'Ey' in self.field_data_components else [])
|
||||
+ ([('Ez', 'Ez', 'Ez')] if 'Ez' in self.field_data_components else [])
|
||||
+ ([('Hx', 'Hx', 'Hx')] if 'Hx' in self.field_data_components else [])
|
||||
+ ([('Hy', 'Hy', 'Hy')] if 'Hy' in self.field_data_components else [])
|
||||
+ ([('Hz', 'Hz', 'Hz')] if 'Hz' in self.field_data_components else [])
|
||||
(component_name, f'ℂ {component_name[1]}-Pol')
|
||||
for component_name in self.monitor_data_components
|
||||
if (edit_text == '' or edit_text.lower() in component_name.lower())
|
||||
]
|
||||
|
||||
# Flux Data
|
||||
## Nothing to extract.
|
||||
|
||||
# Fallback
|
||||
return []
|
||||
|
||||
####################
|
||||
|
@ -82,30 +95,35 @@ class ExtractDataNode(base.MaxwellSimNode):
|
|||
col.prop(self, 'extract_filter', text='')
|
||||
|
||||
def draw_info(self, _: bpy.types.Context, col: bpy.types.UILayout) -> None:
|
||||
if self.active_socket_set == 'Sim Data' and self.inputs['Sim Data'].is_linked:
|
||||
if self.has_sim_data or self.has_monitor_data:
|
||||
# Header
|
||||
row = col.row()
|
||||
row.alignment = 'CENTER'
|
||||
row.label(text=f'{self.cache__num_monitors} Monitors')
|
||||
if self.has_sim_data:
|
||||
row.label(text=f'{len(self.sim_data_monitor_nametype)} Monitors')
|
||||
elif self.has_monitor_data:
|
||||
row.label(text=f'{self.monitor_data_type} Monitor Data')
|
||||
|
||||
# Monitor Info
|
||||
if len(self.sim_data_monitor_nametype) > 0:
|
||||
for (
|
||||
monitor_name,
|
||||
monitor_type,
|
||||
) in self.sim_data_monitor_nametype.items():
|
||||
col.label(text=f'{monitor_name}: {monitor_type}')
|
||||
# Monitor Data Contents
|
||||
row = col.row()
|
||||
box = row.box()
|
||||
grid = box.grid_flow(row_major=True, columns=2, even_columns=True)
|
||||
for name, desc in self.search_extract_filters(edit_text=''):
|
||||
grid.label(text=name)
|
||||
grid.label(text=desc if desc else '')
|
||||
|
||||
####################
|
||||
# - Events
|
||||
####################
|
||||
@events.on_value_changed(
|
||||
socket_name='Sim Data',
|
||||
input_sockets={'Sim Data'},
|
||||
input_sockets_optional={'Sim Data': True},
|
||||
socket_name={'Sim Data', 'Monitor Data'},
|
||||
prop_name='active_socket_set',
|
||||
input_sockets={'Sim Data', 'Monitor Data'},
|
||||
input_sockets_optional={'Sim Data': True, 'Monitor Data': True},
|
||||
)
|
||||
def on_sim_data_changed(self, input_sockets: dict):
|
||||
if input_sockets['Sim Data'] is not None:
|
||||
# Sim Data Monitors: Set Name -> Type
|
||||
self.sim_data_monitor_nametype = {
|
||||
monitor_name: monitor_data.type
|
||||
for monitor_name, monitor_data in input_sockets[
|
||||
|
@ -113,55 +131,77 @@ class ExtractDataNode(base.MaxwellSimNode):
|
|||
].monitor_data.items()
|
||||
}
|
||||
|
||||
@events.on_value_changed(
|
||||
socket_name='Field Data',
|
||||
input_sockets={'Field Data'},
|
||||
input_sockets_optional={'Field Data': True},
|
||||
)
|
||||
def on_field_data_changed(self, input_sockets: dict):
|
||||
if input_sockets['Field Data'] is not None:
|
||||
self.field_data_components = (
|
||||
{'Ex'}
|
||||
if input_sockets['Field Data'].Ex is not None
|
||||
else set() | {'Ey'}
|
||||
if input_sockets['Field Data'].Ey is not None
|
||||
else set() | {'Ez'}
|
||||
if input_sockets['Field Data'].Ez is not None
|
||||
else set() | {'Hx'}
|
||||
if input_sockets['Field Data'].Hx is not None
|
||||
else set() | {'Hy'}
|
||||
if input_sockets['Field Data'].Hy is not None
|
||||
else set() | {'Hz'}
|
||||
if input_sockets['Field Data'].Hz is not None
|
||||
else set()
|
||||
if input_sockets['Monitor Data'] is not None:
|
||||
# Monitor Data Type
|
||||
self.monitor_data_type = input_sockets['Monitor Data'].type.removesuffix(
|
||||
'Data'
|
||||
)
|
||||
|
||||
# Field/FieldTime
|
||||
if self.monitor_data_type in ['Field', 'FieldTime']:
|
||||
self.monitor_data_components = [
|
||||
field_component
|
||||
for field_component in ['Ex', 'Ey', 'Ez', 'Hx', 'Hy', 'Hz']
|
||||
if hasattr(input_sockets['Monitor Data'], field_component)
|
||||
]
|
||||
|
||||
# Permittivity
|
||||
if self.monitor_data_type == 'Permittivity':
|
||||
self.monitor_data_components = ['xx', 'yy', 'zz']
|
||||
|
||||
# Flux/FluxTime
|
||||
if self.monitor_data_type in ['Flux', 'FluxTime']:
|
||||
self.monitor_data_components = ['flux']
|
||||
|
||||
# FieldProjection(Angle/Cartesian/KSpace)/Diffraction
|
||||
if self.monitor_data_type in [
|
||||
'FieldProjectionAngle',
|
||||
'FieldProjectionCartesian',
|
||||
'FieldProjectionKSpace',
|
||||
'Diffraction',
|
||||
]:
|
||||
self.monitor_data_components = [
|
||||
'Er',
|
||||
'Etheta',
|
||||
'Ephi',
|
||||
'Hr',
|
||||
'Htheta',
|
||||
'Hphi',
|
||||
]
|
||||
|
||||
# Invalidate Computed Property Caches
|
||||
self.has_sim_data = bl_cache.Signal.InvalidateCache
|
||||
self.has_monitor_data = bl_cache.Signal.InvalidateCache
|
||||
|
||||
# Reset Extraction Filter
|
||||
## The extraction filter that was set before may not be valid anymore.
|
||||
## If so, simply remove it.
|
||||
if self.extract_filter not in [
|
||||
el[0] for el in self.search_extract_filters(edit_text='')
|
||||
]:
|
||||
self.extract_filter = ''
|
||||
|
||||
####################
|
||||
# - Output: Value
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
'Monitor Data',
|
||||
kind=ct.FlowKind.Value,
|
||||
props={'active_socket_set', 'extract_filter'},
|
||||
input_sockets={'Sim Data', 'Field Data', 'Flux Data'},
|
||||
input_sockets_optional={
|
||||
'Sim Data': True,
|
||||
'Field Data': True,
|
||||
'Flux Data': True,
|
||||
},
|
||||
props={'extract_filter'},
|
||||
input_sockets={'Sim Data'},
|
||||
)
|
||||
def compute_extracted_data(self, props: dict, input_sockets: dict):
|
||||
if props['active_socket_set'] == 'Sim Data':
|
||||
def compute_monitor_data(self, props: dict, input_sockets: dict):
|
||||
return input_sockets['Sim Data'].monitor_data[props['extract_filter']]
|
||||
|
||||
if props['active_socket_set'] == 'Field Data':
|
||||
return getattr(input_sockets['Field Data'], props['extract_filter'])
|
||||
|
||||
if props['active_socket_set'] == 'Flux Data':
|
||||
return input_sockets['Flux Data']
|
||||
|
||||
msg = f'Tried to get a "FlowKind.Value" from socket set {props["active_socket_set"]} in "{self.bl_label}"'
|
||||
raise RuntimeError(msg)
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.Value,
|
||||
props={'extract_filter'},
|
||||
input_sockets={'Monitor Data'},
|
||||
)
|
||||
def compute_data(self, props: dict, input_sockets: dict) -> jax.Array:
|
||||
xarray_data = getattr(input_sockets['Monitor Data'], props['extract_filter'])
|
||||
return jnp.array(xarray_data.data) ## TODO: Can it be done without a copy?
|
||||
|
||||
####################
|
||||
# - Output: LazyValueFunc
|
||||
|
@ -169,17 +209,13 @@ class ExtractDataNode(base.MaxwellSimNode):
|
|||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.LazyValueFunc,
|
||||
props={'active_socket_set'},
|
||||
output_sockets={'Data'},
|
||||
output_socket_kinds={'Data': ct.FlowKind.Value},
|
||||
)
|
||||
def compute_extracted_data_lazy(self, props: dict, output_sockets: dict):
|
||||
if self.active_socket_set in {'Field Data', 'Flux Data'}:
|
||||
data = jnp.array(output_sockets['Data'].data)
|
||||
return ct.LazyValueFuncFlow(func=lambda: data, supports_jax=True)
|
||||
|
||||
msg = f'Tried to get a "FlowKind.LazyValueFunc" from socket set {props["active_socket_set"]} in "{self.bl_label}"'
|
||||
raise RuntimeError(msg)
|
||||
def compute_extracted_data_lazy(self, output_sockets: dict) -> ct.LazyValueFuncFlow:
|
||||
return ct.LazyValueFuncFlow(
|
||||
func=lambda: output_sockets['Data'], supports_jax=True
|
||||
)
|
||||
|
||||
####################
|
||||
# - Output: Info
|
||||
|
@ -187,38 +223,157 @@ class ExtractDataNode(base.MaxwellSimNode):
|
|||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.Info,
|
||||
props={'active_socket_set'},
|
||||
output_sockets={'Data'},
|
||||
output_socket_kinds={'Data': ct.FlowKind.Value},
|
||||
props={'monitor_data_type', 'extract_filter'},
|
||||
input_sockets={'Monitor Data'},
|
||||
)
|
||||
def compute_extracted_data_info(self, props: dict, output_sockets: dict):
|
||||
if props['active_socket_set'] == 'Field Data':
|
||||
xarr = output_sockets['Data']
|
||||
def compute_extracted_data_info(
|
||||
self, props: dict, input_sockets: dict
|
||||
) -> ct.InfoFlow: # noqa: PLR0911
|
||||
if input_sockets['Monitor Data'] is None or not props['extract_filter']:
|
||||
return ct.InfoFlow()
|
||||
|
||||
xarr = getattr(input_sockets['Monitor Data'], props['extract_filter'])
|
||||
|
||||
# XYZF: Field / Permittivity / FieldProjectionCartesian
|
||||
if props['monitor_data_type'] in {
|
||||
'Field',
|
||||
'Permittivity',
|
||||
'FieldProjectionCartesian',
|
||||
}:
|
||||
return ct.InfoFlow(
|
||||
dim_names=['x', 'y', 'z', 'f'],
|
||||
dim_idx={
|
||||
axis: ct.ArrayFlow(values=xarr.get_index(axis).values, unit=spu.um)
|
||||
axis: ct.ArrayFlow(
|
||||
values=xarr.get_index(axis).values, unit=spu.um, is_sorted=True
|
||||
)
|
||||
for axis in ['x', 'y', 'z']
|
||||
}
|
||||
| {
|
||||
'f': ct.ArrayFlow(
|
||||
values=xarr.get_index('f').values, unit=spu.hertz
|
||||
values=xarr.get_index('f').values,
|
||||
unit=spu.hertz,
|
||||
is_sorted=True,
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
if props['active_socket_set'] == 'Flux Data':
|
||||
xarr = output_sockets['Data']
|
||||
# XYZT: FieldTime
|
||||
if props['monitor_data_type'] == 'FieldTime':
|
||||
return ct.InfoFlow(
|
||||
dim_names=['x', 'y', 'z', 't'],
|
||||
dim_idx={
|
||||
axis: ct.ArrayFlow(
|
||||
values=xarr.get_index(axis).values, unit=spu.um, is_sorted=True
|
||||
)
|
||||
for axis in ['x', 'y', 'z']
|
||||
}
|
||||
| {
|
||||
't': ct.ArrayFlow(
|
||||
values=xarr.get_index('t').values,
|
||||
unit=spu.second,
|
||||
is_sorted=True,
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
# F: Flux
|
||||
if props['monitor_data_type'] == 'Flux':
|
||||
return ct.InfoFlow(
|
||||
dim_names=['f'],
|
||||
dim_idx={
|
||||
'f': ct.ArrayFlow(
|
||||
values=xarr.get_index('f').values, unit=spu.hertz
|
||||
values=xarr.get_index('f').values,
|
||||
unit=spu.hertz,
|
||||
is_sorted=True,
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
msg = f'Tried to get a "FlowKind.Info" from socket set {props["active_socket_set"]} in "{self.bl_label}"'
|
||||
# T: FluxTime
|
||||
if props['monitor_data_type'] == 'FluxTime':
|
||||
return ct.InfoFlow(
|
||||
dim_names=['t'],
|
||||
dim_idx={
|
||||
't': ct.ArrayFlow(
|
||||
values=xarr.get_index('t').values,
|
||||
unit=spu.hertz,
|
||||
is_sorted=True,
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
# RThetaPhiF: FieldProjectionAngle
|
||||
if props['monitor_data_type'] == 'FieldProjectionAngle':
|
||||
return ct.InfoFlow(
|
||||
dim_names=['r', 'theta', 'phi', 'f'],
|
||||
dim_idx={
|
||||
'r': ct.ArrayFlow(
|
||||
values=xarr.get_index('r').values,
|
||||
unit=spu.micrometer,
|
||||
is_sorted=True,
|
||||
),
|
||||
}
|
||||
| {
|
||||
c: ct.ArrayFlow(
|
||||
values=xarr.get_index(c).values, unit=spu.radian, is_sorted=True
|
||||
)
|
||||
for c in ['r', 'theta', 'phi']
|
||||
}
|
||||
| {
|
||||
'f': ct.ArrayFlow(
|
||||
values=xarr.get_index('f').values,
|
||||
unit=spu.hertz,
|
||||
is_sorted=True,
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
# UxUyRF: FieldProjectionKSpace
|
||||
if props['monitor_data_type'] == 'FieldProjectionKSpace':
|
||||
return ct.InfoFlow(
|
||||
dim_names=['ux', 'uy', 'r', 'f'],
|
||||
dim_idx={
|
||||
c: ct.ArrayFlow(
|
||||
values=xarr.get_index(c).values, unit=None, is_sorted=True
|
||||
)
|
||||
for c in ['ux', 'uy']
|
||||
}
|
||||
| {
|
||||
'r': ct.ArrayFlow(
|
||||
values=xarr.get_index('r').values,
|
||||
unit=spu.micrometer,
|
||||
is_sorted=True,
|
||||
),
|
||||
'f': ct.ArrayFlow(
|
||||
values=xarr.get_index('f').values,
|
||||
unit=spu.hertz,
|
||||
is_sorted=True,
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
# OrderxOrderyF: Diffraction
|
||||
if props['monitor_data_type'] == 'Diffraction':
|
||||
return ct.InfoFlow(
|
||||
dim_names=['orders_x', 'orders_y', 'f'],
|
||||
dim_idx={
|
||||
f'orders_{c}': ct.ArrayFlow(
|
||||
values=xarr.get_index(f'orders_{c}').values,
|
||||
unit=None,
|
||||
is_sorted=True,
|
||||
)
|
||||
for c in ['x', 'y']
|
||||
}
|
||||
| {
|
||||
'f': ct.ArrayFlow(
|
||||
values=xarr.get_index('f').values,
|
||||
unit=spu.hertz,
|
||||
is_sorted=True,
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
msg = f'Unsupported Monitor Data Type {props["monitor_data_type"]} in "FlowKind.Info" of "{self.bl_label}"'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import jax
|
||||
import jax.numpy as jnp
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
from blender_maxwell.utils import bl_cache, logger
|
||||
|
||||
from .... import contracts as ct
|
||||
from .... import sockets
|
||||
|
@ -12,43 +13,19 @@ from ... import base, events
|
|||
log = logger.get(__name__)
|
||||
|
||||
|
||||
# @functools.partial(jax.jit, static_argnames=('fixed_axis', 'fixed_axis_value'))
|
||||
# jax.jit
|
||||
def fix_axis(data, fixed_axis: int, fixed_axis_value: float):
|
||||
log.critical(data.shape)
|
||||
# Select Values of Fixed Axis
|
||||
fixed_axis_values = data[
|
||||
tuple(slice(None) if i == fixed_axis else 0 for i in range(data.ndim))
|
||||
]
|
||||
log.critical(fixed_axis_values)
|
||||
|
||||
# Compute Nearest Index on Fixed Axis
|
||||
idx_of_nearest = jnp.argmin(jnp.abs(fixed_axis_values - fixed_axis_value))
|
||||
log.critical(idx_of_nearest)
|
||||
|
||||
# Select Values along Fixed Axis Value
|
||||
return jnp.take(data, idx_of_nearest, axis=fixed_axis)
|
||||
|
||||
|
||||
class FilterMathNode(base.MaxwellSimNode):
|
||||
node_type = ct.NodeType.FilterMath
|
||||
bl_label = 'Filter Math'
|
||||
|
||||
input_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
'Data': sockets.DataSocketDef(format='jax'),
|
||||
}
|
||||
input_socket_sets: typ.ClassVar = {
|
||||
'By Axis Value': {
|
||||
'Axis': sockets.IntegerNumberSocketDef(),
|
||||
'Value': sockets.RealNumberSocketDef(),
|
||||
},
|
||||
'By Axis': {
|
||||
'Axis': sockets.IntegerNumberSocketDef(),
|
||||
},
|
||||
## TODO: bool arrays for comparison/switching/sparse 0-setting/etc. .
|
||||
'By Dim': {},
|
||||
'By Dim Value': {},
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
'Data': sockets.DataSocketDef(format='jax'),
|
||||
}
|
||||
|
||||
####################
|
||||
|
@ -56,58 +33,233 @@ class FilterMathNode(base.MaxwellSimNode):
|
|||
####################
|
||||
operation: bpy.props.EnumProperty(
|
||||
name='Op',
|
||||
description='Operation to reduce the input axis with',
|
||||
description='Operation to filter with',
|
||||
items=lambda self, _: self.search_operations(),
|
||||
update=lambda self, context: self.on_prop_changed('operation', context),
|
||||
)
|
||||
|
||||
dim: bpy.props.StringProperty(
|
||||
name='Dim',
|
||||
description='Dims to use when filtering data',
|
||||
default='',
|
||||
search=lambda self, _, edit_text: self.search_dims(edit_text),
|
||||
update=lambda self, context: self.on_prop_changed('dim', context),
|
||||
)
|
||||
|
||||
dim_names: list[str] = bl_cache.BLField([])
|
||||
dim_lens: dict[str, int] = bl_cache.BLField({})
|
||||
|
||||
@property
|
||||
def has_dim(self) -> bool:
|
||||
return (
|
||||
self.active_socket_set in ['By Dim', 'By Dim Value']
|
||||
and self.inputs['Data'].is_linked
|
||||
and self.dim_names
|
||||
)
|
||||
|
||||
####################
|
||||
# - Operation Search
|
||||
####################
|
||||
def search_operations(self) -> list[tuple[str, str, str]]:
|
||||
items = []
|
||||
if self.active_socket_set == 'By Axis Value':
|
||||
if self.active_socket_set == 'By Dim':
|
||||
items += [
|
||||
('FIX', 'Fix Coordinate', '(*, N, *) -> (*, *)'),
|
||||
('SQUEEZE', 'del a | #=1', 'Squeeze'),
|
||||
]
|
||||
if self.active_socket_set == 'By Axis':
|
||||
if self.active_socket_set == 'By Dim Value':
|
||||
items += [
|
||||
('SQUEEZE', 'Squeeze', '(*, 1, *) -> (*, *)'),
|
||||
('FIX', 'del a | i≈v', 'Fix Coordinate'),
|
||||
]
|
||||
else:
|
||||
items += [('NONE', 'None', 'No operations...')]
|
||||
|
||||
return items
|
||||
|
||||
def draw_props(self, _: bpy.types.Context, layout: bpy.types.UILayout) -> None:
|
||||
if self.active_socket_set != 'Axis Expr':
|
||||
layout.prop(self, 'operation')
|
||||
####################
|
||||
# - Dim Search
|
||||
####################
|
||||
def search_dims(self, edit_text: str) -> list[tuple[str, str, str]]:
|
||||
if self.has_dim:
|
||||
dims = [
|
||||
(dim_name, dim_name)
|
||||
for dim_name in self.dim_names
|
||||
if edit_text == '' or edit_text.lower() in dim_name.lower()
|
||||
]
|
||||
|
||||
# Squeeze: Dimension Must Have Length=1
|
||||
if self.operation == 'SQUEEZE':
|
||||
return [dim for dim in dims if self.dim_lens[dim[0]] == 1]
|
||||
return dims
|
||||
return []
|
||||
|
||||
####################
|
||||
# - Compute
|
||||
# - UI
|
||||
####################
|
||||
def draw_props(self, _: bpy.types.Context, layout: bpy.types.UILayout) -> None:
|
||||
layout.prop(self, 'operation', text='')
|
||||
if self.has_dim:
|
||||
layout.prop(self, 'dim', text='')
|
||||
|
||||
####################
|
||||
# - Events
|
||||
####################
|
||||
@events.on_value_changed(
|
||||
socket_name={'Data'},
|
||||
prop_name={'active_socket_set', 'dim'},
|
||||
props={'active_socket_set', 'dim'},
|
||||
input_sockets={'Data'},
|
||||
input_socket_kinds={'Data': ct.FlowKind.Info},
|
||||
input_sockets_optional={'Data': True},
|
||||
)
|
||||
def on_any_change(self, props: dict, input_sockets: dict):
|
||||
# Set Dimension Names from InfoFlow
|
||||
if input_sockets['Data'].dim_names:
|
||||
self.dim_names = input_sockets['Data'].dim_names
|
||||
self.dim_lens = {
|
||||
dim_name: len(dim_idx)
|
||||
for dim_name, dim_idx in input_sockets['Data'].dim_idx.items()
|
||||
}
|
||||
else:
|
||||
self.dim_names = []
|
||||
self.dim_lens = {}
|
||||
|
||||
# Add Input Value w/Unit from InfoFlow
|
||||
## Socket Type is determined from the Unit
|
||||
if (
|
||||
props['active_socket_set'] == 'By Dim Value'
|
||||
and props['dim'] != ''
|
||||
and props['dim'] in input_sockets['Data'].dim_names
|
||||
):
|
||||
socket_def = sockets.SOCKET_DEFS[
|
||||
ct.unit_to_socket_type(input_sockets['Data'].dim_idx[props['dim']].unit)
|
||||
]
|
||||
if (
|
||||
_val_socket_def := self.loose_input_sockets.get('Value')
|
||||
) is None or _val_socket_def != socket_def:
|
||||
self.loose_input_sockets = {
|
||||
'Value': socket_def(),
|
||||
}
|
||||
elif self.loose_input_sockets:
|
||||
self.loose_input_sockets = {}
|
||||
|
||||
####################
|
||||
# - Compute: LazyValueFunc / Array
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
props={'operation', 'active_socket_set'},
|
||||
input_sockets={'Data', 'Axis', 'Value'},
|
||||
input_sockets_optional={'Axis': True, 'Value': True},
|
||||
kind=ct.FlowKind.LazyValueFunc,
|
||||
props={'active_socket_set', 'operation', 'dim'},
|
||||
input_sockets={'Data'},
|
||||
input_socket_kinds={'Data': {ct.FlowKind.LazyValueFunc, ct.FlowKind.Info}},
|
||||
)
|
||||
def compute_data(self, props: dict, input_sockets: dict):
|
||||
if not hasattr(input_sockets['Data'], 'shape'):
|
||||
msg = 'Input socket "Data" must be an N-D Array (with a "shape" attribute)'
|
||||
lazy_value_func = input_sockets['Data'][ct.FlowKind.LazyValueFunc]
|
||||
info = input_sockets['Data'][ct.FlowKind.Info]
|
||||
|
||||
# Determine Bound/Free Parameters
|
||||
if props['dim'] in info.dim_names:
|
||||
axis = info.dim_names.index(props['dim'])
|
||||
else:
|
||||
msg = 'Dimension invalid'
|
||||
raise ValueError(msg)
|
||||
|
||||
# By Axis Value
|
||||
if props['active_socket_set'] == 'By Axis Value':
|
||||
if props['operation'] == 'FIX':
|
||||
return fix_axis(
|
||||
input_sockets['Data'], input_sockets['Axis'], input_sockets['Value']
|
||||
func_args = [int] if props['active_socket_set'] == 'By Dim Value' else []
|
||||
|
||||
# Select Function
|
||||
filter_func: typ.Callable[[jax.Array], jax.Array] = {
|
||||
'By Dim': {'SQUEEZE': lambda data: jnp.squeeze(data, axis)},
|
||||
'By Dim Value': {
|
||||
'FIX': lambda data, fixed_axis_idx: jnp.take(
|
||||
data, fixed_axis_idx, axis=axis
|
||||
)
|
||||
},
|
||||
}[props['active_socket_set']][props['operation']]
|
||||
|
||||
# Compose Function for Output
|
||||
return lazy_value_func.compose_within(
|
||||
filter_func,
|
||||
enclosing_func_args=func_args,
|
||||
supports_jax=True,
|
||||
)
|
||||
|
||||
# By Axis
|
||||
if props['active_socket_set'] == 'By Axis':
|
||||
if props['operation'] == 'SQUEEZE':
|
||||
return jnp.squeeze(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.Array,
|
||||
output_sockets={'Data'},
|
||||
output_socket_kinds={
|
||||
'Data': {ct.FlowKind.LazyValueFunc, ct.FlowKind.Params},
|
||||
},
|
||||
)
|
||||
def compute_array(self, output_sockets: dict) -> ct.ArrayFlow:
|
||||
lazy_value_func = output_sockets['Data'][ct.FlowKind.LazyValueFunc]
|
||||
params = output_sockets['Data'][ct.FlowKind.Params]
|
||||
return ct.ArrayFlow(
|
||||
values=lazy_value_func.func_jax(*params.func_args, **params.func_kwargs),
|
||||
unit=None, ## TODO: Unit Propagation
|
||||
)
|
||||
|
||||
msg = 'Operation invalid'
|
||||
raise ValueError(msg)
|
||||
####################
|
||||
# - Compute Auxiliary: Info / Params
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.Info,
|
||||
props={'active_socket_set', 'dim', 'operation'},
|
||||
input_sockets={'Data'},
|
||||
input_socket_kinds={'Data': ct.FlowKind.Info},
|
||||
)
|
||||
def compute_data_info(self, props: dict, input_sockets: dict) -> ct.InfoFlow:
|
||||
info = input_sockets['Data']
|
||||
|
||||
if props['dim'] in info.dim_names:
|
||||
axis = info.dim_names.index(props['dim'])
|
||||
else:
|
||||
return ct.InfoFlow()
|
||||
|
||||
# Compute Axis
|
||||
if (props['active_socket_set'], props['operation']) in [
|
||||
('By Dim', 'SQUEEZE'),
|
||||
('By Dim Value', 'FIX'),
|
||||
] and info.dim_names:
|
||||
return ct.InfoFlow(
|
||||
dim_names=info.dim_names[:axis] + info.dim_names[axis + 1 :],
|
||||
dim_idx={
|
||||
dim_name: dim_idx
|
||||
for dim_name, dim_idx in info.dim_idx.items()
|
||||
if dim_name != props['dim']
|
||||
},
|
||||
)
|
||||
|
||||
return ct.InfoFlow()
|
||||
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.Params,
|
||||
props={'active_socket_set', 'dim', 'operation'},
|
||||
input_sockets={'Data', 'Value'},
|
||||
input_socket_kinds={'Data': {ct.FlowKind.Info, ct.FlowKind.Params}},
|
||||
input_sockets_optional={'Value': True},
|
||||
)
|
||||
def compute_data_params(self, props: dict, input_sockets: dict) -> ct.ParamsFlow:
|
||||
info = input_sockets['Data'][ct.FlowKind.Info]
|
||||
params = input_sockets['Data'][ct.FlowKind.Params]
|
||||
|
||||
if (
|
||||
(props['active_socket_set'], props['operation'])
|
||||
in [
|
||||
('By Dim Value', 'FIX'),
|
||||
]
|
||||
and props['dim'] in info.dim_names
|
||||
and input_sockets['Value'] is not None
|
||||
):
|
||||
# Compute IDX Corresponding to Value
|
||||
## Aka. "indexing by a float"
|
||||
nearest_idx_to_value = info.dim_idx[props['dim']].nearest_idx_of(
|
||||
input_sockets['Value'], require_sorted=True
|
||||
)
|
||||
|
||||
# Compose Parameters
|
||||
return params.compose_within(enclosing_func_args=[nearest_idx_to_value])
|
||||
|
||||
return params
|
||||
|
||||
|
||||
####################
|
||||
|
@ -117,3 +269,6 @@ BL_REGISTER = [
|
|||
FilterMathNode,
|
||||
]
|
||||
BL_NODES = {ct.NodeType.FilterMath: (ct.NodeCategory.MAXWELLSIM_ANALYSIS_MATH)}
|
||||
|
||||
|
||||
## TODO TODO Okay so just like, Value needs to be a Loose socket, events needs to be able to handle sets of kinds, the invalidator needs to be able to handle sets of kinds too. Given all that, we only need to propagate the output array unit; given all all that, we are 100% goddamn ready to fix that goddamn coordinate.
|
||||
|
|
|
@ -13,13 +13,15 @@ from ... import base, events
|
|||
|
||||
log = logger.get(__name__)
|
||||
|
||||
X_COMPLEX = sp.Symbol('x', complex=True)
|
||||
|
||||
|
||||
class MapMathNode(base.MaxwellSimNode):
|
||||
node_type = ct.NodeType.MapMath
|
||||
bl_label = 'Map Math'
|
||||
|
||||
input_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
'Data': sockets.DataSocketDef(format='jax'),
|
||||
}
|
||||
input_socket_sets: typ.ClassVar = {
|
||||
'By Element': {},
|
||||
|
@ -27,13 +29,13 @@ class MapMathNode(base.MaxwellSimNode):
|
|||
'By Matrix': {},
|
||||
'Expr': {
|
||||
'Mapper': sockets.ExprSocketDef(
|
||||
symbols=[sp.Symbol('x')],
|
||||
default_expr=sp.Symbol('x'),
|
||||
complex_symbols=[X_COMPLEX],
|
||||
default_expr=X_COMPLEX,
|
||||
),
|
||||
},
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
'Data': sockets.DataSocketDef(format='jax'),
|
||||
}
|
||||
|
||||
####################
|
||||
|
@ -51,53 +53,56 @@ class MapMathNode(base.MaxwellSimNode):
|
|||
if self.active_socket_set == 'By Element':
|
||||
items += [
|
||||
# General
|
||||
('REAL', 'real', 'ℝ(L) (by el)'),
|
||||
('IMAG', 'imag', 'Im(L) (by el)'),
|
||||
('ABS', 'abs', '|L| (by el)'),
|
||||
('SQ', 'square', 'L^2 (by el)'),
|
||||
('SQRT', 'sqrt', 'sqrt(L) (by el)'),
|
||||
('INV_SQRT', '1/sqrt', '1/sqrt(L) (by el)'),
|
||||
('REAL', 'ℝ(v)', 'real(v) (by el)'),
|
||||
('IMAG', 'Im(v)', 'imag(v) (by el)'),
|
||||
('ABS', '|v|', 'abs(v) (by el)'),
|
||||
('SQ', 'v²', 'v^2 (by el)'),
|
||||
('SQRT', '√v', 'sqrt(v) (by el)'),
|
||||
('INV_SQRT', '1/√v', '1/sqrt(v) (by el)'),
|
||||
# Trigonometry
|
||||
('COS', 'cos', 'cos(L) (by el)'),
|
||||
('SIN', 'sin', 'sin(L) (by el)'),
|
||||
('TAN', 'tan', 'tan(L) (by el)'),
|
||||
('ACOS', 'acos', 'acos(L) (by el)'),
|
||||
('ASIN', 'asin', 'asin(L) (by el)'),
|
||||
('ATAN', 'atan', 'atan(L) (by el)'),
|
||||
('COS', 'cos v', 'cos(v) (by el)'),
|
||||
('SIN', 'sin v', 'sin(v) (by el)'),
|
||||
('TAN', 'tan v', 'tan(v) (by el)'),
|
||||
('ACOS', 'acos v', 'acos(v) (by el)'),
|
||||
('ASIN', 'asin v', 'asin(v) (by el)'),
|
||||
('ATAN', 'atan v', 'atan(v) (by el)'),
|
||||
]
|
||||
elif self.active_socket_set in 'By Vector':
|
||||
items += [
|
||||
('NORM_2', '2-Norm', '||L||_2 (by Vec)'),
|
||||
('NORM_2', '||v||₂', 'norm(v, 2) (by Vec)'),
|
||||
]
|
||||
elif self.active_socket_set == 'By Matrix':
|
||||
items += [
|
||||
# Matrix -> Number
|
||||
('DET', 'Determinant', 'det(L) (by Mat)'),
|
||||
('COND', 'Condition', 'κ(L) (by Mat)'),
|
||||
('NORM_FRO', 'Frobenius Norm', '||L||_F (by Mat)'),
|
||||
('RANK', 'Rank', 'rank(L) (by Mat)'),
|
||||
('DET', 'det V', 'det(V) (by Mat)'),
|
||||
('COND', 'κ(V)', 'cond(V) (by Mat)'),
|
||||
('NORM_FRO', '||V||_F', 'norm(V, frobenius) (by Mat)'),
|
||||
('RANK', 'rank V', 'rank(V) (by Mat)'),
|
||||
# Matrix -> Array
|
||||
('DIAG', 'Diagonal', 'diag(L) (by Mat)'),
|
||||
('EIG_VALS', 'Eigenvalues', 'eigvals(L) (by Mat)'),
|
||||
('SVD_VALS', 'SVD', 'svd(L) -> diag(Σ) (by Mat)'),
|
||||
('DIAG', 'diag V', 'diag(V) (by Mat)'),
|
||||
('EIG_VALS', 'eigvals V', 'eigvals(V) (by Mat)'),
|
||||
('SVD_VALS', 'svdvals V', 'diag(svd(V)) (by Mat)'),
|
||||
# Matrix -> Matrix
|
||||
('INV', 'Invert', 'L^(-1) (by Mat)'),
|
||||
('TRA', 'Transpose', 'L^T (by Mat)'),
|
||||
('INV', 'V⁻¹', 'V^(-1) (by Mat)'),
|
||||
('TRA', 'Vt', 'V^T (by Mat)'),
|
||||
# Matrix -> Matrices
|
||||
('QR', 'QR', 'L -> Q·R (by Mat)'),
|
||||
('CHOL', 'Cholesky', 'L -> L·Lh (by Mat)'),
|
||||
('SVD', 'SVD', 'L -> U·Σ·Vh (by Mat)'),
|
||||
('QR', 'qr V', 'qr(V) -> Q·R (by Mat)'),
|
||||
('CHOL', 'chol V', 'cholesky(V) -> V·V† (by Mat)'),
|
||||
('SVD', 'svd V', 'svd(V) -> U·Σ·V† (by Mat)'),
|
||||
]
|
||||
elif self.active_socket_set == 'Expr':
|
||||
items += [('EXPR_EL', 'By Element', 'Expression-defined (by el)')]
|
||||
else:
|
||||
items += ['EXPR_EL', 'Expr (by el)', 'Expression-defined (by el)']
|
||||
msg = f'Invalid socket set {self.active_socket_set}'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return items
|
||||
|
||||
def draw_props(self, _: bpy.types.Context, layout: bpy.types.UILayout) -> None:
|
||||
if self.active_socket_set not in {'Expr (Element)'}:
|
||||
layout.prop(self, 'operation')
|
||||
layout.prop(self, 'operation', text='')
|
||||
|
||||
####################
|
||||
# - Compute
|
||||
# - Compute: LazyValueFunc / Array
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
|
@ -148,8 +153,8 @@ class MapMathNode(base.MaxwellSimNode):
|
|||
'CHOL': lambda data: jnp.linalg.cholesky(data),
|
||||
'SVD': lambda data: jnp.linalg.svd(data),
|
||||
},
|
||||
'By El (Expr)': {
|
||||
'EXPR_EL': lambda data: input_sockets['Mapper'](data),
|
||||
'Expr': {
|
||||
'EXPR_EL': lambda data: input_sockets['Mapper'].func(data),
|
||||
},
|
||||
}[props['active_socket_set']][props['operation']]
|
||||
|
||||
|
@ -159,6 +164,43 @@ class MapMathNode(base.MaxwellSimNode):
|
|||
supports_jax=True,
|
||||
)
|
||||
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.Array,
|
||||
output_sockets={'Data'},
|
||||
output_socket_kinds={
|
||||
'Data': {ct.FlowKind.LazyValueFunc, ct.FlowKind.Params},
|
||||
},
|
||||
)
|
||||
def compute_array(self, output_sockets: dict) -> ct.ArrayFlow:
|
||||
lazy_value_func = output_sockets['Data'][ct.FlowKind.LazyValueFunc]
|
||||
params = output_sockets['Data'][ct.FlowKind.Params]
|
||||
return ct.ArrayFlow(
|
||||
values=lazy_value_func.func_jax(*params.func_args, **params.func_kwargs),
|
||||
unit=None, ## TODO: Unit Propagation
|
||||
)
|
||||
|
||||
####################
|
||||
# - Compute Auxiliary: Info / Params
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.Info,
|
||||
input_sockets={'Data'},
|
||||
input_socket_kinds={'Data': ct.FlowKind.Info},
|
||||
)
|
||||
def compute_data_info(self, input_sockets: dict) -> ct.InfoFlow:
|
||||
return input_sockets['Data']
|
||||
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.Params,
|
||||
input_sockets={'Data'},
|
||||
input_socket_kinds={'Data': ct.FlowKind.Params},
|
||||
)
|
||||
def compute_data_params(self, input_sockets: dict) -> ct.ParamsFlow:
|
||||
return input_sockets['Data']
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
|
|
|
@ -21,8 +21,7 @@ class VizNode(base.MaxwellSimNode):
|
|||
# - Sockets
|
||||
####################
|
||||
input_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
'Freq': sockets.PhysicalFreqSocketDef(),
|
||||
'Data': sockets.DataSocketDef(format='jax'),
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Preview': sockets.AnySocketDef(),
|
||||
|
@ -57,12 +56,9 @@ class VizNode(base.MaxwellSimNode):
|
|||
#####################
|
||||
@events.on_show_plot(
|
||||
managed_objs={'plot'},
|
||||
input_sockets={'Data', 'Freq'},
|
||||
input_sockets={'Data'},
|
||||
input_socket_kinds={'Data': ct.FlowKind.Array},
|
||||
props={'colormap'},
|
||||
unit_systems={'Tidy3DUnits': ct.UNITS_TIDY3D},
|
||||
scale_input_sockets={
|
||||
'Freq': 'Tidy3DUnits',
|
||||
},
|
||||
stop_propagation=True,
|
||||
)
|
||||
def on_show_plot(
|
||||
|
@ -70,10 +66,9 @@ class VizNode(base.MaxwellSimNode):
|
|||
managed_objs: dict,
|
||||
input_sockets: dict,
|
||||
props: dict,
|
||||
unit_systems: dict,
|
||||
):
|
||||
managed_objs['plot'].map_2d_to_image(
|
||||
input_sockets['Data'].as_bound_jax_func(),
|
||||
input_sockets['Data'].values,
|
||||
colormap=props['colormap'],
|
||||
bl_select=True,
|
||||
)
|
||||
|
|
|
@ -4,8 +4,11 @@ Attributes:
|
|||
MANDATORY_PROPS: Properties that must be defined on the `MaxwellSimNode`.
|
||||
"""
|
||||
|
||||
## TODO: Check whether input_socket_sets and output_socket_sets have the right shape? Or just use a type checker...
|
||||
|
||||
import typing as typ
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from types import MappingProxyType
|
||||
|
||||
import bpy
|
||||
|
@ -66,6 +69,9 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
dict[ct.ManagedObjName, type[_managed_objs.ManagedObj]]
|
||||
] = MappingProxyType({})
|
||||
|
||||
def reset_instance_id(self) -> None:
|
||||
self.instance_id = str(uuid.uuid4())
|
||||
|
||||
####################
|
||||
# - Class Methods
|
||||
####################
|
||||
|
@ -431,17 +437,17 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
|
||||
# Create BL Socket from Socket
|
||||
## Set 'display_shape' from 'socket_shape'
|
||||
bl_socket = all_bl_sockets.new(
|
||||
all_bl_sockets.new(
|
||||
str(socket_def.socket_type.value),
|
||||
socket_name,
|
||||
)
|
||||
bl_socket.display_shape = bl_socket.socket_shape
|
||||
|
||||
# Record Socket Creation
|
||||
created_sockets[socket_name] = socket_def
|
||||
|
||||
# Initialize Just-Created BL Sockets
|
||||
for socket_name, socket_def in created_sockets.items():
|
||||
socket_def.preinit(all_bl_sockets[socket_name])
|
||||
socket_def.init(all_bl_sockets[socket_name])
|
||||
|
||||
def _sync_sockets(self) -> None:
|
||||
|
@ -625,7 +631,17 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
|
||||
return output_socket_methods[0](self)
|
||||
|
||||
msg = f'No output method for ({output_socket_name}, {kind.value!s}'
|
||||
# Auxiliary Fallbacks
|
||||
if kind == ct.FlowKind.Info:
|
||||
return ct.InfoFlow()
|
||||
|
||||
if kind == ct.FlowKind.Params:
|
||||
return ct.ParamsFlow()
|
||||
|
||||
if optional:
|
||||
return None
|
||||
|
||||
msg = f'No output method for ({output_socket_name}, {kind})'
|
||||
raise ValueError(msg)
|
||||
|
||||
####################
|
||||
|
@ -634,8 +650,9 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
def _should_recompute_output_socket(
|
||||
self,
|
||||
method_info: events.InfoOutputRequested,
|
||||
input_socket_name: ct.SocketName,
|
||||
prop_name: str,
|
||||
input_socket_name: ct.SocketName | None,
|
||||
input_socket_kinds: set[ct.FlowKind] | None,
|
||||
prop_name: str | None,
|
||||
) -> bool:
|
||||
return (
|
||||
prop_name is not None
|
||||
|
@ -643,6 +660,20 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
or input_socket_name is not None
|
||||
and (
|
||||
input_socket_name in method_info.depon_input_sockets
|
||||
and (
|
||||
input_socket_kinds is None
|
||||
or (
|
||||
isinstance(
|
||||
_kind := method_info.depon_input_socket_kinds.get(
|
||||
input_socket_name, ct.FlowKind.Value
|
||||
),
|
||||
set,
|
||||
)
|
||||
and input_socket_kinds.intersection(_kind)
|
||||
)
|
||||
or _kind == ct.FlowKind.Value
|
||||
or _kind in input_socket_kinds
|
||||
)
|
||||
or (
|
||||
method_info.depon_all_loose_input_sockets
|
||||
and input_socket_name in self.loose_input_sockets
|
||||
|
@ -650,10 +681,56 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
)
|
||||
)
|
||||
|
||||
@bl_cache.cached_bl_property(persist=False)
|
||||
def _dependent_outputs(
|
||||
self,
|
||||
) -> dict[
|
||||
tuple[ct.SocketName, ct.FlowKind], set[tuple[ct.SocketName, ct.FlowKind]]
|
||||
]:
|
||||
## TODO: Cleanup
|
||||
## TODO: Detect cycles?
|
||||
## TODO: Networkx?
|
||||
altered_to_invalidated = defaultdict(set)
|
||||
output_requested_methods = self.event_methods_by_event[
|
||||
ct.FlowEvent.OutputRequested
|
||||
]
|
||||
|
||||
for altered_method in output_requested_methods:
|
||||
altered_info = altered_method.callback_info
|
||||
altered_key = (altered_info.output_socket_name, altered_info.kind)
|
||||
|
||||
for invalidated_method in output_requested_methods:
|
||||
invalidated_info = invalidated_method.callback_info
|
||||
|
||||
if (
|
||||
altered_info.output_socket_name
|
||||
in invalidated_info.depon_output_sockets
|
||||
):
|
||||
is_same_kind = (
|
||||
altered_info.kind
|
||||
== (
|
||||
_kind := invalidated_info.depon_output_socket_kinds.get(
|
||||
altered_info.output_socket_name
|
||||
)
|
||||
)
|
||||
or (isinstance(_kind, set) and altered_info.kind in _kind)
|
||||
or altered_info.kind == ct.FlowKind.Value
|
||||
)
|
||||
|
||||
if is_same_kind:
|
||||
invalidated_key = (
|
||||
invalidated_info.output_socket_name,
|
||||
invalidated_info.kind,
|
||||
)
|
||||
altered_to_invalidated[altered_key].add(invalidated_key)
|
||||
|
||||
return altered_to_invalidated
|
||||
|
||||
def trigger_event(
|
||||
self,
|
||||
event: ct.FlowEvent,
|
||||
socket_name: ct.SocketName | None = None,
|
||||
socket_kinds: set[ct.FlowKind] | None = None,
|
||||
prop_name: ct.SocketName | None = None,
|
||||
) -> None:
|
||||
"""Recursively triggers events forwards or backwards along the node tree, allowing nodes in the update path to react.
|
||||
|
@ -671,16 +748,32 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
socket_name: The input socket that was altered, if any, in order to trigger this event.
|
||||
pop_name: The property that was altered, if any, in order to trigger this event.
|
||||
"""
|
||||
# Outflow Socket Kinds
|
||||
## Something has happened, that much is for sure.
|
||||
## Output methods might require invalidation of (outsck, FlowKind)s.
|
||||
## Whichever FlowKinds we do happen to invalidate, we should mark.
|
||||
## This way, each FlowKind gets its own invalidation chain.
|
||||
altered_socket_kinds = set()
|
||||
|
||||
# Invalidate Caches on DataChanged
|
||||
if event == ct.FlowEvent.DataChanged:
|
||||
input_socket_name = socket_name ## Trigger direction is forwards
|
||||
|
||||
# Invalidate Input Socket Cache
|
||||
if input_socket_name is not None:
|
||||
if socket_kinds is None:
|
||||
self._compute_input.invalidate(
|
||||
input_socket_name=input_socket_name,
|
||||
kind=...,
|
||||
unit_system=...,
|
||||
)
|
||||
else:
|
||||
for socket_kind in socket_kinds:
|
||||
self._compute_input.invalidate(
|
||||
input_socket_name=input_socket_name,
|
||||
kind=socket_kind,
|
||||
unit_system=...,
|
||||
)
|
||||
|
||||
# Invalidate Output Socket Cache
|
||||
for output_socket_method in self.event_methods_by_event[
|
||||
|
@ -688,11 +781,39 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
]:
|
||||
method_info = output_socket_method.callback_info
|
||||
if self._should_recompute_output_socket(
|
||||
method_info, socket_name, prop_name
|
||||
method_info, socket_name, socket_kinds, prop_name
|
||||
):
|
||||
out_sckname = method_info.output_socket_name
|
||||
kind = method_info.kind
|
||||
|
||||
# Invalidate Output Directly
|
||||
# log.critical(
|
||||
# '[%s] Invalidating: (%s, %s)',
|
||||
# self.sim_node_name,
|
||||
# out_sckname,
|
||||
# str(kind),
|
||||
# )
|
||||
altered_socket_kinds.add(kind)
|
||||
self.compute_output.invalidate(
|
||||
output_socket_name=method_info.output_socket_name,
|
||||
kind=method_info.kind,
|
||||
output_socket_name=out_sckname,
|
||||
kind=kind,
|
||||
)
|
||||
|
||||
# Invalidate Any Dependent Outputs
|
||||
if (
|
||||
dep_outs := self._dependent_outputs.get((out_sckname, kind))
|
||||
) is not None:
|
||||
for dep_out in dep_outs:
|
||||
# log.critical(
|
||||
# '![%s] Invalidating: (%s, %s)',
|
||||
# self.sim_node_name,
|
||||
# dep_out[0],
|
||||
# dep_out[1],
|
||||
# )
|
||||
altered_socket_kinds.add(dep_out[1])
|
||||
self.compute_output.invalidate(
|
||||
output_socket_name=dep_out[0],
|
||||
kind=dep_out[1],
|
||||
)
|
||||
|
||||
# Run Triggered Event Methods
|
||||
|
@ -711,7 +832,13 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
direc=ct.FlowEvent.flow_direction[event]
|
||||
)
|
||||
for bl_socket in triggered_sockets:
|
||||
bl_socket.trigger_event(event)
|
||||
# log.critical(
|
||||
# '![%s] Propagating: (%s, %s)',
|
||||
# self.sim_node_name,
|
||||
# event,
|
||||
# altered_socket_kinds,
|
||||
# )
|
||||
bl_socket.trigger_event(event, socket_kinds=altered_socket_kinds)
|
||||
|
||||
####################
|
||||
# - Property Event: On Update
|
||||
|
@ -838,7 +965,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
"""
|
||||
# Initialize Instance ID
|
||||
## This is used by various caches from 'bl_cache'.
|
||||
self.instance_id = str(uuid.uuid4())
|
||||
self.reset_instance_id()
|
||||
|
||||
# Initialize Name
|
||||
## This is used whenever a unique name pointing to this node is needed.
|
||||
|
@ -881,7 +1008,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
Blender runs this when instantiating this node from an existing node.
|
||||
"""
|
||||
# Generate New Instance ID
|
||||
self.instance_id = str(uuid.uuid4())
|
||||
self.reset_instance_id()
|
||||
|
||||
# Generate New Sim Node Name
|
||||
## Blender will automatically add .001 so that `self.name` is unique.
|
||||
|
|
|
@ -32,11 +32,11 @@ class InfoOutputRequested:
|
|||
depon_props: set[str]
|
||||
|
||||
depon_input_sockets: set[ct.SocketName]
|
||||
depon_input_socket_kinds: dict[ct.SocketName, ct.FlowKind]
|
||||
depon_input_socket_kinds: dict[ct.SocketName, ct.FlowKind | set[ct.FlowKind]]
|
||||
depon_all_loose_input_sockets: bool
|
||||
|
||||
depon_output_sockets: set[ct.SocketName]
|
||||
depon_output_socket_kinds: dict[ct.SocketName, ct.FlowKind]
|
||||
depon_output_socket_kinds: dict[ct.SocketName, ct.FlowKind | set[ct.FlowKind]]
|
||||
depon_all_loose_output_sockets: bool
|
||||
|
||||
|
||||
|
@ -59,10 +59,14 @@ def event_decorator(
|
|||
props: set[PropName] = frozenset(),
|
||||
input_sockets: set[ct.SocketName] = frozenset(),
|
||||
input_sockets_optional: dict[ct.SocketName, bool] = MappingProxyType({}),
|
||||
input_socket_kinds: dict[ct.SocketName, ct.FlowKind] = MappingProxyType({}),
|
||||
input_socket_kinds: dict[
|
||||
ct.SocketName, ct.FlowKind | set[ct.FlowKind]
|
||||
] = MappingProxyType({}),
|
||||
output_sockets: set[ct.SocketName] = frozenset(),
|
||||
output_sockets_optional: dict[ct.SocketName, bool] = MappingProxyType({}),
|
||||
output_socket_kinds: dict[ct.SocketName, ct.FlowKind] = MappingProxyType({}),
|
||||
output_socket_kinds: dict[
|
||||
ct.SocketName, ct.FlowKind | set[ct.FlowKind]
|
||||
] = MappingProxyType({}),
|
||||
all_loose_input_sockets: bool = False,
|
||||
all_loose_output_sockets: bool = False,
|
||||
# Request Unit System Scaling
|
||||
|
@ -157,11 +161,9 @@ def event_decorator(
|
|||
'input_sockets': {
|
||||
input_socket_name: node._compute_input(
|
||||
input_socket_name,
|
||||
kind=input_socket_kinds.get(
|
||||
input_socket_name, ct.FlowKind.Value
|
||||
),
|
||||
kind=_kind,
|
||||
unit_system=(
|
||||
unit_system := unit_systems.get(
|
||||
unit_systems.get(
|
||||
scale_input_sockets.get(input_socket_name)
|
||||
)
|
||||
),
|
||||
|
@ -169,6 +171,25 @@ def event_decorator(
|
|||
input_socket_name, False
|
||||
),
|
||||
)
|
||||
if not isinstance(
|
||||
_kind := input_socket_kinds.get(
|
||||
input_socket_name, ct.FlowKind.Value
|
||||
),
|
||||
set,
|
||||
)
|
||||
else {
|
||||
kind: node._compute_input(
|
||||
input_socket_name,
|
||||
kind=kind,
|
||||
unit_system=unit_systems.get(
|
||||
scale_input_sockets.get(input_socket_name)
|
||||
),
|
||||
optional=input_sockets_optional.get(
|
||||
input_socket_name, False
|
||||
),
|
||||
)
|
||||
for kind in _kind
|
||||
}
|
||||
for input_socket_name in input_sockets
|
||||
}
|
||||
}
|
||||
|
@ -177,37 +198,39 @@ def event_decorator(
|
|||
)
|
||||
|
||||
## Output Sockets
|
||||
def _g_output_socket(output_socket_name: ct.SocketName, kind: ct.FlowKind):
|
||||
if scale_output_sockets.get(output_socket_name) is None:
|
||||
return node.compute_output(
|
||||
output_socket_name,
|
||||
kind=kind,
|
||||
optional=output_sockets_optional.get(output_socket_name, False),
|
||||
)
|
||||
|
||||
return ct.FlowKind.scale_to_unit_system(
|
||||
kind,
|
||||
node.compute_output(
|
||||
output_socket_name,
|
||||
kind=kind,
|
||||
optional=output_sockets_optional.get(output_socket_name, False),
|
||||
),
|
||||
node.outputs[output_socket_name].socket_type,
|
||||
unit_systems.get(scale_output_sockets.get(output_socket_name)),
|
||||
)
|
||||
|
||||
method_kw_args |= (
|
||||
{
|
||||
'output_sockets': {
|
||||
output_socket_name: ct.FlowKind.scale_to_unit_system(
|
||||
(
|
||||
output_socket_kind := output_socket_kinds.get(
|
||||
output_socket_name, ct.FlowKind.Value
|
||||
)
|
||||
),
|
||||
node.compute_output(
|
||||
output_socket_name,
|
||||
kind=output_socket_kind,
|
||||
optional=output_sockets_optional.get(
|
||||
output_socket_name, False
|
||||
),
|
||||
),
|
||||
node.outputs[output_socket_name].socket_type,
|
||||
unit_systems.get(
|
||||
scale_output_sockets.get(output_socket_name)
|
||||
),
|
||||
)
|
||||
if scale_output_sockets.get(output_socket_name) is not None
|
||||
else node.compute_output(
|
||||
output_socket_name,
|
||||
kind=output_socket_kinds.get(
|
||||
output_socket_name: _g_output_socket(output_socket_name, _kind)
|
||||
if not isinstance(
|
||||
_kind := output_socket_kinds.get(
|
||||
output_socket_name, ct.FlowKind.Value
|
||||
),
|
||||
optional=output_sockets_optional.get(
|
||||
output_socket_name, False
|
||||
),
|
||||
set,
|
||||
)
|
||||
else {
|
||||
kind: _g_output_socket(output_socket_name, kind)
|
||||
for kind in _kind
|
||||
}
|
||||
for output_socket_name in output_sockets
|
||||
}
|
||||
}
|
||||
|
@ -327,12 +350,12 @@ def computes_output_socket(
|
|||
kind=kind,
|
||||
depon_props=kwargs.get('props', set()),
|
||||
depon_input_sockets=kwargs.get('input_sockets', set()),
|
||||
depon_input_socket_kinds=kwargs.get('input_socket_kinds', set()),
|
||||
depon_input_socket_kinds=kwargs.get('input_socket_kinds', {}),
|
||||
depon_output_sockets=kwargs.get('output_sockets', set()),
|
||||
depon_output_socket_kinds=kwargs.get('output_socket_kinds', set()),
|
||||
depon_all_loose_input_sockets=kwargs.get('all_loose_input_sockets', set()),
|
||||
depon_output_socket_kinds=kwargs.get('output_socket_kinds', {}),
|
||||
depon_all_loose_input_sockets=kwargs.get('all_loose_input_sockets', False),
|
||||
depon_all_loose_output_sockets=kwargs.get(
|
||||
'all_loose_output_sockets', set()
|
||||
'all_loose_output_sockets', False
|
||||
),
|
||||
),
|
||||
**kwargs, ## stop_propagation has no effect.
|
||||
|
|
|
@ -62,7 +62,6 @@ class WaveConstantNode(base.MaxwellSimNode):
|
|||
input_sockets_optional={'WL': True, 'Freq': True},
|
||||
)
|
||||
def compute_freq_value(self, input_sockets: dict) -> sp.Expr:
|
||||
log.critical(input_sockets)
|
||||
if input_sockets['Freq'] is not None:
|
||||
return input_sockets['Freq']
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@ import typing as typ
|
|||
import bpy
|
||||
import sympy as sp
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from ... import contracts as ct
|
||||
|
@ -18,7 +19,7 @@ class ConsoleViewOperator(bpy.types.Operator):
|
|||
bl_label = 'View Plots'
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
def poll(cls, _: bpy.types.Context):
|
||||
return True
|
||||
|
||||
def execute(self, context):
|
||||
|
@ -33,7 +34,7 @@ class RefreshPlotViewOperator(bpy.types.Operator):
|
|||
bl_label = 'Refresh Plots'
|
||||
|
||||
@classmethod
|
||||
def poll(cls, context):
|
||||
def poll(cls, _: bpy.types.Context):
|
||||
return True
|
||||
|
||||
def execute(self, context):
|
||||
|
@ -50,12 +51,20 @@ class ViewerNode(base.MaxwellSimNode):
|
|||
bl_label = 'Viewer'
|
||||
|
||||
input_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
'Any': sockets.AnySocketDef(),
|
||||
}
|
||||
|
||||
####################
|
||||
# - Properties
|
||||
####################
|
||||
print_kind: bpy.props.EnumProperty(
|
||||
name='Print Kind',
|
||||
description='FlowKind of the input socket to print',
|
||||
items=[(kind, kind.name, kind.name) for kind in list(ct.FlowKind)],
|
||||
default=ct.FlowKind.Value,
|
||||
update=lambda self, context: self.on_prop_changed('print_kind', context),
|
||||
)
|
||||
|
||||
auto_plot: bpy.props.BoolProperty(
|
||||
name='Auto-Plot',
|
||||
description='Whether to auto-plot anything plugged into the viewer node',
|
||||
|
@ -73,7 +82,10 @@ class ViewerNode(base.MaxwellSimNode):
|
|||
####################
|
||||
# - UI
|
||||
####################
|
||||
def draw_operators(self, context, layout):
|
||||
def draw_props(self, _: bpy.types.Context, layout: bpy.types.UILayout):
|
||||
layout.prop(self, 'print_kind', text='')
|
||||
|
||||
def draw_operators(self, _: bpy.types.Context, layout: bpy.types.UILayout):
|
||||
split = layout.split(factor=0.4)
|
||||
|
||||
# Split LHS
|
||||
|
@ -105,12 +117,13 @@ class ViewerNode(base.MaxwellSimNode):
|
|||
# - Methods
|
||||
####################
|
||||
def print_data_to_console(self):
|
||||
if not self.inputs['Data'].is_linked:
|
||||
if not self.inputs['Any'].is_linked:
|
||||
return
|
||||
|
||||
log.info('Printing Data to Console')
|
||||
data = self._compute_input('Data')
|
||||
if isinstance(data, sp.Basic):
|
||||
log.info('Printing to Console')
|
||||
data = self._compute_input('Any', kind=self.print_kind, optional=True)
|
||||
|
||||
if isinstance(data, spux.SympyType):
|
||||
console.print(sp.pretty(data, use_unicode=True))
|
||||
else:
|
||||
console.print(data)
|
||||
|
@ -119,16 +132,16 @@ class ViewerNode(base.MaxwellSimNode):
|
|||
# - Event Methods
|
||||
####################
|
||||
@events.on_value_changed(
|
||||
socket_name='Data',
|
||||
socket_name='Any',
|
||||
prop_name='auto_plot',
|
||||
props={'auto_plot'},
|
||||
)
|
||||
def on_changed_plot_preview(self, props):
|
||||
if self.inputs['Data'].is_linked and props['auto_plot']:
|
||||
if self.inputs['Any'].is_linked and props['auto_plot']:
|
||||
self.trigger_event(ct.FlowEvent.ShowPlot)
|
||||
|
||||
@events.on_value_changed(
|
||||
socket_name='Data',
|
||||
socket_name='Any',
|
||||
prop_name='auto_3d_preview',
|
||||
props={'auto_3d_preview'},
|
||||
)
|
||||
|
@ -137,7 +150,7 @@ class ViewerNode(base.MaxwellSimNode):
|
|||
|
||||
# Remove Non-Repreviewed Previews on Close
|
||||
with node_tree.repreview_all():
|
||||
if self.inputs['Data'].is_linked and props['auto_3d_preview']:
|
||||
if self.inputs['Any'].is_linked and props['auto_3d_preview']:
|
||||
self.trigger_event(ct.FlowEvent.ShowPreview)
|
||||
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import abc
|
||||
import functools
|
||||
import typing as typ
|
||||
import uuid
|
||||
|
||||
import bpy
|
||||
import pydantic as pyd
|
||||
|
@ -31,6 +32,14 @@ class SocketDef(pyd.BaseModel, abc.ABC):
|
|||
|
||||
socket_type: ct.SocketType
|
||||
|
||||
def preinit(self, bl_socket: bpy.types.NodeSocket) -> None:
|
||||
"""Pre-initialize a real Blender node socket from this socket definition.
|
||||
|
||||
Parameters:
|
||||
bl_socket: The Blender node socket to alter using data from this SocketDef.
|
||||
"""
|
||||
bl_socket.reset_instance_id()
|
||||
|
||||
@abc.abstractmethod
|
||||
def init(self, bl_socket: bpy.types.NodeSocket) -> None:
|
||||
"""Initializes a real Blender node socket from this socket definition.
|
||||
|
@ -79,7 +88,8 @@ class SocketDef(pyd.BaseModel, abc.ABC):
|
|||
if not initialized_classes:
|
||||
msg = f'No "SocketDef" subclass found for name {obj[1]}. Please report this error'
|
||||
RuntimeError(msg)
|
||||
return next(initialized_classes)
|
||||
|
||||
return initialized_classes[0]
|
||||
|
||||
|
||||
####################
|
||||
|
@ -118,6 +128,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
# Options
|
||||
use_units: bool = False
|
||||
use_prelock: bool = False
|
||||
use_info_draw: bool = False
|
||||
|
||||
# Computed
|
||||
bl_idname: str
|
||||
|
@ -126,6 +137,9 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
# - Initialization
|
||||
####################
|
||||
## TODO: Common implementation of this for both sockets and nodes - perhaps a BLInstance base class?
|
||||
def reset_instance_id(self) -> None:
|
||||
self.instance_id = str(uuid.uuid4())
|
||||
|
||||
@classmethod
|
||||
def set_prop(
|
||||
cls,
|
||||
|
@ -432,6 +446,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
def trigger_event(
|
||||
self,
|
||||
event: ct.FlowEvent,
|
||||
socket_kinds: set[ct.FlowKind] | None = None,
|
||||
) -> None:
|
||||
"""Responds to and triggers subsequent events along the node tree.
|
||||
|
||||
|
@ -461,26 +476,34 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
# Input Socket | Input Flow
|
||||
if not self.is_output and flow_direction == 'input':
|
||||
for link in self.links:
|
||||
link.from_socket.trigger_event(event)
|
||||
link.from_socket.trigger_event(event, socket_kinds=socket_kinds)
|
||||
|
||||
# Input Socket | Output Flow
|
||||
if not self.is_output and flow_direction == 'output':
|
||||
if event == ct.FlowEvent.LinkChanged:
|
||||
self.node.trigger_event(ct.FlowEvent.DataChanged, socket_name=self.name)
|
||||
self.node.trigger_event(
|
||||
ct.FlowEvent.DataChanged,
|
||||
socket_name=self.name,
|
||||
socket_kinds=socket_kinds,
|
||||
)
|
||||
|
||||
self.node.trigger_event(event, socket_name=self.name)
|
||||
self.node.trigger_event(
|
||||
event, socket_name=self.name, socket_kinds=socket_kinds
|
||||
)
|
||||
|
||||
# Output Socket | Input Flow
|
||||
if self.is_output and flow_direction == 'input':
|
||||
self.node.trigger_event(event, socket_name=self.name)
|
||||
self.node.trigger_event(
|
||||
event, socket_name=self.name, socket_kinds=socket_kinds
|
||||
)
|
||||
|
||||
# Output Socket | Output Flow
|
||||
if self.is_output and flow_direction == 'output':
|
||||
for link in self.links:
|
||||
link.to_socket.trigger_event(event)
|
||||
link.to_socket.trigger_event(event, socket_kinds=socket_kinds)
|
||||
|
||||
####################
|
||||
# - Data Chain
|
||||
# - FlowKind: Auxiliary
|
||||
####################
|
||||
# Capabilities
|
||||
@property
|
||||
|
@ -490,11 +513,40 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
Notes:
|
||||
See `ct.FlowKind` for more information.
|
||||
"""
|
||||
return ct.DataCapabilities(
|
||||
return ct.CapabilitiesFlow(
|
||||
socket_type=self.socket_type,
|
||||
active_kind=self.active_kind,
|
||||
)
|
||||
|
||||
# Info
|
||||
@property
|
||||
def info(self) -> ct.InfoFlow:
|
||||
"""Signal that no information is declared by this socket.
|
||||
|
||||
Notes:
|
||||
See `ct.FlowKind` for more information.
|
||||
|
||||
Returns:
|
||||
An empty `ct.InfoFlow`.
|
||||
"""
|
||||
return ct.InfoFlow()
|
||||
|
||||
# Param
|
||||
@property
|
||||
def params(self) -> ct.ParamsFlow:
|
||||
"""Signal that no params are declared by this socket.
|
||||
|
||||
Notes:
|
||||
See `ct.FlowKind` for more information.
|
||||
|
||||
Returns:
|
||||
An empty `ct.ParamsFlow`.
|
||||
"""
|
||||
return ct.ParamsFlow()
|
||||
|
||||
####################
|
||||
# - FlowKind: Auxiliary
|
||||
####################
|
||||
# Value
|
||||
@property
|
||||
def value(self) -> ct.ValueFlow:
|
||||
|
@ -591,7 +643,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
raise NotImplementedError(msg)
|
||||
|
||||
@lazy_array_range.setter
|
||||
def lazy_array_range(self, value: tuple[ct.DataValue, ct.DataValue, int]) -> None:
|
||||
def lazy_array_range(self, value: ct.LazyArrayRangeFlow) -> None:
|
||||
"""Throws a descriptive error.
|
||||
|
||||
Notes:
|
||||
|
@ -603,60 +655,6 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to set "ct.FlowKind.LazyArrayRange", but socket does not define it'
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
# Param
|
||||
@property
|
||||
def param(self) -> ct.ParamsFlow:
|
||||
"""Throws a descriptive error.
|
||||
|
||||
Notes:
|
||||
See `ct.FlowKind` for more information.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: When used without being overridden.
|
||||
"""
|
||||
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to get "ct.FlowKind.Param", but socket does not define it'
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
@param.setter
|
||||
def param(self, value: tuple[ct.DataValue, ct.DataValue, int]) -> None:
|
||||
"""Throws a descriptive error.
|
||||
|
||||
Notes:
|
||||
See `ct.FlowKind` for more information.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: When used without being overridden.
|
||||
"""
|
||||
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to set "ct.FlowKind.Param", but socket does not define it'
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
# Info
|
||||
@property
|
||||
def info(self) -> ct.ParamsFlow:
|
||||
"""Throws a descriptive error.
|
||||
|
||||
Notes:
|
||||
See `ct.FlowKind` for more information.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: When used without being overridden.
|
||||
"""
|
||||
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to get "ct.FlowKind.Info", but socket does not define it'
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
@info.setter
|
||||
def info(self, value: tuple[ct.DataValue, ct.DataValue, int]) -> None:
|
||||
"""Throws a descriptive error.
|
||||
|
||||
Notes:
|
||||
See `ct.FlowKind` for more information.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: When used without being overridden.
|
||||
"""
|
||||
msg = f'Socket {self.bl_label} {self.socket_type}): Tried to set "ct.FlowKind.Info", but socket does not define it'
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
####################
|
||||
# - Data Chain Computation
|
||||
####################
|
||||
|
@ -674,8 +672,8 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
"""
|
||||
kind_data_map = {
|
||||
ct.FlowKind.Value: lambda: self.value,
|
||||
ct.FlowKind.ValueArray: lambda: self.value_array,
|
||||
ct.FlowKind.LazyValueFunc: lambda: self.lazy_value,
|
||||
ct.FlowKind.Array: lambda: self.array,
|
||||
ct.FlowKind.LazyValueFunc: lambda: self.lazy_value_func,
|
||||
ct.FlowKind.LazyArrayRange: lambda: self.lazy_array_range,
|
||||
ct.FlowKind.Params: lambda: self.params,
|
||||
ct.FlowKind.Info: lambda: self.info,
|
||||
|
@ -727,8 +725,16 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
if len(linked_values) == 1:
|
||||
return linked_values[0]
|
||||
|
||||
# Edge Case: While Dragging Link (but not yet removed)
|
||||
## While the user is dragging a link:
|
||||
## - self.is_linked = True, since the user hasn't confirmed anything.
|
||||
## - self.links will be empty, since the link object was freed.
|
||||
## When this particular condition is met, pretend that we're not linked.
|
||||
if len(linked_values) == 0:
|
||||
return self._compute_data(kind)
|
||||
|
||||
msg = f'Socket {self.bl_label} ({self.socket_type}): Multi-input sockets are not yet supported'
|
||||
return NotImplementedError(msg)
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
####################
|
||||
# - Theme
|
||||
|
@ -819,17 +825,18 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
"""
|
||||
col = layout.column(align=False)
|
||||
|
||||
# Label Row
|
||||
# Row: Label
|
||||
row = col.row(align=False)
|
||||
|
||||
## Lock Check
|
||||
if self.locked:
|
||||
row.enabled = False
|
||||
|
||||
## Linked Label
|
||||
## Link Check
|
||||
if self.is_linked:
|
||||
row.label(text=text)
|
||||
return
|
||||
|
||||
## User Label Row (incl. Units)
|
||||
else:
|
||||
# User Label Row (incl. Units)
|
||||
if self.use_units:
|
||||
split = row.split(factor=0.6, align=True)
|
||||
|
||||
|
@ -841,7 +848,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
else:
|
||||
self.draw_label_row(row, text)
|
||||
|
||||
# Prelock Row
|
||||
# User Prelock Row
|
||||
row = col.row(align=False)
|
||||
if self.use_prelock:
|
||||
_col = row.column(align=False)
|
||||
|
@ -854,15 +861,20 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
elif self.locked:
|
||||
row.enabled = False
|
||||
|
||||
# FlowKind Column(s)
|
||||
# FlowKind Draw Row
|
||||
col = row.column(align=True)
|
||||
{
|
||||
ct.FlowKind.Value: self.draw_value,
|
||||
ct.FlowKind.Array: self.draw_value_array,
|
||||
ct.FlowKind.LazyValueFunc: self.draw_lazy_value,
|
||||
ct.FlowKind.LazyValueRange: self.draw_lazy_value_range,
|
||||
ct.FlowKind.Array: self.draw_array,
|
||||
ct.FlowKind.LazyValueFunc: self.draw_lazy_value_func,
|
||||
ct.FlowKind.LazyArrayRange: self.draw_lazy_array_range,
|
||||
}[self.active_kind](col)
|
||||
|
||||
# Info Drawing
|
||||
if self.use_info_draw:
|
||||
info = self.compute_data(kind=ct.FlowKind.Info)
|
||||
self.draw_info(info, col)
|
||||
|
||||
def draw_output(
|
||||
self,
|
||||
context: bpy.types.Context, # noqa: ARG002
|
||||
|
@ -881,10 +893,18 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
node: The node within which the socket is embedded.
|
||||
text: The socket's name in the UI.
|
||||
"""
|
||||
layout.label(text=text)
|
||||
col = layout.column()
|
||||
row = col.row()
|
||||
row.alignment = 'RIGHT'
|
||||
row.label(text=text)
|
||||
|
||||
# Draw FlowKind.Info related Information
|
||||
if self.use_info_draw:
|
||||
info = self.compute_data(kind=ct.FlowKind.Info)
|
||||
self.draw_info(info, col)
|
||||
|
||||
####################
|
||||
# - UI Methods
|
||||
# - UI Methods: Active FlowKind
|
||||
####################
|
||||
def draw_label_row(
|
||||
self,
|
||||
|
@ -922,7 +942,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
col: Target for defining UI elements.
|
||||
"""
|
||||
|
||||
def draw_lazy_value(self, col: bpy.types.UILayout) -> None:
|
||||
def draw_lazy_value_func(self, col: bpy.types.UILayout) -> None:
|
||||
"""Draws the socket lazy value on its own line.
|
||||
|
||||
Notes:
|
||||
|
@ -941,3 +961,16 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
Parameters:
|
||||
col: Target for defining UI elements.
|
||||
"""
|
||||
|
||||
####################
|
||||
# - UI Methods: Auxilliary
|
||||
####################
|
||||
def draw_info(self, info: ct.InfoFlow, col: bpy.types.UILayout) -> None:
|
||||
"""Draws the socket info on its own line.
|
||||
|
||||
Notes:
|
||||
Should be overriden by individual socket classes, if they might output a `FlowKind.Info`.
|
||||
|
||||
Parameters:
|
||||
col: Target for defining UI elements.
|
||||
"""
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
from . import any as any_socket
|
||||
from . import bool as bool_socket
|
||||
from . import expr, file_path, string
|
||||
from . import expr, file_path, string, data
|
||||
|
||||
AnySocketDef = any_socket.AnySocketDef
|
||||
DataSocketDef = data.DataSocketDef
|
||||
BoolSocketDef = bool_socket.BoolSocketDef
|
||||
StringSocketDef = string.StringSocketDef
|
||||
FilePathSocketDef = file_path.FilePathSocketDef
|
||||
|
@ -11,6 +12,7 @@ ExprSocketDef = expr.ExprSocketDef
|
|||
|
||||
BL_REGISTER = [
|
||||
*any_socket.BL_REGISTER,
|
||||
*data.BL_REGISTER,
|
||||
*bool_socket.BL_REGISTER,
|
||||
*string.BL_REGISTER,
|
||||
*file_path.BL_REGISTER,
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
@ -12,7 +11,7 @@ class AnyBLSocket(base.MaxwellSimSocket):
|
|||
|
||||
@property
|
||||
def capabilities(self):
|
||||
return ct.DataCapabilities(
|
||||
return ct.CapabilitiesFlow(
|
||||
socket_type=self.socket_type,
|
||||
active_kind=self.active_kind,
|
||||
is_universal=True,
|
||||
|
|
|
@ -0,0 +1,86 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
|
||||
from blender_maxwell.utils import bl_cache
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Socket
|
||||
####################
|
||||
class DataBLSocket(base.MaxwellSimSocket):
|
||||
socket_type = ct.SocketType.Data
|
||||
bl_label = 'Data'
|
||||
use_info_draw = True
|
||||
|
||||
####################
|
||||
# - Properties: Format
|
||||
####################
|
||||
format: str = bl_cache.BLField('')
|
||||
## TODO: typ.Literal['xarray', 'jax']
|
||||
|
||||
####################
|
||||
# - FlowKind
|
||||
####################
|
||||
@property
|
||||
def capabilities(self) -> ct.CapabilitiesFlow:
|
||||
return ct.CapabilitiesFlow(
|
||||
socket_type=self.socket_type,
|
||||
active_kind=self.active_kind,
|
||||
must_match={'format': self.format},
|
||||
)
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
return None
|
||||
|
||||
####################
|
||||
# - UI
|
||||
####################
|
||||
def draw_info(self, info: ct.InfoFlow, col: bpy.types.UILayout) -> None:
|
||||
if self.format == 'jax' and info.dim_names:
|
||||
row = col.row()
|
||||
box = row.box()
|
||||
grid = box.grid_flow(
|
||||
columns=3,
|
||||
row_major=True,
|
||||
even_columns=True,
|
||||
#even_rows=True,
|
||||
align=True,
|
||||
)
|
||||
|
||||
# Grid Header
|
||||
#grid.label(text='Dim')
|
||||
#grid.label(text='Len')
|
||||
#grid.label(text='Unit')
|
||||
|
||||
# Dimension Names
|
||||
for dim_name in info.dim_names:
|
||||
dim_idx = info.dim_idx[dim_name]
|
||||
grid.label(text=dim_name)
|
||||
grid.label(text=str(len(dim_idx)))
|
||||
grid.label(text=spux.sp_to_str(dim_idx.unit))
|
||||
|
||||
|
||||
####################
|
||||
# - Socket Configuration
|
||||
####################
|
||||
class DataSocketDef(base.SocketDef):
|
||||
socket_type: ct.SocketType = ct.SocketType.Data
|
||||
|
||||
format: typ.Literal['xarray', 'jax', 'monitor_data']
|
||||
|
||||
def init(self, bl_socket: DataBLSocket) -> None:
|
||||
bl_socket.format = self.format
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
####################
|
||||
BL_REGISTER = [
|
||||
DataBLSocket,
|
||||
]
|
|
@ -4,12 +4,14 @@ import bpy
|
|||
import pydantic as pyd
|
||||
import sympy as sp
|
||||
|
||||
from blender_maxwell.utils import bl_cache
|
||||
from blender_maxwell.utils import bl_cache, logger
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
|
||||
class ExprBLSocket(base.MaxwellSimSocket):
|
||||
socket_type = ct.SocketType.Expr
|
||||
|
@ -25,11 +27,11 @@ class ExprBLSocket(base.MaxwellSimSocket):
|
|||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
)
|
||||
|
||||
int_symbols: set[spux.IntSymbol] = bl_cache.BLField([])
|
||||
real_symbols: set[spux.RealSymbol] = bl_cache.BLField([])
|
||||
complex_symbols: set[spux.ComplexSymbol] = bl_cache.BLField([])
|
||||
int_symbols: frozenset[spux.IntSymbol] = bl_cache.BLField(frozenset())
|
||||
real_symbols: frozenset[spux.RealSymbol] = bl_cache.BLField(frozenset())
|
||||
complex_symbols: frozenset[spux.ComplexSymbol] = bl_cache.BLField(frozenset())
|
||||
|
||||
@property
|
||||
@bl_cache.cached_bl_property(persist=False)
|
||||
def symbols(self) -> list[spux.Symbol]:
|
||||
"""Retrieves all symbols by concatenating int, real, and complex symbols, and sorting them by name.
|
||||
|
||||
|
@ -48,6 +50,19 @@ class ExprBLSocket(base.MaxwellSimSocket):
|
|||
####################
|
||||
def draw_value(self, col: bpy.types.UILayout) -> None:
|
||||
col.prop(self, 'raw_value', text='')
|
||||
if len(self.symbols) > 0:
|
||||
box = col.box()
|
||||
split = box.split(factor=0.3)
|
||||
|
||||
# Left Col
|
||||
col = split.column()
|
||||
col.label(text='Let:')
|
||||
|
||||
# Right Col
|
||||
col = split.column()
|
||||
col.alignment = 'RIGHT'
|
||||
for sym in self.symbols:
|
||||
col.label(text=spux.pretty_symbol(sym))
|
||||
|
||||
####################
|
||||
# - Computation of Default Value
|
||||
|
@ -61,10 +76,6 @@ class ExprBLSocket(base.MaxwellSimSocket):
|
|||
convert_xor=True,
|
||||
).subs(spux.ALL_UNIT_SYMBOLS)
|
||||
|
||||
if not expr.free_symbols.issubset(self.symbols):
|
||||
msg = f'Expression "{expr}" (symbols={self.expr.free_symbols}) has invalid symbols (valid symbols: {self.symbols})'
|
||||
raise ValueError(msg)
|
||||
|
||||
return expr
|
||||
|
||||
@value.setter
|
||||
|
@ -88,13 +99,26 @@ class ExprBLSocket(base.MaxwellSimSocket):
|
|||
class ExprSocketDef(base.SocketDef):
|
||||
socket_type: ct.SocketType = ct.SocketType.Expr
|
||||
|
||||
_x = sp.Symbol('x', real=True)
|
||||
int_symbols: list[spux.IntSymbol] = []
|
||||
real_symbols: list[spux.RealSymbol] = [_x]
|
||||
complex_symbols: list[spux.ComplexSymbol] = []
|
||||
int_symbols: frozenset[spux.IntSymbol] = frozenset()
|
||||
real_symbols: frozenset[spux.RealSymbol] = frozenset()
|
||||
complex_symbols: frozenset[spux.ComplexSymbol] = frozenset()
|
||||
|
||||
@property
|
||||
def symbols(self) -> list[spux.Symbol]:
|
||||
"""Retrieves all symbols by concatenating int, real, and complex symbols, and sorting them by name.
|
||||
|
||||
The order is guaranteed to be **deterministic**.
|
||||
|
||||
Returns:
|
||||
All symbols valid for use in the expression.
|
||||
"""
|
||||
return sorted(
|
||||
self.int_symbols | self.real_symbols | self.complex_symbols,
|
||||
key=lambda sym: sym.name,
|
||||
)
|
||||
|
||||
# Expression
|
||||
default_expr: spux.SympyExpr = _x
|
||||
default_expr: spux.SympyExpr = sp.S(1)
|
||||
allow_units: bool = True
|
||||
|
||||
@pyd.model_validator(mode='after')
|
||||
|
@ -104,24 +128,19 @@ class ExprSocketDef(base.SocketDef):
|
|||
Raises:
|
||||
ValueError: If the expression uses symbols not defined in `self.symbols`.
|
||||
"""
|
||||
if not spux.uses_units(self.default_expr):
|
||||
msg = f'Expression symbols ({self.default_expr.free_symbol}) are not a strict subset of defined symbols ({self.symbols})'
|
||||
if spux.uses_units(self.default_expr) and not self.allow_units:
|
||||
msg = f'Expression {self.default_expr} uses units, but "self.allow_units" is False'
|
||||
raise ValueError(msg)
|
||||
|
||||
@pyd.model_validator(mode='after')
|
||||
def check_default_expr_uses_allowed_symbols(self) -> typ.Self:
|
||||
"""Checks that `self.default_expr` only uses symbols defined in `self.symbols`.
|
||||
return self
|
||||
|
||||
Raises:
|
||||
ValueError: If the expression uses symbols not defined in `self.symbols`.
|
||||
"""
|
||||
if not self.default_expr.free_symbols.issubset(self.symbols):
|
||||
msg = f'Expression symbols ({self.default_expr.free_symbol}) are not a strict subset of defined symbols ({self.symbols})'
|
||||
raise ValueError(msg)
|
||||
## TODO: Validator for Symbol Usage
|
||||
|
||||
def init(self, bl_socket: ExprBLSocket) -> None:
|
||||
bl_socket.value = self.default_expr
|
||||
bl_socket.symbols = self.symbols
|
||||
bl_socket.int_symbols = self.int_symbols
|
||||
bl_socket.real_symbols = self.real_symbols
|
||||
bl_socket.complex_symbols = self.complex_symbols
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import tidy3d as td
|
||||
|
||||
|
|
|
@ -99,6 +99,8 @@ class MaxwellBoundCondsBLSocket(base.MaxwellSimSocket):
|
|||
_col.prop(self, axis + '_neg', text='')
|
||||
_col.prop(self, axis + '_pos', text='')
|
||||
|
||||
draw_value_array = draw_value
|
||||
|
||||
####################
|
||||
# - Computation of Default Value
|
||||
####################
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
|
|
@ -92,7 +92,7 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
|
|||
# - Computation of Default Value
|
||||
####################
|
||||
@property
|
||||
def value(self) -> spux.Complex:
|
||||
def value(self) -> spux.ComplexNumber:
|
||||
"""Return the complex number as a sympy expression, of a form determined by the coordinate system.
|
||||
|
||||
- **Cartesian**: $(a,b) -> a + ib$
|
||||
|
@ -109,7 +109,7 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
|
|||
}[self.coord_sys]
|
||||
|
||||
@value.setter
|
||||
def value(self, value: spux.Complex) -> None:
|
||||
def value(self, value: spux.ComplexNumber) -> None:
|
||||
"""Set the complex number from a sympy expression, by numerically simplifying it into coordinate-system determined components.
|
||||
|
||||
- **Cartesian**: $(a,b) -> a + ib$
|
||||
|
@ -130,7 +130,7 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
|
|||
class ComplexNumberSocketDef(base.SocketDef):
|
||||
socket_type: ct.SocketType = ct.SocketType.ComplexNumber
|
||||
|
||||
default_value: spux.Complex = sp.S(0)
|
||||
default_value: spux.ComplexNumber = sp.S(0)
|
||||
coord_sys: typ.Literal['CARTESIAN', 'POLAR'] = 'CARTESIAN'
|
||||
|
||||
def init(self, bl_socket: ComplexNumberBLSocket) -> None:
|
||||
|
|
|
@ -74,8 +74,8 @@ class PhysicalFreqBLSocket(base.MaxwellSimSocket):
|
|||
self.raw_value = spux.sympy_to_python(spux.scale_to_unit(value, self.unit))
|
||||
|
||||
@property
|
||||
def lazy_array_range(self) -> ct.LazyArrayRange:
|
||||
return ct.LazyArrayRange(
|
||||
def lazy_array_range(self) -> ct.LazyArrayRangeFlow:
|
||||
return ct.LazyArrayRangeFlow(
|
||||
symbols=set(),
|
||||
unit=self.unit,
|
||||
start=sp.S(self.min_freq) * self.unit,
|
||||
|
@ -86,9 +86,13 @@ class PhysicalFreqBLSocket(base.MaxwellSimSocket):
|
|||
|
||||
@lazy_array_range.setter
|
||||
def lazy_array_range(self, value: ct.LazyArrayRangeFlow) -> None:
|
||||
self.min_freq = spux.sympy_to_python(spux.scale_to_unit(value[0], self.unit))
|
||||
self.max_freq = spux.sympy_to_python(spux.scale_to_unit(value[1], self.unit))
|
||||
self.steps = value[2]
|
||||
self.min_freq = spux.sympy_to_python(
|
||||
spux.scale_to_unit(value.start * value.unit, self.unit)
|
||||
)
|
||||
self.max_freq = spux.sympy_to_python(
|
||||
spux.scale_to_unit(value.stop * value.unit, self.unit)
|
||||
)
|
||||
self.steps = value.steps
|
||||
|
||||
|
||||
####################
|
||||
|
@ -103,7 +107,7 @@ class PhysicalFreqSocketDef(base.SocketDef):
|
|||
|
||||
min_freq: SympyExpr = 400.0 * spux.terahertz
|
||||
max_freq: SympyExpr = 600.0 * spux.terahertz
|
||||
steps: SympyExpr = 50
|
||||
steps: int = 50
|
||||
|
||||
def init(self, bl_socket: PhysicalFreqBLSocket) -> None:
|
||||
bl_socket.unit = self.default_unit
|
||||
|
|
|
@ -75,8 +75,8 @@ class PhysicalLengthBLSocket(base.MaxwellSimSocket):
|
|||
self.raw_value = spux.sympy_to_python(spux.scale_to_unit(value, self.unit))
|
||||
|
||||
@property
|
||||
def lazy_array_range(self) -> ct.LazyArrayRange:
|
||||
return ct.LazyArrayRange(
|
||||
def lazy_array_range(self) -> ct.LazyArrayRangeFlow:
|
||||
return ct.LazyArrayRangeFlow(
|
||||
symbols=set(),
|
||||
unit=self.unit,
|
||||
start=sp.S(self.min_len) * self.unit,
|
||||
|
@ -86,10 +86,14 @@ class PhysicalLengthBLSocket(base.MaxwellSimSocket):
|
|||
)
|
||||
|
||||
@lazy_array_range.setter
|
||||
def lazy_value_range(self, value: tuple[sp.Expr, sp.Expr, int]) -> None:
|
||||
self.min_len = spux.sympy_to_python(spux.scale_to_unit(value[0], self.unit))
|
||||
self.max_len = spux.sympy_to_python(spux.scale_to_unit(value[1], self.unit))
|
||||
self.steps = value[2]
|
||||
def lazy_array_range(self, value: ct.LazyArrayRangeFlow) -> None:
|
||||
self.min_len = spux.sympy_to_python(
|
||||
spux.scale_to_unit(value.start * value.unit, self.unit)
|
||||
)
|
||||
self.max_len = spux.sympy_to_python(
|
||||
spux.scale_to_unit(value.stop * value.unit, self.unit)
|
||||
)
|
||||
self.steps = value.steps
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
@ -5,8 +5,7 @@ from pathlib import Path
|
|||
import bpy
|
||||
|
||||
from ... import contracts as ct
|
||||
from ... import registration
|
||||
from ..utils import pydeps, simple_logger
|
||||
from ..utils import pip_process, pydeps, simple_logger
|
||||
|
||||
log = simple_logger.get(__name__)
|
||||
|
||||
|
@ -17,11 +16,13 @@ class InstallPyDeps(bpy.types.Operator):
|
|||
|
||||
@classmethod
|
||||
def poll(cls, _: bpy.types.Context):
|
||||
return not pydeps.DEPS_OK
|
||||
return not pip_process.is_loaded() and not pydeps.DEPS_OK
|
||||
|
||||
####################
|
||||
# - Property: PyDeps Path
|
||||
####################
|
||||
_timer = None
|
||||
|
||||
bl__pydeps_path: bpy.props.StringProperty(
|
||||
default='',
|
||||
)
|
||||
|
@ -52,48 +53,79 @@ class InstallPyDeps(bpy.types.Operator):
|
|||
####################
|
||||
# - Execution
|
||||
####################
|
||||
def execute(self, _: bpy.types.Context):
|
||||
def execute(self, context: bpy.types.Context):
|
||||
if pip_process.is_loaded():
|
||||
self.report(
|
||||
{'ERROR'},
|
||||
'A PyDeps installation is already running. Please wait for it to complete.',
|
||||
)
|
||||
return {'FINISHED'}
|
||||
|
||||
log.info(
|
||||
'Running Install PyDeps w/requirements.txt (%s) to path: %s',
|
||||
self.pydeps_reqlock_path,
|
||||
self.pydeps_path,
|
||||
'Installing PyDeps to path: %s',
|
||||
str(self.pydeps_path),
|
||||
)
|
||||
|
||||
# Create the Addon-Specific Folder (if Needed)
|
||||
## It MUST, however, have a parent already
|
||||
self.pydeps_path.mkdir(parents=False, exist_ok=True)
|
||||
|
||||
# Determine Path to Blender's Bundled Python
|
||||
## bpy.app.binary_path_python was deprecated in 2.91.
|
||||
## sys.executable points to the correct bundled Python.
|
||||
## See <https://developer.blender.org/docs/release_notes/2.91/python_api/>
|
||||
python_exec = Path(sys.executable)
|
||||
# Run Pip Install
|
||||
pip_process.run(ct.addon.PATH_REQS, self.pydeps_path, ct.addon.PIP_INSTALL_LOG)
|
||||
|
||||
# Install Deps w/Bundled pip
|
||||
try:
|
||||
cmdline = [
|
||||
str(python_exec),
|
||||
'-m',
|
||||
'pip',
|
||||
'install',
|
||||
'-r',
|
||||
str(self.pydeps_reqlock_path),
|
||||
'--target',
|
||||
str(self.pydeps_path),
|
||||
]
|
||||
log.info(
|
||||
'Running pip w/cmdline: %s',
|
||||
' '.join(cmdline),
|
||||
# Set Timer
|
||||
self._timer = context.window_manager.event_timer_add(
|
||||
0.25, window=context.window
|
||||
)
|
||||
subprocess.check_call(cmdline)
|
||||
except subprocess.CalledProcessError:
|
||||
log.exception('Failed to install PyDeps')
|
||||
return {'CANCELLED'}
|
||||
context.window_manager.modal_handler_add(self)
|
||||
|
||||
# Report PyDeps Changed
|
||||
return {'RUNNING_MODAL'}
|
||||
|
||||
def modal(
|
||||
self, context: bpy.types.Context, event: bpy.types.Event
|
||||
) -> ct.BLOperatorStatus:
|
||||
# Non-Timer Event: Do Nothing
|
||||
if event.type != 'TIMER':
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
# No Process: Very Bad!
|
||||
if not pip_process.is_loaded():
|
||||
msg = 'Pip process was removed elsewhere than "install_deps" modal operator'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Not Running: Done!
|
||||
if not pip_process.is_running():
|
||||
# Report Result
|
||||
if pip_process.returncode() == 0:
|
||||
self.report({'INFO'}, 'PyDeps installation succeeded.')
|
||||
else:
|
||||
self.report(
|
||||
{'ERROR'},
|
||||
f'PyDeps installation returned status code: {pip_process.returncode()}. Please see the addon preferences, or the pip installation logs at: {ct.addon.PIP_INSTALL_LOG}',
|
||||
)
|
||||
|
||||
# Reset Process and Timer
|
||||
pip_process.reset()
|
||||
context.window_manager.event_timer_remove(self._timer)
|
||||
|
||||
# Mark PyDeps Changed
|
||||
ct.addon.prefs().on_addon_pydeps_changed()
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
if ct.addon.PIP_INSTALL_LOG.is_file():
|
||||
pip_process.update_progress(ct.addon.PIP_INSTALL_LOG)
|
||||
context.area.tag_redraw()
|
||||
return {'PASS_THROUGH'}
|
||||
|
||||
def cancel(self, context: bpy.types.Context):
|
||||
# Kill / Reset Process and Delete Event Timer
|
||||
pip_process.kill()
|
||||
pip_process.reset()
|
||||
context.window_manager.event_timer_remove(self._timer)
|
||||
|
||||
return {'CANCELLED'}
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
|
|
|
@ -4,7 +4,7 @@ import bpy
|
|||
|
||||
from blender_maxwell import contracts as ct
|
||||
|
||||
from ..utils import pydeps, simple_logger
|
||||
from ..utils import pip_process, pydeps, simple_logger
|
||||
|
||||
log = simple_logger.get(__name__)
|
||||
|
||||
|
@ -34,21 +34,6 @@ class ManagePyDeps(bpy.types.Operator):
|
|||
def pydeps_path(self, path: Path) -> None:
|
||||
self.bl__pydeps_path = str(path.resolve())
|
||||
|
||||
####################
|
||||
# - Property: requirements.lock
|
||||
####################
|
||||
bl__pydeps_reqlock_path: bpy.props.StringProperty(
|
||||
default='',
|
||||
)
|
||||
|
||||
@property
|
||||
def pydeps_reqlock_path(self):
|
||||
return Path(bpy.path.abspath(self.bl__pydeps_reqlock_path))
|
||||
|
||||
@pydeps_reqlock_path.setter
|
||||
def pydeps_reqlock_path(self, path: Path) -> None:
|
||||
self.bl__pydeps_reqlock_path = str(path.resolve())
|
||||
|
||||
####################
|
||||
# - UI
|
||||
####################
|
||||
|
@ -75,14 +60,40 @@ class ManagePyDeps(bpy.types.Operator):
|
|||
for issue in pydeps.DEPS_ISSUES:
|
||||
grid.label(text=issue)
|
||||
|
||||
# Install Deps
|
||||
# Row: Install Deps
|
||||
row = layout.row(align=True)
|
||||
op = row.operator(
|
||||
ct.OperatorType.InstallPyDeps,
|
||||
text='Install Python Dependencies (requires internet)',
|
||||
)
|
||||
op.bl__pydeps_path = str(self.pydeps_path)
|
||||
op.bl__pydeps_reqlock_path = str(self.bl__pydeps_reqlock_path)
|
||||
|
||||
## Row: Uninstall Deps
|
||||
row = layout.row(align=True)
|
||||
op = row.operator(
|
||||
ct.OperatorType.UninstallPyDeps,
|
||||
text='Uninstall Python Dependencies',
|
||||
)
|
||||
op.bl__pydeps_path = str(self.pydeps_path)
|
||||
|
||||
## Row: Deps Install Progress
|
||||
row = layout.row()
|
||||
num_req_deplocks = len(pydeps.DEPS_REQ_DEPLOCKS)
|
||||
if pydeps.DEPS_OK:
|
||||
row.progress(
|
||||
text=f'{num_req_deplocks}/{num_req_deplocks} Installed',
|
||||
factor=1.0,
|
||||
)
|
||||
elif pip_process.PROGRESS is not None:
|
||||
row.progress(
|
||||
text='/'.join(pip_process.PROGRESS_FRAC) + ' Installed',
|
||||
factor=float(pip_process.PROGRESS),
|
||||
)
|
||||
else:
|
||||
row.progress(
|
||||
text=f'0/{num_req_deplocks} Installed',
|
||||
factor=0.0,
|
||||
)
|
||||
|
||||
## Row: Toggle Default PyDeps Path
|
||||
row = layout.row()
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
import shutil
|
||||
import site
|
||||
from pathlib import Path
|
||||
|
||||
import bpy
|
||||
|
||||
from blender_maxwell import contracts as ct
|
||||
|
||||
from ..utils import pydeps
|
||||
from ..utils import pip_process, pydeps, simple_logger
|
||||
|
||||
log = simple_logger.get(__name__)
|
||||
|
||||
|
||||
class UninstallPyDeps(bpy.types.Operator):
|
||||
|
@ -14,7 +17,9 @@ class UninstallPyDeps(bpy.types.Operator):
|
|||
|
||||
@classmethod
|
||||
def poll(cls, _: bpy.types.Context):
|
||||
return pydeps.DEPS_OK
|
||||
return not pip_process.is_loaded() and (
|
||||
pydeps.DEPS_OK or (pydeps.DEPS_ISSUES and pydeps.DEPS_INST_DEPLOCKS)
|
||||
)
|
||||
|
||||
####################
|
||||
# - Property: PyDeps Path
|
||||
|
@ -35,19 +40,70 @@ class UninstallPyDeps(bpy.types.Operator):
|
|||
# - Execution
|
||||
####################
|
||||
def execute(self, _: bpy.types.Context):
|
||||
path_addon_pydeps = Path(self.pydeps_path)
|
||||
if (
|
||||
pydeps.check_pydeps()
|
||||
and path_addon_pydeps.exists()
|
||||
and path_addon_pydeps.is_dir()
|
||||
):
|
||||
raise NotImplementedError
|
||||
# TODO: CAREFUL!!
|
||||
# shutil.rmtree(self.path_addon_pydeps)
|
||||
else:
|
||||
msg = "Can't uninstall pydeps"
|
||||
raise RuntimeError(msg)
|
||||
# Reject Bad PyDeps Paths (to prevent unfortunate deletions)
|
||||
## Reject user site-packages
|
||||
if self.pydeps_path == Path(site.getusersitepackages()):
|
||||
msg = f"PyDeps path ({self.pydeps_path}) can't be the user site-packages"
|
||||
raise ValueError(msg)
|
||||
|
||||
## Reject any global site-packages
|
||||
if self.pydeps_path == Path(site.getusersitepackages()):
|
||||
msg = f"PyDeps path ({self.pydeps_path}) can't be a global site-packages"
|
||||
raise ValueError(msg)
|
||||
|
||||
## Reject any Reserved sys.path Entry (as of addon initialization)
|
||||
## -> At addon init, ORIGINAL_SYS_PATH is created as a sys.path copy.
|
||||
## -> Thus, ORIGINAL_SYS_PATH only includes Blender-set paths.
|
||||
## -> (possibly also other addon's manipulations, but that's good!)
|
||||
if self.pydeps_path in [
|
||||
Path(sys_path) for sys_path in ct.addon.ORIGINAL_SYS_PATH
|
||||
]:
|
||||
msg = f'PyDeps path ({self.pydeps_path}) can\'t be any package defined in "sys.path"'
|
||||
raise ValueError(msg)
|
||||
|
||||
## Reject non-existant PyDeps Path
|
||||
if not self.pydeps_path.exists():
|
||||
msg = f"PyDeps path ({self.pydeps_path}) doesn't exist"
|
||||
raise ValueError(msg)
|
||||
|
||||
## Reject non-directory PyDeps Path
|
||||
if not self.pydeps_path.is_dir():
|
||||
msg = f"PyDeps path ({self.pydeps_path}) isn't a directory"
|
||||
raise ValueError(msg)
|
||||
|
||||
## Reject PyDeps Path that is Home Dir (I hope nobody needs this)
|
||||
if self.pydeps_path == Path.home().resolve():
|
||||
msg = f"PyDeps path ({self.pydeps_path}) can't be the user home directory"
|
||||
raise ValueError(msg)
|
||||
|
||||
# Check for Empty Directory
|
||||
if len(pydeps.compute_installed_deplocks(self.pydeps_path)) == 0:
|
||||
## Reject Non-Empty Directories w/o Python Dependencies
|
||||
if any(Path(self.pydeps_path).iterdir()):
|
||||
msg = "PyDeps Path has no installed Python modules, but isn't empty: {self.pydeps_path)"
|
||||
raise ValueError(msg)
|
||||
|
||||
self.report(
|
||||
{'ERROR'},
|
||||
f"PyDeps Path is empty; uninstall can't run: {self.pydeps_path}",
|
||||
)
|
||||
return {'FINISHED'}
|
||||
|
||||
# Brutally Delete / Remake PyDeps Folder
|
||||
## The point isn't to protect against dedicated stupididy.
|
||||
## Just to nudge away a few of the obvious "bad ideas" users might have.
|
||||
## TODO: Handle rmtree.avoids_symlink_attacks
|
||||
## TODO: Handle audit events
|
||||
log.warning(
|
||||
'Deleting and Creating Folder at "%s": %s',
|
||||
'pydeps_path',
|
||||
str(self.pydeps_path),
|
||||
)
|
||||
shutil.rmtree(self.pydeps_path)
|
||||
self.pydeps_path.mkdir()
|
||||
|
||||
# Update Changed PyDeps
|
||||
ct.addon.prefs().on_addon_pydeps_changed()
|
||||
return {'FINISHED'}
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,115 @@
|
|||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
from . import pydeps, simple_logger
|
||||
|
||||
log = simple_logger.get(__name__)
|
||||
|
||||
PROCESS: subprocess.Popen | None = None
|
||||
PROGRESS: float | None = None
|
||||
PROGRESS_FRAC: tuple[str, str] | None = None
|
||||
|
||||
|
||||
def run(reqs_path: Path, pydeps_path: Path, install_log: Path) -> None:
|
||||
global PROCESS # noqa: PLW0603
|
||||
|
||||
if PROCESS is not None:
|
||||
msg = 'A pip process is already loaded'
|
||||
raise ValueError(msg)
|
||||
|
||||
# Path to Blender's Bundled Python
|
||||
## bpy.app.binary_path_python was deprecated in 2.91.
|
||||
## sys.executable points to the correct bundled Python.
|
||||
## See <https://developer.blender.org/docs/release_notes/2.91/python_api/>
|
||||
cmdline = [
|
||||
sys.executable,
|
||||
'-m',
|
||||
'pip',
|
||||
'install',
|
||||
'-r',
|
||||
str(reqs_path),
|
||||
'--target',
|
||||
str(pydeps_path),
|
||||
'--log',
|
||||
str(install_log),
|
||||
'--disable-pip-version-check',
|
||||
]
|
||||
|
||||
log.debug(
|
||||
'pip cmdline: %s',
|
||||
' '.join(cmdline),
|
||||
)
|
||||
|
||||
PROCESS = subprocess.Popen(
|
||||
cmdline,
|
||||
env=os.environ.copy() | {'PYTHONUNBUFFERED': '1'},
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL,
|
||||
)
|
||||
|
||||
|
||||
def is_loaded() -> bool:
|
||||
return PROCESS is not None
|
||||
|
||||
|
||||
def is_running() -> bool:
|
||||
if PROCESS is None:
|
||||
msg = "Tried to check whether a process that doesn't exist is running"
|
||||
raise ValueError(msg)
|
||||
|
||||
return PROCESS.poll() is None
|
||||
|
||||
|
||||
def returncode() -> bool:
|
||||
if not is_running() and PROCESS is not None:
|
||||
return PROCESS.returncode
|
||||
|
||||
msg = "Can't get process return code of running/nonexistant process"
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
def kill() -> None:
|
||||
global PROCESS # noqa: PLW0603
|
||||
|
||||
if not is_running():
|
||||
msg = "Can't kill process that isn't running"
|
||||
raise ValueError(msg)
|
||||
|
||||
PROCESS.kill()
|
||||
|
||||
|
||||
def reset() -> None:
|
||||
global PROCESS # noqa: PLW0603
|
||||
global PROGRESS # noqa: PLW0603
|
||||
global PROGRESS_FRAC # noqa: PLW0603
|
||||
|
||||
PROCESS = None
|
||||
PROGRESS = None
|
||||
PROGRESS_FRAC = None
|
||||
|
||||
|
||||
RE_COLLECTED_DEPLOCK = re.compile(r'Collecting (\w+==[\w\.]+)')
|
||||
|
||||
|
||||
def update_progress(pip_install_log_path: Path):
|
||||
global PROGRESS # noqa: PLW0603
|
||||
global PROGRESS_FRAC # noqa: PLW0603
|
||||
|
||||
if not pip_install_log_path.is_file():
|
||||
msg = "Can't parse progress from non-existant pip-install log"
|
||||
raise ValueError(msg)
|
||||
|
||||
# start_time = time.perf_counter()
|
||||
with pip_install_log_path.open('r') as f:
|
||||
pip_install_log = f.read()
|
||||
# print('READ', time.perf_counter() - start_time)
|
||||
|
||||
found_deplocks = set(RE_COLLECTED_DEPLOCK.findall(pip_install_log))
|
||||
# print('SETUP', time.perf_counter() - start_time)
|
||||
PROGRESS = len(found_deplocks) / len(pydeps.DEPS_REQ_DEPLOCKS)
|
||||
PROGRESS_FRAC = (str(len(found_deplocks)), str(len(pydeps.DEPS_REQ_DEPLOCKS)))
|
||||
# print('COMPUTED', time.perf_counter() - start_time)
|
|
@ -1,6 +1,7 @@
|
|||
"""Tools for fearless managemenet of addon-specific Python dependencies."""
|
||||
|
||||
import contextlib
|
||||
import functools
|
||||
import importlib.metadata
|
||||
import os
|
||||
import sys
|
||||
|
@ -17,6 +18,8 @@ log = simple_logger.get(__name__)
|
|||
####################
|
||||
DEPS_OK: bool = False ## Presume no (but we don't know yet)
|
||||
DEPS_ISSUES: list[str] = [] ## No known issues (yet)
|
||||
DEPS_REQ_DEPLOCKS: set[str] = set()
|
||||
DEPS_INST_DEPLOCKS: set[str] = set()
|
||||
|
||||
|
||||
####################
|
||||
|
@ -88,6 +91,28 @@ def conform_pypi_package_deplock(deplock: str) -> str:
|
|||
return deplock.lower().replace('_', '-')
|
||||
|
||||
|
||||
def compute_required_deplocks(
|
||||
path_requirementslock: Path,
|
||||
) -> set[str]:
|
||||
with path_requirementslock.open('r') as file:
|
||||
return {
|
||||
conform_pypi_package_deplock(line)
|
||||
for raw_line in file.readlines()
|
||||
if (line := raw_line.strip()) and not line.startswith('#')
|
||||
}
|
||||
|
||||
|
||||
def compute_installed_deplocks(
|
||||
path_deps: Path,
|
||||
) -> set[str]:
|
||||
return {
|
||||
conform_pypi_package_deplock(
|
||||
f'{dep.metadata["Name"]}=={dep.metadata["Version"]}'
|
||||
)
|
||||
for dep in importlib.metadata.distributions(path=[str(path_deps.resolve())])
|
||||
}
|
||||
|
||||
|
||||
def deplock_conflicts(
|
||||
path_requirementslock: Path,
|
||||
path_deps: Path,
|
||||
|
@ -110,25 +135,12 @@ def deplock_conflicts(
|
|||
- **Missing**: Something should be installed that isn't.
|
||||
- **Superfluous**: Something is installed that shouldn't be.
|
||||
"""
|
||||
# DepLocks: Required
|
||||
with path_requirementslock.open('r') as file:
|
||||
required_depslock = {
|
||||
conform_pypi_package_deplock(line)
|
||||
for raw_line in file.readlines()
|
||||
if (line := raw_line.strip()) and not line.startswith('#')
|
||||
}
|
||||
|
||||
# DepLocks: Installed
|
||||
installed_depslock = {
|
||||
conform_pypi_package_deplock(
|
||||
f'{dep.metadata["Name"]}=={dep.metadata["Version"]}'
|
||||
)
|
||||
for dep in importlib.metadata.distributions(path=[str(path_deps.resolve())])
|
||||
}
|
||||
required_deplocks = compute_required_deplocks(path_requirementslock)
|
||||
installed_deplocks = compute_installed_deplocks(path_deps)
|
||||
|
||||
# Determine Diff of Required vs. Installed
|
||||
req_not_inst = required_depslock - installed_depslock
|
||||
inst_not_req = installed_depslock - required_depslock
|
||||
req_not_inst = required_deplocks - installed_deplocks
|
||||
inst_not_req = installed_deplocks - required_deplocks
|
||||
conflicts = {
|
||||
req.split('==')[0]: (req.split('==')[1], inst.split('==')[1])
|
||||
for req in req_not_inst
|
||||
|
@ -181,6 +193,8 @@ def check_pydeps(path_requirementslock: Path, path_deps: Path):
|
|||
"""
|
||||
global DEPS_OK # noqa: PLW0603
|
||||
global DEPS_ISSUES # noqa: PLW0603
|
||||
global DEPS_REQ_DEPLOCKS # noqa: PLW0603
|
||||
global DEPS_INST_DEPLOCKS # noqa: PLW0603
|
||||
|
||||
log.info(
|
||||
'Analyzing PyDeps at: %s',
|
||||
|
@ -200,4 +214,6 @@ def check_pydeps(path_requirementslock: Path, path_deps: Path):
|
|||
DEPS_OK = True
|
||||
DEPS_ISSUES = []
|
||||
|
||||
DEPS_REQ_DEPLOCKS = compute_required_deplocks(path_requirementslock)
|
||||
DEPS_INST_DEPLOCKS = compute_installed_deplocks(path_deps)
|
||||
return DEPS_OK
|
||||
|
|
|
@ -2,6 +2,7 @@ import logging
|
|||
import typing as typ
|
||||
from pathlib import Path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
## TODO: Hygiene; don't try to own all root loggers.
|
||||
|
||||
LogLevel: typ.TypeAlias = int
|
||||
|
@ -18,8 +19,6 @@ LOG_LEVEL_MAP: dict[str, LogLevel] = {
|
|||
'CRITICAL': logging.CRITICAL,
|
||||
}
|
||||
|
||||
SIMPLE_LOGGER_PREFIX = 'simple::'
|
||||
|
||||
STREAM_LOG_FORMAT = 11 * ' ' + '%(levelname)-8s %(message)s (%(name)s)'
|
||||
FILE_LOG_FORMAT = STREAM_LOG_FORMAT
|
||||
|
||||
|
@ -27,6 +26,7 @@ FILE_LOG_FORMAT = STREAM_LOG_FORMAT
|
|||
# - Globals
|
||||
####################
|
||||
CACHE = {
|
||||
'simple_loggers': set(),
|
||||
'console_level': None,
|
||||
'file_path': None,
|
||||
'file_level': logging.NOTSET,
|
||||
|
@ -72,7 +72,7 @@ def file_handler(path_log_file: Path, level: LogLevel) -> logging.FileHandler:
|
|||
####################
|
||||
# - Logger Setup
|
||||
####################
|
||||
def setup_logger(
|
||||
def update_logger(
|
||||
cb_console_handler: typ.Callable[[LogLevel], LogHandler],
|
||||
cb_file_handler: typ.Callable[[Path, LogLevel], LogHandler],
|
||||
logger: logging.Logger,
|
||||
|
@ -117,37 +117,10 @@ def setup_logger(
|
|||
logger.addHandler(cb_file_handler(file_path, file_level))
|
||||
|
||||
|
||||
def get(module_name) -> logging.Logger:
|
||||
"""Get a simple logger from the module name.
|
||||
|
||||
Should be used by calling ex. `LOG = simple_logger.get(__name__)` in the module wherein logging is desired.
|
||||
Should **only** be used if the dependencies aren't yet available for using `blender_maxwell.utils.logger`.
|
||||
|
||||
Uses the global `CACHE` to store `console_level`, `file_path`, and `file_level`, since addon preferences aren't yet available.
|
||||
|
||||
Parameters:
|
||||
module_name: The name of the module to create a logger for.
|
||||
Should be set to `__name__`.
|
||||
"""
|
||||
logger = logging.getLogger(SIMPLE_LOGGER_PREFIX + module_name)
|
||||
|
||||
# Reuse Cached Arguments from Last sync_*
|
||||
setup_logger(
|
||||
console_handler,
|
||||
file_handler,
|
||||
logger,
|
||||
console_level=CACHE['console_level'],
|
||||
file_path=CACHE['file_path'],
|
||||
file_level=CACHE['file_level'],
|
||||
)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
####################
|
||||
# - Logger Sync
|
||||
# - Logger Initialization
|
||||
####################
|
||||
def sync_bootstrap_logging(
|
||||
def init_simple_logger_defaults(
|
||||
console_level: LogLevel | None = None,
|
||||
file_path: Path | None = None,
|
||||
file_level: LogLevel = logging.NOTSET,
|
||||
|
@ -164,25 +137,58 @@ def sync_bootstrap_logging(
|
|||
file_level: The file log level threshold to store in `CACHE`.
|
||||
Only needs to be set if `file_path` is not `None`.
|
||||
"""
|
||||
CACHE['simple_loggers'].add(__name__)
|
||||
CACHE['console_level'] = console_level
|
||||
CACHE['file_path'] = file_path
|
||||
CACHE['file_level'] = file_level
|
||||
|
||||
logger_logger = logging.getLogger(__name__)
|
||||
for name in logging.root.manager.loggerDict:
|
||||
logger = logging.getLogger(name)
|
||||
setup_logger(
|
||||
# Setup __name__ Logger
|
||||
update_logger(
|
||||
console_handler,
|
||||
file_handler,
|
||||
logger,
|
||||
log,
|
||||
console_level=console_level,
|
||||
file_path=file_path,
|
||||
file_level=file_level,
|
||||
)
|
||||
logger_logger.info('Bootstrapped Simple Logging w/Settings %s', str(CACHE))
|
||||
log.info('Initialized Simple Logging w/Settings %s', str(CACHE))
|
||||
|
||||
|
||||
def sync_all_loggers(
|
||||
####################
|
||||
# - Logger Access
|
||||
####################
|
||||
def get(module_name) -> logging.Logger:
|
||||
"""Get a simple logger from the module name.
|
||||
|
||||
Should be used by calling ex. `LOG = simple_logger.get(__name__)` in the module wherein logging is desired.
|
||||
Should **only** be used if the dependencies aren't yet available for using `blender_maxwell.utils.logger`.
|
||||
|
||||
Uses the global `CACHE` to store `console_level`, `file_path`, and `file_level`, since addon preferences aren't yet available.
|
||||
|
||||
Parameters:
|
||||
module_name: The name of the module to create a logger for.
|
||||
Should be set to `__name__`.
|
||||
"""
|
||||
logger = logging.getLogger(module_name)
|
||||
CACHE['simple_loggers'].add(module_name)
|
||||
|
||||
# Reuse Cached Arguments from Last sync_*
|
||||
update_logger(
|
||||
console_handler,
|
||||
file_handler,
|
||||
logger,
|
||||
console_level=CACHE['console_level'],
|
||||
file_path=CACHE['file_path'],
|
||||
file_level=CACHE['file_level'],
|
||||
)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
####################
|
||||
# - Logger Sync
|
||||
####################
|
||||
def update_all_loggers(
|
||||
cb_console_handler: typ.Callable[[LogLevel], LogHandler],
|
||||
cb_file_handler: typ.Callable[[Path, LogLevel], LogHandler],
|
||||
console_level: LogLevel | None,
|
||||
|
@ -191,8 +197,8 @@ def sync_all_loggers(
|
|||
):
|
||||
"""Update all loggers to conform to the given per-handler on/off state and log level.
|
||||
|
||||
This runs the corresponding `setup_logger()` for all active loggers.
|
||||
Thus, all parameters are identical to `setup_logger()`.
|
||||
This runs the corresponding `update_logger()` for all active loggers.
|
||||
Thus, all parameters are identical to `update_logger()`.
|
||||
"""
|
||||
CACHE['console_level'] = console_level
|
||||
CACHE['file_path'] = file_path
|
||||
|
@ -200,7 +206,7 @@ def sync_all_loggers(
|
|||
|
||||
for name in logging.root.manager.loggerDict:
|
||||
logger = logging.getLogger(name)
|
||||
setup_logger(
|
||||
update_logger(
|
||||
cb_console_handler,
|
||||
cb_file_handler,
|
||||
logger,
|
||||
|
@ -221,5 +227,9 @@ def simple_loggers():
|
|||
return [
|
||||
logging.getLogger(name)
|
||||
for name in logging.root.manager.loggerDict
|
||||
if name.startswith(SIMPLE_LOGGER_PREFIX)
|
||||
if name in CACHE['simple_loggers']
|
||||
]
|
||||
|
||||
|
||||
def clear_simple_loggers():
|
||||
CACHE['simple_loggers'].clear()
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
import bpy
|
||||
|
||||
from blender_maxwell.utils import logger as logger
|
||||
from blender_maxwell import contracts as ct
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
|
||||
class ConnectViewerNode(bpy.types.Operator):
|
||||
bl_idname = 'blender_maxwell.connect_viewer_node'
|
||||
bl_idname = ct.OperatorType.ConnectViewerNode
|
||||
bl_label = 'Connect Viewer to Active'
|
||||
bl_description = 'Connect active node to Viewer Node'
|
||||
bl_options = {'REGISTER', 'UNDO'}
|
||||
|
@ -62,7 +63,7 @@ BL_REGISTER = [
|
|||
BL_HOTKEYS = [
|
||||
{
|
||||
'_': (
|
||||
ConnectViewerNode.bl_idname,
|
||||
ct.OperatorType.ConnectViewerNode,
|
||||
'LEFTMOUSE',
|
||||
'PRESS',
|
||||
),
|
||||
|
|
|
@ -6,7 +6,7 @@ import bpy
|
|||
from . import contracts as ct
|
||||
from . import registration
|
||||
from .nodeps.operators import install_deps, uninstall_deps
|
||||
from .nodeps.utils import pydeps, simple_logger
|
||||
from .nodeps.utils import pip_process, pydeps, simple_logger
|
||||
|
||||
log = simple_logger.get(__name__)
|
||||
|
||||
|
@ -84,9 +84,10 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
('ERROR', 'Error', 'Error'),
|
||||
('CRITICAL', 'Critical', 'Critical'),
|
||||
],
|
||||
default='INFO',
|
||||
default='DEBUG',
|
||||
update=lambda self, _: self.on_addon_logging_changed(),
|
||||
)
|
||||
## TODO: Derive default from BOOTSTRAP_LOG_LEVEL
|
||||
|
||||
## File Logging
|
||||
use_log_file: bpy.props.BoolProperty(
|
||||
|
@ -105,7 +106,7 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
('ERROR', 'Error', 'Error'),
|
||||
('CRITICAL', 'Critical', 'Critical'),
|
||||
],
|
||||
default='INFO',
|
||||
default='DEBUG',
|
||||
update=lambda self, _: self.on_addon_logging_changed(),
|
||||
)
|
||||
|
||||
|
@ -121,7 +122,7 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
def log_file_path(self) -> Path:
|
||||
return Path(bpy.path.abspath(self.bl__log_file_path))
|
||||
|
||||
@pydeps_path.setter
|
||||
@log_file_path.setter
|
||||
def log_file_path(self, path: Path) -> None:
|
||||
self.bl__log_file_path = str(path.resolve())
|
||||
|
||||
|
@ -155,7 +156,7 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
|
||||
# Sync Single Logger / All Loggers
|
||||
if single_logger_to_setup is not None:
|
||||
logger.setup_logger(
|
||||
logger.update_logger(
|
||||
logger.console_handler,
|
||||
logger.file_handler,
|
||||
single_logger_to_setup,
|
||||
|
@ -163,7 +164,7 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
)
|
||||
else:
|
||||
log.info('Re-Configuring All Loggers')
|
||||
logger.sync_all_loggers(
|
||||
logger.update_all_loggers(
|
||||
logger.console_handler,
|
||||
logger.file_handler,
|
||||
**log_setup_kwargs,
|
||||
|
@ -185,9 +186,10 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
if pydeps.check_pydeps(ct.addon.PATH_REQS, self.pydeps_path):
|
||||
# Re-Sync Loggers
|
||||
## We can now upgrade all loggers to the fancier loggers.
|
||||
for _log in simple_logger.simple_loggers:
|
||||
for _log in simple_logger.simple_loggers():
|
||||
log.debug('Upgrading Logger (%s)', str(_log))
|
||||
self.on_addon_logging_changed(single_logger_to_setup=_log)
|
||||
simple_logger.clear_simple_loggers()
|
||||
|
||||
# Run Registrations Waiting on DEPS_SATISFIED
|
||||
## Since the deps are OK, we can now register the whole addon.
|
||||
|
@ -205,7 +207,6 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
ct.OperatorType.ManagePyDeps,
|
||||
'INVOKE_DEFAULT',
|
||||
bl__pydeps_path=str(self.pydeps_path),
|
||||
bl__pydeps_reqlock_path=str(ct.addon.PATH_REQS),
|
||||
)
|
||||
## TODO: else:
|
||||
## TODO: Can we 'downgrade' the loggers back to simple loggers?
|
||||
|
@ -219,6 +220,9 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
layout = self.layout
|
||||
num_pydeps_issues = len(pydeps.DEPS_ISSUES)
|
||||
|
||||
####################
|
||||
# - Logging
|
||||
####################
|
||||
# Box w/Split: Log Level
|
||||
box = layout.box()
|
||||
row = box.row()
|
||||
|
@ -248,9 +252,11 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
row.enabled = self.use_log_file
|
||||
row.prop(self, 'log_level_file')
|
||||
|
||||
####################
|
||||
# - Dependencies
|
||||
####################
|
||||
# Box: Dependency Status
|
||||
box = layout.box()
|
||||
## Row: Header
|
||||
row = box.row(align=True)
|
||||
row.alignment = 'CENTER'
|
||||
row.label(text='Python Dependencies')
|
||||
|
@ -273,7 +279,7 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
## Row: More Information Panel
|
||||
col = box.column(align=True)
|
||||
header, panel = col.panel('pydeps_issues', default_closed=True)
|
||||
header.label(text=f'Install Mismatches ({num_pydeps_issues})')
|
||||
header.label(text=f'Show Conflicts ({num_pydeps_issues})')
|
||||
if panel is not None:
|
||||
grid = panel.grid_flow()
|
||||
for issue in pydeps.DEPS_ISSUES:
|
||||
|
@ -296,6 +302,25 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
|||
)
|
||||
op.bl__pydeps_path = str(self.pydeps_path)
|
||||
|
||||
## Row: Deps Install Progress
|
||||
row = box.row()
|
||||
num_req_deplocks = len(pydeps.DEPS_REQ_DEPLOCKS)
|
||||
if pydeps.DEPS_OK:
|
||||
row.progress(
|
||||
text=f'{num_req_deplocks}/{num_req_deplocks} Installed',
|
||||
factor=1.0,
|
||||
)
|
||||
elif pip_process.PROGRESS is not None:
|
||||
row.progress(
|
||||
text='/'.join(pip_process.PROGRESS_FRAC) + ' Installed',
|
||||
factor=float(pip_process.PROGRESS),
|
||||
)
|
||||
else:
|
||||
row.progress(
|
||||
text=f'0/{num_req_deplocks} Installed',
|
||||
factor=0.0,
|
||||
)
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
|
|
|
@ -87,7 +87,8 @@ def register_hotkeys(hotkey_defs: list[dict]):
|
|||
global _ADDON_KEYMAP # noqa: PLW0603
|
||||
if _ADDON_KEYMAP is None:
|
||||
_ADDON_KEYMAP = bpy.context.window_manager.keyconfigs.addon.keymaps.new(
|
||||
name=f'{ct.addon.NAME} Keymap',
|
||||
name='Node Editor',
|
||||
space_type='NODE_EDITOR',
|
||||
)
|
||||
log.info(
|
||||
'Registered Addon Keymap (Base for Keymap Items): %s',
|
||||
|
|
|
@ -1,8 +1,12 @@
|
|||
"""Implements various key caches on instances of Blender objects, especially nodes and sockets."""
|
||||
|
||||
## TODO: Note that persist=True on cached_bl_property may cause a draw method to try and write to a Blender property, which Blender disallows.
|
||||
|
||||
import enum
|
||||
import functools
|
||||
import inspect
|
||||
import typing as typ
|
||||
import uuid
|
||||
|
||||
import bpy
|
||||
|
||||
|
@ -13,6 +17,24 @@ log = logger.get(__name__)
|
|||
InstanceID: typ.TypeAlias = str ## Stringified UUID4
|
||||
|
||||
|
||||
class Signal(enum.StrEnum):
|
||||
"""A value used to signal the descriptor via its `__set__`.
|
||||
|
||||
Such a signal **must** be entirely unique: Even a well-thought-out string could conceivably produce a very nasty bug, where instead of setting a descriptor-managed attribute, the user would inadvertently signal the descriptor.
|
||||
|
||||
To make it effectively impossible to confuse any other object whatsoever with a signal, the enum values are set to per-session `uuid.uuid4()`.
|
||||
|
||||
Notes:
|
||||
**Do not** use this enum for anything other than directly signalling a `bl_cache` descriptor via its setter.
|
||||
|
||||
**Do not** store this enum `Signal` in a variable or method binding that survives longer than the session.
|
||||
|
||||
**Do not** persist this enum; the values will change whenever `bl_cache` is (re)loaded.
|
||||
"""
|
||||
|
||||
InvalidateCache: str = str(uuid.uuid4()) #'1569c45a-7cf3-4307-beab-5729c2f8fa4b'
|
||||
|
||||
|
||||
class BLInstance(typ.Protocol):
|
||||
"""An instance of a blender object, ex. nodes/sockets.
|
||||
|
||||
|
@ -22,6 +44,8 @@ class BLInstance(typ.Protocol):
|
|||
|
||||
instance_id: InstanceID
|
||||
|
||||
def reset_instance_id(self) -> None: ...
|
||||
|
||||
@classmethod
|
||||
def set_prop(
|
||||
cls,
|
||||
|
@ -257,14 +281,20 @@ class CachedBLProperty:
|
|||
If `self._persist` is `True`, the persistent cache will be checked and filled after the non-persistent cache.
|
||||
|
||||
Notes:
|
||||
- The persistent cache keeps the
|
||||
- The persistent cache is fast and has good compatibility (courtesy `msgspec` encoding), but isn't nearly as fast as
|
||||
- The non-persistent cache keeps the object in memory.
|
||||
- The persistent cache serializes the object and stores it as a string on the BLInstance. This is often fast enough, and has decent compatibility (courtesy `msgspec`), it isn't nearly as fast as the non-persistent cache, and there are gotchas.
|
||||
|
||||
Parameters:
|
||||
bl_instance: The Blender object this prop
|
||||
"""
|
||||
if bl_instance is None:
|
||||
return None
|
||||
if not bl_instance.instance_id:
|
||||
log.debug(
|
||||
"Can't Get CachedBLProperty: Instance ID not (yet) defined on BLInstance %s",
|
||||
str(bl_instance),
|
||||
)
|
||||
return
|
||||
|
||||
# Create Non-Persistent Cache Entry
|
||||
## Prefer explicit cache management to 'defaultdict'
|
||||
|
@ -307,6 +337,19 @@ class CachedBLProperty:
|
|||
Parameters:
|
||||
bl_instance: The Blender object this prop
|
||||
"""
|
||||
if bl_instance is None:
|
||||
return
|
||||
if not bl_instance.instance_id:
|
||||
log.debug(
|
||||
"Can't Set CachedBLProperty: Instance ID not (yet) defined on BLInstance %s",
|
||||
str(bl_instance),
|
||||
)
|
||||
return
|
||||
|
||||
if value == Signal.InvalidateCache:
|
||||
self._invalidate_cache(bl_instance)
|
||||
return
|
||||
|
||||
if self._setter_method is None:
|
||||
msg = f'Tried to set "{value}" to "{self.prop_name}" on "{bl_instance.bl_label}", but a setter was not defined'
|
||||
raise NotImplementedError(msg)
|
||||
|
@ -363,14 +406,6 @@ class CachedBLProperty:
|
|||
|
||||
Parameters:
|
||||
bl_instance: The instance of the Blender object that contains this property.
|
||||
|
||||
Examples:
|
||||
It is discouraged to run this directly, as any use-pattern that requires manually invalidating a property cache is **likely an anti-pattern**.
|
||||
|
||||
With that disclaimer, manual invocation looks like this:
|
||||
```python
|
||||
bl_instance.attr._invalidate_cache()
|
||||
```
|
||||
"""
|
||||
# Invalidate Non-Persistent Cache
|
||||
if CACHE_NOPERSIST.get(bl_instance.instance_id) is not None:
|
||||
|
@ -494,11 +529,6 @@ class BLField:
|
|||
## 2. Set bpy.props.StringProperty string.
|
||||
def setter(_self: BLInstance, value: AttrType) -> None:
|
||||
encoded_value = serialize.encode(value).decode('utf-8')
|
||||
log.debug(
|
||||
'Writing BLField attr "%s" w/encoded value: %s',
|
||||
bl_attr_name,
|
||||
encoded_value,
|
||||
)
|
||||
setattr(_self, bl_attr_name, encoded_value)
|
||||
|
||||
# Initialize CachedBLProperty w/Getter and Setter
|
||||
|
|
|
@ -262,6 +262,14 @@ def sympy_to_python(scalar: sp.Basic) -> int | float | complex | tuple | list:
|
|||
raise ValueError(msg)
|
||||
|
||||
|
||||
def pretty_symbol(sym: sp.Symbol) -> str:
|
||||
return f'{sym.name} ∈ ' + (
|
||||
'ℂ'
|
||||
if sym.is_complex
|
||||
else ('ℝ' if sym.is_real else ('ℤ' if sym.is_integer else '?'))
|
||||
)
|
||||
|
||||
|
||||
####################
|
||||
# - Pydantic-Validated SympyExpr
|
||||
####################
|
||||
|
@ -355,7 +363,7 @@ class _SympyExpr:
|
|||
|
||||
|
||||
SympyExpr = typx.Annotated[
|
||||
SympyType,
|
||||
sp.Basic, ## Treat all sympy types as sp.Basic
|
||||
_SympyExpr,
|
||||
]
|
||||
|
||||
|
@ -453,7 +461,7 @@ def ConstrSympyExpr( # noqa: N802, PLR0913
|
|||
return expr
|
||||
|
||||
return typx.Annotated[
|
||||
SympyType,
|
||||
sp.Basic,
|
||||
_SympyExpr,
|
||||
pyd.AfterValidator(validate_expr),
|
||||
]
|
||||
|
|
|
@ -12,9 +12,9 @@ from ..nodeps.utils.simple_logger import (
|
|||
LOG_LEVEL_MAP, # noqa: F401
|
||||
LogLevel,
|
||||
loggers, # noqa: F401
|
||||
setup_logger, # noqa: F401
|
||||
simple_loggers, # noqa: F401
|
||||
sync_all_loggers, # noqa: F401
|
||||
update_all_loggers, # noqa: F401
|
||||
update_logger, # noqa: F401
|
||||
)
|
||||
|
||||
OUTPUT_CONSOLE = rich.console.Console(
|
||||
|
@ -60,12 +60,3 @@ def get(module_name):
|
|||
ct.addon.prefs().on_addon_logging_changed(single_logger_to_setup=logger)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
####################
|
||||
# - Logger Sync
|
||||
####################
|
||||
# def upgrade_simple_loggers():
|
||||
# """Upgrades simple loggers to rich-enabled loggers."""
|
||||
# for logger in simple_loggers():
|
||||
# setup_logger(console_handler, file_handler, logger)
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import typing as typ
|
||||
|
||||
import pydantic as pyd
|
||||
import sympy as sp
|
||||
import sympy.physics.units as spu
|
||||
|
|
Loading…
Reference in New Issue