From 480679a3c08cd7c5ced066fc6036b17ddb2740e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sofus=20Albert=20H=C3=B8gsbro=20Rose?= Date: Fri, 12 Apr 2024 15:39:13 +0200 Subject: [PATCH] refactor: Streamlined graph-update semantics. --- TODO.md | 12 +- pyproject.toml | 3 +- requirements-dev.lock | 3 + requirements.lock | 3 + .../node_trees/maxwell_sim_nodes/bl_cache.py | 529 ++++++++ .../maxwell_sim_nodes/bl_socket_map.py | 2 +- .../maxwell_sim_nodes/contracts/bl.py | 1 + .../contracts/data_flow_actions.py | 32 + .../maxwell_sim_nodes/contracts/data_flows.py | 16 +- .../managed_objs/__init__.py | 8 +- .../managed_objs/managed_bl_mesh.py | 7 + .../managed_objs/managed_bl_modifier.py | 1 + .../node_trees/maxwell_sim_nodes/node_tree.py | 340 +++-- .../nodes/analysis/extract_data.py | 27 +- .../maxwell_sim_nodes/nodes/base.py | 1141 ++++++++++------- .../maxwell_sim_nodes/nodes/events.py | 373 +++--- .../file_importers/tidy_3d_file_importer.py | 2 +- .../nodes/inputs/wave_constant.py | 97 +- .../web_importers/tidy_3d_web_importer.py | 17 +- .../nodes/monitors/eh_field_monitor.py | 3 - .../monitors/field_power_flux_monitor.py | 59 +- .../maxwell_sim_nodes/nodes/outputs/viewer.py | 35 +- .../nodes/simulations/sim_domain.py | 5 +- .../nodes/structures/geonodes_structure.py | 6 +- .../structures/primitives/box_structure.py | 5 +- .../structures/primitives/sphere_structure.py | 5 +- .../nodes/utilities/combine.py | 14 +- .../maxwell_sim_nodes/sockets/base.py | 17 +- src/blender_maxwell/services/tdcloud.py | 8 +- 29 files changed, 1905 insertions(+), 866 deletions(-) create mode 100644 src/blender_maxwell/node_trees/maxwell_sim_nodes/bl_cache.py diff --git a/TODO.md b/TODO.md index d6e354a..3790f69 100644 --- a/TODO.md +++ b/TODO.md @@ -1,12 +1,17 @@ # Acute Tasks - [x] Implement Material Import for Maxim Data - [x] Implement Robust DataFlowKind for list-like / spectral-like composite types +- [ ] Unify random node/socket caches. - [ ] Finish the "Low-Hanging Fruit" Nodes - [ ] Move preview GN trees to the asset library. # Nodes +## Analysis +- [ ] Extract +- [ ] Viz + ## Inputs - [x] Wave Constant - [x] Implement export of frequency / wavelength array/range. @@ -161,12 +166,7 @@ - [ ] Sim Grid Axes / Uniform Sim Grid Axis - [ ] Sim Grid Axes / Array Sim Grid Axis -## Converters -- [ ] Math - - [ ] Implement common operations w/secondary choice of socket type based on a custom internal data structure - - [ ] Implement angfreq/frequency/vacwl conversion. - - [ ] Implement spectral math on SDs - - [ ] Implement easy derivation of ex. transmission and reflection. +## Utilities - [ ] Separate - [x] Combine - [x] Implement concatenation of sim-critical socket types into their multi-type diff --git a/pyproject.toml b/pyproject.toml index db57e4f..1911238 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ dependencies = [ "charset-normalizer==2.0.10", "certifi==2021.10.8", "jax[cpu]>=0.4.26", + "msgspec[toml]>=0.18.6", ] readme = "README.md" requires-python = "~= 3.11" @@ -136,4 +137,4 @@ max-args = 6 [tool.ruff.format] quote-style = "single" indent-style = "tab" -docstring-code-format = true +docstring-code-format = false diff --git a/requirements-dev.lock b/requirements-dev.lock index 28fae0b..dca0820 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -63,6 +63,7 @@ ml-dtypes==0.4.0 # via jaxlib mpmath==1.3.0 # via sympy +msgspec==0.18.6 networkx==3.2 numpy==1.24.3 # via contourpy @@ -135,6 +136,8 @@ sympy==1.12 tidy3d==2.6.3 toml==0.10.2 # via tidy3d +tomli-w==1.0.0 + # via msgspec toolz==0.12.1 # via dask # via partd diff --git a/requirements.lock b/requirements.lock index dad5d98..367c92c 100644 --- a/requirements.lock +++ b/requirements.lock @@ -62,6 +62,7 @@ ml-dtypes==0.4.0 # via jaxlib mpmath==1.3.0 # via sympy +msgspec==0.18.6 networkx==3.2 numpy==1.24.3 # via contourpy @@ -133,6 +134,8 @@ sympy==1.12 tidy3d==2.6.3 toml==0.10.2 # via tidy3d +tomli-w==1.0.0 + # via msgspec toolz==0.12.1 # via dask # via partd diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/bl_cache.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/bl_cache.py new file mode 100644 index 0000000..a45493c --- /dev/null +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/bl_cache.py @@ -0,0 +1,529 @@ +"""Implements various key caches on instances of Blender objects, especially nodes and sockets.""" + +import functools +import inspect +import typing as typ + +import bpy +import msgspec +import sympy as sp +import sympy.physics.units as spu + +from ...utils import extra_sympy_units as spux +from ...utils import logger +from . import contracts as ct +from . import managed_objs, sockets + +log = logger.get(__name__) + +InstanceID: typ.TypeAlias = str ## Stringified UUID4 + + +class BLInstance(typ.Protocol): + """An instance of a blender object, ex. nodes/sockets. + + Attributes: + instance_id: Stringified UUID4 that uniquely identifies an instance, among all active instances on all active classes. + """ + + instance_id: InstanceID + + +EncodableValue: typ.TypeAlias = typ.Any ## msgspec-compatible +PropGetMethod: typ.TypeAlias = typ.Callable[[BLInstance], EncodableValue] +PropSetMethod: typ.TypeAlias = typ.Callable[[BLInstance, EncodableValue], None] + +#################### +# - (De)Serialization +#################### +EncodedComplex: typ.TypeAlias = tuple[float, float] | list[float, float] +EncodedSympy: typ.TypeAlias = str +EncodedManagedObj: typ.TypeAlias = tuple[str, str] | list[str, str] +EncodedPydanticModel: typ.TypeAlias = tuple[str, str] | list[str, str] + + +def _enc_hook(obj: typ.Any) -> EncodableValue: + """Translates types not natively supported by `msgspec`, to an encodable form supported by `msgspec`. + + Parameters: + obj: The object of arbitrary type to transform into an encodable value. + + Returns: + A value encodable by `msgspec`. + + Raises: + NotImplementedError: When the type transformation hasn't been implemented. + """ + if isinstance(obj, complex): + return (obj.real, obj.imag) + if isinstance(obj, sp.Basic | sp.MatrixBase | sp.Expr | spu.Quantity): + return sp.srepr(obj) + if isinstance(obj, managed_objs.ManagedObj): + return (obj.name, obj.__class__.__name__) + if isinstance(obj, ct.schemas.SocketDef): + return (obj.model_dump(), obj.__class__.__name__) + + msg = f'Can\'t encode "{obj}" of type {type(obj)}' + raise NotImplementedError(msg) + + +def _dec_hook(_type: type, obj: EncodableValue) -> typ.Any: + """Translates the `msgspec`-encoded form of an object back to its true form. + + Parameters: + _type: The type to transform the `msgspec`-encoded object back into. + obj: The encoded object of to transform back into an encodable value. + + Returns: + A value encodable by `msgspec`. + + Raises: + NotImplementedError: When the type transformation hasn't been implemented. + """ + if _type is complex and isinstance(obj, EncodedComplex): + return complex(obj[0], obj[1]) + if ( + _type is sp.Basic + and isinstance(obj, EncodedSympy) + or _type is sp.Expr + and isinstance(obj, EncodedSympy) + or _type is sp.MatrixBase + and isinstance(obj, EncodedSympy) + or _type is spu.Quantity + and isinstance(obj, EncodedSympy) + ): + return sp.sympify(obj).subs(spux.ALL_UNIT_SYMBOLS) + if ( + _type is managed_objs.ManagedBLMesh + and isinstance(obj, EncodedManagedObj) + or _type is managed_objs.ManagedBLImage + and isinstance(obj, EncodedManagedObj) + or _type is managed_objs.ManagedBLModifier + and isinstance(obj, EncodedManagedObj) + ): + return { + 'ManagedBLMesh': managed_objs.ManagedBLMesh, + 'ManagedBLImage': managed_objs.ManagedBLImage, + 'ManagedBLModifier': managed_objs.ManagedBLModifier, + }[obj[1]](obj[0]) + if _type is ct.schemas.SocketDef: + return getattr(sockets, obj[1])(**obj[0]) + + msg = f'Can\'t decode "{obj}" to type {type(obj)}' + raise NotImplementedError(msg) + + +ENCODER = msgspec.json.Encoder(enc_hook=_enc_hook, order='deterministic') + +_DECODERS: dict[type, msgspec.json.Decoder] = { + complex: msgspec.json.Decoder(type=complex, dec_hook=_dec_hook), + sp.Basic: msgspec.json.Decoder(type=sp.Basic, dec_hook=_dec_hook), + sp.Expr: msgspec.json.Decoder(type=sp.Expr, dec_hook=_dec_hook), + sp.MatrixBase: msgspec.json.Decoder(type=sp.MatrixBase, dec_hook=_dec_hook), + spu.Quantity: msgspec.json.Decoder(type=spu.Quantity, dec_hook=_dec_hook), + managed_objs.ManagedBLMesh: msgspec.json.Decoder( + type=managed_objs.ManagedBLMesh, + dec_hook=_dec_hook, + ), + managed_objs.ManagedBLImage: msgspec.json.Decoder( + type=managed_objs.ManagedBLImage, + dec_hook=_dec_hook, + ), + managed_objs.ManagedBLModifier: msgspec.json.Decoder( + type=managed_objs.ManagedBLModifier, + dec_hook=_dec_hook, + ), + # managed_objs.ManagedObj: msgspec.json.Decoder( + # type=managed_objs.ManagedObj, dec_hook=_dec_hook + # ), ## Doesn't work b/c unions are not explicit + ct.schemas.SocketDef: msgspec.json.Decoder( + type=ct.schemas.SocketDef, + dec_hook=_dec_hook, + ), +} +_DECODER_FALLBACK: msgspec.json.Decoder = msgspec.json.Decoder(dec_hook=_dec_hook) + + +@functools.cache +def DECODER(_type: type) -> msgspec.json.Decoder: # noqa: N802 + """Retrieve a suitable `msgspec.json.Decoder` by-type. + + Parameters: + _type: The type to retrieve a decoder for. + + Returns: + A suitable decoder. + """ + if (decoder := _DECODERS.get(_type)) is not None: + return decoder + + return _DECODER_FALLBACK + + +def decode_any(_type: type, obj: str) -> typ.Any: + naive_decode = DECODER(_type).decode(obj) + if _type == dict[str, ct.schemas.SocketDef]: + return { + socket_name: getattr(sockets, socket_def_list[1])(**socket_def_list[0]) + for socket_name, socket_def_list in naive_decode.items() + } + + log.critical( + 'Naive Decode of "%s" to "%s" (%s)', str(obj), str(naive_decode), str(_type) + ) + return naive_decode + + +#################### +# - Cache: Non-Persistent +#################### +CACHE_NOPERSIST: dict[InstanceID, dict[typ.Any, typ.Any]] = {} + + +def invalidate_nonpersist_instance_id(instance_id: InstanceID) -> None: + """Invalidate any `instance_id` that might be utilizing cache space in `CACHE_NOPERSIST`. + + Note: + This should be run by the `instance_id` owner in its `free()` method. + + Parameters: + instance_id: The ID of the Blender object instance that's being freed. + """ + CACHE_NOPERSIST.pop(instance_id, None) + + +#################### +# - Property Descriptor +#################### +class CachedBLProperty: + """A descriptor that caches a computed attribute of a Blender node/socket/... instance (`bl_instance`), with optional cache persistence. + + Note: + **Accessing the internal `_*` attributes is likely an anti-pattern**. + + `CachedBLProperty` does not own the data; it only provides a convenient interface of running user-provided getter/setters. + This also applies to the `bpy.types.Property` entry created by `CachedBLProperty`, which should not be accessed directly. + + Attributes: + _getter_method: Method of `bl_instance` that computes the value. + _setter_method: Method of `bl_instance` that sets the value. + _persist: Whether to persist the value on a `bpy.types.Property` defined on `bl_instance`. + The name of this `bpy.types.Property` will be `cache__`. + _type: The type of the value, used by the persistent decoder. + """ + + def __init__(self, getter_method: PropGetMethod, persist: bool): + """Initialize the getter (and persistance) of the cached property. + + Notes: + - When `persist` is true, the return annotation of the getter mathod will be used to guide deserialization. + + Parameters: + getter_method: Method of `bl_instance` that computes the value. + persist: Whether to persist the value on a `bpy.types.Property` defined on `bl_instance`. + The name of this `bpy.types.Property` will be `cache__`. + """ + self._getter_method: PropGetMethod = getter_method + self._setter_method: PropSetMethod | None = None + + # Persistance + self._persist: bool = persist + self._type: type | None = ( + inspect.signature(getter_method).return_annotation if persist else None + ) + + # Check Non-Empty Type Annotation + ## For now, just presume that all types can be encoded/decoded. + + # Check Non-Empty Type Annotation + ## For now, just presume that all types can be encoded/decoded. + if self._type is not None and self._type is inspect.Signature.empty: + msg = f'A CachedBLProperty was instantiated with "persist={persist}", but its getter method "{self._getter_method}" has no return type annotation' + raise TypeError(msg) + + def __set_name__(self, owner: type[BLInstance], name: str) -> None: + """Generates the property name from the name of the attribute that this descriptor is assigned to. + + Notes: + - Run by Python when setting an instance of this class to an attribute. + + Parameters: + owner: The class that contains an attribute assigned to an instance of this descriptor. + name: The name of the attribute that an instance of descriptor was assigned to. + """ + self.prop_name: str = name + self._bl_prop_name: str = f'blcache__{name}' + + # Define Blender Property (w/Update Sync) + owner.set_prop( + self._bl_prop_name, + bpy.props.StringProperty, + name=f'DO NOT USE: Cache for {self.prop_name}', + default='', + no_update=True, + ) + + def __get__( + self, bl_instance: BLInstance | None, owner: type[BLInstance] + ) -> typ.Any: + """Retrieves the property from a cache, or computes it and fills the cache(s). + + If `self._persist` is `True`, the persistent cache will be checked and filled after the non-persistent cache. + + Notes: + - The persistent cache keeps the + - The persistent cache is fast and has good compatibility (courtesy `msgspec` encoding), but isn't nearly as fast as + + Parameters: + bl_instance: The Blender object this prop + """ + if bl_instance is None: + return None + # Create Non-Persistent Cache Entry + ## Prefer explicit cache management to 'defaultdict' + if CACHE_NOPERSIST.get(bl_instance.instance_id) is None: + CACHE_NOPERSIST[bl_instance.instance_id] = {} + cache_nopersist = CACHE_NOPERSIST[bl_instance.instance_id] + + # Try Hit on Non-Persistent Cache + if (value := cache_nopersist.get(self._bl_prop_name)) is not None: + return value + + # Try Hit on Persistent Cache + ## Hit: Fill Non-Persistent Cache + if ( + self._persist + and (encoded_value := getattr(bl_instance, self._bl_prop_name)) != '' + ): + value = decode_any(self._type, encoded_value) + cache_nopersist[self._bl_prop_name] = value + return value + + # Compute Value + ## Fill Non-Persistent Cache + ## Fill Persistent Cache (maybe) + value = self._getter_method(bl_instance) + cache_nopersist[self._bl_prop_name] = value + if self._persist: + setattr( + bl_instance, self._bl_prop_name, ENCODER.encode(value).decode('utf-8') + ) + return value + + def __set__(self, bl_instance: BLInstance, value: typ.Any) -> None: + """Runs the user-provided setter, after invalidating the caches. + + Notes: + - This invalidates all caches without re-filling them. + - The caches will be re-filled on the first `__get__` invocation, which may be slow due to having to run the getter method. + + Parameters: + bl_instance: The Blender object this prop + """ + if self._setter_method is None: + msg = f'Tried to set "{value}" to "{self.prop_name}" on "{bl_instance.bl_label}", but a setter was not defined' + raise NotImplementedError(msg) + + # Invalidate Caches + self._invalidate_cache(bl_instance) + + # Set the Value + self._setter_method(bl_instance, value) + + def setter(self, setter_method: PropSetMethod) -> typ.Self: + """Decorator to add a setter to the cached property. + + Returns: + The same descriptor, so that use of the same method name for defining a setter won't change the semantics of the attribute. + + Examples: + Without the decor + ```python + class Test(bpy.types.Node): + bl_label = 'Default' + ... + def method(self) -> str: return self.bl_label + attr = CachedBLProperty(getter_method=method, persist=False) + + @attr.setter + def attr(self, value: str) -> None: + self.bl_label = 'Altered' + ``` + """ + # Validate Setter Signature + setter_sig = inspect.signature(setter_method) + + ## Parameter Length + if (sig_len := len(setter_sig.parameters)) != 2: # noqa: PLR2004 + msg = f'Setter method for "{self.prop_name}" should have 2 parameters, not "{sig_len}"' + raise TypeError(msg) + + ## Parameter Value Type + if (sig_ret_type := setter_sig.return_annotation) is not None: + msg = f'Setter method for "{self.prop_name}" return value type "{sig_ret_type}", but it should be "None" (omitting an annotation does not imply "None")' + raise TypeError(msg) + + self._setter_method = setter_method + return self + + def _invalidate_cache(self, bl_instance: BLInstance) -> None: + """Invalidates all caches that might be storing the computed property value. + + This is invoked by `__set__`. + + Note: + Will not delete the `bpy.props.StringProperty`; instead, it will be set to ''. + + Parameters: + bl_instance: The instance of the Blender object that contains this property. + + Examples: + It is discouraged to run this directly, as any use-pattern that requires manually invalidating a property cache is **likely an anti-pattern**. + + With that disclaimer, manual invocation looks like this: + ```python + bl_instance.attr._invalidate_cache() + ``` + """ + # Invalidate Non-Persistent Cache + if CACHE_NOPERSIST.get(bl_instance.instance_id) is not None: + CACHE_NOPERSIST[bl_instance.instance_id].pop(self._bl_prop_name, None) + + # Invalidate Persistent Cache + if self._persist and getattr(bl_instance, self._bl_prop_name) != '': + setattr(bl_instance, self._bl_prop_name, '') + + +## TODO: How do we invalidate the data that the computed cached property depends on? +#################### +# - Property Decorators +#################### +def cached_bl_property(persist: bool = ...): + """Decorator creating a descriptor that caches a computed attribute of a Blender node/socket. + + Many such `bl_instance`s rely on fast access to computed, cached properties, for example to ensure that `draw()` remains effectively non-blocking. + It is also sometimes desired that this cache persist on `bl_instance`, ex. in the case of loose sockets or cached web data. + + Notes: + - Unfortunately, `functools.cached_property` doesn't work, and can't handle persistance. + - Use `cached_attribute` instead if merely persisting the value is desired. + + Parameters: + persist: Whether or not to persist the cache value in the Blender object. + This should be used when the **source(s) of the computed value also persists with the Blender object**. + For example, this is especially helpful when caching information for use in `draw()` methods, so that reloading the file won't alter the cache. + + Examples: + ```python + class CustomNode(bpy.types.Node): + @bl_cache.cached(persist=True|False) + def computed_prop(self) -> ...: return ... + + print(bl_instance.prop) ## Computes first time + print(bl_instance.prop) ## Cached (maybe persistently in a property, maybe not) + ``` + + When + """ + + def decorator(getter_method: typ.Callable[[BLInstance], None]) -> type: + return CachedBLProperty(getter_method=getter_method, persist=persist) + + return decorator + + +#################### +# - Attribute Descriptor +#################### +class BLField: + """A descriptor that allows persisting arbitrary types in Blender objects, with cached reads.""" + + def __init__(self, default_value: typ.Any, triggers_prop_update: bool = True): + """Initializes and sets the attribute to a given default value. + + Parameters: + default_value: The default value to use if the value is read before it's set. + trigger_prop_update: Whether to run `bl_instance.sync_prop(attr_name)` whenever value is set. + + """ + log.debug( + 'Initializing BLField (default_value=%s, triggers_prop_update=%s)', + str(default_value), + str(triggers_prop_update), + ) + self._default_value: typ.Any = default_value + self._triggers_prop_update: bool = triggers_prop_update + + def __set_name__(self, owner: type[BLInstance], name: str) -> None: + """Sets up getters/setters for attribute access, and sets up a `CachedBLProperty` to internally utilize them. + + Our getter/setter essentially reads/writes to a `bpy.props.StringProperty`, with + + and use them as user-provided getter/setter to internally define a normal non-persistent `CachedBLProperty`. + As a result, we can reuse almost all of the logic in `CachedBLProperty` + + Note: + Run by Python when setting an instance of this class to an attribute. + + Parameters: + owner: The class that contains an attribute assigned to an instance of this descriptor. + name: The name of the attribute that an instance of descriptor was assigned to. + """ + # Compute Name and Type of Property + ## Also compute the internal + attr_name = name + bl_attr_name = f'blattr__{name}' + if (AttrType := inspect.get_annotations(owner).get(name)) is None: # noqa: N806 + msg = f'BLField "{self.prop_name}" must define a type annotation, but doesn\'t.' + raise TypeError(msg) + + # Define Blender Property (w/Update Sync) + encoded_default_value = ENCODER.encode(self._default_value).decode('utf-8') + log.debug( + '%s set to StringProperty w/default "%s" and no_update="%s"', + bl_attr_name, + encoded_default_value, + str(not self._triggers_prop_update), + ) + owner.set_prop( + bl_attr_name, + bpy.props.StringProperty, + name=f'Encoded Attribute for {attr_name}', + default=encoded_default_value, + no_update=not self._triggers_prop_update, + update_with_name=attr_name, + ) + + ## Getter: + ## 1. Initialize bpy.props.StringProperty to Default (if undefined). + ## 2. Retrieve bpy.props.StringProperty string. + ## 3. Decode using annotated type. + def getter(_self: BLInstance) -> AttrType: + return decode_any(AttrType, getattr(_self, bl_attr_name)) + + ## Setter: + ## 1. Initialize bpy.props.StringProperty to Default (if undefined). + ## 3. Encode value (implicitly using the annotated type). + ## 2. Set bpy.props.StringProperty string. + def setter(_self: BLInstance, value: AttrType) -> None: + encoded_value = ENCODER.encode(value).decode('utf-8') + log.debug( + 'Writing BLField attr "%s" w/encoded value: %s', + bl_attr_name, + encoded_value, + ) + setattr(_self, bl_attr_name, encoded_value) + + # Initialize CachedBLProperty w/Getter and Setter + ## This is the usual descriptor assignment procedure. + self._cached_bl_property = CachedBLProperty(getter_method=getter, persist=False) + self._cached_bl_property.__set_name__(owner, name) + self._cached_bl_property.setter(setter) + + def __get__( + self, bl_instance: BLInstance | None, owner: type[BLInstance] + ) -> typ.Any: + return self._cached_bl_property.__get__(bl_instance, owner) + + def __set__(self, bl_instance: BLInstance, value: typ.Any) -> None: + self._cached_bl_property.__set__(bl_instance, value) diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/bl_socket_map.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/bl_socket_map.py index 795f3c8..7fe24d2 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/bl_socket_map.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/bl_socket_map.py @@ -220,7 +220,7 @@ def _writable_bl_socket_value( _bl_socket_value = value # Compute Blender Socket Value - if isinstance(_bl_socket_value, sp.Basic): + if isinstance(_bl_socket_value, sp.Basic | sp.MatrixBase): bl_socket_value = spux.sympy_to_python(_bl_socket_value) else: bl_socket_value = _bl_socket_value diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/bl.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/bl.py index 66a5cae..f68ced0 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/bl.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/bl.py @@ -1,3 +1,4 @@ +import enum import pydantic as pyd import typing_extensions as pytypes_ext diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/data_flow_actions.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/data_flow_actions.py index 5ab0897..d59c1c8 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/data_flow_actions.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/data_flow_actions.py @@ -1,4 +1,7 @@ import enum +import typing as typ + +import typing_extensions as typx class DataFlowAction(enum.StrEnum): @@ -7,8 +10,37 @@ class DataFlowAction(enum.StrEnum): DisableLock = 'disable_lock' # Value + OutputRequested = 'output_requested' DataChanged = 'value_changed' # Previewing ShowPreview = 'show_preview' ShowPlot = 'show_plot' + + def trigger_direction(action: typ.Self) -> typx.Literal['input', 'output']: + """When a given action is triggered, all sockets/nodes/... in this direction should be recursively triggered. + + Parameters: + action: The action for which to retrieve the trigger direction. + + Returns: + The trigger direction, which can be used ex. in nodes to select `node.inputs` or `node.outputs`. + """ + return { + DataFlowAction.EnableLock: 'input', + DataFlowAction.DisableLock: 'input', + DataFlowAction.DataChanged: 'output', + DataFlowAction.OutputRequested: 'input', + DataFlowAction.ShowPreview: 'input', + DataFlowAction.ShowPlot: 'input', + }[action] + + def stop_if_no_event_methods(action: typ.Self) -> bool: + return { + DataFlowAction.EnableLock: False, + DataFlowAction.DisableLock: False, + DataFlowAction.DataChanged: True, + DataFlowAction.OutputRequested: True, + DataFlowAction.ShowPreview: False, + DataFlowAction.ShowPlot: False, + }[action] diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/data_flows.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/data_flows.py index e915224..d912cc6 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/data_flows.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/contracts/data_flows.py @@ -5,7 +5,6 @@ import typing as typ from types import MappingProxyType # import colour ## TODO -import jax import numpy as np import sympy as sp import sympy.physics.units as spu @@ -77,6 +76,21 @@ class DataFlowKind(enum.StrEnum): LazyValueRange = enum.auto() LazyValueSpectrum = enum.auto() + @classmethod + def scale_to_unit_system(cls, kind: typ.Self, value, socket_type, unit_system): + if kind == cls.Value: + return spux.sympy_to_python( + spux.scale_to_unit( + value, + unit_system[socket_type], + ) + ) + if kind == cls.LazyValueRange: + return value.rescale_to_unit(unit_system[socket_type]) + + msg = 'Tried to scale unknown kind' + raise ValueError(msg) + #################### # - Data Structures: Capabilities diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/__init__.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/__init__.py index 83285d1..661143c 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/__init__.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/__init__.py @@ -1,4 +1,6 @@ -#from .managed_bl_empty import ManagedBLEmpty +import typing as typ + +# from .managed_bl_empty import ManagedBLEmpty from .managed_bl_image import ManagedBLImage # from .managed_bl_collection import ManagedBLCollection @@ -8,6 +10,8 @@ from .managed_bl_mesh import ManagedBLMesh # from .managed_bl_volume import ManagedBLVolume from .managed_bl_modifier import ManagedBLModifier +ManagedObj: typ.TypeAlias = ManagedBLImage | ManagedBLMesh | ManagedBLModifier + __all__ = [ #'ManagedBLEmpty', 'ManagedBLImage', @@ -17,3 +21,5 @@ __all__ = [ #'ManagedBLVolume', 'ManagedBLModifier', ] + +## REMEMBER: Add the appropriate entry to the bl_cache.DECODER diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/managed_bl_mesh.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/managed_bl_mesh.py index df9c957..5402d49 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/managed_bl_mesh.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/managed_bl_mesh.py @@ -31,6 +31,13 @@ class ManagedBLMesh(ct.schemas.ManagedObj): 'Changing BLMesh w/Name "%s" to Name "%s"', self._bl_object_name, value ) + if self._bl_object_name == value: + ## TODO: This is a workaround. + ## Really, we can't tell if a name is valid by searching objects. + ## Since, after all, other managedobjs may have taken a name.. + ## ...but not yet made an object that has it. + return + if (bl_object := bpy.data.objects.get(value)) is None: log.info( 'Desired BLMesh Name "%s" Not Taken', diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/managed_bl_modifier.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/managed_bl_modifier.py index 9346b54..2e84607 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/managed_bl_modifier.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/managed_objs/managed_bl_modifier.py @@ -130,6 +130,7 @@ def write_modifier_geonodes( bl_modifier[iface_id] = float(bl_socket_value) modifier_altered = True else: + ## TODO: Whitelist what can be here. I'm done with the TypeErrors. bl_modifier[iface_id] = bl_socket_value modifier_altered = True diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/node_tree.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/node_tree.py index 96c5c06..d32849d 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/node_tree.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/node_tree.py @@ -1,3 +1,4 @@ +import contextlib import typing as typ import bpy @@ -15,47 +16,176 @@ MemAddr = int class DeltaNodeLinkCache(typ.TypedDict): + """Describes change in the `NodeLink`s of a node tree. + + Attributes: + added: Set of pointers to added node tree links. + removed: Set of pointers to removed node tree links. + """ + added: set[MemAddr] removed: set[MemAddr] class NodeLinkCache: + """A pointer-based cache of node links in a node tree. + + Attributes: + _node_tree: Reference to the owning node tree. + link_ptrs_as_links: + link_ptrs: Pointers (as in integer memory adresses) to `NodeLink`s. + link_ptrs_as_links: Map from pointers to actual `NodeLink`s. + link_ptrs_from_sockets: Map from pointers to `NodeSocket`s, representing the source of each `NodeLink`. + link_ptrs_from_sockets: Map from pointers to `NodeSocket`s, representing the destination of each `NodeLink`. + """ + def __init__(self, node_tree: bpy.types.NodeTree): - # Initialize Parameters + """Initialize the cache from a node tree. + + Parameters: + node_tree: The Blender node tree whose `NodeLink`s will be cached. + """ self._node_tree = node_tree - self.link_ptrs_to_links = {} - self.link_ptrs = set() - self.link_ptrs_from_sockets = {} - self.link_ptrs_to_sockets = {} + + # Link PTR and PTR->REF + self.link_ptrs: set[MemAddr] = set() + self.link_ptrs_as_links: dict[MemAddr, bpy.types.NodeLink] = {} + + # Socket PTR and PTR->REF + self.socket_ptrs: set[MemAddr] = set() + self.socket_ptrs_as_sockets: dict[MemAddr, bpy.types.NodeSocket] = {} + self.socket_ptr_refcount: dict[MemAddr, int] = {} + + # Link PTR -> Socket PTR + self.link_ptrs_as_from_socket_ptrs: dict[MemAddr, MemAddr] = {} + self.link_ptrs_as_to_socket_ptrs: dict[MemAddr, MemAddr] = {} # Fill Cache self.regenerate() - def remove(self, link_ptrs: set[MemAddr]) -> None: - for link_ptr in link_ptrs: - self.link_ptrs.remove(link_ptr) - self.link_ptrs_to_links.pop(link_ptr, None) + def remove_link(self, link_ptr: MemAddr) -> None: + """Removes a link pointer from the cache, indicating that the link doesn't exist anymore. + + Notes: + - **DOES NOT** remove PTR->REF dictionary entries + - Invoking this method directly causes the removed node links to not be reported as "removed" by `NodeLinkCache.regenerate()`. + - This **must** be done whenever a node link is deleted. + - Failure to do so may result in a segmentation fault at arbitrary future time. + + Parameters: + link_ptrs: Pointers to remove from the cache. + """ + self.link_ptrs.remove(link_ptr) + self.link_ptrs_as_links.pop(link_ptr) + + def remove_sockets_by_link_ptr(self, link_ptr: MemAddr) -> None: + """Removes a single pointer's reference to its from/to sockets.""" + from_socket_ptr = self.link_ptrs_as_from_socket_ptrs.pop(link_ptr, None) + to_socket_ptr = self.link_ptrs_as_to_socket_ptrs.pop(link_ptr, None) + + for socket_ptr in [from_socket_ptr, to_socket_ptr]: + if socket_ptr is None: + continue + + # Delete w/RefCount Respect + if self.socket_ptr_refcount[socket_ptr] == 1: + self.socket_ptrs.remove(socket_ptr) + self.socket_ptrs_as_sockets.pop(socket_ptr) + self.socket_ptr_refcount.pop(socket_ptr) + else: + self.socket_ptr_refcount[socket_ptr] -= 1 def regenerate(self) -> DeltaNodeLinkCache: - current_link_ptrs_to_links = { + """Regenerates the cache from the internally-linked node tree. + + Notes: + - This is designed to run within the `update()` invocation of the node tree. + - This should be a very fast function, since it is called so much. + """ + # Compute All NodeLink Pointers + all_link_ptrs_as_links = { link.as_pointer(): link for link in self._node_tree.links } - current_link_ptrs = set(current_link_ptrs_to_links.keys()) + all_link_ptrs = set(all_link_ptrs_as_links.keys()) - # Compute Delta - added_link_ptrs = current_link_ptrs - self.link_ptrs - removed_link_ptrs = self.link_ptrs - current_link_ptrs + # Compute Added/Removed Links + added_link_ptrs = all_link_ptrs - self.link_ptrs + removed_link_ptrs = self.link_ptrs - all_link_ptrs - # Update Caches Incrementally - self.remove(removed_link_ptrs) + # Edge Case: 'from_socket' Reassignment + ## (Reverse engineered) When all: + ## - Created a new link between the same two nodes. + ## - Matching 'to_socket'. + ## - Non-matching 'from_socket' on the same node. + ## -> THEN the link_ptr will not change, but the from_socket ptr should. + if len(added_link_ptrs) == 0 and len(removed_link_ptrs) == 0: + # Find the Link w/Reassigned 'from_socket' PTR + ## A bit of a performance hit from the search, but it's an edge case. + _link_ptr_as_from_socket_ptrs = { + link_ptr: ( + from_socket_ptr, + all_link_ptrs_as_links[link_ptr].from_socket.as_pointer(), + ) + for link_ptr, from_socket_ptr in self.link_ptrs_as_from_socket_ptrs.items() + if all_link_ptrs_as_links[link_ptr].from_socket.as_pointer() + != from_socket_ptr + } + # Completely Remove the Old Link (w/Reassigned 'from_socket') + ## This effectively reclassifies the edge case as a normal 're-add'. + for link_ptr in _link_ptr_as_from_socket_ptrs: + log.info( + 'Edge-Case - "from_socket" Reassigned in NodeLink w/o New NodeLink Pointer: %s', + link_ptr, + ) + self.remove_link(link_ptr) + self.remove_sockets_by_link_ptr(link_ptr) + + # Recompute Added/Removed Links + ## The algorithm will now detect an "added link". + added_link_ptrs = all_link_ptrs - self.link_ptrs + removed_link_ptrs = self.link_ptrs - all_link_ptrs + + # Shuffle Cache based on Change in Links + ## Remove Entries for Removed Pointers + for removed_link_ptr in removed_link_ptrs: + self.remove_link(removed_link_ptr) + ## User must manually call 'remove_socket_by_link_ptr' later. + ## For now, leave dangling socket information by-link. + + # Add New Link Pointers self.link_ptrs |= added_link_ptrs for link_ptr in added_link_ptrs: - link = current_link_ptrs_to_links[link_ptr] + # Add Link PTR->REF + new_link = all_link_ptrs_as_links[link_ptr] + self.link_ptrs_as_links[link_ptr] = new_link - self.link_ptrs_to_links[link_ptr] = link - self.link_ptrs_from_sockets[link_ptr] = link.from_socket - self.link_ptrs_to_sockets[link_ptr] = link.to_socket + # Retrieve Link Socket Information + from_socket = new_link.from_socket + from_socket_ptr = from_socket.as_pointer() + to_socket = new_link.to_socket + to_socket_ptr = to_socket.as_pointer() + + # Add Socket PTR, PTR -> REF + for socket_ptr, bl_socket in zip( # noqa: B905 + [from_socket_ptr, to_socket_ptr], + [from_socket, to_socket], + ): + # Increment RefCount of Socket PTR + ## This happens if another link also uses the same socket. + ## 1. An output socket links to several inputs. + ## 2. A multi-input socket links from several inputs. + if socket_ptr in self.socket_ptr_refcount: + self.socket_ptr_refcount[socket_ptr] += 1 + else: + ## RefCount == 0: Add PTR, PTR -> REF + self.socket_ptrs.add(socket_ptr) + self.socket_ptrs_as_sockets[socket_ptr] = bl_socket + self.socket_ptr_refcount[socket_ptr] = 1 + + # Add Link PTR -> Socket PTR + self.link_ptrs_as_from_socket_ptrs[link_ptr] = from_socket_ptr + self.link_ptrs_as_to_socket_ptrs[link_ptr] = to_socket_ptr return {'added': added_link_ptrs, 'removed': removed_link_ptrs} @@ -71,20 +201,42 @@ class MaxwellSimTree(bpy.types.NodeTree): #################### # - Lock Methods #################### - def unlock_all(self): + def unlock_all(self) -> None: + """Unlock all nodes in the node tree, making them editable.""" + log.info('Unlocking All Nodes in NodeTree "%s"', self.bl_label) for node in self.nodes: node.locked = False for bl_socket in [*node.inputs, *node.outputs]: bl_socket.locked = False - def unpreview_all(self): - log.info('Disabling All 3D Previews') - for node in self.nodes: - if node.preview_active: - node.preview_active = False + @contextlib.contextmanager + def repreview_all(self) -> None: + all_nodes_with_preview_active = { + node.instance_id: node for node in self.nodes if node.preview_active + } + self.is_currently_repreviewing = True + self.newly_previewed_nodes = {} - for bl_object in preview_collection().objects.values(): - preview_collection().objects.unlink(bl_object) + try: + yield + finally: + for dangling_previewed_node in [ + node + for node_instance_id, node in all_nodes_with_preview_active.items() + if node_instance_id not in self.newly_previewed_nodes + ]: + # log.debug( + # 'Removing Dangling Preview of Node "{%s}"', + # str(dangling_previewed_node), + # ) + dangling_previewed_node.preview_active = False + + def report_show_preview(self, node: bpy.types.Node) -> None: + if ( + hasattr(self, 'is_currently_repreviewing') + and self.is_currently_repreviewing + ): + self.newly_previewed_nodes[node.instance_id] = node #################### # - Init Methods @@ -94,11 +246,10 @@ class MaxwellSimTree(bpy.types.NodeTree): It's a bit of a "fake" function - in practicality, it's triggered on the first update() function. """ - ## TODO: Consider tying this to an "on_load" handler - if hasattr(self, '_node_link_cache'): - self._node_link_cache.regenerate() + if hasattr(self, 'node_link_cache'): + self.node_link_cache.regenerate() else: - self._node_link_cache = NodeLinkCache(self) + self.node_link_cache = NodeLinkCache(self) #################### # - Update Methods @@ -106,23 +257,35 @@ class MaxwellSimTree(bpy.types.NodeTree): def sync_node_removed(self, node: bpy.types.Node): """Run by `Node.free()` when a node is being removed. + ONLY input socket links are removed from the NodeLink cache. + - `self.update()` handles link-removal from existing nodes. + - `self.update()` can't handle link-removal + Removes node input links from the internal cache (so we don't attempt to update non-existant sockets). """ - for bl_socket in node.inputs.values(): - # Retrieve Socket Links (if any) - self._node_link_cache.remove( - {link.as_pointer() for link in bl_socket.links} - ) ## ONLY Input Socket Links are Removed from the NodeLink Cache ## - update() handles link-removal from still-existing node just fine. ## - update() does NOT handle link-removal of non-existant nodes. + for bl_socket in list(node.inputs.values()) + list(node.outputs.values()): + # Compute About-To-Be-Freed Link Ptrs + link_ptrs = {link.as_pointer() for link in bl_socket.links} - def update(self): - """Run by Blender when 'something changes' in the node tree. + if link_ptrs: + for link_ptr in link_ptrs: + self.node_link_cache.remove_link(link_ptr) + self.node_link_cache.remove_sockets_by_link_ptr(link_ptr) - Updates an internal node link cache, then updates sockets that just lost/gained an input link. + def update(self) -> None: + """Monitors all changes to the node tree, potentially responding with appropriate callbacks. + + Notes: + - Run by Blender when "anything" changes in the node tree. + - Responds to node link changes with callbacks, with the help of a performant node link cache. """ - if not hasattr(self, '_node_link_cache'): + if not hasattr(self, 'ignore_update'): + self.ignore_update = False + + if not hasattr(self, 'node_link_cache'): self.on_load() ## We presume update() is run before the first link is altered. ## - Else, the first link of the session will not update caches. @@ -130,51 +293,82 @@ class MaxwellSimTree(bpy.types.NodeTree): ## - Therefore, self.on_load() is also called as a load_post handler. return - # Compute Changes to NodeLink Cache - delta_links = self._node_link_cache.regenerate() + # Ignore Update + ## Manually set to implement link corrections w/o recursion. + if self.ignore_update: + return - link_alterations = { + # Compute Changes to Node Links + delta_links = self.node_link_cache.regenerate() + + link_corrections = { 'to_remove': [], 'to_add': [], } for link_ptr in delta_links['removed']: - from_socket = self._node_link_cache.link_ptrs_from_sockets[link_ptr] - to_socket = self._node_link_cache.link_ptrs_to_sockets[link_ptr] + # Retrieve Link PTR -> From/To Socket PTR + ## We don't know if they exist yet. + from_socket_ptr = self.node_link_cache.link_ptrs_as_from_socket_ptrs[ + link_ptr + ] + to_socket_ptr = self.node_link_cache.link_ptrs_as_to_socket_ptrs[link_ptr] - # Update Socket Caches - self._node_link_cache.link_ptrs_from_sockets.pop(link_ptr, None) - self._node_link_cache.link_ptrs_to_sockets.pop(link_ptr, None) + # Check Existance of From/To Socket + ## `Node.free()` must report removed sockets, so this here works. + ## If Both Exist: 'to_socket' may "non-consent" to the link removal. + if ( + from_socket_ptr in self.node_link_cache.socket_ptrs + and to_socket_ptr in self.node_link_cache.socket_ptrs + ): + # Retrieve 'from_socket'/'to_socket' REF + from_socket = self.node_link_cache.socket_ptrs_as_sockets[ + from_socket_ptr + ] + to_socket = self.node_link_cache.socket_ptrs_as_sockets[to_socket_ptr] - # Trigger Report Chain on Socket that Just Lost a Link - ## Aka. Forward-Refresh Caches Relying on Linkage - if not (consent_removal := to_socket.sync_link_removed(from_socket)): - # Did Not Consent to Removal: Queue Add Link - link_alterations['to_add'].append((from_socket, to_socket)) + # Ask 'to_socket' for Consent to Remove Link + ## The link has already been removed, but we can fix that. + ## If NO: Queue re-adding the link (safe since the sockets exist) + ## TODO: Crash if deleting removing linked loose sockets. + consent_removal = to_socket.sync_link_removed(from_socket) + if not consent_removal: + link_corrections['to_add'].append((from_socket, to_socket)) + + # Ensure Removal of Socket PTRs, PTRs->REFs + self.node_link_cache.remove_sockets_by_link_ptr(link_ptr) for link_ptr in delta_links['added']: - link = self._node_link_cache.link_ptrs_to_links.get(link_ptr) - if link is None: - continue + # Retrieve Link Reference + link = self.node_link_cache.link_ptrs_as_links[link_ptr] - # Trigger Report Chain on Socket that Just Gained a Link - ## Aka. Forward-Refresh Caches Relying on Linkage + # Ask 'to_socket' for Consent to Remove Link + ## The link has already been added, but we can fix that. + ## If NO: Queue re-adding the link (safe since the sockets exist) + consent_added = link.to_socket.sync_link_added(link) + if not consent_added: + link_corrections['to_remove'].append(link) - if not (consent_added := link.to_socket.sync_link_added(link)): - # Did Not Consent to Addition: Queue Remove Link - link_alterations['to_remove'].append(link) - - # Execute Queued Operations - ## - Especially undoing undesirable link changes. - ## - This is important for locked graphs, whose links must not change. - for link in link_alterations['to_remove']: - self.links.remove(link) - for from_socket, to_socket in link_alterations['to_add']: + # Link Corrections + ## ADD: Links that 'to_socket' don't want removed. + ## REMOVE: Links that 'to_socket' don't want added. + ## NOTE: Both remove() and new() recursively triggers update(). + for link in link_corrections['to_remove']: + self.ignore_update = True + self.links.remove(link) ## Recursively triggers update() + self.ignore_update = False + for from_socket, to_socket in link_corrections['to_add']: + ## 'to_socket' and 'from_socket' are guaranteed to exist. + self.ignore_update = True self.links.new(from_socket, to_socket) + self.ignore_update = False - # If Queued Operations: Regenerate Cache - ## - This prevents the next update() from picking up on alterations. - if link_alterations['to_remove'] or link_alterations['to_add']: - self._node_link_cache.regenerate() + # Regenerate on Corrections + ## Prevents next update() from trying to correct the corrections. + ## We must remember to trigger '.remove_sockets_by_link_ptr' + if link_corrections['to_remove'] or link_corrections['to_add']: + delta_links = self.node_link_cache.regenerate() + for link_ptr in delta_links['removed']: + self.node_link_cache.remove_sockets_by_link_ptr(link_ptr) #################### diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/analysis/extract_data.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/analysis/extract_data.py index 0257712..9ff7af5 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/analysis/extract_data.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/analysis/extract_data.py @@ -13,14 +13,15 @@ CACHE_SIM_DATA = {} class ExtractDataNode(base.MaxwellSimNode): - """Node for visualizing simulation data, by querying its monitors.""" + """Node for extracting data from other objects.""" node_type = ct.NodeType.ExtractData - bl_label = 'Extract Data' + bl_label = 'Extract' input_socket_sets: typ.ClassVar = { 'Sim Data': {'Sim Data': sockets.MaxwellFDTDSimDataSocketDef()}, 'Field Data': {'Field Data': sockets.AnySocketDef()}, + 'Flux Data': {'Flux Data': sockets.AnySocketDef()}, } output_sockets: typ.ClassVar = { 'Data': sockets.AnySocketDef(), @@ -192,6 +193,20 @@ class ExtractDataNode(base.MaxwellSimNode): elif not self.inputs['Field Data'].is_linked and self.cache__components: self.cache__components = '' + #################### + # - Flux Data + #################### + + def draw_props__flux_data( + self, _: bpy.types.Context, col: bpy.types.UILayout + ) -> None: + pass + + def draw_info__flux_data( + self, _: bpy.types.Context, col: bpy.types.UILayout + ) -> None: + pass + #################### # - Global #################### @@ -200,12 +215,16 @@ class ExtractDataNode(base.MaxwellSimNode): self.draw_props__sim_data(context, col) if self.active_socket_set == 'Field Data': self.draw_props__field_data(context, col) + if self.active_socket_set == 'Flux Data': + self.draw_props__flux_data(context, col) def draw_info(self, context: bpy.types.Context, col: bpy.types.UILayout) -> None: if self.active_socket_set == 'Sim Data': self.draw_info__sim_data(context, col) if self.active_socket_set == 'Field Data': self.draw_info__field_data(context, col) + if self.active_socket_set == 'Flux Data': + self.draw_info__flux_data(context, col) @events.computes_output_socket( 'Data', @@ -226,6 +245,10 @@ class ExtractDataNode(base.MaxwellSimNode): field_data = self._compute_input('Field Data') return getattr(field_data, props['field_data__component']) + elif self.active_socket_set == 'Flux Data': # noqa: RET505 + flux_data = self._compute_input('Flux Data') + return getattr(flux_data, 'flux') + msg = f'Tried to get data from unknown output socket in "{self.bl_label}"' raise RuntimeError(msg) diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/base.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/base.py index 33c88b8..64a6e5c 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/base.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/base.py @@ -1,180 +1,201 @@ -import json import typing as typ import uuid +from types import MappingProxyType import bpy -import pydantic as pyd +import sympy as sp import typing_extensions as typx +from ....utils import extra_sympy_units as spux from ....utils import logger +from .. import bl_cache from .. import contracts as ct -from .. import sockets +from .. import managed_objs as _managed_objs +from . import events log = logger.get(__name__) -CACHE: dict[str, typ.Any] = {} ## By Instance UUID -## NOTE: CACHE does not persist between file loads. - -_DEFAULT_LOOSE_SOCKET_SER = json.dumps( - { - 'socket_names': [], - 'socket_def_names': [], - 'models': [], - } -) ## TODO: What in the jesus christ is this +MANDATORY_PROPS = {'node_type', 'bl_label'} class MaxwellSimNode(bpy.types.Node): - # Fundamentals - node_type: ct.NodeType - bl_idname: str + """A specialized Blender node for Maxwell simulations. + + Attributes: + node_type: The `ct.NodeType` that identifies which node this is. + bl_label: The label shown in the header of the node in Blender. + instance_id: A unique ID attached to a particular node instance. + Guaranteed to be unchanged so long as the node lives. + Used as a node-specific cache index. + sim_node_name: A unique human-readable name identifying the node. + Used when naming managed objects and exporting. + preview_active: Whether the preview (if any) is currently active. + locked: Whether the node is currently 'locked' aka. non-editable. + """ + use_sim_node_name: bool = False - bl_label: str - # draw_label(self) -> str: pass - - # Style - bl_description: str = '' - - # bl_width_default: float = 0.0 - # bl_width_min: float = 0.0 - # bl_width_max: float = 0.0 + ## TODO: bl_description from first line of __doc__? # Sockets - _output_socket_methods: dict - - input_sockets: typ.ClassVar[dict[str, ct.schemas.SocketDef]] = {} - output_sockets: typ.ClassVar[dict[str, ct.schemas.SocketDef]] = {} - input_socket_sets: typ.ClassVar[dict[str, dict[str, ct.schemas.SocketDef]]] = {} - output_socket_sets: typ.ClassVar[dict[str, dict[str, ct.schemas.SocketDef]]] = {} + input_sockets: typ.ClassVar[dict[str, ct.schemas.SocketDef]] = MappingProxyType({}) + output_sockets: typ.ClassVar[dict[str, ct.schemas.SocketDef]] = MappingProxyType({}) + input_socket_sets: typ.ClassVar[dict[str, dict[str, ct.schemas.SocketDef]]] = ( + MappingProxyType({}) + ) + output_socket_sets: typ.ClassVar[dict[str, dict[str, ct.schemas.SocketDef]]] = ( + MappingProxyType({}) + ) # Presets - presets: typ.ClassVar = {} + presets: typ.ClassVar = MappingProxyType({}) # Managed Objects managed_obj_defs: typ.ClassVar[ dict[ct.ManagedObjName, ct.schemas.ManagedObjDef] - ] = {} + ] = MappingProxyType({}) #################### - # - Initialization + # - Class Methods #################### - def __init_subclass__(cls, **kwargs: typ.Any): - super().__init_subclass__(**kwargs) + @classmethod + def _assert_attrs_valid(cls) -> None: + """Asserts that all mandatory attributes are defined on the class. + + The list of mandatory objects is sourced from `base.MANDATORY_PROPS`. + + Raises: + ValueError: If a mandatory attribute defined in `base.MANDATORY_PROPS` is not defined on the class. + """ + for cls_attr in MANDATORY_PROPS: + if not hasattr(cls, cls_attr): + msg = f'Node class {cls} does not define mandatory attribute "{cls_attr}".' + raise ValueError(msg) + + @classmethod + def set_prop( + cls, + prop_name: str, + prop: bpy.types.Property, + no_update: bool = False, + update_with_name: str | None = None, + **kwargs, + ) -> None: + """Adds a Blender property to a class via `__annotations__`, so it initializes with any subclass. + + Notes: + - Blender properties can't be set within `__init_subclass__` simply by adding attributes to the class; they must be added as type annotations. + - Must be called **within** `__init_subclass__`. + + Parameters: + name: The name of the property to set. + prop: The `bpy.types.Property` to instantiate and attach.. + no_update: Don't attach a `self.sync_prop()` callback to the property's `update`. + """ + _update_with_name = prop_name if update_with_name is None else update_with_name + extra_kwargs = ( + { + 'update': lambda self, context: self.sync_prop( + _update_with_name, context + ), + } + if not no_update + else {} + ) + cls.__annotations__[prop_name] = prop( + **kwargs, + **extra_kwargs, + ) + + @classmethod + def _gather_event_methods(cls) -> dict[str, typ.Callable[[], None]]: + """Gathers all methods called in response to actions/events observed by the node. + + Notes: + - 'Event methods' must have an attribute 'action_type' in order to be picked up. + - 'Event methods' must have an attribute 'action_type'. + + Returns: + Event methods, indexed by the action that (maybe) triggers them. + """ + event_methods = [ + method + for attr_name in dir(cls) + if hasattr(method := getattr(cls, attr_name), 'action_type') + and method.action_type in set(ct.DataFlowAction) + ] + event_methods_by_action = { + action_type: [] for action_type in set(ct.DataFlowAction) + } + for method in event_methods: + event_methods_by_action[method.action_type].append(method) + + return event_methods_by_action + + @classmethod + def socket_set_names(cls) -> list[str]: + """Retrieve the names of socket sets, in an order-preserving way. + + Notes: + Semantically similar to `list(set(...) | set(...))`. + + Returns: + List of socket set names, without duplicates, in definition order. + """ + return (_input_socket_set_names := list(cls.input_socket_sets.keys())) + [ + output_socket_set_name + for output_socket_set_name in cls.output_socket_sets + if output_socket_set_name not in _input_socket_set_names + ] + + @classmethod + def __init_subclass__(cls, **kwargs) -> None: + """Initializes node properties and attributes for use. + + Notes: + Run when initializing any subclass of MaxwellSimNode. + """ log.debug('Initializing Node: %s', cls.node_type) + super().__init_subclass__(**kwargs) + cls._assert_attrs_valid() - # Setup Blender ID for Node - if not hasattr(cls, 'node_type'): - msg = f"Node class {cls} does not define 'node_type', or it is does not have the type {ct.NodeType}" - raise ValueError(msg) - cls.bl_idname = str(cls.node_type.value) + # Node Properties + ## Identifiers + cls.bl_idname: str = str(cls.node_type.value) + cls.set_prop('instance_id', bpy.props.StringProperty, no_update=True) + cls.set_prop('sim_node_name', bpy.props.StringProperty, default='') - # Setup Instance ID for Node - cls.__annotations__['instance_id'] = bpy.props.StringProperty( - name='Instance ID', - description='The instance ID of a particular MaxwellSimNode instance, used to index caches', - default='', - ) + ## Special States + cls.set_prop('preview_active', bpy.props.BoolProperty, default=False) + cls.set_prop('locked', bpy.props.BoolProperty, no_update=True, default=False) - # Setup Name Property for Node - cls.__annotations__['sim_node_name'] = bpy.props.StringProperty( - name='Sim Node Name', - description='The name of a particular MaxwellSimNode node, which can be used to help identify data managed by the node', - default='', - update=(lambda self, context: self.sync_sim_node_name(context)), - ) + ## Event Method Callbacks + cls.event_methods_by_action = cls._gather_event_methods() - # Setup "Previewing" Property for Node - cls.__annotations__['preview_active'] = bpy.props.BoolProperty( - name='Preview Active', - description='Whether the preview (if any) is currently active', - default=False, - update=lambda self, context: self.sync_preview_active(context), - ) - - # Setup Locked Property for Node - cls.__annotations__['locked'] = bpy.props.BoolProperty( - name='Locked State', - description="The lock-state of a particular MaxwellSimNode instance, which determines the node's user editability", - default=False, - ) - - # Setup Blender Label for Node - if not hasattr(cls, 'bl_label'): - msg = f"Node class {cls} does not define 'bl_label'" - raise ValueError(msg) - - # Setup Callback Methods - cls._output_socket_methods = { - method - for attr_name in dir(cls) - if hasattr(method := getattr(cls, attr_name), 'action_type') - and method.action_type == 'computes_output_socket' - } - cls._on_value_changed_methods = { - method - for attr_name in dir(cls) - if hasattr(method := getattr(cls, attr_name), 'action_type') - and method.action_type == 'on_value_changed' - } - cls._on_show_plot = { - method - for attr_name in dir(cls) - if hasattr(method := getattr(cls, attr_name), 'action_type') - and method.action_type == 'on_show_plot' - } - cls._on_init = { - method - for attr_name in dir(cls) - if hasattr(method := getattr(cls, attr_name), 'action_type') - and method.action_type == 'on_init' - } - - # Setup Socket Set Dropdown - if not len(cls.input_socket_sets) + len(cls.output_socket_sets) > 0: - cls.active_socket_set = None - else: - ## Add Active Socket Set Enum - socket_set_names = ( - _input_socket_set_names := list(cls.input_socket_sets.keys()) - ) + [ - output_socket_set_name - for output_socket_set_name in cls.output_socket_sets - if output_socket_set_name not in _input_socket_set_names - ] - socket_set_ids = [ - socket_set_name.replace(' ', '_').upper() - for socket_set_name in socket_set_names - ] - ## TODO: Better deriv. of sock.set. ID, ex. ( is currently invalid. - - ## Add Active Socket Set Enum - cls.__annotations__['active_socket_set'] = bpy.props.EnumProperty( + ## Active Socket Set + if len(cls.input_socket_sets) + len(cls.output_socket_sets) > 0: + socket_set_names = cls.socket_set_names() + cls.set_prop( + 'active_socket_set', + bpy.props.EnumProperty, name='Active Socket Set', - description='The active socket set', + description='Selector of active sockets', items=[ - ( - socket_set_name, - socket_set_name, - socket_set_name, - ) - for socket_set_id, socket_set_name in zip( - socket_set_ids, - socket_set_names, - strict=False, - ) + (socket_set_name, socket_set_name, socket_set_name) + for socket_set_name in socket_set_names ], default=socket_set_names[0], - update=lambda self, context: self.sync_active_socket_set(context), ) - - # Setup Preset Dropdown - if not cls.presets: - cls.active_preset = None else: - ## TODO: Check that presets are represented in a socket that is guaranteed to be always available, specifically either a static socket or ALL static socket sets. - cls.__annotations__['active_preset'] = bpy.props.EnumProperty( + cls.active_socket_set = None + + ## Active Preset + ## TODO: Validate Presets + if cls.presets: + cls.set_prop( + 'active_preset', + bpy.props.EnumProperty, name='Active Preset', - description='The active preset', + description='The currently active preset', items=[ ( preset_name, @@ -184,286 +205,280 @@ class MaxwellSimNode(bpy.types.Node): for preset_name, preset_def in cls.presets.items() ], default=next(cls.presets.keys()), - update=lambda self, _: (self.sync_active_preset()()), + ) + else: + cls.active_preset = None + + #################### + # - Events: Class Properties + #################### + @events.on_value_changed(prop_name='active_socket_set') + def _on_socket_set_changed(self): + log.info( + 'Changed Sim Node Socket Set to "%s"', + self.active_socket_set, + ) + self._sync_sockets() + + @events.on_value_changed( + prop_name='sim_node_name', + props={'sim_node_name', 'managed_objs', 'managed_obj_defs'}, + ) + def _on_sim_node_name_changed(self, props: dict): + log.info( + 'Changed Sim Node Name of a "%s" to "%s" (self=%s)', + self.bl_idname, + self.sim_node_name, + str(self), + ) + + # Set Name of Managed Objects + for mobj_id, mobj in props['managed_objs'].items(): + mobj_def = props['managed_obj_defs'][mobj_id] + mobj.name = mobj_def.name_prefix + props['sim_node_name'] + + @events.on_value_changed( + prop_name='active_preset', props=['presets', 'active_preset'] + ) + def _on_active_preset_changed(self, props: dict): + if props['active_preset'] is not None: + log.info( + 'Changed Sim Node Preset to "%s"', + props['active_preset'], ) - #################### - # - Generic Properties - #################### - def sync_active_socket_set(self, context): - self.sync_sockets() - self.sync_prop('active_socket_set', context) + # Retrieve Preset + if not (preset_def := props['presets'].get(props['active_preset'])): + msg = f'Tried to apply active preset, but the active preset "{props["active_preset"]}" is not a defined preset: {props["active_preset"]}' + raise RuntimeError(msg) - def sync_sim_node_name(self, _): - if (mobjs := CACHE[self.instance_id].get('managed_objs')) is None: - return + # Apply Preset to Sockets + for socket_name, socket_value in preset_def.values.items(): + if not (bl_socket := self.inputs.get(socket_name)): + msg = f'Tried to set preset socket/value pair ({socket_name}={socket_value}), but socket is not in active input sockets ({self.inputs})' + raise ValueError(msg) - for mobj_id, mobj in mobjs.items(): - # Retrieve Managed Obj Definition - mobj_def = self.managed_obj_defs[mobj_id] + ## TODO: Account for DataFlowKind + bl_socket.value = socket_value - # Set Managed Obj Name - mobj.name = mobj_def.name_prefix + self.sim_node_name - ## ManagedObj is allowed to alter the name when setting it. - ## - This will happen whenever the name is taken. - ## - If altered, set the 'sim_node_name' to the altered name. - ## - This will cause recursion, but only once. + @events.on_show_preview() + def _on_show_preview(self): + node_tree = self.id_data + node_tree.report_show_preview(self) + # Set Preview to Active + ## Implicitly triggers any @on_value_changed for preview_active. + if not self.preview_active: + self.preview_active = True - def sync_preview_active(self, _: bpy.types.Context): - log.info( - 'Changed Preview Active in "%s" to "%s"', - self.name, - self.preview_active, - ) - for method in self._on_value_changed_methods: - if 'preview_active' in method.extra_data['changed_props']: - log.info( - 'Running Previewer Callback "%s" in "%s")', - method.__name__, - self.name, - ) - method(self) + @events.on_value_changed(prop_name='preview_active', props={'preview_active'}) + def _on_preview_changed(self, props): + if not props['preview_active']: + for mobj in self.managed_objs.values(): + if isinstance(mobj, _managed_objs.ManagedBLMesh): + ## TODO: This is a Workaround + mobj.hide_preview() + + @events.on_enable_lock() + def _on_enabled_lock(self): + # Set Locked to Active + ## draw() picks up on this immediately. + ## Doesn't trigger @on_value_changed, since self.locked has no update(). + self.locked = True + + @events.on_disable_lock() + def _on_disabled_lock(self): + # Set Locked to Inactive + ## draw() picks up on this immediately. + ## Doesn't trigger @on_value_changed, since self.locked has no update(). + self.locked = False #################### - # - Managed Object Properties + # - Loose Sockets #################### - @property - def managed_objs(self): - if not CACHE.get(self.instance_id): - CACHE[self.instance_id] = {} + loose_input_sockets: dict[str, ct.schemas.SocketDef] = bl_cache.BLField({}) + loose_output_sockets: dict[str, ct.schemas.SocketDef] = bl_cache.BLField({}) - # If No Managed Objects in CACHE: Initialize Managed Objects - ## - This happens on every ex. file load, init(), etc. . - ## - ManagedObjects MUST the same object by name. - ## - We sync our 'sim_node_name' with all managed objects. - ## - (There is also a class-defined 'name_prefix' to differentiate) - ## - See the 'sim_node_name' w/its sync function. - if CACHE[self.instance_id].get('managed_objs') is None: - # Initialize the Managed Object Instance Cache - CACHE[self.instance_id]['managed_objs'] = {} - - # Fill w/Managed Objects by Name Socket - for mobj_id, mobj_def in self.managed_obj_defs.items(): - name = mobj_def.name_prefix + self.sim_node_name - CACHE[self.instance_id]['managed_objs'][mobj_id] = mobj_def.mk(name) - - return CACHE[self.instance_id]['managed_objs'] - - return CACHE[self.instance_id]['managed_objs'] + @events.on_value_changed(prop_name={'loose_input_sockets', 'loose_output_sockets'}) + def _on_loose_sockets_changed(self): + self._sync_sockets() #################### - # - Socket Properties + # - Socket Accessors #################### - def active_bl_sockets(self, direc: typx.Literal['input', 'output']): + def _bl_sockets( + self, direc: typx.Literal['input', 'output'] + ) -> bpy.types.NodeInputs: + """Retrieve currently visible Blender sockets on the node, by-direction. + + Only use internally, when `node.inputs`/`node.outputs` is too much of a mouthful to use directly. + + Note: + You should probably use `node.inputs` or `node.outputs` directly. + + Parameters: + direc: The direction to load Blender sockets from. + + Returns: + The actual `node.inputs` or `node.outputs`, depending on `direc`. + """ return self.inputs if direc == 'input' else self.outputs - def active_socket_set_sockets( + def _active_socket_set_socket_defs( self, direc: typx.Literal['input', 'output'], - ) -> dict: + ) -> dict[ct.SocketName, ct.schemas.SocketDef]: + """Retrieve all socket definitions for sockets that should be defined, according to the `self.active_socket_set`. + + Note: + You should probably use `self.active_socket_defs()` + + Parameters: + direc: The direction to load Blender sockets from. + + Returns: + Mapping from socket names to corresponding `ct.schemas.SocketDef`s. + + If `self.active_socket_set` is None, the empty dict is returned. + """ # No Active Socket Set: Return Nothing - if not self.active_socket_set: + if self.active_socket_set is None: return {} # Retrieve Active Socket Set Sockets socket_sets = ( self.input_socket_sets if direc == 'input' else self.output_socket_sets ) - active_socket_set_sockets = socket_sets.get(self.active_socket_set) + return socket_sets.get(self.active_socket_set, {}) - # Return Active Socket Set Sockets (if any) - if not active_socket_set_sockets: - return {} - return active_socket_set_sockets + def active_socket_defs( + self, direc: typx.Literal['input', 'output'] + ) -> dict[ct.SocketName, ct.schemas.SocketDef]: + """Retrieve all socket definitions for sockets that should be defined. - def active_sockets(self, direc: typx.Literal['input', 'output']): + Parameters: + direc: The direction to load Blender sockets from. + + Returns: + Mapping from socket names to corresponding `ct.schemas.SocketDef`s. + """ static_sockets = self.input_sockets if direc == 'input' else self.output_sockets loose_sockets = ( self.loose_input_sockets if direc == 'input' else self.loose_output_sockets ) return ( - static_sockets | self.active_socket_set_sockets(direc=direc) | loose_sockets + static_sockets + | self._active_socket_set_socket_defs(direc=direc) + | loose_sockets ) - #################### - # - Loose Sockets - #################### - # Loose Sockets - ## Only Blender props persist as instance data - ser_loose_input_sockets: bpy.props.StringProperty( - name='Serialized Loose Input Sockets', - description='JSON-serialized representation of loose input sockets.', - default=_DEFAULT_LOOSE_SOCKET_SER, - ) - ser_loose_output_sockets: bpy.props.StringProperty( - name='Serialized Loose Input Sockets', - description='JSON-serialized representation of loose input sockets.', - default=_DEFAULT_LOOSE_SOCKET_SER, - ) - - ## Internal Serialization/Deserialization Methods (yuck) - def _ser_loose_sockets(self, deser: dict[str, ct.schemas.SocketDef]) -> str: - if not all(isinstance(model, pyd.BaseModel) for model in deser.values()): - msg = 'Trying to deserialize loose sockets with invalid SocketDefs (they must be `pydantic` BaseModels).' - raise ValueError(msg) - - return json.dumps( - { - 'socket_names': list(deser.keys()), - 'socket_def_names': [ - model.__class__.__name__ for model in deser.values() - ], - 'models': [ - model.model_dump() - for model in deser.values() - if isinstance(model, pyd.BaseModel) - ], - } - ) ## Big reliance on order-preservation of dicts here.) - - def _deser_loose_sockets(self, ser: str) -> dict[str, ct.schemas.SocketDef]: - semi_deser = json.loads(ser) - return { - socket_name: getattr(sockets, socket_def_name)(**model_kwargs) - for socket_name, socket_def_name, model_kwargs in zip( - semi_deser['socket_names'], - semi_deser['socket_def_names'], - semi_deser['models'], - strict=False, - ) - if hasattr(sockets, socket_def_name) - } - - @property - def loose_input_sockets(self) -> dict[str, ct.schemas.SocketDef]: - return self._deser_loose_sockets(self.ser_loose_input_sockets) - - @property - def loose_output_sockets(self) -> dict[str, ct.schemas.SocketDef]: - return self._deser_loose_sockets(self.ser_loose_output_sockets) - - ## TODO: Some caching may play a role if this is all too slow. - - @loose_input_sockets.setter - def loose_input_sockets( - self, - value: dict[str, ct.schemas.SocketDef], - ) -> None: - # Prune Loose Sockets - self.ser_loose_input_sockets = _DEFAULT_LOOSE_SOCKET_SER - self.sync_sockets() - - # Install New Sockets - if not value: - self.ser_loose_input_sockets = _DEFAULT_LOOSE_SOCKET_SER - else: - self.ser_loose_input_sockets = self._ser_loose_sockets(value) - - # Synchronize Sockets - self.sync_sockets() - - @loose_output_sockets.setter - def loose_output_sockets( - self, - value: dict[str, ct.schemas.SocketDef], - ) -> None: - # Prune Loose Sockets - self.ser_loose_output_sockets = _DEFAULT_LOOSE_SOCKET_SER - self.sync_sockets() - - # Install New Sockets - if not value: - self.ser_loose_output_sockets = _DEFAULT_LOOSE_SOCKET_SER - else: - self.ser_loose_output_sockets = self._ser_loose_sockets(value) - - # Synchronize Sockets - self.sync_sockets() - #################### # - Socket Management #################### + ## TODO: Check for namespace collisions in sockets to prevent silent errors def _prune_inactive_sockets(self): - """Remove all inactive sockets from the node. + """Remove all "inactive" sockets from the node. - **NOTE**: Socket names must be unique within direction, active socket set, and loose socket set. + A socket is considered "inactive" when it shouldn't be defined (per `self.active_socket_defs), but is present nonetheless. """ for direc in ['input', 'output']: - sockets = self.active_sockets(direc) - bl_sockets = self.active_bl_sockets(direc) + all_bl_sockets = self._bl_sockets(direc) + active_bl_socket_defs = self.active_socket_defs(direc) # Determine Sockets to Remove bl_sockets_to_remove = [ bl_socket - for socket_name, bl_socket in bl_sockets.items() - if socket_name not in sockets + for socket_name, bl_socket in all_bl_sockets.items() + if socket_name not in active_bl_socket_defs + or socket_name + in ( + self.loose_input_sockets + if direc == 'input' + else self.loose_output_sockets + ) ] # Remove Sockets for bl_socket in bl_sockets_to_remove: - bl_sockets.remove(bl_socket) + all_bl_sockets.remove(bl_socket) def _add_new_active_sockets(self): - """Add and initialize all non-existing active sockets to the node. + """Add and initialize all "active" sockets that aren't on the node. Existing sockets within the given direction are not re-created. """ for direc in ['input', 'output']: - sockets = self.active_sockets(direc) - bl_sockets = self.active_bl_sockets(direc) + all_bl_sockets = self._bl_sockets(direc) + active_bl_socket_defs = self.active_socket_defs(direc) # Define BL Sockets created_sockets = {} - for socket_name, socket_def in sockets.items(): + for socket_name, socket_def in active_bl_socket_defs.items(): # Skip Existing Sockets - if socket_name in bl_sockets: + if socket_name in all_bl_sockets: continue # Create BL Socket from Socket - bl_socket = bl_sockets.new( + ## Set 'display_shape' from 'socket_shape' + bl_socket = all_bl_sockets.new( str(socket_def.socket_type.value), socket_name, ) bl_socket.display_shape = bl_socket.socket_shape - ## `display_shape` needs to be dynamically set - # Record Created Socket + # Record Socket Creation created_sockets[socket_name] = socket_def # Initialize Just-Created BL Sockets for socket_name, socket_def in created_sockets.items(): - socket_def.init(bl_sockets[socket_name]) + socket_def.init(all_bl_sockets[socket_name]) - def sync_sockets(self) -> None: + def _sync_sockets(self) -> None: """Synchronize the node's sockets with the active sockets. - Any non-existing active socket will be added and initialized. - Any existing active socket will not be changed. - Any existing inactive socket will be removed. - Must be called after any change to socket definitions, including loose + Note: + Must be called after any change to socket definitions, including loose sockets. """ self._prune_inactive_sockets() self._add_new_active_sockets() #################### - # - Preset Management + # - Managed Objects #################### - def sync_active_preset(self) -> None: - """Applies the active preset by overwriting the value of preset-defined input sockets.""" - if not (preset_def := self.presets.get(self.active_preset)): - msg = f'Tried to apply active preset, but the active preset ({self.active_preset}) is not in presets ({self.presets})' - raise RuntimeError(msg) + managed_bl_meshes: dict[str, _managed_objs.ManagedBLMesh] = bl_cache.BLField({}) + managed_bl_images: dict[str, _managed_objs.ManagedBLImage] = bl_cache.BLField({}) + managed_bl_modifiers: dict[str, _managed_objs.ManagedBLModifier] = bl_cache.BLField( + {} + ) - for socket_name, socket_value in preset_def.values.items(): - if not (bl_socket := self.inputs.get(socket_name)): - msg = f'Tried to set preset socket/value pair ({socket_name}={socket_value}), but socket is not in active input sockets ({self.inputs})' - raise ValueError(msg) + @bl_cache.cached_bl_property( + persist=False + ) ## Disable broken ManagedObj union DECODER + def managed_objs(self) -> dict[str, _managed_objs.ManagedObj]: + """Access the managed objects defined on this node. - bl_socket.value = socket_value - ## TODO: Lazy-valued presets? + Persistent cache ensures that the managed objects are only created on first access, even across file reloads. + """ + if self.managed_obj_defs: + if not ( + managed_objs := ( + self.managed_bl_meshes + | self.managed_bl_images + | self.managed_bl_modifiers + ) + ): + return { + mobj_name: mobj_def.mk(mobj_def.name_prefix + self.sim_node_name) + for mobj_name, mobj_def in self.managed_obj_defs.items() + } + return managed_objs + + return {} #################### # - UI Methods @@ -473,28 +488,45 @@ class MaxwellSimNode(bpy.types.Node): context: bpy.types.Context, layout: bpy.types.UILayout, ) -> None: + """Draws the UI of the node. + + - Locked (`self.locked`): The UI will be unusable. + - Active Preset (`self.active_preset`): The preset selector will display. + - Active Socket Set (`self.active_socket_set`): The socket set selector will display. + - Use Sim Node Name (`self.use_sim_node_name`): The "Sim Node Name will display. + - Properties (`self.draw_props()`): Node properties will display. + - Operators (`self.draw_operators()`): Node operators will display. + - Info (`self.draw_operators()`): Node information will display. + + Parameters: + context: The current Blender context. + layout: Target for defining UI elements. + """ if self.locked: layout.enabled = False - if self.active_preset: - layout.prop(self, 'active_preset', text='') - if self.active_socket_set: layout.prop(self, 'active_socket_set', text='') + if self.active_preset is not None: + layout.prop(self, 'active_preset', text='') + # Draw Name - col = layout.column(align=False) + # col = layout.column(align=False) if self.use_sim_node_name: - row = col.row(align=True) + # row = col.row(align=True) + row = layout.row(align=True) row.label(text='', icon='FILE_TEXT') row.prop(self, 'sim_node_name', text='') # Draw Name - self.draw_props(context, col) - self.draw_operators(context, col) - self.draw_info(context, col) + self.draw_props(context, layout) + self.draw_operators(context, layout) + self.draw_info(context, layout) + # self.draw_props(context, col) + # self.draw_operators(context, col) + # self.draw_info(context, col) - ## TODO: Managed Operators instead of this shit def draw_props(self, context, layout): pass @@ -504,31 +536,150 @@ class MaxwellSimNode(bpy.types.Node): def draw_info(self, context, layout): pass - def draw_buttons_ext(self, context, layout): - pass + #################### + # - Special Compute Input / Output Caches + #################### - ## TODO: Side panel buttons for fanciness. + ## Compute Output Cache + ## -> KEY: output socket name, kind + ## -> INV: When DataChanged triggers with one of the event_method dependencies: + ## - event_method.dependencies.input_sockets has DataChanged socket_name + ## - event_method.dependencies.input_socket_kinds has DataChanged kind + ## - DataChanged socket_name is loose and event_method wants all-loose + ## - event_method.dependencies.props has DataChanged prop_name + def _hit_cached_output_socket_value( + self, + compute_output_socket_cb: typ.Callable[[], typ.Any], + output_socket_name: ct.SocketName, + kind: ct.DataFlowKind, + ) -> typ.Any | None: + """Retrieve a cached output socket value by `output_socket_name, kind`.""" + # Create Non-Persistent Cache Entry + if bl_cache.CACHE_NOPERSIST.get(self.instance_id) is None: + bl_cache.CACHE_NOPERSIST[self.instance_id] = {} + cache_nopersist = bl_cache.CACHE_NOPERSIST[self.instance_id] - def draw_plot_settings(self, _: bpy.types.Context, layout: bpy.types.UILayout): - if self.locked: - layout.enabled = False + # Create Output Socket Cache Entry + if cache_nopersist.get('_cached_output_sockets') is None: + cache_nopersist['_cached_output_sockets'] = {} + cached_output_sockets = cache_nopersist['_cached_output_sockets'] + + # Try Hit on Cached Output Sockets + cached_value = cached_output_sockets.get((output_socket_name, kind)) + if cached_value is None: + value = compute_output_socket_cb() + cached_output_sockets[(output_socket_name, kind)] = value + else: + value = cached_value + + return value + + def _invalidate_cached_output_socket_value( + self, output_socket_name: ct.SocketName, kind: ct.DataFlowKind + ) -> None: + # Create Non-Persistent Cache Entry + if bl_cache.CACHE_NOPERSIST.get(self.instance_id) is None: + return + cache_nopersist = bl_cache.CACHE_NOPERSIST[self.instance_id] + + # Create Output Socket Cache Entry + if cache_nopersist.get('_cached_output_sockets') is None: + return + cached_output_sockets = cache_nopersist['_cached_output_sockets'] + + # Try Hit & Delete + cached_output_sockets.pop((output_socket_name, kind), None) + + ## Input Cache + ## -> KEY: input socket name, kind, unit system + ## -> INV: DataChanged w/socket name + def _hit_cached_input_socket_value( + self, + compute_input_socket_cb: typ.Callable[[typ.Self], typ.Any], + input_socket_name: ct.SocketName, + kind: ct.DataFlowKind, + unit_system: dict[ct.SocketType, sp.Expr], + ) -> typ.Any | None: + # Create Non-Persistent Cache Entry + if bl_cache.CACHE_NOPERSIST.get(self.instance_id) is None: + bl_cache.CACHE_NOPERSIST[self.instance_id] = {} + cache_nopersist = bl_cache.CACHE_NOPERSIST[self.instance_id] + + # Create Output Socket Cache Entry + if cache_nopersist.get('_cached_input_sockets') is None: + cache_nopersist['_cached_input_sockets'] = {} + cached_input_sockets = cache_nopersist['_cached_input_sockets'] + + # Try Hit on Cached Output Sockets + encoded_unit_system = bl_cache.ENCODER.encode(unit_system).decode('utf-8') + cached_value = cached_input_sockets.get( + (input_socket_name, kind, encoded_unit_system), + ) + if cached_value is None: + value = compute_input_socket_cb() + cached_input_sockets[(input_socket_name, kind, encoded_unit_system)] = value + else: + value = cached_value + return value + + def _invalidate_cached_input_socket_value( + self, + input_socket_name: ct.SocketName, + ) -> None: + # Create Non-Persistent Cache Entry + if bl_cache.CACHE_NOPERSIST.get(self.instance_id) is None: + return + cache_nopersist = bl_cache.CACHE_NOPERSIST[self.instance_id] + + # Create Output Socket Cache Entry + if cache_nopersist.get('_cached_input_sockets') is None: + return + cached_input_sockets = cache_nopersist['_cached_input_sockets'] + + # Try Hit & Delete + for cached_input_socket in list(cached_input_sockets.keys()): + if cached_input_socket[0] == input_socket_name: + cached_input_sockets.pop(cached_input_socket, None) #################### # - Data Flow #################### + ## TODO: Lazy input socket list in events.py callbacks, to replace random scattered `_compute_input` calls. def _compute_input( self, input_socket_name: ct.SocketName, kind: ct.DataFlowKind = ct.DataFlowKind.Value, - ) -> typ.Any | None: - """Computes the data of an input socket, by socket name and data flow kind, by asking the socket nicely via `bl_socket.compute_data`. + unit_system: dict[ct.SocketType, sp.Expr] | None = None, + optional: bool = False, + ) -> typ.Any: + """Computes the data of an input socket, following links if needed. - Args: - input_socket_name: The name of the input socket, as defined in `self.input_sockets`. - kind: The kind of data flow to compute. + Note: + The semantics derive entirely from `sockets.MaxwellSimSocket.compute_data()`. + + Parameters: + input_socket_name: The name of the input socket to compute the value of. + It must be currently active. + kind: The data flow kind to compute. """ - if bl_socket := self.inputs.get(input_socket_name): - return bl_socket.compute_data(kind=kind) + if (bl_socket := self.inputs.get(input_socket_name)) is not None: + return self._hit_cached_input_socket_value( + lambda: ( + ct.DataFlowKind.scale_to_unit_system( + kind, + bl_socket.compute_data(kind=kind), + bl_socket.socket_type, + unit_system, + ) + if unit_system is not None + else bl_socket.compute_data(kind=kind) + ), + input_socket_name, + kind, + unit_system, + ) + if optional: + return None msg = f'Input socket "{input_socket_name}" on "{self.bl_idname}" is not an active input socket' raise ValueError(msg) @@ -537,13 +688,11 @@ class MaxwellSimNode(bpy.types.Node): self, output_socket_name: ct.SocketName, kind: ct.DataFlowKind = ct.DataFlowKind.Value, + optional: bool = False, ) -> typ.Any: - """Computes the value of an output socket name, from its socket name. + """Computes the value of an output socket. - Searches methods decorated with `@computes_output_socket(output_socket_name, kind=..., ...)`, for a perfect match to the pair `socket_name..kind`. - This method is run to produce the value. - - Args: + Parameters: output_socket_name: The name declaring the output socket, for which this method computes the output. kind: The DataFlowKind to use when computing the output socket value. @@ -551,35 +700,99 @@ class MaxwellSimNode(bpy.types.Node): The value of the output socket, as computed by the dedicated method registered using the `@computes_output_socket` decorator. """ + if self.outputs.get(output_socket_name) is None: + if optional: + return None + msg = f"Can't compute nonexistent output socket name {output_socket_name}, as it's not currently active" + raise RuntimeError(msg) + + output_socket_methods = self.event_methods_by_action[ + ct.DataFlowAction.OutputRequested + ] possible_output_socket_methods = [ output_socket_method - for output_socket_method in self._output_socket_methods - if kind == output_socket_method.extra_data['kind'] + for output_socket_method in output_socket_methods + if kind == output_socket_method.callback_info.kind and ( output_socket_name - == output_socket_method.extra_data['output_socket_name'] + == output_socket_method.callback_info.output_socket_name or ( - output_socket_method.extra_data['any_loose_output_socket'] + output_socket_method.callback_info.any_loose_output_socket and output_socket_name in self.loose_output_sockets ) ) ] if len(possible_output_socket_methods) == 1: + return self._hit_cached_output_socket_value( + lambda: possible_output_socket_methods[0](self), + output_socket_name, + kind, + ) return possible_output_socket_methods[0](self) - msg = f'No output method for ({output_socket_name}, {kind.value!s}' - raise ValueError(msg) + if len(possible_output_socket_methods) == 0: + msg = f'No output method for ({output_socket_name}, {kind.value!s}' + raise ValueError(msg) + + if len(possible_output_socket_methods) > 1: + msg = ( + f'More than one method found for ({output_socket_name}, {kind.value!s}.' + ) + raise RuntimeError(msg) + + msg = 'Somehow, a length is negative. Call NASA.' + raise SystemError(msg) #################### # - Action Chain #################### - def sync_prop(self, prop_name: str, _: bpy.types.Context): - """Called when a property has been updated.""" - if not hasattr(self, prop_name): - msg = f'Property {prop_name} not defined on socket {self}' + def sync_prop(self, prop_name: str, _: bpy.types.Context) -> None: + """Report that a particular property has changed, which may cause certain caches to regenerate. + + Note: + Called by **all** valid `bpy.prop.Property` definitions in the addon, via their update methods. + + May be called in a threaded context - careful! + + Parameters: + prop_name: The name of the property that changed. + """ + if hasattr(self, prop_name): + self.trigger_action(ct.DataFlowAction.DataChanged, prop_name=prop_name) + else: + msg = f'Property {prop_name} not defined on node {self}' raise RuntimeError(msg) - self.trigger_action(ct.DataFlowAction.DataChanged, prop_name=prop_name) + @bl_cache.cached_bl_property(persist=False) + def event_method_filter_by_action(self) -> dict[ct.DataFlowAction, typ.Callable]: + """Compute a map of DataFlowActions, to a function that filters its event methods. + + The filter expression may use attributes of `self`, or return `True` if no filtering should occur, or return `False` if methods should never run. + """ + return { + ct.DataFlowAction.EnableLock: lambda *_: True, + ct.DataFlowAction.DisableLock: lambda *_: True, + ct.DataFlowAction.DataChanged: lambda event_method, + socket_name, + prop_name: ( + ( + socket_name + and socket_name in event_method.callback_info.on_changed_sockets + ) + or ( + prop_name + and prop_name in event_method.callback_info.on_changed_props + ) + or ( + socket_name + and event_method.callback_info.on_any_changed_loose_input + and socket_name in self.loose_input_sockets + ) + ), + ct.DataFlowAction.OutputRequested: lambda *_: False, + ct.DataFlowAction.ShowPreview: lambda *_: True, + ct.DataFlowAction.ShowPlot: lambda *_: True, + } def trigger_action( self, @@ -587,89 +800,86 @@ class MaxwellSimNode(bpy.types.Node): socket_name: ct.SocketName | None = None, prop_name: ct.SocketName | None = None, ) -> None: - """Reports that the input socket is changed. + """Recursively triggers actions/events forwards or backwards along the node tree, allowing nodes in the update path to react. - Invalidates (recursively) the cache of any managed object or - output socket method that implicitly depends on this input socket. + Use `events` decorators to define methods that react to particular `ct.DataFlowAction`s. + + Note: + This can be an unpredictably heavy function, depending on the node graph topology. + + Parameters: + action: The action/event to report forwards/backwards along the node tree. + socket_name: The input socket that was altered, if any, in order to trigger this event. + pop_name: The property that was altered, if any, in order to trigger this event. """ - # log.debug( - # 'Action "%s" Triggered in "%s" (socket_name="%s", prop_name="%s")', - # action, - # self.name, - # socket_name, - # prop_name, - # ) - # Forwards Chains if action == ct.DataFlowAction.DataChanged: - # Run User Callbacks - ## Careful with these, they run BEFORE propagation... - ## ...because later-chain methods may rely on the results of this. - for method in self._on_value_changed_methods: - if ( - ( - socket_name - and socket_name in method.extra_data['changed_sockets'] - ) - or (prop_name and prop_name in method.extra_data['changed_props']) + # Invalidate Input/Output Socket Caches + all_output_method_infos = [ + event_method.callback_info + for event_method in self.event_methods_by_action[ + ct.DataFlowAction.OutputRequested + ] + ] + input_sockets_to_invalidate_cached_values_of = set() + output_sockets_to_invalidate_cached_values_of = set() + + # Invalidate by Dependent Input Socket + if socket_name is not None: + input_sockets_to_invalidate_cached_values_of.add(socket_name) + + ## Output Socket: Invalidate if an Output Method Depends on Us + output_sockets_to_invalidate_cached_values_of |= { + (method_info.output_socket_name, method_info.kind) + for method_info in all_output_method_infos + if socket_name in method_info.depon_input_sockets or ( - socket_name - and method.extra_data['changed_loose_input'] - and socket_name in self.loose_input_sockets + socket_name in self.loose_input_sockets + and method_info.depon_all_loose_input_sockets ) - ): - # log.debug( - # 'Running Value-Change Callback "%s" in "%s")', - # method.__name__, - # self.name, - # ) - method(self) + } - # Propagate via Output Sockets - for bl_socket in self.active_bl_sockets('output'): - bl_socket.trigger_action(action) + # Invalidate by Dependent Property + if prop_name is not None: + output_sockets_to_invalidate_cached_values_of |= { + (method_info.output_socket_name, method_info.kind) + for method_info in all_output_method_infos + if prop_name in method_info.depon_props + } - # Backwards Chains - elif action == ct.DataFlowAction.EnableLock: - self.locked = True + # Invalidate Output Socket Values + for key in input_sockets_to_invalidate_cached_values_of: + # log.debug('Invalidating Input Socket Cache: %s', key) + self._invalidate_cached_input_socket_value(key) - ## Propagate via Input Sockets - for bl_socket in self.active_bl_sockets('input'): - bl_socket.trigger_action(action) + for key in output_sockets_to_invalidate_cached_values_of: + # log.debug('Invalidating Output Socket Cache: %s', key) + self._invalidate_cached_output_socket_value(*key) - elif action == ct.DataFlowAction.DisableLock: - self.locked = False + # Run Triggered Event Methods + stop_propagation = False ## A method wants us to not continue + event_methods_to_run = [ + event_method + for event_method in self.event_methods_by_action[action] + if self.event_method_filter_by_action[action]( + event_method, socket_name, prop_name + ) + ] + for event_method in event_methods_to_run: + stop_propagation |= event_method.stop_propagation + event_method(self) - ## Propagate via Input Sockets - for bl_socket in self.active_bl_sockets('input'): - bl_socket.trigger_action(action) - - elif action == ct.DataFlowAction.ShowPreview: - # Run User Callbacks - ## "On Show Preview" callbacks are 'on_value_changed' callbacks... - ## ...which simply hook into the 'preview_active' property. - ## By (maybe) altering 'preview_active', callbacks run as needed. - if not self.preview_active: - log.info( - 'Activating Preview in "%s")', - self.name, - ) - self.preview_active = True - - ## Propagate via Input Sockets - for bl_socket in self.active_bl_sockets('input'): - bl_socket.trigger_action(action) - - elif action == ct.DataFlowAction.ShowPlot: - # Run User Callbacks - ## These shouldn't change any data, BUT... - ## ...because they can stop propagation, they should go first. - for method in self._on_show_plot: - method(self) - if method.extra_data['stop_propagation']: - return - - ## Propagate via Input Sockets - for bl_socket in self.active_bl_sockets('input'): + # Trigger Action on Input/Output Sockets + ## The trigger chain goes node/socket/node/socket/... + if ( + ct.DataFlowAction.stop_if_no_event_methods(action) + and len(event_methods_to_run) == 0 + ): + return + if not stop_propagation: + triggered_sockets = self._bl_sockets( + direc=ct.DataFlowAction.trigger_direction(action) + ) + for bl_socket in triggered_sockets: bl_socket.trigger_action(action) #################### @@ -687,30 +897,52 @@ class MaxwellSimNode(bpy.types.Node): """Run (by Blender) on node creation.""" # Initialize Cache and Instance ID self.instance_id = str(uuid.uuid4()) - CACHE[self.instance_id] = {} # Initialize Name self.sim_node_name = self.name ## Only shown in draw_buttons if 'self.use_sim_node_name' # Initialize Sockets - self.sync_sockets() + self._sync_sockets() # Apply Default Preset if self.active_preset: - self.sync_active_preset() + self._on_active_preset_changed() - # Callbacks - for method in self._on_init: - method(self) + # Event Methods + ## Run any 'DataChanged' methods with 'run_on_init' set. + ## Semantically: Creating data _arguably_ changes it. + ## -> Compromise: Users explicitly say 'run_on_init' in @on_value_changed + for event_method in [ + event_method + for event_method in self.event_methods_by_action[ + ct.DataFlowAction.DataChanged + ] + if event_method.callback_info.run_on_init + ]: + event_method(self) def update(self) -> None: pass + def copy(self, _: bpy.types.Node) -> None: + """Generate a new instance ID and Sim Node Name. + + Note: + Blender runs this when instantiating this node from an existing node. + + Parameters: + node: The existing node from which this node was copied. + """ + # Generate New Instance ID + self.instance_id = str(uuid.uuid4()) + + # Generate New Name + ## Blender will automatically add .001 so that `self.name` is unique. + self.sim_node_name = self.name + def free(self) -> None: """Run (by Blender) when deleting the node.""" - if not CACHE.get(self.instance_id): - CACHE[self.instance_id] = {} node_tree = self.id_data # Unlock @@ -718,7 +950,6 @@ class MaxwellSimNode(bpy.types.Node): ## Essentially, deleting a locked node will unlock along input chain. ## It also counts if any of the input sockets are linked and locked. ## Thus, we prevent "dangling locks". - ## TODO: Don't even allow deleting a locked node. if self.locked or any( bl_socket.is_linked and bl_socket.locked for bl_socket in self.inputs.values() @@ -735,6 +966,6 @@ class MaxwellSimNode(bpy.types.Node): ## By reporting that we're deleting the node, the cache stays happy. node_tree.sync_node_removed(self) - # Finally: Free Instance Cache - if self.instance_id in CACHE: - del CACHE[self.instance_id] + # Invalidate Non-Persistent Cache + ## Prevents memory leak due to dangling cache entries for deleted nodes. + bl_cache.invalidate_nonpersist_instance_id(self.instance_id) diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/events.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/events.py index a7b26e9..b41cdcb 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/events.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/events.py @@ -1,12 +1,10 @@ -import enum +import dataclasses import inspect import typing as typ from types import MappingProxyType -from ....utils import extra_sympy_units as spux from ....utils import logger from .. import contracts as ct -from .base import MaxwellSimNode log = logger.get(__name__) @@ -14,50 +12,35 @@ UnitSystemID = str UnitSystem = dict[ct.SocketType, typ.Any] -class EventCallbackType(enum.StrEnum): - """Names of actions that support callbacks.""" - - computes_output_socket = enum.auto() - on_value_changed = enum.auto() - on_show_plot = enum.auto() - on_init = enum.auto() - - #################### # - Event Callback Information #################### -class EventCallbackData_ComputesOutputSocket(typ.TypedDict): # noqa: N801 - """Extra data used to select a method to compute output sockets.""" +@dataclasses.dataclass(kw_only=True, frozen=True) +class InfoDataChanged: + run_on_init: bool + on_changed_sockets: set[ct.SocketName] + on_changed_props: set[str] + on_any_changed_loose_input: set[str] + +@dataclasses.dataclass(kw_only=True, frozen=True) +class InfoOutputRequested: output_socket_name: ct.SocketName any_loose_output_socket: bool kind: ct.DataFlowKind + depon_props: set[str] -class EventCallbackData_OnValueChanged(typ.TypedDict): # noqa: N801 - """Extra data used to select a method to compute output sockets.""" + depon_input_sockets: set[ct.SocketName] + depon_input_socket_kinds: dict[ct.SocketName, ct.DataFlowKind] + depon_all_loose_input_sockets: bool - changed_sockets: set[ct.SocketName] - changed_props: set[str] - changed_loose_input: set[str] + depon_output_sockets: set[ct.SocketName] + depon_output_socket_kinds: dict[ct.SocketName, ct.DataFlowKind] + depon_all_loose_output_sockets: bool -class EventCallbackData_OnShowPlot(typ.TypedDict): # noqa: N801 - """Extra data in the callback, used when showing a plot.""" - - stop_propagation: bool - - -class EventCallbackData_OnInit(typ.TypedDict): # noqa: D101, N801 - pass - - -EventCallbackData: typ.TypeAlias = ( - EventCallbackData_ComputesOutputSocket - | EventCallbackData_OnValueChanged - | EventCallbackData_OnShowPlot - | EventCallbackData_OnInit -) +EventCallbackInfo: typ.TypeAlias = InfoDataChanged | InfoOutputRequested #################### @@ -68,16 +51,21 @@ PropName: typ.TypeAlias = str def event_decorator( - action_type: EventCallbackType, - extra_data: EventCallbackData, - props: set[PropName] = frozenset(), + action_type: ct.DataFlowAction, + callback_info: EventCallbackInfo | None, + stop_propagation: bool = False, + # Request Data for Callback managed_objs: set[ManagedObjName] = frozenset(), + props: set[PropName] = frozenset(), input_sockets: set[ct.SocketName] = frozenset(), + input_sockets_optional: dict[ct.SocketName, bool] = MappingProxyType({}), input_socket_kinds: dict[ct.SocketName, ct.DataFlowKind] = MappingProxyType({}), output_sockets: set[ct.SocketName] = frozenset(), + output_sockets_optional: dict[ct.SocketName, bool] = MappingProxyType({}), output_socket_kinds: dict[ct.SocketName, ct.DataFlowKind] = MappingProxyType({}), all_loose_input_sockets: bool = False, all_loose_output_sockets: bool = False, + # Request Unit System Scaling unit_systems: dict[UnitSystemID, UnitSystem] = MappingProxyType({}), scale_input_sockets: dict[ct.SocketName, UnitSystemID] = MappingProxyType({}), scale_output_sockets: dict[ct.SocketName, UnitSystemID] = MappingProxyType({}), @@ -87,9 +75,11 @@ def event_decorator( Parameters: action_type: A name describing which event the decorator should respond to. Set to `return_method.action_type` - extra_data: A dictionary that provides the caller with additional per-`action_type` information. + callback_info: A dictionary that provides the caller with additional per-`action_type` information. This might include parameters to help select the most appropriate method(s) to respond to an event with, or actions to take after running the callback. props: Set of `props` to compute, then pass to the decorated method. + stop_propagation: Whether or stop propagating the event through the graph after encountering this method. + Other methods defined on the same node will still run. managed_objs: Set of `managed_objs` to retrieve, then pass to the decorated method. input_sockets: Set of `input_sockets` to compute, then pass to the decorated method. input_socket_kinds: The `ct.DataFlowKind` to compute per-input-socket. @@ -104,7 +94,7 @@ def event_decorator( A decorator, which can be applied to a method of `MaxwellSimNode`. When a `MaxwellSimNode` subclass initializes, such a decorated method will be picked up on. - When the `action_type` action passes through the node, then `extra_data` is used to determine + When the `action_type` action passes through the node, then `callback_info` is used to determine """ req_params = ( {'self'} @@ -119,6 +109,8 @@ def event_decorator( # TODO: Check that all Unit System IDs referenced are also defined in 'unit_systems'. ## TODO: More ex. introspective checks and such, to make it really hard to write invalid methods. + # TODO: Check Function Annotation Validity + ## - socket capabilities def decorator(method: typ.Callable) -> typ.Callable: # Check Function Signature Validity @@ -133,127 +125,126 @@ def event_decorator( msg = f'Decorated method {method.__name__} has superfluous arguments {func_sig - req_params}' raise ValueError(msg) - # TODO: Check Function Annotation Validity - ## - socket capabilities - - def decorated(node: MaxwellSimNode): + def decorated(node): method_kw_args = {} ## Keyword Arguments for Decorated Method - # Compute Requested Props - if props: - _props = {prop_name: getattr(node, prop_name) for prop_name in props} - method_kw_args |= {'props': _props} + # Unit Systems + method_kw_args |= {'unit_systems': unit_systems} if unit_systems else {} - # Retrieve Requested Managed Objects - if managed_objs: - _managed_objs = { - managed_obj_name: node.managed_objs[managed_obj_name] - for managed_obj_name in managed_objs + # Properties + method_kw_args |= ( + {'props': {prop_name: getattr(node, prop_name) for prop_name in props}} + if props + else {} + ) + + # Managed Objects + method_kw_args |= ( + { + 'managed_objs': { + managed_obj_name: node.managed_objs[managed_obj_name] + for managed_obj_name in managed_objs + } } - method_kw_args |= {'managed_objs': _managed_objs} + if managed_objs + else {} + ) - # Requested Sockets - ## Compute Requested Input Sockets - if input_sockets: - _input_sockets = { - input_socket_name: node._compute_input( - input_socket_name, - kind=input_socket_kinds.get( - input_socket_name, ct.DataFlowKind.Value - ), - ) - for input_socket_name in input_sockets + # Sockets + ## Input Sockets + method_kw_args |= ( + { + 'input_sockets': { + input_socket_name: node._compute_input( + input_socket_name, + kind=input_socket_kinds.get( + input_socket_name, ct.DataFlowKind.Value + ), + unit_system=( + unit_system := unit_systems.get( + scale_input_sockets.get(input_socket_name) + ) + ), + optional=input_sockets_optional.get( + input_socket_name, False + ), + ) + for input_socket_name in input_sockets + } } + if input_sockets + else {} + ) - # Scale Specified Input Sockets to Unit System - ## First, scale the input socket value to the given unit system - ## Then, convert the symbol-less sympy scalar to a python type. - for input_socket_name, unit_system_id in scale_input_sockets.items(): - unit_system = unit_systems[unit_system_id] - kind = input_socket_kinds.get( - input_socket_name, ct.DataFlowKind.Value - ) - - if kind == ct.DataFlowKind.Value: - _input_sockets[input_socket_name] = spux.sympy_to_python( - spux.scale_to_unit( - _input_sockets[input_socket_name], - unit_system[node.inputs[input_socket_name].socket_type], - ) + ## Output Sockets + method_kw_args |= ( + { + 'output_sockets': { + output_socket_name: ct.DataFlowKind.scale_to_unit_system( + ( + output_socket_kind := output_socket_kinds.get( + output_socket_name, ct.DataFlowKind.Value + ) + ), + node.compute_output( + output_socket_name, + kind=output_socket_kind, + optional=output_sockets_optional.get( + output_socket_name, False + ), + ), + node.outputs[output_socket_name].socket_type, + unit_systems.get( + scale_output_sockets.get(output_socket_name) + ), ) - elif kind == ct.DataFlowKind.LazyValueRange: - _input_sockets[input_socket_name] = _input_sockets[ - input_socket_name - ].rescale_to_unit( - unit_system[node.inputs[input_socket_name].socket_type] + if scale_output_sockets.get(output_socket_name) is not None + else node.compute_output( + output_socket_name, + kind=output_socket_kinds.get( + output_socket_name, ct.DataFlowKind.Value + ), + optional=output_sockets_optional.get( + output_socket_name, False + ), ) - - method_kw_args |= {'input_sockets': _input_sockets} - - ## Compute Requested Output Sockets - if output_sockets: - _output_sockets = { - output_socket_name: node.compute_output( - output_socket_name, - kind=output_socket_kinds.get( - output_socket_name, ct.DataFlowKind.Value - ), - ) - for output_socket_name in output_sockets + for output_socket_name in output_sockets + } } - - # Scale Specified Output Sockets to Unit System - ## First, scale the output socket value to the given unit system - ## Then, convert the symbol-less sympy scalar to a python type. - for output_socket_name, unit_system_id in scale_output_sockets.items(): - unit_system = unit_systems[unit_system_id] - kind = input_socket_kinds.get( - input_socket_name, ct.DataFlowKind.Value - ) - - if kind == ct.DataFlowKind.Value: - _output_sockets[output_socket_name] = spux.sympy_to_python( - spux.scale_to_unit( - _output_sockets[output_socket_name], - unit_system[ - node.outputs[output_socket_name].socket_type - ], - ) - ) - elif kind == ct.DataFlowKind.LazyValueRange: - _output_sockets[output_socket_name] = _output_sockets[ - output_socket_name - ].rescale_to_unit( - unit_system[node.outputs[output_socket_name].socket_type] - ) - method_kw_args |= {'output_sockets': _output_sockets} + if output_sockets + else {} + ) # Loose Sockets ## Compute All Loose Input Sockets - if all_loose_input_sockets: - _loose_input_sockets = { - input_socket_name: node._compute_input( - input_socket_name, - kind=node.inputs[input_socket_name].active_kind, - ) - for input_socket_name in node.loose_input_sockets + method_kw_args |= ( + { + 'loose_input_sockets': { + input_socket_name: node._compute_input( + input_socket_name, + kind=node.inputs[input_socket_name].active_kind, + ) + for input_socket_name in node.loose_input_sockets + } } - method_kw_args |= {'loose_input_sockets': _loose_input_sockets} + if all_loose_input_sockets + else {} + ) ## Compute All Loose Output Sockets - if all_loose_output_sockets: - _loose_output_sockets = { - output_socket_name: node.compute_output( - output_socket_name, - kind=node.outputs[output_socket_name].active_kind, - ) - for output_socket_name in node.loose_output_sockets + method_kw_args |= ( + { + 'loose_output_sockets': { + output_socket_name: node.compute_output( + output_socket_name, + kind=node.outputs[output_socket_name].active_kind, + ) + for output_socket_name in node.loose_output_sockets + } } - method_kw_args |= {'loose_output_sockets': _loose_output_sockets} - - # Unit Systems - if unit_systems: - method_kw_args |= {'unit_systems': unit_systems} + if all_loose_output_sockets + else {} + ) # Call Method return method( @@ -270,7 +261,8 @@ def event_decorator( ## Add Spice decorated.action_type = action_type - decorated.extra_data = extra_data + decorated.callback_info = callback_info + decorated.stop_propagation = stop_propagation return decorated @@ -280,19 +272,22 @@ def event_decorator( #################### # - Simplified Event Callbacks #################### -def computes_output_socket( - output_socket_name: ct.SocketName | None, - any_loose_output_socket: bool = False, - kind: ct.DataFlowKind = ct.DataFlowKind.Value, +def on_enable_lock( **kwargs, ): return event_decorator( - action_type='computes_output_socket', - extra_data={ - 'output_socket_name': output_socket_name, - 'any_loose_output_socket': any_loose_output_socket, - 'kind': kind, - }, + action_type=ct.DataFlowAction.EnableLock, + callback_info=None, + **kwargs, + ) + + +def on_disable_lock( + **kwargs, +): + return event_decorator( + action_type=ct.DataFlowAction.DisableLock, + callback_info=None, **kwargs, ) @@ -302,37 +297,67 @@ def on_value_changed( socket_name: set[ct.SocketName] | ct.SocketName | None = None, prop_name: set[str] | str | None = None, any_loose_input_socket: bool = False, + run_on_init: bool = False, **kwargs, ): return event_decorator( - action_type=EventCallbackType.on_value_changed, - extra_data={ - 'changed_sockets': ( + action_type=ct.DataFlowAction.DataChanged, + callback_info=InfoDataChanged( + run_on_init=run_on_init, + on_changed_sockets=( socket_name if isinstance(socket_name, set) else {socket_name} ), - 'changed_props': (prop_name if isinstance(prop_name, set) else {prop_name}), - 'changed_loose_input': any_loose_input_socket, - }, + on_changed_props=(prop_name if isinstance(prop_name, set) else {prop_name}), + on_any_changed_loose_input=any_loose_input_socket, + ), + **kwargs, + ) + + +## TODO: Change name to 'on_output_requested' +def computes_output_socket( + output_socket_name: ct.SocketName | None, + any_loose_output_socket: bool = False, + kind: ct.DataFlowKind = ct.DataFlowKind.Value, + **kwargs, +): + return event_decorator( + action_type=ct.DataFlowAction.OutputRequested, + callback_info=InfoOutputRequested( + output_socket_name=output_socket_name, + any_loose_output_socket=any_loose_output_socket, + kind=kind, + depon_props=kwargs.get('props', set()), + depon_input_sockets=kwargs.get('input_sockets', set()), + depon_input_socket_kinds=kwargs.get('input_socket_kinds', set()), + depon_output_sockets=kwargs.get('output_sockets', set()), + depon_output_socket_kinds=kwargs.get('output_socket_kinds', set()), + depon_all_loose_input_sockets=kwargs.get('all_loose_input_sockets', set()), + depon_all_loose_output_sockets=kwargs.get( + 'all_loose_output_sockets', set() + ), + ), + **kwargs, ## stop_propagation has no effect. + ) + + +def on_show_preview( + **kwargs, +): + return event_decorator( + action_type=ct.DataFlowAction.ShowPreview, + callback_info={}, **kwargs, ) def on_show_plot( - stop_propagation: bool = False, + stop_propagation: bool = True, **kwargs, ): return event_decorator( - action_type=EventCallbackType.on_show_plot, - extra_data={ - 'stop_propagation': stop_propagation, - }, - **kwargs, - ) - - -def on_init(**kwargs): - return event_decorator( - action_type=EventCallbackType.on_init, - extra_data={}, + action_type=ct.DataFlowAction.ShowPlot, + callback_info={}, + stop_propagation=stop_propagation, **kwargs, ) diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/file_importers/tidy_3d_file_importer.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/file_importers/tidy_3d_file_importer.py index 60774d8..ad91f2d 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/file_importers/tidy_3d_file_importer.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/file_importers/tidy_3d_file_importer.py @@ -57,6 +57,7 @@ class Tidy3DFileImporterNode(base.MaxwellSimNode): #################### # - Properties #################### + ## TODO: More automatic determination of which file type is in use :) tidy3d_type: bpy.props.EnumProperty( name='Tidy3D Type', description='Type of Tidy3D object to load', @@ -228,7 +229,6 @@ class Tidy3DFileImporterNode(base.MaxwellSimNode): disp_fitter = CACHE[self.bl_label]['fitter'] # Plot - log.debug(disp_fitter) managed_objs['plot'].mpl_plot_to_image( lambda ax: disp_fitter.plot( medium=model_medium, diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/wave_constant.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/wave_constant.py index 77e61ca..91331ed 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/wave_constant.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/wave_constant.py @@ -5,11 +5,14 @@ import sympy as sp import sympy.physics.units as spu from .....utils import extra_sympy_units as spux +from .....utils import logger from .....utils import sci_constants as constants from ... import contracts as ct from ... import sockets from .. import base, events +log = logger.get(__name__) + class WaveConstantNode(base.MaxwellSimNode): node_type = ct.NodeType.WaveConstant @@ -22,7 +25,7 @@ class WaveConstantNode(base.MaxwellSimNode): use_range: bpy.props.BoolProperty( name='Range', - description='Whether to use the wavelength range', + description='Whether to use a wavelength/frequency range', default=False, update=lambda self, context: self.sync_prop('use_range', context), ) @@ -36,62 +39,79 @@ class WaveConstantNode(base.MaxwellSimNode): @events.computes_output_socket( 'WL', kind=ct.DataFlowKind.Value, - all_loose_input_sockets=True, + # Data + input_sockets={'WL', 'Freq'}, + input_sockets_optional={'WL': True, 'Freq': True}, ) - def compute_wl_value(self, loose_input_sockets: dict) -> sp.Expr: - if (wl := loose_input_sockets.get('WL')) is not None: - return wl + def compute_wl_value(self, input_sockets: dict) -> sp.Expr: + if input_sockets['WL'] is not None: + return input_sockets['WL'] - freq = loose_input_sockets.get('Freq') - return constants.vac_speed_of_light / freq + if input_sockets['WL'] is None and input_sockets['Freq'] is None: + msg = 'Both WL and Freq are None.' + raise RuntimeError(msg) + + return constants.vac_speed_of_light / input_sockets['Freq'] @events.computes_output_socket( 'Freq', kind=ct.DataFlowKind.Value, - all_loose_input_sockets=True, + # Data + input_sockets={'WL', 'Freq'}, + input_sockets_optional={'WL': True, 'Freq': True}, ) - def compute_freq_value(self, loose_input_sockets: dict) -> sp.Expr: - if (freq := loose_input_sockets.get('Freq')) is not None: - return freq + def compute_freq_value(self, input_sockets: dict) -> sp.Expr: + log.critical(input_sockets) + if input_sockets['Freq'] is not None: + return input_sockets['Freq'] - wl = loose_input_sockets.get('WL') - return constants.vac_speed_of_light / wl + if input_sockets['WL'] is None and input_sockets['Freq'] is None: + msg = 'Both WL and Freq are None.' + raise RuntimeError(msg) + + return constants.vac_speed_of_light / input_sockets['WL'] @events.computes_output_socket( 'WL', kind=ct.DataFlowKind.LazyValueRange, - all_loose_input_sockets=True, + # Data + input_sockets={'WL', 'Freq'}, + input_sockets_optional={'WL': True, 'Freq': True}, ) - def compute_wl_lazyvaluerange(self, loose_input_sockets: dict) -> sp.Expr: - if (wl := loose_input_sockets.get('WL')) is not None: - return wl + def compute_wl_range(self, input_sockets: dict) -> sp.Expr: + if input_sockets['WL'] is not None: + return input_sockets['WL'] - freq = loose_input_sockets.get('Freq') + if input_sockets['WL'] is None and input_sockets['Freq'] is None: + msg = 'Both WL and Freq are None.' + raise RuntimeError(msg) - if isinstance(freq, ct.LazyDataValueRange): - return freq.rescale_bounds( - lambda bound: constants.vac_speed_of_light / bound, reverse=True - ) - - return constants.vac_speed_of_light / freq + return input_sockets['Freq'].rescale_bounds( + lambda bound: constants.vac_speed_of_light / bound, reverse=True + ) @events.computes_output_socket( 'Freq', kind=ct.DataFlowKind.LazyValueRange, - all_loose_input_sockets=True, + # Data + input_sockets={'WL', 'Freq'}, + input_socket_kinds={ + 'WL': ct.DataFlowKind.LazyValueRange, + 'Freq': ct.DataFlowKind.LazyValueRange, + }, + input_sockets_optional={'WL': True, 'Freq': True}, ) - def compute_freq_lazyvaluerange(self, loose_input_sockets: dict) -> sp.Expr: - if (freq := loose_input_sockets.get('Freq')) is not None: - return freq + def compute_freq_range(self, input_sockets: dict) -> sp.Expr: + if input_sockets['Freq'] is not None: + return input_sockets['Freq'] - wl = loose_input_sockets.get('WL') + if input_sockets['WL'] is None and input_sockets['Freq'] is None: + msg = 'Both WL and Freq are None.' + raise RuntimeError(msg) - if isinstance(wl, ct.LazyDataValueRange): - return wl.rescale_bounds( - lambda bound: constants.vac_speed_of_light / bound, reverse=True - ) - - return constants.vac_speed_of_light / wl + return input_sockets['WL'].rescale_bounds( + lambda bound: constants.vac_speed_of_light / bound, reverse=True + ) #################### # - Event Methods @@ -99,6 +119,7 @@ class WaveConstantNode(base.MaxwellSimNode): @events.on_value_changed( prop_name={'active_socket_set', 'use_range'}, props={'active_socket_set', 'use_range'}, + run_on_init=True, ) def on_input_spec_change(self, props: dict): if props['active_socket_set'] == 'Wavelength': @@ -123,12 +144,6 @@ class WaveConstantNode(base.MaxwellSimNode): 'Freq': sockets.PhysicalFreqSocketDef(is_array=props['use_range']), } - @events.on_init( - props={'active_socket_set', 'use_range'}, - ) - def on_init(self, props: dict): - self.on_input_spec_change() - #################### # - Blender Registration diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/web_importers/tidy_3d_web_importer.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/web_importers/tidy_3d_web_importer.py index 77146c1..9592737 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/web_importers/tidy_3d_web_importer.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/inputs/web_importers/tidy_3d_web_importer.py @@ -47,15 +47,16 @@ class Tidy3DWebImporterNode(base.MaxwellSimNode): ## TODO: REMOVE TEST log.info('Loading SimulationData File') import sys + for module_name, module in sys.modules.copy().items(): if module_name == '__mp_main__': print('Problematic Module Entry', module_name) print(module) - #print('MODULE REPR', module) + # print('MODULE REPR', module) continue - #return td.SimulationData.from_file( - # fname='/home/sofus/src/blender_maxwell/dev/sim_demo.hdf5' - #) + # return td.SimulationData.from_file( + # fname='/home/sofus/src/blender_maxwell/dev/sim_demo.hdf5' + # ) # Validate Task Availability if (cloud_task := input_sockets['Cloud Task']) is None: @@ -77,7 +78,9 @@ class Tidy3DWebImporterNode(base.MaxwellSimNode): cloud_task, _sim_data_cache_path(cloud_task.task_id) ) - @events.on_value_changed(socket_name='Cloud Task', input_sockets={'Cloud Task'}) + @events.on_value_changed( + socket_name='Cloud Task', run_on_init=True, input_sockets={'Cloud Task'} + ) def on_cloud_task_changed(self, input_sockets: dict): if ( (cloud_task := input_sockets['Cloud Task']) is not None @@ -90,10 +93,6 @@ class Tidy3DWebImporterNode(base.MaxwellSimNode): else: self.loose_output_sockets = {} - @events.on_init() - def on_init(self): - self.on_cloud_task_changed() - #################### # - Blender Registration diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/monitors/eh_field_monitor.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/monitors/eh_field_monitor.py index bb61dfd..cdf246b 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/monitors/eh_field_monitor.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/monitors/eh_field_monitor.py @@ -99,9 +99,6 @@ class EHFieldMonitorNode(base.MaxwellSimNode): name=props['sim_node_name'], interval_space=tuple(input_sockets['Samples/Space']), freqs=input_sockets['Freqs'].realize().values, - #freqs=[ - # float(spu.convert_to(freq, spu.hertz) / spu.hertz) for freq in freqs - #], ) #################### diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/monitors/field_power_flux_monitor.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/monitors/field_power_flux_monitor.py index 3452d8f..f21d47f 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/monitors/field_power_flux_monitor.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/monitors/field_power_flux_monitor.py @@ -45,8 +45,9 @@ class PowerFluxMonitorNode(base.MaxwellSimNode): ), }, } - output_sockets: typ.ClassVar = { - 'Monitor': sockets.MaxwellMonitorSocketDef(), + output_socket_sets: typ.ClassVar = { + 'Freq Domain': {'Freq Monitor': sockets.MaxwellMonitorSocketDef()}, + 'Time Domain': {'Time Monitor': sockets.MaxwellMonitorSocketDef()}, } managed_obj_defs: typ.ClassVar = { @@ -62,60 +63,44 @@ class PowerFluxMonitorNode(base.MaxwellSimNode): # - Event Methods: Computation #################### @events.computes_output_socket( - 'Monitor', - props={'active_socket_set', 'sim_node_name'}, + 'Freq Monitor', + props={'sim_node_name'}, input_sockets={ - 'Rec Start', - 'Rec Stop', 'Center', 'Size', 'Samples/Space', - 'Samples/Time', 'Freqs', 'Direction', }, input_socket_kinds={ - 'Freqs': ct.LazyDataValueRange, + 'Freqs': ct.DataFlowKind.LazyValueRange, }, unit_systems={'Tidy3DUnits': ct.UNITS_TIDY3D}, scale_input_sockets={ 'Center': 'Tidy3DUnits', 'Size': 'Tidy3DUnits', 'Freqs': 'Tidy3DUnits', - 'Samples/Space': 'Tidy3DUnits', - 'Rec Start': 'Tidy3DUnits', - 'Rec Stop': 'Tidy3DUnits', - 'Samples/Time': 'Tidy3DUnits', }, ) - def compute_monitor(self, input_sockets: dict, props: dict) -> td.FieldTimeMonitor: - direction = '+' if input_sockets['Direction'] else '-' - - if props['active_socket_set'] == 'Freq Domain': - log.info( - 'Computing FluxMonitor (name="%s") with center="%s", size="%s"', - props['sim_node_name'], - input_sockets['Center'], - input_sockets['Size'], - ) - return td.FluxMonitor( - center=input_sockets['Center'], - size=input_sockets['Size'], - name=props['sim_node_name'], - interval_space=input_sockets['Samples/Space'], - freqs=input_sockets['Freqs'].realize().values, - normal_dir=direction, - ) - - return td.FluxTimeMonitor( + def compute_freq_monitor( + self, + input_sockets: dict, + props: dict, + unit_systems: dict, + ) -> td.FieldMonitor: + log.info( + 'Computing FluxMonitor (name="%s") with center="%s", size="%s"', + props['sim_node_name'], + input_sockets['Center'], + input_sockets['Size'], + ) + return td.FluxMonitor( center=input_sockets['Center'], size=input_sockets['Size'], name=props['sim_node_name'], - start=input_sockets['Rec Start'], - stop=input_sockets['Rec Stop'], - interval=input_sockets['Samples/Time'], - interval_space=input_sockets['Samples/Space'], - normal_dir=direction, + interval_space=(1,1,1), + freqs=input_sockets['Freqs'].realize().values, + normal_dir='+' if input_sockets['Direction'] else '-', ) #################### diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/outputs/viewer.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/outputs/viewer.py index 04ec4af..173df61 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/outputs/viewer.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/outputs/viewer.py @@ -70,12 +70,6 @@ class ViewerNode(base.MaxwellSimNode): update=lambda self, context: self.sync_prop('auto_3d_preview', context), ) - cache__data_socket_linked: bpy.props.BoolProperty( - name='Data Is Linked', - description='Whether the Data input was linked last time it was checked.', - default=True, - ) - #################### # - UI #################### @@ -125,41 +119,26 @@ class ViewerNode(base.MaxwellSimNode): # - Event Methods #################### @events.on_value_changed( + socket_name='Data', prop_name='auto_plot', props={'auto_plot'}, ) def on_changed_plot_preview(self, props): if self.inputs['Data'].is_linked and props['auto_plot']: - # log.debug('Enabling 2D Plot from "%s"', self.name) self.trigger_action(ct.DataFlowAction.ShowPlot) @events.on_value_changed( + socket_name='Data', prop_name='auto_3d_preview', props={'auto_3d_preview'}, ) def on_changed_3d_preview(self, props): - # Unpreview Everything - if props['auto_3d_preview']: - node_tree = self.id_data - node_tree.unpreview_all() + node_tree = self.id_data - # Trigger Preview Action - if self.inputs['Data'].is_linked and props['auto_3d_preview']: - # log.debug('Enabling 3D Previews from "%s"', self.name) - self.trigger_action(ct.DataFlowAction.ShowPreview) - - @events.on_value_changed( - socket_name='Data', - ) - def on_changed_3d_data(self): - # Is Linked: Re-Preview - if self.inputs['Data'].is_linked: - self.on_changed_3d_preview() - self.on_changed_plot_preview() - - # Just Linked / Just Unlinked: Preview/Unpreview All - if self.inputs['Data'].is_linked ^ self.cache__data_socket_linked: - self.cache__data_socket_linked = self.inputs['Data'].is_linked + # Remove Non-Repreviewed Previews on Close + with node_tree.repreview_all(): + if self.inputs['Data'].is_linked and props['auto_3d_preview']: + self.trigger_action(ct.DataFlowAction.ShowPreview) #################### diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/simulations/sim_domain.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/simulations/sim_domain.py index 6796a7d..39cfbc8 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/simulations/sim_domain.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/simulations/sim_domain.py @@ -63,6 +63,7 @@ class SimDomainNode(base.MaxwellSimNode): @events.on_value_changed( socket_name={'Center', 'Size'}, prop_name='preview_active', + run_on_init=True, props={'preview_active'}, input_sockets={'Center', 'Size'}, managed_objs={'mesh', 'modifier'}, @@ -94,10 +95,6 @@ class SimDomainNode(base.MaxwellSimNode): if props['preview_active']: managed_objs['mesh'].show_preview() - @events.on_init() - def on_init(self): - self.on_input_changed() - #################### # - Blender Registration diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/geonodes_structure.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/geonodes_structure.py index 5555997..df542f2 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/geonodes_structure.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/geonodes_structure.py @@ -71,14 +71,14 @@ class GeoNodesStructureNode(base.MaxwellSimNode): socket_name='GeoNodes', prop_name='preview_active', any_loose_input_socket=True, + run_on_init=True, + # Pass Data props={'preview_active'}, managed_objs={'mesh', 'modifier'}, input_sockets={'Center', 'GeoNodes'}, all_loose_input_sockets=True, unit_systems={'BlenderUnits': ct.UNITS_BLENDER}, - scale_input_sockets={ - 'Center': 'BlenderUnits' - } + scale_input_sockets={'Center': 'BlenderUnits'}, ) def on_input_changed( self, diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/primitives/box_structure.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/primitives/box_structure.py index afb0b45..9dc8635 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/primitives/box_structure.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/primitives/box_structure.py @@ -62,6 +62,7 @@ class BoxStructureNode(base.MaxwellSimNode): @events.on_value_changed( socket_name={'Center', 'Size'}, prop_name='preview_active', + run_on_init=True, props={'preview_active'}, input_sockets={'Center', 'Size'}, managed_objs={'mesh', 'modifier'}, @@ -93,10 +94,6 @@ class BoxStructureNode(base.MaxwellSimNode): if props['preview_active']: managed_objs['mesh'].show_preview() - @events.on_init() - def on_init(self): - self.on_inputs_changed() - #################### # - Blender Registration diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/primitives/sphere_structure.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/primitives/sphere_structure.py index 88c2e68..fae1deb 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/primitives/sphere_structure.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/structures/primitives/sphere_structure.py @@ -64,6 +64,7 @@ class SphereStructureNode(base.MaxwellSimNode): @events.on_value_changed( socket_name={'Center', 'Radius'}, prop_name='preview_active', + run_on_init=True, props={'preview_active'}, input_sockets={'Center', 'Radius'}, managed_objs={'mesh', 'modifier'}, @@ -96,10 +97,6 @@ class SphereStructureNode(base.MaxwellSimNode): if props['preview_active']: managed_objs['mesh'].show_preview() - @events.on_init() - def on_init(self): - self.on_inputs_changed() - #################### # - Blender Registration diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/utilities/combine.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/utilities/combine.py index 71d2768..8e9d492 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/utilities/combine.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/nodes/utilities/combine.py @@ -1,3 +1,5 @@ +import typing as typ + import bpy import sympy as sp @@ -5,18 +7,15 @@ from ... import contracts as ct from ... import sockets from .. import base, events -MAX_AMOUNT = 20 - class CombineNode(base.MaxwellSimNode): node_type = ct.NodeType.Combine bl_label = 'Combine' - # bl_icon = ... #################### # - Sockets #################### - input_socket_sets = { + input_socket_sets: typ.ClassVar = { 'Maxwell Sources': {}, 'Maxwell Structures': {}, 'Maxwell Monitors': {}, @@ -69,7 +68,7 @@ class CombineNode(base.MaxwellSimNode): description='Amount of Objects to Combine', default=1, min=1, - max=MAX_AMOUNT, + # max=MAX_AMOUNT, update=lambda self, context: self.sync_prop('amount', context), ) @@ -118,6 +117,7 @@ class CombineNode(base.MaxwellSimNode): @events.on_value_changed( prop_name='active_socket_set', props={'active_socket_set', 'amount'}, + run_on_init=True, ) def on_value_changed__active_socket_set(self, props): if props['active_socket_set'] == 'Maxwell Sources': @@ -144,10 +144,6 @@ class CombineNode(base.MaxwellSimNode): def on_value_changed__amount(self): self.on_value_changed__active_socket_set() - @events.on_init() - def on_init(self): - self.on_value_changed__active_socket_set() - #################### # - Blender Registration diff --git a/src/blender_maxwell/node_trees/maxwell_sim_nodes/sockets/base.py b/src/blender_maxwell/node_trees/maxwell_sim_nodes/sockets/base.py index 03924f1..5095970 100644 --- a/src/blender_maxwell/node_trees/maxwell_sim_nodes/sockets/base.py +++ b/src/blender_maxwell/node_trees/maxwell_sim_nodes/sockets/base.py @@ -116,7 +116,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket): `trigger_action` method will be called. """ # Forwards Chains - if action in {'value_changed'}: + if action in {ct.DataFlowAction.DataChanged}: ## Input Socket if not self.is_output: self.node.trigger_action(action, socket_name=self.name) @@ -128,15 +128,17 @@ class MaxwellSimSocket(bpy.types.NodeSocket): # Backwards Chains elif action in { - 'enable_lock', - 'disable_lock', - 'show_preview', - 'show_plot', + ct.DataFlowAction.EnableLock, + ct.DataFlowAction.DisableLock, + ct.DataFlowAction.OutputRequested, + ct.DataFlowAction.DataChanged, + ct.DataFlowAction.ShowPreview, + ct.DataFlowAction.ShowPlot, }: - if action == 'enable_lock': + if action == ct.DataFlowAction.EnableLock: self.locked = True - if action == 'disable_lock': + if action == ct.DataFlowAction.DisableLock: self.locked = False ## Output Socket @@ -208,6 +210,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket): Returns a bool, whether or not the socket consents to the link change. """ + ## TODO: Crash if deleting removing linked loose sockets. if self.locked: return False if self.is_output: diff --git a/src/blender_maxwell/services/tdcloud.py b/src/blender_maxwell/services/tdcloud.py index a67d947..192c38b 100644 --- a/src/blender_maxwell/services/tdcloud.py +++ b/src/blender_maxwell/services/tdcloud.py @@ -105,7 +105,7 @@ class TidyCloudFolders: cloud_folder.folder_id: cloud_folder for cloud_folder in cloud_folders } cls.cache_folders = folders - log.info("Retrieved Folders: %s", str(cls.cache_folders)) + log.info('Retrieved Folders: %s', str(cls.cache_folders)) return folders @classmethod @@ -243,7 +243,11 @@ class TidyCloudTasks: ## Task by-Folder Cache cls.cache_folder_tasks[cloud_folder.folder_id] = set(cloud_tasks) - log.info('Retrieved Tasks (folder="%s"): %s)', cloud_folder.folder_id, str(set(cloud_tasks))) + log.info( + 'Retrieved Tasks (folder="%s"): %s)', + cloud_folder.folder_id, + str(set(cloud_tasks)), + ) return cloud_tasks ####################