refactor: Streamlined graph-update semantics.
parent
dc76ab7688
commit
480679a3c0
12
TODO.md
12
TODO.md
|
@ -1,12 +1,17 @@
|
|||
# Acute Tasks
|
||||
- [x] Implement Material Import for Maxim Data
|
||||
- [x] Implement Robust DataFlowKind for list-like / spectral-like composite types
|
||||
- [ ] Unify random node/socket caches.
|
||||
- [ ] Finish the "Low-Hanging Fruit" Nodes
|
||||
- [ ] Move preview GN trees to the asset library.
|
||||
|
||||
|
||||
|
||||
# Nodes
|
||||
## Analysis
|
||||
- [ ] Extract
|
||||
- [ ] Viz
|
||||
|
||||
## Inputs
|
||||
- [x] Wave Constant
|
||||
- [x] Implement export of frequency / wavelength array/range.
|
||||
|
@ -161,12 +166,7 @@
|
|||
- [ ] Sim Grid Axes / Uniform Sim Grid Axis
|
||||
- [ ] Sim Grid Axes / Array Sim Grid Axis
|
||||
|
||||
## Converters
|
||||
- [ ] Math
|
||||
- [ ] Implement common operations w/secondary choice of socket type based on a custom internal data structure
|
||||
- [ ] Implement angfreq/frequency/vacwl conversion.
|
||||
- [ ] Implement spectral math on SDs
|
||||
- [ ] Implement easy derivation of ex. transmission and reflection.
|
||||
## Utilities
|
||||
- [ ] Separate
|
||||
- [x] Combine
|
||||
- [x] Implement concatenation of sim-critical socket types into their multi-type
|
||||
|
|
|
@ -23,6 +23,7 @@ dependencies = [
|
|||
"charset-normalizer==2.0.10",
|
||||
"certifi==2021.10.8",
|
||||
"jax[cpu]>=0.4.26",
|
||||
"msgspec[toml]>=0.18.6",
|
||||
]
|
||||
readme = "README.md"
|
||||
requires-python = "~= 3.11"
|
||||
|
@ -136,4 +137,4 @@ max-args = 6
|
|||
[tool.ruff.format]
|
||||
quote-style = "single"
|
||||
indent-style = "tab"
|
||||
docstring-code-format = true
|
||||
docstring-code-format = false
|
||||
|
|
|
@ -63,6 +63,7 @@ ml-dtypes==0.4.0
|
|||
# via jaxlib
|
||||
mpmath==1.3.0
|
||||
# via sympy
|
||||
msgspec==0.18.6
|
||||
networkx==3.2
|
||||
numpy==1.24.3
|
||||
# via contourpy
|
||||
|
@ -135,6 +136,8 @@ sympy==1.12
|
|||
tidy3d==2.6.3
|
||||
toml==0.10.2
|
||||
# via tidy3d
|
||||
tomli-w==1.0.0
|
||||
# via msgspec
|
||||
toolz==0.12.1
|
||||
# via dask
|
||||
# via partd
|
||||
|
|
|
@ -62,6 +62,7 @@ ml-dtypes==0.4.0
|
|||
# via jaxlib
|
||||
mpmath==1.3.0
|
||||
# via sympy
|
||||
msgspec==0.18.6
|
||||
networkx==3.2
|
||||
numpy==1.24.3
|
||||
# via contourpy
|
||||
|
@ -133,6 +134,8 @@ sympy==1.12
|
|||
tidy3d==2.6.3
|
||||
toml==0.10.2
|
||||
# via tidy3d
|
||||
tomli-w==1.0.0
|
||||
# via msgspec
|
||||
toolz==0.12.1
|
||||
# via dask
|
||||
# via partd
|
||||
|
|
|
@ -0,0 +1,529 @@
|
|||
"""Implements various key caches on instances of Blender objects, especially nodes and sockets."""
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import msgspec
|
||||
import sympy as sp
|
||||
import sympy.physics.units as spu
|
||||
|
||||
from ...utils import extra_sympy_units as spux
|
||||
from ...utils import logger
|
||||
from . import contracts as ct
|
||||
from . import managed_objs, sockets
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
InstanceID: typ.TypeAlias = str ## Stringified UUID4
|
||||
|
||||
|
||||
class BLInstance(typ.Protocol):
|
||||
"""An instance of a blender object, ex. nodes/sockets.
|
||||
|
||||
Attributes:
|
||||
instance_id: Stringified UUID4 that uniquely identifies an instance, among all active instances on all active classes.
|
||||
"""
|
||||
|
||||
instance_id: InstanceID
|
||||
|
||||
|
||||
EncodableValue: typ.TypeAlias = typ.Any ## msgspec-compatible
|
||||
PropGetMethod: typ.TypeAlias = typ.Callable[[BLInstance], EncodableValue]
|
||||
PropSetMethod: typ.TypeAlias = typ.Callable[[BLInstance, EncodableValue], None]
|
||||
|
||||
####################
|
||||
# - (De)Serialization
|
||||
####################
|
||||
EncodedComplex: typ.TypeAlias = tuple[float, float] | list[float, float]
|
||||
EncodedSympy: typ.TypeAlias = str
|
||||
EncodedManagedObj: typ.TypeAlias = tuple[str, str] | list[str, str]
|
||||
EncodedPydanticModel: typ.TypeAlias = tuple[str, str] | list[str, str]
|
||||
|
||||
|
||||
def _enc_hook(obj: typ.Any) -> EncodableValue:
|
||||
"""Translates types not natively supported by `msgspec`, to an encodable form supported by `msgspec`.
|
||||
|
||||
Parameters:
|
||||
obj: The object of arbitrary type to transform into an encodable value.
|
||||
|
||||
Returns:
|
||||
A value encodable by `msgspec`.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: When the type transformation hasn't been implemented.
|
||||
"""
|
||||
if isinstance(obj, complex):
|
||||
return (obj.real, obj.imag)
|
||||
if isinstance(obj, sp.Basic | sp.MatrixBase | sp.Expr | spu.Quantity):
|
||||
return sp.srepr(obj)
|
||||
if isinstance(obj, managed_objs.ManagedObj):
|
||||
return (obj.name, obj.__class__.__name__)
|
||||
if isinstance(obj, ct.schemas.SocketDef):
|
||||
return (obj.model_dump(), obj.__class__.__name__)
|
||||
|
||||
msg = f'Can\'t encode "{obj}" of type {type(obj)}'
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
|
||||
def _dec_hook(_type: type, obj: EncodableValue) -> typ.Any:
|
||||
"""Translates the `msgspec`-encoded form of an object back to its true form.
|
||||
|
||||
Parameters:
|
||||
_type: The type to transform the `msgspec`-encoded object back into.
|
||||
obj: The encoded object of to transform back into an encodable value.
|
||||
|
||||
Returns:
|
||||
A value encodable by `msgspec`.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: When the type transformation hasn't been implemented.
|
||||
"""
|
||||
if _type is complex and isinstance(obj, EncodedComplex):
|
||||
return complex(obj[0], obj[1])
|
||||
if (
|
||||
_type is sp.Basic
|
||||
and isinstance(obj, EncodedSympy)
|
||||
or _type is sp.Expr
|
||||
and isinstance(obj, EncodedSympy)
|
||||
or _type is sp.MatrixBase
|
||||
and isinstance(obj, EncodedSympy)
|
||||
or _type is spu.Quantity
|
||||
and isinstance(obj, EncodedSympy)
|
||||
):
|
||||
return sp.sympify(obj).subs(spux.ALL_UNIT_SYMBOLS)
|
||||
if (
|
||||
_type is managed_objs.ManagedBLMesh
|
||||
and isinstance(obj, EncodedManagedObj)
|
||||
or _type is managed_objs.ManagedBLImage
|
||||
and isinstance(obj, EncodedManagedObj)
|
||||
or _type is managed_objs.ManagedBLModifier
|
||||
and isinstance(obj, EncodedManagedObj)
|
||||
):
|
||||
return {
|
||||
'ManagedBLMesh': managed_objs.ManagedBLMesh,
|
||||
'ManagedBLImage': managed_objs.ManagedBLImage,
|
||||
'ManagedBLModifier': managed_objs.ManagedBLModifier,
|
||||
}[obj[1]](obj[0])
|
||||
if _type is ct.schemas.SocketDef:
|
||||
return getattr(sockets, obj[1])(**obj[0])
|
||||
|
||||
msg = f'Can\'t decode "{obj}" to type {type(obj)}'
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
|
||||
ENCODER = msgspec.json.Encoder(enc_hook=_enc_hook, order='deterministic')
|
||||
|
||||
_DECODERS: dict[type, msgspec.json.Decoder] = {
|
||||
complex: msgspec.json.Decoder(type=complex, dec_hook=_dec_hook),
|
||||
sp.Basic: msgspec.json.Decoder(type=sp.Basic, dec_hook=_dec_hook),
|
||||
sp.Expr: msgspec.json.Decoder(type=sp.Expr, dec_hook=_dec_hook),
|
||||
sp.MatrixBase: msgspec.json.Decoder(type=sp.MatrixBase, dec_hook=_dec_hook),
|
||||
spu.Quantity: msgspec.json.Decoder(type=spu.Quantity, dec_hook=_dec_hook),
|
||||
managed_objs.ManagedBLMesh: msgspec.json.Decoder(
|
||||
type=managed_objs.ManagedBLMesh,
|
||||
dec_hook=_dec_hook,
|
||||
),
|
||||
managed_objs.ManagedBLImage: msgspec.json.Decoder(
|
||||
type=managed_objs.ManagedBLImage,
|
||||
dec_hook=_dec_hook,
|
||||
),
|
||||
managed_objs.ManagedBLModifier: msgspec.json.Decoder(
|
||||
type=managed_objs.ManagedBLModifier,
|
||||
dec_hook=_dec_hook,
|
||||
),
|
||||
# managed_objs.ManagedObj: msgspec.json.Decoder(
|
||||
# type=managed_objs.ManagedObj, dec_hook=_dec_hook
|
||||
# ), ## Doesn't work b/c unions are not explicit
|
||||
ct.schemas.SocketDef: msgspec.json.Decoder(
|
||||
type=ct.schemas.SocketDef,
|
||||
dec_hook=_dec_hook,
|
||||
),
|
||||
}
|
||||
_DECODER_FALLBACK: msgspec.json.Decoder = msgspec.json.Decoder(dec_hook=_dec_hook)
|
||||
|
||||
|
||||
@functools.cache
|
||||
def DECODER(_type: type) -> msgspec.json.Decoder: # noqa: N802
|
||||
"""Retrieve a suitable `msgspec.json.Decoder` by-type.
|
||||
|
||||
Parameters:
|
||||
_type: The type to retrieve a decoder for.
|
||||
|
||||
Returns:
|
||||
A suitable decoder.
|
||||
"""
|
||||
if (decoder := _DECODERS.get(_type)) is not None:
|
||||
return decoder
|
||||
|
||||
return _DECODER_FALLBACK
|
||||
|
||||
|
||||
def decode_any(_type: type, obj: str) -> typ.Any:
|
||||
naive_decode = DECODER(_type).decode(obj)
|
||||
if _type == dict[str, ct.schemas.SocketDef]:
|
||||
return {
|
||||
socket_name: getattr(sockets, socket_def_list[1])(**socket_def_list[0])
|
||||
for socket_name, socket_def_list in naive_decode.items()
|
||||
}
|
||||
|
||||
log.critical(
|
||||
'Naive Decode of "%s" to "%s" (%s)', str(obj), str(naive_decode), str(_type)
|
||||
)
|
||||
return naive_decode
|
||||
|
||||
|
||||
####################
|
||||
# - Cache: Non-Persistent
|
||||
####################
|
||||
CACHE_NOPERSIST: dict[InstanceID, dict[typ.Any, typ.Any]] = {}
|
||||
|
||||
|
||||
def invalidate_nonpersist_instance_id(instance_id: InstanceID) -> None:
|
||||
"""Invalidate any `instance_id` that might be utilizing cache space in `CACHE_NOPERSIST`.
|
||||
|
||||
Note:
|
||||
This should be run by the `instance_id` owner in its `free()` method.
|
||||
|
||||
Parameters:
|
||||
instance_id: The ID of the Blender object instance that's being freed.
|
||||
"""
|
||||
CACHE_NOPERSIST.pop(instance_id, None)
|
||||
|
||||
|
||||
####################
|
||||
# - Property Descriptor
|
||||
####################
|
||||
class CachedBLProperty:
|
||||
"""A descriptor that caches a computed attribute of a Blender node/socket/... instance (`bl_instance`), with optional cache persistence.
|
||||
|
||||
Note:
|
||||
**Accessing the internal `_*` attributes is likely an anti-pattern**.
|
||||
|
||||
`CachedBLProperty` does not own the data; it only provides a convenient interface of running user-provided getter/setters.
|
||||
This also applies to the `bpy.types.Property` entry created by `CachedBLProperty`, which should not be accessed directly.
|
||||
|
||||
Attributes:
|
||||
_getter_method: Method of `bl_instance` that computes the value.
|
||||
_setter_method: Method of `bl_instance` that sets the value.
|
||||
_persist: Whether to persist the value on a `bpy.types.Property` defined on `bl_instance`.
|
||||
The name of this `bpy.types.Property` will be `cache__<prop_name>`.
|
||||
_type: The type of the value, used by the persistent decoder.
|
||||
"""
|
||||
|
||||
def __init__(self, getter_method: PropGetMethod, persist: bool):
|
||||
"""Initialize the getter (and persistance) of the cached property.
|
||||
|
||||
Notes:
|
||||
- When `persist` is true, the return annotation of the getter mathod will be used to guide deserialization.
|
||||
|
||||
Parameters:
|
||||
getter_method: Method of `bl_instance` that computes the value.
|
||||
persist: Whether to persist the value on a `bpy.types.Property` defined on `bl_instance`.
|
||||
The name of this `bpy.types.Property` will be `cache__<prop_name>`.
|
||||
"""
|
||||
self._getter_method: PropGetMethod = getter_method
|
||||
self._setter_method: PropSetMethod | None = None
|
||||
|
||||
# Persistance
|
||||
self._persist: bool = persist
|
||||
self._type: type | None = (
|
||||
inspect.signature(getter_method).return_annotation if persist else None
|
||||
)
|
||||
|
||||
# Check Non-Empty Type Annotation
|
||||
## For now, just presume that all types can be encoded/decoded.
|
||||
|
||||
# Check Non-Empty Type Annotation
|
||||
## For now, just presume that all types can be encoded/decoded.
|
||||
if self._type is not None and self._type is inspect.Signature.empty:
|
||||
msg = f'A CachedBLProperty was instantiated with "persist={persist}", but its getter method "{self._getter_method}" has no return type annotation'
|
||||
raise TypeError(msg)
|
||||
|
||||
def __set_name__(self, owner: type[BLInstance], name: str) -> None:
|
||||
"""Generates the property name from the name of the attribute that this descriptor is assigned to.
|
||||
|
||||
Notes:
|
||||
- Run by Python when setting an instance of this class to an attribute.
|
||||
|
||||
Parameters:
|
||||
owner: The class that contains an attribute assigned to an instance of this descriptor.
|
||||
name: The name of the attribute that an instance of descriptor was assigned to.
|
||||
"""
|
||||
self.prop_name: str = name
|
||||
self._bl_prop_name: str = f'blcache__{name}'
|
||||
|
||||
# Define Blender Property (w/Update Sync)
|
||||
owner.set_prop(
|
||||
self._bl_prop_name,
|
||||
bpy.props.StringProperty,
|
||||
name=f'DO NOT USE: Cache for {self.prop_name}',
|
||||
default='',
|
||||
no_update=True,
|
||||
)
|
||||
|
||||
def __get__(
|
||||
self, bl_instance: BLInstance | None, owner: type[BLInstance]
|
||||
) -> typ.Any:
|
||||
"""Retrieves the property from a cache, or computes it and fills the cache(s).
|
||||
|
||||
If `self._persist` is `True`, the persistent cache will be checked and filled after the non-persistent cache.
|
||||
|
||||
Notes:
|
||||
- The persistent cache keeps the
|
||||
- The persistent cache is fast and has good compatibility (courtesy `msgspec` encoding), but isn't nearly as fast as
|
||||
|
||||
Parameters:
|
||||
bl_instance: The Blender object this prop
|
||||
"""
|
||||
if bl_instance is None:
|
||||
return None
|
||||
# Create Non-Persistent Cache Entry
|
||||
## Prefer explicit cache management to 'defaultdict'
|
||||
if CACHE_NOPERSIST.get(bl_instance.instance_id) is None:
|
||||
CACHE_NOPERSIST[bl_instance.instance_id] = {}
|
||||
cache_nopersist = CACHE_NOPERSIST[bl_instance.instance_id]
|
||||
|
||||
# Try Hit on Non-Persistent Cache
|
||||
if (value := cache_nopersist.get(self._bl_prop_name)) is not None:
|
||||
return value
|
||||
|
||||
# Try Hit on Persistent Cache
|
||||
## Hit: Fill Non-Persistent Cache
|
||||
if (
|
||||
self._persist
|
||||
and (encoded_value := getattr(bl_instance, self._bl_prop_name)) != ''
|
||||
):
|
||||
value = decode_any(self._type, encoded_value)
|
||||
cache_nopersist[self._bl_prop_name] = value
|
||||
return value
|
||||
|
||||
# Compute Value
|
||||
## Fill Non-Persistent Cache
|
||||
## Fill Persistent Cache (maybe)
|
||||
value = self._getter_method(bl_instance)
|
||||
cache_nopersist[self._bl_prop_name] = value
|
||||
if self._persist:
|
||||
setattr(
|
||||
bl_instance, self._bl_prop_name, ENCODER.encode(value).decode('utf-8')
|
||||
)
|
||||
return value
|
||||
|
||||
def __set__(self, bl_instance: BLInstance, value: typ.Any) -> None:
|
||||
"""Runs the user-provided setter, after invalidating the caches.
|
||||
|
||||
Notes:
|
||||
- This invalidates all caches without re-filling them.
|
||||
- The caches will be re-filled on the first `__get__` invocation, which may be slow due to having to run the getter method.
|
||||
|
||||
Parameters:
|
||||
bl_instance: The Blender object this prop
|
||||
"""
|
||||
if self._setter_method is None:
|
||||
msg = f'Tried to set "{value}" to "{self.prop_name}" on "{bl_instance.bl_label}", but a setter was not defined'
|
||||
raise NotImplementedError(msg)
|
||||
|
||||
# Invalidate Caches
|
||||
self._invalidate_cache(bl_instance)
|
||||
|
||||
# Set the Value
|
||||
self._setter_method(bl_instance, value)
|
||||
|
||||
def setter(self, setter_method: PropSetMethod) -> typ.Self:
|
||||
"""Decorator to add a setter to the cached property.
|
||||
|
||||
Returns:
|
||||
The same descriptor, so that use of the same method name for defining a setter won't change the semantics of the attribute.
|
||||
|
||||
Examples:
|
||||
Without the decor
|
||||
```python
|
||||
class Test(bpy.types.Node):
|
||||
bl_label = 'Default'
|
||||
...
|
||||
def method(self) -> str: return self.bl_label
|
||||
attr = CachedBLProperty(getter_method=method, persist=False)
|
||||
|
||||
@attr.setter
|
||||
def attr(self, value: str) -> None:
|
||||
self.bl_label = 'Altered'
|
||||
```
|
||||
"""
|
||||
# Validate Setter Signature
|
||||
setter_sig = inspect.signature(setter_method)
|
||||
|
||||
## Parameter Length
|
||||
if (sig_len := len(setter_sig.parameters)) != 2: # noqa: PLR2004
|
||||
msg = f'Setter method for "{self.prop_name}" should have 2 parameters, not "{sig_len}"'
|
||||
raise TypeError(msg)
|
||||
|
||||
## Parameter Value Type
|
||||
if (sig_ret_type := setter_sig.return_annotation) is not None:
|
||||
msg = f'Setter method for "{self.prop_name}" return value type "{sig_ret_type}", but it should be "None" (omitting an annotation does not imply "None")'
|
||||
raise TypeError(msg)
|
||||
|
||||
self._setter_method = setter_method
|
||||
return self
|
||||
|
||||
def _invalidate_cache(self, bl_instance: BLInstance) -> None:
|
||||
"""Invalidates all caches that might be storing the computed property value.
|
||||
|
||||
This is invoked by `__set__`.
|
||||
|
||||
Note:
|
||||
Will not delete the `bpy.props.StringProperty`; instead, it will be set to ''.
|
||||
|
||||
Parameters:
|
||||
bl_instance: The instance of the Blender object that contains this property.
|
||||
|
||||
Examples:
|
||||
It is discouraged to run this directly, as any use-pattern that requires manually invalidating a property cache is **likely an anti-pattern**.
|
||||
|
||||
With that disclaimer, manual invocation looks like this:
|
||||
```python
|
||||
bl_instance.attr._invalidate_cache()
|
||||
```
|
||||
"""
|
||||
# Invalidate Non-Persistent Cache
|
||||
if CACHE_NOPERSIST.get(bl_instance.instance_id) is not None:
|
||||
CACHE_NOPERSIST[bl_instance.instance_id].pop(self._bl_prop_name, None)
|
||||
|
||||
# Invalidate Persistent Cache
|
||||
if self._persist and getattr(bl_instance, self._bl_prop_name) != '':
|
||||
setattr(bl_instance, self._bl_prop_name, '')
|
||||
|
||||
|
||||
## TODO: How do we invalidate the data that the computed cached property depends on?
|
||||
####################
|
||||
# - Property Decorators
|
||||
####################
|
||||
def cached_bl_property(persist: bool = ...):
|
||||
"""Decorator creating a descriptor that caches a computed attribute of a Blender node/socket.
|
||||
|
||||
Many such `bl_instance`s rely on fast access to computed, cached properties, for example to ensure that `draw()` remains effectively non-blocking.
|
||||
It is also sometimes desired that this cache persist on `bl_instance`, ex. in the case of loose sockets or cached web data.
|
||||
|
||||
Notes:
|
||||
- Unfortunately, `functools.cached_property` doesn't work, and can't handle persistance.
|
||||
- Use `cached_attribute` instead if merely persisting the value is desired.
|
||||
|
||||
Parameters:
|
||||
persist: Whether or not to persist the cache value in the Blender object.
|
||||
This should be used when the **source(s) of the computed value also persists with the Blender object**.
|
||||
For example, this is especially helpful when caching information for use in `draw()` methods, so that reloading the file won't alter the cache.
|
||||
|
||||
Examples:
|
||||
```python
|
||||
class CustomNode(bpy.types.Node):
|
||||
@bl_cache.cached(persist=True|False)
|
||||
def computed_prop(self) -> ...: return ...
|
||||
|
||||
print(bl_instance.prop) ## Computes first time
|
||||
print(bl_instance.prop) ## Cached (maybe persistently in a property, maybe not)
|
||||
```
|
||||
|
||||
When
|
||||
"""
|
||||
|
||||
def decorator(getter_method: typ.Callable[[BLInstance], None]) -> type:
|
||||
return CachedBLProperty(getter_method=getter_method, persist=persist)
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
####################
|
||||
# - Attribute Descriptor
|
||||
####################
|
||||
class BLField:
|
||||
"""A descriptor that allows persisting arbitrary types in Blender objects, with cached reads."""
|
||||
|
||||
def __init__(self, default_value: typ.Any, triggers_prop_update: bool = True):
|
||||
"""Initializes and sets the attribute to a given default value.
|
||||
|
||||
Parameters:
|
||||
default_value: The default value to use if the value is read before it's set.
|
||||
trigger_prop_update: Whether to run `bl_instance.sync_prop(attr_name)` whenever value is set.
|
||||
|
||||
"""
|
||||
log.debug(
|
||||
'Initializing BLField (default_value=%s, triggers_prop_update=%s)',
|
||||
str(default_value),
|
||||
str(triggers_prop_update),
|
||||
)
|
||||
self._default_value: typ.Any = default_value
|
||||
self._triggers_prop_update: bool = triggers_prop_update
|
||||
|
||||
def __set_name__(self, owner: type[BLInstance], name: str) -> None:
|
||||
"""Sets up getters/setters for attribute access, and sets up a `CachedBLProperty` to internally utilize them.
|
||||
|
||||
Our getter/setter essentially reads/writes to a `bpy.props.StringProperty`, with
|
||||
|
||||
and use them as user-provided getter/setter to internally define a normal non-persistent `CachedBLProperty`.
|
||||
As a result, we can reuse almost all of the logic in `CachedBLProperty`
|
||||
|
||||
Note:
|
||||
Run by Python when setting an instance of this class to an attribute.
|
||||
|
||||
Parameters:
|
||||
owner: The class that contains an attribute assigned to an instance of this descriptor.
|
||||
name: The name of the attribute that an instance of descriptor was assigned to.
|
||||
"""
|
||||
# Compute Name and Type of Property
|
||||
## Also compute the internal
|
||||
attr_name = name
|
||||
bl_attr_name = f'blattr__{name}'
|
||||
if (AttrType := inspect.get_annotations(owner).get(name)) is None: # noqa: N806
|
||||
msg = f'BLField "{self.prop_name}" must define a type annotation, but doesn\'t.'
|
||||
raise TypeError(msg)
|
||||
|
||||
# Define Blender Property (w/Update Sync)
|
||||
encoded_default_value = ENCODER.encode(self._default_value).decode('utf-8')
|
||||
log.debug(
|
||||
'%s set to StringProperty w/default "%s" and no_update="%s"',
|
||||
bl_attr_name,
|
||||
encoded_default_value,
|
||||
str(not self._triggers_prop_update),
|
||||
)
|
||||
owner.set_prop(
|
||||
bl_attr_name,
|
||||
bpy.props.StringProperty,
|
||||
name=f'Encoded Attribute for {attr_name}',
|
||||
default=encoded_default_value,
|
||||
no_update=not self._triggers_prop_update,
|
||||
update_with_name=attr_name,
|
||||
)
|
||||
|
||||
## Getter:
|
||||
## 1. Initialize bpy.props.StringProperty to Default (if undefined).
|
||||
## 2. Retrieve bpy.props.StringProperty string.
|
||||
## 3. Decode using annotated type.
|
||||
def getter(_self: BLInstance) -> AttrType:
|
||||
return decode_any(AttrType, getattr(_self, bl_attr_name))
|
||||
|
||||
## Setter:
|
||||
## 1. Initialize bpy.props.StringProperty to Default (if undefined).
|
||||
## 3. Encode value (implicitly using the annotated type).
|
||||
## 2. Set bpy.props.StringProperty string.
|
||||
def setter(_self: BLInstance, value: AttrType) -> None:
|
||||
encoded_value = ENCODER.encode(value).decode('utf-8')
|
||||
log.debug(
|
||||
'Writing BLField attr "%s" w/encoded value: %s',
|
||||
bl_attr_name,
|
||||
encoded_value,
|
||||
)
|
||||
setattr(_self, bl_attr_name, encoded_value)
|
||||
|
||||
# Initialize CachedBLProperty w/Getter and Setter
|
||||
## This is the usual descriptor assignment procedure.
|
||||
self._cached_bl_property = CachedBLProperty(getter_method=getter, persist=False)
|
||||
self._cached_bl_property.__set_name__(owner, name)
|
||||
self._cached_bl_property.setter(setter)
|
||||
|
||||
def __get__(
|
||||
self, bl_instance: BLInstance | None, owner: type[BLInstance]
|
||||
) -> typ.Any:
|
||||
return self._cached_bl_property.__get__(bl_instance, owner)
|
||||
|
||||
def __set__(self, bl_instance: BLInstance, value: typ.Any) -> None:
|
||||
self._cached_bl_property.__set__(bl_instance, value)
|
|
@ -220,7 +220,7 @@ def _writable_bl_socket_value(
|
|||
_bl_socket_value = value
|
||||
|
||||
# Compute Blender Socket Value
|
||||
if isinstance(_bl_socket_value, sp.Basic):
|
||||
if isinstance(_bl_socket_value, sp.Basic | sp.MatrixBase):
|
||||
bl_socket_value = spux.sympy_to_python(_bl_socket_value)
|
||||
else:
|
||||
bl_socket_value = _bl_socket_value
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import enum
|
||||
import pydantic as pyd
|
||||
import typing_extensions as pytypes_ext
|
||||
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
import enum
|
||||
import typing as typ
|
||||
|
||||
import typing_extensions as typx
|
||||
|
||||
|
||||
class DataFlowAction(enum.StrEnum):
|
||||
|
@ -7,8 +10,37 @@ class DataFlowAction(enum.StrEnum):
|
|||
DisableLock = 'disable_lock'
|
||||
|
||||
# Value
|
||||
OutputRequested = 'output_requested'
|
||||
DataChanged = 'value_changed'
|
||||
|
||||
# Previewing
|
||||
ShowPreview = 'show_preview'
|
||||
ShowPlot = 'show_plot'
|
||||
|
||||
def trigger_direction(action: typ.Self) -> typx.Literal['input', 'output']:
|
||||
"""When a given action is triggered, all sockets/nodes/... in this direction should be recursively triggered.
|
||||
|
||||
Parameters:
|
||||
action: The action for which to retrieve the trigger direction.
|
||||
|
||||
Returns:
|
||||
The trigger direction, which can be used ex. in nodes to select `node.inputs` or `node.outputs`.
|
||||
"""
|
||||
return {
|
||||
DataFlowAction.EnableLock: 'input',
|
||||
DataFlowAction.DisableLock: 'input',
|
||||
DataFlowAction.DataChanged: 'output',
|
||||
DataFlowAction.OutputRequested: 'input',
|
||||
DataFlowAction.ShowPreview: 'input',
|
||||
DataFlowAction.ShowPlot: 'input',
|
||||
}[action]
|
||||
|
||||
def stop_if_no_event_methods(action: typ.Self) -> bool:
|
||||
return {
|
||||
DataFlowAction.EnableLock: False,
|
||||
DataFlowAction.DisableLock: False,
|
||||
DataFlowAction.DataChanged: True,
|
||||
DataFlowAction.OutputRequested: True,
|
||||
DataFlowAction.ShowPreview: False,
|
||||
DataFlowAction.ShowPlot: False,
|
||||
}[action]
|
||||
|
|
|
@ -5,7 +5,6 @@ import typing as typ
|
|||
from types import MappingProxyType
|
||||
|
||||
# import colour ## TODO
|
||||
import jax
|
||||
import numpy as np
|
||||
import sympy as sp
|
||||
import sympy.physics.units as spu
|
||||
|
@ -77,6 +76,21 @@ class DataFlowKind(enum.StrEnum):
|
|||
LazyValueRange = enum.auto()
|
||||
LazyValueSpectrum = enum.auto()
|
||||
|
||||
@classmethod
|
||||
def scale_to_unit_system(cls, kind: typ.Self, value, socket_type, unit_system):
|
||||
if kind == cls.Value:
|
||||
return spux.sympy_to_python(
|
||||
spux.scale_to_unit(
|
||||
value,
|
||||
unit_system[socket_type],
|
||||
)
|
||||
)
|
||||
if kind == cls.LazyValueRange:
|
||||
return value.rescale_to_unit(unit_system[socket_type])
|
||||
|
||||
msg = 'Tried to scale unknown kind'
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
####################
|
||||
# - Data Structures: Capabilities
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
#from .managed_bl_empty import ManagedBLEmpty
|
||||
import typing as typ
|
||||
|
||||
# from .managed_bl_empty import ManagedBLEmpty
|
||||
from .managed_bl_image import ManagedBLImage
|
||||
|
||||
# from .managed_bl_collection import ManagedBLCollection
|
||||
|
@ -8,6 +10,8 @@ from .managed_bl_mesh import ManagedBLMesh
|
|||
# from .managed_bl_volume import ManagedBLVolume
|
||||
from .managed_bl_modifier import ManagedBLModifier
|
||||
|
||||
ManagedObj: typ.TypeAlias = ManagedBLImage | ManagedBLMesh | ManagedBLModifier
|
||||
|
||||
__all__ = [
|
||||
#'ManagedBLEmpty',
|
||||
'ManagedBLImage',
|
||||
|
@ -17,3 +21,5 @@ __all__ = [
|
|||
#'ManagedBLVolume',
|
||||
'ManagedBLModifier',
|
||||
]
|
||||
|
||||
## REMEMBER: Add the appropriate entry to the bl_cache.DECODER
|
||||
|
|
|
@ -31,6 +31,13 @@ class ManagedBLMesh(ct.schemas.ManagedObj):
|
|||
'Changing BLMesh w/Name "%s" to Name "%s"', self._bl_object_name, value
|
||||
)
|
||||
|
||||
if self._bl_object_name == value:
|
||||
## TODO: This is a workaround.
|
||||
## Really, we can't tell if a name is valid by searching objects.
|
||||
## Since, after all, other managedobjs may have taken a name..
|
||||
## ...but not yet made an object that has it.
|
||||
return
|
||||
|
||||
if (bl_object := bpy.data.objects.get(value)) is None:
|
||||
log.info(
|
||||
'Desired BLMesh Name "%s" Not Taken',
|
||||
|
|
|
@ -130,6 +130,7 @@ def write_modifier_geonodes(
|
|||
bl_modifier[iface_id] = float(bl_socket_value)
|
||||
modifier_altered = True
|
||||
else:
|
||||
## TODO: Whitelist what can be here. I'm done with the TypeErrors.
|
||||
bl_modifier[iface_id] = bl_socket_value
|
||||
modifier_altered = True
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import contextlib
|
||||
import typing as typ
|
||||
|
||||
import bpy
|
||||
|
@ -15,47 +16,176 @@ MemAddr = int
|
|||
|
||||
|
||||
class DeltaNodeLinkCache(typ.TypedDict):
|
||||
"""Describes change in the `NodeLink`s of a node tree.
|
||||
|
||||
Attributes:
|
||||
added: Set of pointers to added node tree links.
|
||||
removed: Set of pointers to removed node tree links.
|
||||
"""
|
||||
|
||||
added: set[MemAddr]
|
||||
removed: set[MemAddr]
|
||||
|
||||
|
||||
class NodeLinkCache:
|
||||
"""A pointer-based cache of node links in a node tree.
|
||||
|
||||
Attributes:
|
||||
_node_tree: Reference to the owning node tree.
|
||||
link_ptrs_as_links:
|
||||
link_ptrs: Pointers (as in integer memory adresses) to `NodeLink`s.
|
||||
link_ptrs_as_links: Map from pointers to actual `NodeLink`s.
|
||||
link_ptrs_from_sockets: Map from pointers to `NodeSocket`s, representing the source of each `NodeLink`.
|
||||
link_ptrs_from_sockets: Map from pointers to `NodeSocket`s, representing the destination of each `NodeLink`.
|
||||
"""
|
||||
|
||||
def __init__(self, node_tree: bpy.types.NodeTree):
|
||||
# Initialize Parameters
|
||||
"""Initialize the cache from a node tree.
|
||||
|
||||
Parameters:
|
||||
node_tree: The Blender node tree whose `NodeLink`s will be cached.
|
||||
"""
|
||||
self._node_tree = node_tree
|
||||
self.link_ptrs_to_links = {}
|
||||
self.link_ptrs = set()
|
||||
self.link_ptrs_from_sockets = {}
|
||||
self.link_ptrs_to_sockets = {}
|
||||
|
||||
# Link PTR and PTR->REF
|
||||
self.link_ptrs: set[MemAddr] = set()
|
||||
self.link_ptrs_as_links: dict[MemAddr, bpy.types.NodeLink] = {}
|
||||
|
||||
# Socket PTR and PTR->REF
|
||||
self.socket_ptrs: set[MemAddr] = set()
|
||||
self.socket_ptrs_as_sockets: dict[MemAddr, bpy.types.NodeSocket] = {}
|
||||
self.socket_ptr_refcount: dict[MemAddr, int] = {}
|
||||
|
||||
# Link PTR -> Socket PTR
|
||||
self.link_ptrs_as_from_socket_ptrs: dict[MemAddr, MemAddr] = {}
|
||||
self.link_ptrs_as_to_socket_ptrs: dict[MemAddr, MemAddr] = {}
|
||||
|
||||
# Fill Cache
|
||||
self.regenerate()
|
||||
|
||||
def remove(self, link_ptrs: set[MemAddr]) -> None:
|
||||
for link_ptr in link_ptrs:
|
||||
self.link_ptrs.remove(link_ptr)
|
||||
self.link_ptrs_to_links.pop(link_ptr, None)
|
||||
def remove_link(self, link_ptr: MemAddr) -> None:
|
||||
"""Removes a link pointer from the cache, indicating that the link doesn't exist anymore.
|
||||
|
||||
Notes:
|
||||
- **DOES NOT** remove PTR->REF dictionary entries
|
||||
- Invoking this method directly causes the removed node links to not be reported as "removed" by `NodeLinkCache.regenerate()`.
|
||||
- This **must** be done whenever a node link is deleted.
|
||||
- Failure to do so may result in a segmentation fault at arbitrary future time.
|
||||
|
||||
Parameters:
|
||||
link_ptrs: Pointers to remove from the cache.
|
||||
"""
|
||||
self.link_ptrs.remove(link_ptr)
|
||||
self.link_ptrs_as_links.pop(link_ptr)
|
||||
|
||||
def remove_sockets_by_link_ptr(self, link_ptr: MemAddr) -> None:
|
||||
"""Removes a single pointer's reference to its from/to sockets."""
|
||||
from_socket_ptr = self.link_ptrs_as_from_socket_ptrs.pop(link_ptr, None)
|
||||
to_socket_ptr = self.link_ptrs_as_to_socket_ptrs.pop(link_ptr, None)
|
||||
|
||||
for socket_ptr in [from_socket_ptr, to_socket_ptr]:
|
||||
if socket_ptr is None:
|
||||
continue
|
||||
|
||||
# Delete w/RefCount Respect
|
||||
if self.socket_ptr_refcount[socket_ptr] == 1:
|
||||
self.socket_ptrs.remove(socket_ptr)
|
||||
self.socket_ptrs_as_sockets.pop(socket_ptr)
|
||||
self.socket_ptr_refcount.pop(socket_ptr)
|
||||
else:
|
||||
self.socket_ptr_refcount[socket_ptr] -= 1
|
||||
|
||||
def regenerate(self) -> DeltaNodeLinkCache:
|
||||
current_link_ptrs_to_links = {
|
||||
"""Regenerates the cache from the internally-linked node tree.
|
||||
|
||||
Notes:
|
||||
- This is designed to run within the `update()` invocation of the node tree.
|
||||
- This should be a very fast function, since it is called so much.
|
||||
"""
|
||||
# Compute All NodeLink Pointers
|
||||
all_link_ptrs_as_links = {
|
||||
link.as_pointer(): link for link in self._node_tree.links
|
||||
}
|
||||
current_link_ptrs = set(current_link_ptrs_to_links.keys())
|
||||
all_link_ptrs = set(all_link_ptrs_as_links.keys())
|
||||
|
||||
# Compute Delta
|
||||
added_link_ptrs = current_link_ptrs - self.link_ptrs
|
||||
removed_link_ptrs = self.link_ptrs - current_link_ptrs
|
||||
# Compute Added/Removed Links
|
||||
added_link_ptrs = all_link_ptrs - self.link_ptrs
|
||||
removed_link_ptrs = self.link_ptrs - all_link_ptrs
|
||||
|
||||
# Update Caches Incrementally
|
||||
self.remove(removed_link_ptrs)
|
||||
# Edge Case: 'from_socket' Reassignment
|
||||
## (Reverse engineered) When all:
|
||||
## - Created a new link between the same two nodes.
|
||||
## - Matching 'to_socket'.
|
||||
## - Non-matching 'from_socket' on the same node.
|
||||
## -> THEN the link_ptr will not change, but the from_socket ptr should.
|
||||
if len(added_link_ptrs) == 0 and len(removed_link_ptrs) == 0:
|
||||
# Find the Link w/Reassigned 'from_socket' PTR
|
||||
## A bit of a performance hit from the search, but it's an edge case.
|
||||
_link_ptr_as_from_socket_ptrs = {
|
||||
link_ptr: (
|
||||
from_socket_ptr,
|
||||
all_link_ptrs_as_links[link_ptr].from_socket.as_pointer(),
|
||||
)
|
||||
for link_ptr, from_socket_ptr in self.link_ptrs_as_from_socket_ptrs.items()
|
||||
if all_link_ptrs_as_links[link_ptr].from_socket.as_pointer()
|
||||
!= from_socket_ptr
|
||||
}
|
||||
|
||||
# Completely Remove the Old Link (w/Reassigned 'from_socket')
|
||||
## This effectively reclassifies the edge case as a normal 're-add'.
|
||||
for link_ptr in _link_ptr_as_from_socket_ptrs:
|
||||
log.info(
|
||||
'Edge-Case - "from_socket" Reassigned in NodeLink w/o New NodeLink Pointer: %s',
|
||||
link_ptr,
|
||||
)
|
||||
self.remove_link(link_ptr)
|
||||
self.remove_sockets_by_link_ptr(link_ptr)
|
||||
|
||||
# Recompute Added/Removed Links
|
||||
## The algorithm will now detect an "added link".
|
||||
added_link_ptrs = all_link_ptrs - self.link_ptrs
|
||||
removed_link_ptrs = self.link_ptrs - all_link_ptrs
|
||||
|
||||
# Shuffle Cache based on Change in Links
|
||||
## Remove Entries for Removed Pointers
|
||||
for removed_link_ptr in removed_link_ptrs:
|
||||
self.remove_link(removed_link_ptr)
|
||||
## User must manually call 'remove_socket_by_link_ptr' later.
|
||||
## For now, leave dangling socket information by-link.
|
||||
|
||||
# Add New Link Pointers
|
||||
self.link_ptrs |= added_link_ptrs
|
||||
for link_ptr in added_link_ptrs:
|
||||
link = current_link_ptrs_to_links[link_ptr]
|
||||
# Add Link PTR->REF
|
||||
new_link = all_link_ptrs_as_links[link_ptr]
|
||||
self.link_ptrs_as_links[link_ptr] = new_link
|
||||
|
||||
self.link_ptrs_to_links[link_ptr] = link
|
||||
self.link_ptrs_from_sockets[link_ptr] = link.from_socket
|
||||
self.link_ptrs_to_sockets[link_ptr] = link.to_socket
|
||||
# Retrieve Link Socket Information
|
||||
from_socket = new_link.from_socket
|
||||
from_socket_ptr = from_socket.as_pointer()
|
||||
to_socket = new_link.to_socket
|
||||
to_socket_ptr = to_socket.as_pointer()
|
||||
|
||||
# Add Socket PTR, PTR -> REF
|
||||
for socket_ptr, bl_socket in zip( # noqa: B905
|
||||
[from_socket_ptr, to_socket_ptr],
|
||||
[from_socket, to_socket],
|
||||
):
|
||||
# Increment RefCount of Socket PTR
|
||||
## This happens if another link also uses the same socket.
|
||||
## 1. An output socket links to several inputs.
|
||||
## 2. A multi-input socket links from several inputs.
|
||||
if socket_ptr in self.socket_ptr_refcount:
|
||||
self.socket_ptr_refcount[socket_ptr] += 1
|
||||
else:
|
||||
## RefCount == 0: Add PTR, PTR -> REF
|
||||
self.socket_ptrs.add(socket_ptr)
|
||||
self.socket_ptrs_as_sockets[socket_ptr] = bl_socket
|
||||
self.socket_ptr_refcount[socket_ptr] = 1
|
||||
|
||||
# Add Link PTR -> Socket PTR
|
||||
self.link_ptrs_as_from_socket_ptrs[link_ptr] = from_socket_ptr
|
||||
self.link_ptrs_as_to_socket_ptrs[link_ptr] = to_socket_ptr
|
||||
|
||||
return {'added': added_link_ptrs, 'removed': removed_link_ptrs}
|
||||
|
||||
|
@ -71,20 +201,42 @@ class MaxwellSimTree(bpy.types.NodeTree):
|
|||
####################
|
||||
# - Lock Methods
|
||||
####################
|
||||
def unlock_all(self):
|
||||
def unlock_all(self) -> None:
|
||||
"""Unlock all nodes in the node tree, making them editable."""
|
||||
log.info('Unlocking All Nodes in NodeTree "%s"', self.bl_label)
|
||||
for node in self.nodes:
|
||||
node.locked = False
|
||||
for bl_socket in [*node.inputs, *node.outputs]:
|
||||
bl_socket.locked = False
|
||||
|
||||
def unpreview_all(self):
|
||||
log.info('Disabling All 3D Previews')
|
||||
for node in self.nodes:
|
||||
if node.preview_active:
|
||||
node.preview_active = False
|
||||
@contextlib.contextmanager
|
||||
def repreview_all(self) -> None:
|
||||
all_nodes_with_preview_active = {
|
||||
node.instance_id: node for node in self.nodes if node.preview_active
|
||||
}
|
||||
self.is_currently_repreviewing = True
|
||||
self.newly_previewed_nodes = {}
|
||||
|
||||
for bl_object in preview_collection().objects.values():
|
||||
preview_collection().objects.unlink(bl_object)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
for dangling_previewed_node in [
|
||||
node
|
||||
for node_instance_id, node in all_nodes_with_preview_active.items()
|
||||
if node_instance_id not in self.newly_previewed_nodes
|
||||
]:
|
||||
# log.debug(
|
||||
# 'Removing Dangling Preview of Node "{%s}"',
|
||||
# str(dangling_previewed_node),
|
||||
# )
|
||||
dangling_previewed_node.preview_active = False
|
||||
|
||||
def report_show_preview(self, node: bpy.types.Node) -> None:
|
||||
if (
|
||||
hasattr(self, 'is_currently_repreviewing')
|
||||
and self.is_currently_repreviewing
|
||||
):
|
||||
self.newly_previewed_nodes[node.instance_id] = node
|
||||
|
||||
####################
|
||||
# - Init Methods
|
||||
|
@ -94,11 +246,10 @@ class MaxwellSimTree(bpy.types.NodeTree):
|
|||
|
||||
It's a bit of a "fake" function - in practicality, it's triggered on the first update() function.
|
||||
"""
|
||||
## TODO: Consider tying this to an "on_load" handler
|
||||
if hasattr(self, '_node_link_cache'):
|
||||
self._node_link_cache.regenerate()
|
||||
if hasattr(self, 'node_link_cache'):
|
||||
self.node_link_cache.regenerate()
|
||||
else:
|
||||
self._node_link_cache = NodeLinkCache(self)
|
||||
self.node_link_cache = NodeLinkCache(self)
|
||||
|
||||
####################
|
||||
# - Update Methods
|
||||
|
@ -106,23 +257,35 @@ class MaxwellSimTree(bpy.types.NodeTree):
|
|||
def sync_node_removed(self, node: bpy.types.Node):
|
||||
"""Run by `Node.free()` when a node is being removed.
|
||||
|
||||
ONLY input socket links are removed from the NodeLink cache.
|
||||
- `self.update()` handles link-removal from existing nodes.
|
||||
- `self.update()` can't handle link-removal
|
||||
|
||||
Removes node input links from the internal cache (so we don't attempt to update non-existant sockets).
|
||||
"""
|
||||
for bl_socket in node.inputs.values():
|
||||
# Retrieve Socket Links (if any)
|
||||
self._node_link_cache.remove(
|
||||
{link.as_pointer() for link in bl_socket.links}
|
||||
)
|
||||
## ONLY Input Socket Links are Removed from the NodeLink Cache
|
||||
## - update() handles link-removal from still-existing node just fine.
|
||||
## - update() does NOT handle link-removal of non-existant nodes.
|
||||
for bl_socket in list(node.inputs.values()) + list(node.outputs.values()):
|
||||
# Compute About-To-Be-Freed Link Ptrs
|
||||
link_ptrs = {link.as_pointer() for link in bl_socket.links}
|
||||
|
||||
def update(self):
|
||||
"""Run by Blender when 'something changes' in the node tree.
|
||||
if link_ptrs:
|
||||
for link_ptr in link_ptrs:
|
||||
self.node_link_cache.remove_link(link_ptr)
|
||||
self.node_link_cache.remove_sockets_by_link_ptr(link_ptr)
|
||||
|
||||
Updates an internal node link cache, then updates sockets that just lost/gained an input link.
|
||||
def update(self) -> None:
|
||||
"""Monitors all changes to the node tree, potentially responding with appropriate callbacks.
|
||||
|
||||
Notes:
|
||||
- Run by Blender when "anything" changes in the node tree.
|
||||
- Responds to node link changes with callbacks, with the help of a performant node link cache.
|
||||
"""
|
||||
if not hasattr(self, '_node_link_cache'):
|
||||
if not hasattr(self, 'ignore_update'):
|
||||
self.ignore_update = False
|
||||
|
||||
if not hasattr(self, 'node_link_cache'):
|
||||
self.on_load()
|
||||
## We presume update() is run before the first link is altered.
|
||||
## - Else, the first link of the session will not update caches.
|
||||
|
@ -130,51 +293,82 @@ class MaxwellSimTree(bpy.types.NodeTree):
|
|||
## - Therefore, self.on_load() is also called as a load_post handler.
|
||||
return
|
||||
|
||||
# Compute Changes to NodeLink Cache
|
||||
delta_links = self._node_link_cache.regenerate()
|
||||
# Ignore Update
|
||||
## Manually set to implement link corrections w/o recursion.
|
||||
if self.ignore_update:
|
||||
return
|
||||
|
||||
link_alterations = {
|
||||
# Compute Changes to Node Links
|
||||
delta_links = self.node_link_cache.regenerate()
|
||||
|
||||
link_corrections = {
|
||||
'to_remove': [],
|
||||
'to_add': [],
|
||||
}
|
||||
for link_ptr in delta_links['removed']:
|
||||
from_socket = self._node_link_cache.link_ptrs_from_sockets[link_ptr]
|
||||
to_socket = self._node_link_cache.link_ptrs_to_sockets[link_ptr]
|
||||
# Retrieve Link PTR -> From/To Socket PTR
|
||||
## We don't know if they exist yet.
|
||||
from_socket_ptr = self.node_link_cache.link_ptrs_as_from_socket_ptrs[
|
||||
link_ptr
|
||||
]
|
||||
to_socket_ptr = self.node_link_cache.link_ptrs_as_to_socket_ptrs[link_ptr]
|
||||
|
||||
# Update Socket Caches
|
||||
self._node_link_cache.link_ptrs_from_sockets.pop(link_ptr, None)
|
||||
self._node_link_cache.link_ptrs_to_sockets.pop(link_ptr, None)
|
||||
# Check Existance of From/To Socket
|
||||
## `Node.free()` must report removed sockets, so this here works.
|
||||
## If Both Exist: 'to_socket' may "non-consent" to the link removal.
|
||||
if (
|
||||
from_socket_ptr in self.node_link_cache.socket_ptrs
|
||||
and to_socket_ptr in self.node_link_cache.socket_ptrs
|
||||
):
|
||||
# Retrieve 'from_socket'/'to_socket' REF
|
||||
from_socket = self.node_link_cache.socket_ptrs_as_sockets[
|
||||
from_socket_ptr
|
||||
]
|
||||
to_socket = self.node_link_cache.socket_ptrs_as_sockets[to_socket_ptr]
|
||||
|
||||
# Trigger Report Chain on Socket that Just Lost a Link
|
||||
## Aka. Forward-Refresh Caches Relying on Linkage
|
||||
if not (consent_removal := to_socket.sync_link_removed(from_socket)):
|
||||
# Did Not Consent to Removal: Queue Add Link
|
||||
link_alterations['to_add'].append((from_socket, to_socket))
|
||||
# Ask 'to_socket' for Consent to Remove Link
|
||||
## The link has already been removed, but we can fix that.
|
||||
## If NO: Queue re-adding the link (safe since the sockets exist)
|
||||
## TODO: Crash if deleting removing linked loose sockets.
|
||||
consent_removal = to_socket.sync_link_removed(from_socket)
|
||||
if not consent_removal:
|
||||
link_corrections['to_add'].append((from_socket, to_socket))
|
||||
|
||||
# Ensure Removal of Socket PTRs, PTRs->REFs
|
||||
self.node_link_cache.remove_sockets_by_link_ptr(link_ptr)
|
||||
|
||||
for link_ptr in delta_links['added']:
|
||||
link = self._node_link_cache.link_ptrs_to_links.get(link_ptr)
|
||||
if link is None:
|
||||
continue
|
||||
# Retrieve Link Reference
|
||||
link = self.node_link_cache.link_ptrs_as_links[link_ptr]
|
||||
|
||||
# Trigger Report Chain on Socket that Just Gained a Link
|
||||
## Aka. Forward-Refresh Caches Relying on Linkage
|
||||
# Ask 'to_socket' for Consent to Remove Link
|
||||
## The link has already been added, but we can fix that.
|
||||
## If NO: Queue re-adding the link (safe since the sockets exist)
|
||||
consent_added = link.to_socket.sync_link_added(link)
|
||||
if not consent_added:
|
||||
link_corrections['to_remove'].append(link)
|
||||
|
||||
if not (consent_added := link.to_socket.sync_link_added(link)):
|
||||
# Did Not Consent to Addition: Queue Remove Link
|
||||
link_alterations['to_remove'].append(link)
|
||||
|
||||
# Execute Queued Operations
|
||||
## - Especially undoing undesirable link changes.
|
||||
## - This is important for locked graphs, whose links must not change.
|
||||
for link in link_alterations['to_remove']:
|
||||
self.links.remove(link)
|
||||
for from_socket, to_socket in link_alterations['to_add']:
|
||||
# Link Corrections
|
||||
## ADD: Links that 'to_socket' don't want removed.
|
||||
## REMOVE: Links that 'to_socket' don't want added.
|
||||
## NOTE: Both remove() and new() recursively triggers update().
|
||||
for link in link_corrections['to_remove']:
|
||||
self.ignore_update = True
|
||||
self.links.remove(link) ## Recursively triggers update()
|
||||
self.ignore_update = False
|
||||
for from_socket, to_socket in link_corrections['to_add']:
|
||||
## 'to_socket' and 'from_socket' are guaranteed to exist.
|
||||
self.ignore_update = True
|
||||
self.links.new(from_socket, to_socket)
|
||||
self.ignore_update = False
|
||||
|
||||
# If Queued Operations: Regenerate Cache
|
||||
## - This prevents the next update() from picking up on alterations.
|
||||
if link_alterations['to_remove'] or link_alterations['to_add']:
|
||||
self._node_link_cache.regenerate()
|
||||
# Regenerate on Corrections
|
||||
## Prevents next update() from trying to correct the corrections.
|
||||
## We must remember to trigger '.remove_sockets_by_link_ptr'
|
||||
if link_corrections['to_remove'] or link_corrections['to_add']:
|
||||
delta_links = self.node_link_cache.regenerate()
|
||||
for link_ptr in delta_links['removed']:
|
||||
self.node_link_cache.remove_sockets_by_link_ptr(link_ptr)
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
@ -13,14 +13,15 @@ CACHE_SIM_DATA = {}
|
|||
|
||||
|
||||
class ExtractDataNode(base.MaxwellSimNode):
|
||||
"""Node for visualizing simulation data, by querying its monitors."""
|
||||
"""Node for extracting data from other objects."""
|
||||
|
||||
node_type = ct.NodeType.ExtractData
|
||||
bl_label = 'Extract Data'
|
||||
bl_label = 'Extract'
|
||||
|
||||
input_socket_sets: typ.ClassVar = {
|
||||
'Sim Data': {'Sim Data': sockets.MaxwellFDTDSimDataSocketDef()},
|
||||
'Field Data': {'Field Data': sockets.AnySocketDef()},
|
||||
'Flux Data': {'Flux Data': sockets.AnySocketDef()},
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
|
@ -192,6 +193,20 @@ class ExtractDataNode(base.MaxwellSimNode):
|
|||
elif not self.inputs['Field Data'].is_linked and self.cache__components:
|
||||
self.cache__components = ''
|
||||
|
||||
####################
|
||||
# - Flux Data
|
||||
####################
|
||||
|
||||
def draw_props__flux_data(
|
||||
self, _: bpy.types.Context, col: bpy.types.UILayout
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def draw_info__flux_data(
|
||||
self, _: bpy.types.Context, col: bpy.types.UILayout
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
####################
|
||||
# - Global
|
||||
####################
|
||||
|
@ -200,12 +215,16 @@ class ExtractDataNode(base.MaxwellSimNode):
|
|||
self.draw_props__sim_data(context, col)
|
||||
if self.active_socket_set == 'Field Data':
|
||||
self.draw_props__field_data(context, col)
|
||||
if self.active_socket_set == 'Flux Data':
|
||||
self.draw_props__flux_data(context, col)
|
||||
|
||||
def draw_info(self, context: bpy.types.Context, col: bpy.types.UILayout) -> None:
|
||||
if self.active_socket_set == 'Sim Data':
|
||||
self.draw_info__sim_data(context, col)
|
||||
if self.active_socket_set == 'Field Data':
|
||||
self.draw_info__field_data(context, col)
|
||||
if self.active_socket_set == 'Flux Data':
|
||||
self.draw_info__flux_data(context, col)
|
||||
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
|
@ -226,6 +245,10 @@ class ExtractDataNode(base.MaxwellSimNode):
|
|||
field_data = self._compute_input('Field Data')
|
||||
return getattr(field_data, props['field_data__component'])
|
||||
|
||||
elif self.active_socket_set == 'Flux Data': # noqa: RET505
|
||||
flux_data = self._compute_input('Flux Data')
|
||||
return getattr(flux_data, 'flux')
|
||||
|
||||
msg = f'Tried to get data from unknown output socket in "{self.bl_label}"'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,12 +1,10 @@
|
|||
import enum
|
||||
import dataclasses
|
||||
import inspect
|
||||
import typing as typ
|
||||
from types import MappingProxyType
|
||||
|
||||
from ....utils import extra_sympy_units as spux
|
||||
from ....utils import logger
|
||||
from .. import contracts as ct
|
||||
from .base import MaxwellSimNode
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
|
@ -14,50 +12,35 @@ UnitSystemID = str
|
|||
UnitSystem = dict[ct.SocketType, typ.Any]
|
||||
|
||||
|
||||
class EventCallbackType(enum.StrEnum):
|
||||
"""Names of actions that support callbacks."""
|
||||
|
||||
computes_output_socket = enum.auto()
|
||||
on_value_changed = enum.auto()
|
||||
on_show_plot = enum.auto()
|
||||
on_init = enum.auto()
|
||||
|
||||
|
||||
####################
|
||||
# - Event Callback Information
|
||||
####################
|
||||
class EventCallbackData_ComputesOutputSocket(typ.TypedDict): # noqa: N801
|
||||
"""Extra data used to select a method to compute output sockets."""
|
||||
@dataclasses.dataclass(kw_only=True, frozen=True)
|
||||
class InfoDataChanged:
|
||||
run_on_init: bool
|
||||
on_changed_sockets: set[ct.SocketName]
|
||||
on_changed_props: set[str]
|
||||
on_any_changed_loose_input: set[str]
|
||||
|
||||
|
||||
@dataclasses.dataclass(kw_only=True, frozen=True)
|
||||
class InfoOutputRequested:
|
||||
output_socket_name: ct.SocketName
|
||||
any_loose_output_socket: bool
|
||||
kind: ct.DataFlowKind
|
||||
|
||||
depon_props: set[str]
|
||||
|
||||
class EventCallbackData_OnValueChanged(typ.TypedDict): # noqa: N801
|
||||
"""Extra data used to select a method to compute output sockets."""
|
||||
depon_input_sockets: set[ct.SocketName]
|
||||
depon_input_socket_kinds: dict[ct.SocketName, ct.DataFlowKind]
|
||||
depon_all_loose_input_sockets: bool
|
||||
|
||||
changed_sockets: set[ct.SocketName]
|
||||
changed_props: set[str]
|
||||
changed_loose_input: set[str]
|
||||
depon_output_sockets: set[ct.SocketName]
|
||||
depon_output_socket_kinds: dict[ct.SocketName, ct.DataFlowKind]
|
||||
depon_all_loose_output_sockets: bool
|
||||
|
||||
|
||||
class EventCallbackData_OnShowPlot(typ.TypedDict): # noqa: N801
|
||||
"""Extra data in the callback, used when showing a plot."""
|
||||
|
||||
stop_propagation: bool
|
||||
|
||||
|
||||
class EventCallbackData_OnInit(typ.TypedDict): # noqa: D101, N801
|
||||
pass
|
||||
|
||||
|
||||
EventCallbackData: typ.TypeAlias = (
|
||||
EventCallbackData_ComputesOutputSocket
|
||||
| EventCallbackData_OnValueChanged
|
||||
| EventCallbackData_OnShowPlot
|
||||
| EventCallbackData_OnInit
|
||||
)
|
||||
EventCallbackInfo: typ.TypeAlias = InfoDataChanged | InfoOutputRequested
|
||||
|
||||
|
||||
####################
|
||||
|
@ -68,16 +51,21 @@ PropName: typ.TypeAlias = str
|
|||
|
||||
|
||||
def event_decorator(
|
||||
action_type: EventCallbackType,
|
||||
extra_data: EventCallbackData,
|
||||
props: set[PropName] = frozenset(),
|
||||
action_type: ct.DataFlowAction,
|
||||
callback_info: EventCallbackInfo | None,
|
||||
stop_propagation: bool = False,
|
||||
# Request Data for Callback
|
||||
managed_objs: set[ManagedObjName] = frozenset(),
|
||||
props: set[PropName] = frozenset(),
|
||||
input_sockets: set[ct.SocketName] = frozenset(),
|
||||
input_sockets_optional: dict[ct.SocketName, bool] = MappingProxyType({}),
|
||||
input_socket_kinds: dict[ct.SocketName, ct.DataFlowKind] = MappingProxyType({}),
|
||||
output_sockets: set[ct.SocketName] = frozenset(),
|
||||
output_sockets_optional: dict[ct.SocketName, bool] = MappingProxyType({}),
|
||||
output_socket_kinds: dict[ct.SocketName, ct.DataFlowKind] = MappingProxyType({}),
|
||||
all_loose_input_sockets: bool = False,
|
||||
all_loose_output_sockets: bool = False,
|
||||
# Request Unit System Scaling
|
||||
unit_systems: dict[UnitSystemID, UnitSystem] = MappingProxyType({}),
|
||||
scale_input_sockets: dict[ct.SocketName, UnitSystemID] = MappingProxyType({}),
|
||||
scale_output_sockets: dict[ct.SocketName, UnitSystemID] = MappingProxyType({}),
|
||||
|
@ -87,9 +75,11 @@ def event_decorator(
|
|||
Parameters:
|
||||
action_type: A name describing which event the decorator should respond to.
|
||||
Set to `return_method.action_type`
|
||||
extra_data: A dictionary that provides the caller with additional per-`action_type` information.
|
||||
callback_info: A dictionary that provides the caller with additional per-`action_type` information.
|
||||
This might include parameters to help select the most appropriate method(s) to respond to an event with, or actions to take after running the callback.
|
||||
props: Set of `props` to compute, then pass to the decorated method.
|
||||
stop_propagation: Whether or stop propagating the event through the graph after encountering this method.
|
||||
Other methods defined on the same node will still run.
|
||||
managed_objs: Set of `managed_objs` to retrieve, then pass to the decorated method.
|
||||
input_sockets: Set of `input_sockets` to compute, then pass to the decorated method.
|
||||
input_socket_kinds: The `ct.DataFlowKind` to compute per-input-socket.
|
||||
|
@ -104,7 +94,7 @@ def event_decorator(
|
|||
A decorator, which can be applied to a method of `MaxwellSimNode`.
|
||||
When a `MaxwellSimNode` subclass initializes, such a decorated method will be picked up on.
|
||||
|
||||
When the `action_type` action passes through the node, then `extra_data` is used to determine
|
||||
When the `action_type` action passes through the node, then `callback_info` is used to determine
|
||||
"""
|
||||
req_params = (
|
||||
{'self'}
|
||||
|
@ -119,6 +109,8 @@ def event_decorator(
|
|||
|
||||
# TODO: Check that all Unit System IDs referenced are also defined in 'unit_systems'.
|
||||
## TODO: More ex. introspective checks and such, to make it really hard to write invalid methods.
|
||||
# TODO: Check Function Annotation Validity
|
||||
## - socket capabilities
|
||||
|
||||
def decorator(method: typ.Callable) -> typ.Callable:
|
||||
# Check Function Signature Validity
|
||||
|
@ -133,127 +125,126 @@ def event_decorator(
|
|||
msg = f'Decorated method {method.__name__} has superfluous arguments {func_sig - req_params}'
|
||||
raise ValueError(msg)
|
||||
|
||||
# TODO: Check Function Annotation Validity
|
||||
## - socket capabilities
|
||||
|
||||
def decorated(node: MaxwellSimNode):
|
||||
def decorated(node):
|
||||
method_kw_args = {} ## Keyword Arguments for Decorated Method
|
||||
|
||||
# Compute Requested Props
|
||||
if props:
|
||||
_props = {prop_name: getattr(node, prop_name) for prop_name in props}
|
||||
method_kw_args |= {'props': _props}
|
||||
# Unit Systems
|
||||
method_kw_args |= {'unit_systems': unit_systems} if unit_systems else {}
|
||||
|
||||
# Retrieve Requested Managed Objects
|
||||
if managed_objs:
|
||||
_managed_objs = {
|
||||
managed_obj_name: node.managed_objs[managed_obj_name]
|
||||
for managed_obj_name in managed_objs
|
||||
# Properties
|
||||
method_kw_args |= (
|
||||
{'props': {prop_name: getattr(node, prop_name) for prop_name in props}}
|
||||
if props
|
||||
else {}
|
||||
)
|
||||
|
||||
# Managed Objects
|
||||
method_kw_args |= (
|
||||
{
|
||||
'managed_objs': {
|
||||
managed_obj_name: node.managed_objs[managed_obj_name]
|
||||
for managed_obj_name in managed_objs
|
||||
}
|
||||
}
|
||||
method_kw_args |= {'managed_objs': _managed_objs}
|
||||
if managed_objs
|
||||
else {}
|
||||
)
|
||||
|
||||
# Requested Sockets
|
||||
## Compute Requested Input Sockets
|
||||
if input_sockets:
|
||||
_input_sockets = {
|
||||
input_socket_name: node._compute_input(
|
||||
input_socket_name,
|
||||
kind=input_socket_kinds.get(
|
||||
input_socket_name, ct.DataFlowKind.Value
|
||||
),
|
||||
)
|
||||
for input_socket_name in input_sockets
|
||||
# Sockets
|
||||
## Input Sockets
|
||||
method_kw_args |= (
|
||||
{
|
||||
'input_sockets': {
|
||||
input_socket_name: node._compute_input(
|
||||
input_socket_name,
|
||||
kind=input_socket_kinds.get(
|
||||
input_socket_name, ct.DataFlowKind.Value
|
||||
),
|
||||
unit_system=(
|
||||
unit_system := unit_systems.get(
|
||||
scale_input_sockets.get(input_socket_name)
|
||||
)
|
||||
),
|
||||
optional=input_sockets_optional.get(
|
||||
input_socket_name, False
|
||||
),
|
||||
)
|
||||
for input_socket_name in input_sockets
|
||||
}
|
||||
}
|
||||
if input_sockets
|
||||
else {}
|
||||
)
|
||||
|
||||
# Scale Specified Input Sockets to Unit System
|
||||
## First, scale the input socket value to the given unit system
|
||||
## Then, convert the symbol-less sympy scalar to a python type.
|
||||
for input_socket_name, unit_system_id in scale_input_sockets.items():
|
||||
unit_system = unit_systems[unit_system_id]
|
||||
kind = input_socket_kinds.get(
|
||||
input_socket_name, ct.DataFlowKind.Value
|
||||
)
|
||||
|
||||
if kind == ct.DataFlowKind.Value:
|
||||
_input_sockets[input_socket_name] = spux.sympy_to_python(
|
||||
spux.scale_to_unit(
|
||||
_input_sockets[input_socket_name],
|
||||
unit_system[node.inputs[input_socket_name].socket_type],
|
||||
)
|
||||
## Output Sockets
|
||||
method_kw_args |= (
|
||||
{
|
||||
'output_sockets': {
|
||||
output_socket_name: ct.DataFlowKind.scale_to_unit_system(
|
||||
(
|
||||
output_socket_kind := output_socket_kinds.get(
|
||||
output_socket_name, ct.DataFlowKind.Value
|
||||
)
|
||||
),
|
||||
node.compute_output(
|
||||
output_socket_name,
|
||||
kind=output_socket_kind,
|
||||
optional=output_sockets_optional.get(
|
||||
output_socket_name, False
|
||||
),
|
||||
),
|
||||
node.outputs[output_socket_name].socket_type,
|
||||
unit_systems.get(
|
||||
scale_output_sockets.get(output_socket_name)
|
||||
),
|
||||
)
|
||||
elif kind == ct.DataFlowKind.LazyValueRange:
|
||||
_input_sockets[input_socket_name] = _input_sockets[
|
||||
input_socket_name
|
||||
].rescale_to_unit(
|
||||
unit_system[node.inputs[input_socket_name].socket_type]
|
||||
if scale_output_sockets.get(output_socket_name) is not None
|
||||
else node.compute_output(
|
||||
output_socket_name,
|
||||
kind=output_socket_kinds.get(
|
||||
output_socket_name, ct.DataFlowKind.Value
|
||||
),
|
||||
optional=output_sockets_optional.get(
|
||||
output_socket_name, False
|
||||
),
|
||||
)
|
||||
|
||||
method_kw_args |= {'input_sockets': _input_sockets}
|
||||
|
||||
## Compute Requested Output Sockets
|
||||
if output_sockets:
|
||||
_output_sockets = {
|
||||
output_socket_name: node.compute_output(
|
||||
output_socket_name,
|
||||
kind=output_socket_kinds.get(
|
||||
output_socket_name, ct.DataFlowKind.Value
|
||||
),
|
||||
)
|
||||
for output_socket_name in output_sockets
|
||||
for output_socket_name in output_sockets
|
||||
}
|
||||
}
|
||||
|
||||
# Scale Specified Output Sockets to Unit System
|
||||
## First, scale the output socket value to the given unit system
|
||||
## Then, convert the symbol-less sympy scalar to a python type.
|
||||
for output_socket_name, unit_system_id in scale_output_sockets.items():
|
||||
unit_system = unit_systems[unit_system_id]
|
||||
kind = input_socket_kinds.get(
|
||||
input_socket_name, ct.DataFlowKind.Value
|
||||
)
|
||||
|
||||
if kind == ct.DataFlowKind.Value:
|
||||
_output_sockets[output_socket_name] = spux.sympy_to_python(
|
||||
spux.scale_to_unit(
|
||||
_output_sockets[output_socket_name],
|
||||
unit_system[
|
||||
node.outputs[output_socket_name].socket_type
|
||||
],
|
||||
)
|
||||
)
|
||||
elif kind == ct.DataFlowKind.LazyValueRange:
|
||||
_output_sockets[output_socket_name] = _output_sockets[
|
||||
output_socket_name
|
||||
].rescale_to_unit(
|
||||
unit_system[node.outputs[output_socket_name].socket_type]
|
||||
)
|
||||
method_kw_args |= {'output_sockets': _output_sockets}
|
||||
if output_sockets
|
||||
else {}
|
||||
)
|
||||
|
||||
# Loose Sockets
|
||||
## Compute All Loose Input Sockets
|
||||
if all_loose_input_sockets:
|
||||
_loose_input_sockets = {
|
||||
input_socket_name: node._compute_input(
|
||||
input_socket_name,
|
||||
kind=node.inputs[input_socket_name].active_kind,
|
||||
)
|
||||
for input_socket_name in node.loose_input_sockets
|
||||
method_kw_args |= (
|
||||
{
|
||||
'loose_input_sockets': {
|
||||
input_socket_name: node._compute_input(
|
||||
input_socket_name,
|
||||
kind=node.inputs[input_socket_name].active_kind,
|
||||
)
|
||||
for input_socket_name in node.loose_input_sockets
|
||||
}
|
||||
}
|
||||
method_kw_args |= {'loose_input_sockets': _loose_input_sockets}
|
||||
if all_loose_input_sockets
|
||||
else {}
|
||||
)
|
||||
|
||||
## Compute All Loose Output Sockets
|
||||
if all_loose_output_sockets:
|
||||
_loose_output_sockets = {
|
||||
output_socket_name: node.compute_output(
|
||||
output_socket_name,
|
||||
kind=node.outputs[output_socket_name].active_kind,
|
||||
)
|
||||
for output_socket_name in node.loose_output_sockets
|
||||
method_kw_args |= (
|
||||
{
|
||||
'loose_output_sockets': {
|
||||
output_socket_name: node.compute_output(
|
||||
output_socket_name,
|
||||
kind=node.outputs[output_socket_name].active_kind,
|
||||
)
|
||||
for output_socket_name in node.loose_output_sockets
|
||||
}
|
||||
}
|
||||
method_kw_args |= {'loose_output_sockets': _loose_output_sockets}
|
||||
|
||||
# Unit Systems
|
||||
if unit_systems:
|
||||
method_kw_args |= {'unit_systems': unit_systems}
|
||||
if all_loose_output_sockets
|
||||
else {}
|
||||
)
|
||||
|
||||
# Call Method
|
||||
return method(
|
||||
|
@ -270,7 +261,8 @@ def event_decorator(
|
|||
|
||||
## Add Spice
|
||||
decorated.action_type = action_type
|
||||
decorated.extra_data = extra_data
|
||||
decorated.callback_info = callback_info
|
||||
decorated.stop_propagation = stop_propagation
|
||||
|
||||
return decorated
|
||||
|
||||
|
@ -280,19 +272,22 @@ def event_decorator(
|
|||
####################
|
||||
# - Simplified Event Callbacks
|
||||
####################
|
||||
def computes_output_socket(
|
||||
output_socket_name: ct.SocketName | None,
|
||||
any_loose_output_socket: bool = False,
|
||||
kind: ct.DataFlowKind = ct.DataFlowKind.Value,
|
||||
def on_enable_lock(
|
||||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
action_type='computes_output_socket',
|
||||
extra_data={
|
||||
'output_socket_name': output_socket_name,
|
||||
'any_loose_output_socket': any_loose_output_socket,
|
||||
'kind': kind,
|
||||
},
|
||||
action_type=ct.DataFlowAction.EnableLock,
|
||||
callback_info=None,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def on_disable_lock(
|
||||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
action_type=ct.DataFlowAction.DisableLock,
|
||||
callback_info=None,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
@ -302,37 +297,67 @@ def on_value_changed(
|
|||
socket_name: set[ct.SocketName] | ct.SocketName | None = None,
|
||||
prop_name: set[str] | str | None = None,
|
||||
any_loose_input_socket: bool = False,
|
||||
run_on_init: bool = False,
|
||||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
action_type=EventCallbackType.on_value_changed,
|
||||
extra_data={
|
||||
'changed_sockets': (
|
||||
action_type=ct.DataFlowAction.DataChanged,
|
||||
callback_info=InfoDataChanged(
|
||||
run_on_init=run_on_init,
|
||||
on_changed_sockets=(
|
||||
socket_name if isinstance(socket_name, set) else {socket_name}
|
||||
),
|
||||
'changed_props': (prop_name if isinstance(prop_name, set) else {prop_name}),
|
||||
'changed_loose_input': any_loose_input_socket,
|
||||
},
|
||||
on_changed_props=(prop_name if isinstance(prop_name, set) else {prop_name}),
|
||||
on_any_changed_loose_input=any_loose_input_socket,
|
||||
),
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
## TODO: Change name to 'on_output_requested'
|
||||
def computes_output_socket(
|
||||
output_socket_name: ct.SocketName | None,
|
||||
any_loose_output_socket: bool = False,
|
||||
kind: ct.DataFlowKind = ct.DataFlowKind.Value,
|
||||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
action_type=ct.DataFlowAction.OutputRequested,
|
||||
callback_info=InfoOutputRequested(
|
||||
output_socket_name=output_socket_name,
|
||||
any_loose_output_socket=any_loose_output_socket,
|
||||
kind=kind,
|
||||
depon_props=kwargs.get('props', set()),
|
||||
depon_input_sockets=kwargs.get('input_sockets', set()),
|
||||
depon_input_socket_kinds=kwargs.get('input_socket_kinds', set()),
|
||||
depon_output_sockets=kwargs.get('output_sockets', set()),
|
||||
depon_output_socket_kinds=kwargs.get('output_socket_kinds', set()),
|
||||
depon_all_loose_input_sockets=kwargs.get('all_loose_input_sockets', set()),
|
||||
depon_all_loose_output_sockets=kwargs.get(
|
||||
'all_loose_output_sockets', set()
|
||||
),
|
||||
),
|
||||
**kwargs, ## stop_propagation has no effect.
|
||||
)
|
||||
|
||||
|
||||
def on_show_preview(
|
||||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
action_type=ct.DataFlowAction.ShowPreview,
|
||||
callback_info={},
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def on_show_plot(
|
||||
stop_propagation: bool = False,
|
||||
stop_propagation: bool = True,
|
||||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
action_type=EventCallbackType.on_show_plot,
|
||||
extra_data={
|
||||
'stop_propagation': stop_propagation,
|
||||
},
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def on_init(**kwargs):
|
||||
return event_decorator(
|
||||
action_type=EventCallbackType.on_init,
|
||||
extra_data={},
|
||||
action_type=ct.DataFlowAction.ShowPlot,
|
||||
callback_info={},
|
||||
stop_propagation=stop_propagation,
|
||||
**kwargs,
|
||||
)
|
||||
|
|
|
@ -57,6 +57,7 @@ class Tidy3DFileImporterNode(base.MaxwellSimNode):
|
|||
####################
|
||||
# - Properties
|
||||
####################
|
||||
## TODO: More automatic determination of which file type is in use :)
|
||||
tidy3d_type: bpy.props.EnumProperty(
|
||||
name='Tidy3D Type',
|
||||
description='Type of Tidy3D object to load',
|
||||
|
@ -228,7 +229,6 @@ class Tidy3DFileImporterNode(base.MaxwellSimNode):
|
|||
disp_fitter = CACHE[self.bl_label]['fitter']
|
||||
|
||||
# Plot
|
||||
log.debug(disp_fitter)
|
||||
managed_objs['plot'].mpl_plot_to_image(
|
||||
lambda ax: disp_fitter.plot(
|
||||
medium=model_medium,
|
||||
|
|
|
@ -5,11 +5,14 @@ import sympy as sp
|
|||
import sympy.physics.units as spu
|
||||
|
||||
from .....utils import extra_sympy_units as spux
|
||||
from .....utils import logger
|
||||
from .....utils import sci_constants as constants
|
||||
from ... import contracts as ct
|
||||
from ... import sockets
|
||||
from .. import base, events
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
|
||||
class WaveConstantNode(base.MaxwellSimNode):
|
||||
node_type = ct.NodeType.WaveConstant
|
||||
|
@ -22,7 +25,7 @@ class WaveConstantNode(base.MaxwellSimNode):
|
|||
|
||||
use_range: bpy.props.BoolProperty(
|
||||
name='Range',
|
||||
description='Whether to use the wavelength range',
|
||||
description='Whether to use a wavelength/frequency range',
|
||||
default=False,
|
||||
update=lambda self, context: self.sync_prop('use_range', context),
|
||||
)
|
||||
|
@ -36,62 +39,79 @@ class WaveConstantNode(base.MaxwellSimNode):
|
|||
@events.computes_output_socket(
|
||||
'WL',
|
||||
kind=ct.DataFlowKind.Value,
|
||||
all_loose_input_sockets=True,
|
||||
# Data
|
||||
input_sockets={'WL', 'Freq'},
|
||||
input_sockets_optional={'WL': True, 'Freq': True},
|
||||
)
|
||||
def compute_wl_value(self, loose_input_sockets: dict) -> sp.Expr:
|
||||
if (wl := loose_input_sockets.get('WL')) is not None:
|
||||
return wl
|
||||
def compute_wl_value(self, input_sockets: dict) -> sp.Expr:
|
||||
if input_sockets['WL'] is not None:
|
||||
return input_sockets['WL']
|
||||
|
||||
freq = loose_input_sockets.get('Freq')
|
||||
return constants.vac_speed_of_light / freq
|
||||
if input_sockets['WL'] is None and input_sockets['Freq'] is None:
|
||||
msg = 'Both WL and Freq are None.'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return constants.vac_speed_of_light / input_sockets['Freq']
|
||||
|
||||
@events.computes_output_socket(
|
||||
'Freq',
|
||||
kind=ct.DataFlowKind.Value,
|
||||
all_loose_input_sockets=True,
|
||||
# Data
|
||||
input_sockets={'WL', 'Freq'},
|
||||
input_sockets_optional={'WL': True, 'Freq': True},
|
||||
)
|
||||
def compute_freq_value(self, loose_input_sockets: dict) -> sp.Expr:
|
||||
if (freq := loose_input_sockets.get('Freq')) is not None:
|
||||
return freq
|
||||
def compute_freq_value(self, input_sockets: dict) -> sp.Expr:
|
||||
log.critical(input_sockets)
|
||||
if input_sockets['Freq'] is not None:
|
||||
return input_sockets['Freq']
|
||||
|
||||
wl = loose_input_sockets.get('WL')
|
||||
return constants.vac_speed_of_light / wl
|
||||
if input_sockets['WL'] is None and input_sockets['Freq'] is None:
|
||||
msg = 'Both WL and Freq are None.'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return constants.vac_speed_of_light / input_sockets['WL']
|
||||
|
||||
@events.computes_output_socket(
|
||||
'WL',
|
||||
kind=ct.DataFlowKind.LazyValueRange,
|
||||
all_loose_input_sockets=True,
|
||||
# Data
|
||||
input_sockets={'WL', 'Freq'},
|
||||
input_sockets_optional={'WL': True, 'Freq': True},
|
||||
)
|
||||
def compute_wl_lazyvaluerange(self, loose_input_sockets: dict) -> sp.Expr:
|
||||
if (wl := loose_input_sockets.get('WL')) is not None:
|
||||
return wl
|
||||
def compute_wl_range(self, input_sockets: dict) -> sp.Expr:
|
||||
if input_sockets['WL'] is not None:
|
||||
return input_sockets['WL']
|
||||
|
||||
freq = loose_input_sockets.get('Freq')
|
||||
if input_sockets['WL'] is None and input_sockets['Freq'] is None:
|
||||
msg = 'Both WL and Freq are None.'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if isinstance(freq, ct.LazyDataValueRange):
|
||||
return freq.rescale_bounds(
|
||||
lambda bound: constants.vac_speed_of_light / bound, reverse=True
|
||||
)
|
||||
|
||||
return constants.vac_speed_of_light / freq
|
||||
return input_sockets['Freq'].rescale_bounds(
|
||||
lambda bound: constants.vac_speed_of_light / bound, reverse=True
|
||||
)
|
||||
|
||||
@events.computes_output_socket(
|
||||
'Freq',
|
||||
kind=ct.DataFlowKind.LazyValueRange,
|
||||
all_loose_input_sockets=True,
|
||||
# Data
|
||||
input_sockets={'WL', 'Freq'},
|
||||
input_socket_kinds={
|
||||
'WL': ct.DataFlowKind.LazyValueRange,
|
||||
'Freq': ct.DataFlowKind.LazyValueRange,
|
||||
},
|
||||
input_sockets_optional={'WL': True, 'Freq': True},
|
||||
)
|
||||
def compute_freq_lazyvaluerange(self, loose_input_sockets: dict) -> sp.Expr:
|
||||
if (freq := loose_input_sockets.get('Freq')) is not None:
|
||||
return freq
|
||||
def compute_freq_range(self, input_sockets: dict) -> sp.Expr:
|
||||
if input_sockets['Freq'] is not None:
|
||||
return input_sockets['Freq']
|
||||
|
||||
wl = loose_input_sockets.get('WL')
|
||||
if input_sockets['WL'] is None and input_sockets['Freq'] is None:
|
||||
msg = 'Both WL and Freq are None.'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
if isinstance(wl, ct.LazyDataValueRange):
|
||||
return wl.rescale_bounds(
|
||||
lambda bound: constants.vac_speed_of_light / bound, reverse=True
|
||||
)
|
||||
|
||||
return constants.vac_speed_of_light / wl
|
||||
return input_sockets['WL'].rescale_bounds(
|
||||
lambda bound: constants.vac_speed_of_light / bound, reverse=True
|
||||
)
|
||||
|
||||
####################
|
||||
# - Event Methods
|
||||
|
@ -99,6 +119,7 @@ class WaveConstantNode(base.MaxwellSimNode):
|
|||
@events.on_value_changed(
|
||||
prop_name={'active_socket_set', 'use_range'},
|
||||
props={'active_socket_set', 'use_range'},
|
||||
run_on_init=True,
|
||||
)
|
||||
def on_input_spec_change(self, props: dict):
|
||||
if props['active_socket_set'] == 'Wavelength':
|
||||
|
@ -123,12 +144,6 @@ class WaveConstantNode(base.MaxwellSimNode):
|
|||
'Freq': sockets.PhysicalFreqSocketDef(is_array=props['use_range']),
|
||||
}
|
||||
|
||||
@events.on_init(
|
||||
props={'active_socket_set', 'use_range'},
|
||||
)
|
||||
def on_init(self, props: dict):
|
||||
self.on_input_spec_change()
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
|
|
|
@ -47,15 +47,16 @@ class Tidy3DWebImporterNode(base.MaxwellSimNode):
|
|||
## TODO: REMOVE TEST
|
||||
log.info('Loading SimulationData File')
|
||||
import sys
|
||||
|
||||
for module_name, module in sys.modules.copy().items():
|
||||
if module_name == '__mp_main__':
|
||||
print('Problematic Module Entry', module_name)
|
||||
print(module)
|
||||
#print('MODULE REPR', module)
|
||||
# print('MODULE REPR', module)
|
||||
continue
|
||||
#return td.SimulationData.from_file(
|
||||
# fname='/home/sofus/src/blender_maxwell/dev/sim_demo.hdf5'
|
||||
#)
|
||||
# return td.SimulationData.from_file(
|
||||
# fname='/home/sofus/src/blender_maxwell/dev/sim_demo.hdf5'
|
||||
# )
|
||||
|
||||
# Validate Task Availability
|
||||
if (cloud_task := input_sockets['Cloud Task']) is None:
|
||||
|
@ -77,7 +78,9 @@ class Tidy3DWebImporterNode(base.MaxwellSimNode):
|
|||
cloud_task, _sim_data_cache_path(cloud_task.task_id)
|
||||
)
|
||||
|
||||
@events.on_value_changed(socket_name='Cloud Task', input_sockets={'Cloud Task'})
|
||||
@events.on_value_changed(
|
||||
socket_name='Cloud Task', run_on_init=True, input_sockets={'Cloud Task'}
|
||||
)
|
||||
def on_cloud_task_changed(self, input_sockets: dict):
|
||||
if (
|
||||
(cloud_task := input_sockets['Cloud Task']) is not None
|
||||
|
@ -90,10 +93,6 @@ class Tidy3DWebImporterNode(base.MaxwellSimNode):
|
|||
else:
|
||||
self.loose_output_sockets = {}
|
||||
|
||||
@events.on_init()
|
||||
def on_init(self):
|
||||
self.on_cloud_task_changed()
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
|
|
|
@ -99,9 +99,6 @@ class EHFieldMonitorNode(base.MaxwellSimNode):
|
|||
name=props['sim_node_name'],
|
||||
interval_space=tuple(input_sockets['Samples/Space']),
|
||||
freqs=input_sockets['Freqs'].realize().values,
|
||||
#freqs=[
|
||||
# float(spu.convert_to(freq, spu.hertz) / spu.hertz) for freq in freqs
|
||||
#],
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -45,8 +45,9 @@ class PowerFluxMonitorNode(base.MaxwellSimNode):
|
|||
),
|
||||
},
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Monitor': sockets.MaxwellMonitorSocketDef(),
|
||||
output_socket_sets: typ.ClassVar = {
|
||||
'Freq Domain': {'Freq Monitor': sockets.MaxwellMonitorSocketDef()},
|
||||
'Time Domain': {'Time Monitor': sockets.MaxwellMonitorSocketDef()},
|
||||
}
|
||||
|
||||
managed_obj_defs: typ.ClassVar = {
|
||||
|
@ -62,60 +63,44 @@ class PowerFluxMonitorNode(base.MaxwellSimNode):
|
|||
# - Event Methods: Computation
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Monitor',
|
||||
props={'active_socket_set', 'sim_node_name'},
|
||||
'Freq Monitor',
|
||||
props={'sim_node_name'},
|
||||
input_sockets={
|
||||
'Rec Start',
|
||||
'Rec Stop',
|
||||
'Center',
|
||||
'Size',
|
||||
'Samples/Space',
|
||||
'Samples/Time',
|
||||
'Freqs',
|
||||
'Direction',
|
||||
},
|
||||
input_socket_kinds={
|
||||
'Freqs': ct.LazyDataValueRange,
|
||||
'Freqs': ct.DataFlowKind.LazyValueRange,
|
||||
},
|
||||
unit_systems={'Tidy3DUnits': ct.UNITS_TIDY3D},
|
||||
scale_input_sockets={
|
||||
'Center': 'Tidy3DUnits',
|
||||
'Size': 'Tidy3DUnits',
|
||||
'Freqs': 'Tidy3DUnits',
|
||||
'Samples/Space': 'Tidy3DUnits',
|
||||
'Rec Start': 'Tidy3DUnits',
|
||||
'Rec Stop': 'Tidy3DUnits',
|
||||
'Samples/Time': 'Tidy3DUnits',
|
||||
},
|
||||
)
|
||||
def compute_monitor(self, input_sockets: dict, props: dict) -> td.FieldTimeMonitor:
|
||||
direction = '+' if input_sockets['Direction'] else '-'
|
||||
|
||||
if props['active_socket_set'] == 'Freq Domain':
|
||||
log.info(
|
||||
'Computing FluxMonitor (name="%s") with center="%s", size="%s"',
|
||||
props['sim_node_name'],
|
||||
input_sockets['Center'],
|
||||
input_sockets['Size'],
|
||||
)
|
||||
return td.FluxMonitor(
|
||||
center=input_sockets['Center'],
|
||||
size=input_sockets['Size'],
|
||||
name=props['sim_node_name'],
|
||||
interval_space=input_sockets['Samples/Space'],
|
||||
freqs=input_sockets['Freqs'].realize().values,
|
||||
normal_dir=direction,
|
||||
)
|
||||
|
||||
return td.FluxTimeMonitor(
|
||||
def compute_freq_monitor(
|
||||
self,
|
||||
input_sockets: dict,
|
||||
props: dict,
|
||||
unit_systems: dict,
|
||||
) -> td.FieldMonitor:
|
||||
log.info(
|
||||
'Computing FluxMonitor (name="%s") with center="%s", size="%s"',
|
||||
props['sim_node_name'],
|
||||
input_sockets['Center'],
|
||||
input_sockets['Size'],
|
||||
)
|
||||
return td.FluxMonitor(
|
||||
center=input_sockets['Center'],
|
||||
size=input_sockets['Size'],
|
||||
name=props['sim_node_name'],
|
||||
start=input_sockets['Rec Start'],
|
||||
stop=input_sockets['Rec Stop'],
|
||||
interval=input_sockets['Samples/Time'],
|
||||
interval_space=input_sockets['Samples/Space'],
|
||||
normal_dir=direction,
|
||||
interval_space=(1,1,1),
|
||||
freqs=input_sockets['Freqs'].realize().values,
|
||||
normal_dir='+' if input_sockets['Direction'] else '-',
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -70,12 +70,6 @@ class ViewerNode(base.MaxwellSimNode):
|
|||
update=lambda self, context: self.sync_prop('auto_3d_preview', context),
|
||||
)
|
||||
|
||||
cache__data_socket_linked: bpy.props.BoolProperty(
|
||||
name='Data Is Linked',
|
||||
description='Whether the Data input was linked last time it was checked.',
|
||||
default=True,
|
||||
)
|
||||
|
||||
####################
|
||||
# - UI
|
||||
####################
|
||||
|
@ -125,41 +119,26 @@ class ViewerNode(base.MaxwellSimNode):
|
|||
# - Event Methods
|
||||
####################
|
||||
@events.on_value_changed(
|
||||
socket_name='Data',
|
||||
prop_name='auto_plot',
|
||||
props={'auto_plot'},
|
||||
)
|
||||
def on_changed_plot_preview(self, props):
|
||||
if self.inputs['Data'].is_linked and props['auto_plot']:
|
||||
# log.debug('Enabling 2D Plot from "%s"', self.name)
|
||||
self.trigger_action(ct.DataFlowAction.ShowPlot)
|
||||
|
||||
@events.on_value_changed(
|
||||
socket_name='Data',
|
||||
prop_name='auto_3d_preview',
|
||||
props={'auto_3d_preview'},
|
||||
)
|
||||
def on_changed_3d_preview(self, props):
|
||||
# Unpreview Everything
|
||||
if props['auto_3d_preview']:
|
||||
node_tree = self.id_data
|
||||
node_tree.unpreview_all()
|
||||
node_tree = self.id_data
|
||||
|
||||
# Trigger Preview Action
|
||||
if self.inputs['Data'].is_linked and props['auto_3d_preview']:
|
||||
# log.debug('Enabling 3D Previews from "%s"', self.name)
|
||||
self.trigger_action(ct.DataFlowAction.ShowPreview)
|
||||
|
||||
@events.on_value_changed(
|
||||
socket_name='Data',
|
||||
)
|
||||
def on_changed_3d_data(self):
|
||||
# Is Linked: Re-Preview
|
||||
if self.inputs['Data'].is_linked:
|
||||
self.on_changed_3d_preview()
|
||||
self.on_changed_plot_preview()
|
||||
|
||||
# Just Linked / Just Unlinked: Preview/Unpreview All
|
||||
if self.inputs['Data'].is_linked ^ self.cache__data_socket_linked:
|
||||
self.cache__data_socket_linked = self.inputs['Data'].is_linked
|
||||
# Remove Non-Repreviewed Previews on Close
|
||||
with node_tree.repreview_all():
|
||||
if self.inputs['Data'].is_linked and props['auto_3d_preview']:
|
||||
self.trigger_action(ct.DataFlowAction.ShowPreview)
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
@ -63,6 +63,7 @@ class SimDomainNode(base.MaxwellSimNode):
|
|||
@events.on_value_changed(
|
||||
socket_name={'Center', 'Size'},
|
||||
prop_name='preview_active',
|
||||
run_on_init=True,
|
||||
props={'preview_active'},
|
||||
input_sockets={'Center', 'Size'},
|
||||
managed_objs={'mesh', 'modifier'},
|
||||
|
@ -94,10 +95,6 @@ class SimDomainNode(base.MaxwellSimNode):
|
|||
if props['preview_active']:
|
||||
managed_objs['mesh'].show_preview()
|
||||
|
||||
@events.on_init()
|
||||
def on_init(self):
|
||||
self.on_input_changed()
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
|
|
|
@ -71,14 +71,14 @@ class GeoNodesStructureNode(base.MaxwellSimNode):
|
|||
socket_name='GeoNodes',
|
||||
prop_name='preview_active',
|
||||
any_loose_input_socket=True,
|
||||
run_on_init=True,
|
||||
# Pass Data
|
||||
props={'preview_active'},
|
||||
managed_objs={'mesh', 'modifier'},
|
||||
input_sockets={'Center', 'GeoNodes'},
|
||||
all_loose_input_sockets=True,
|
||||
unit_systems={'BlenderUnits': ct.UNITS_BLENDER},
|
||||
scale_input_sockets={
|
||||
'Center': 'BlenderUnits'
|
||||
}
|
||||
scale_input_sockets={'Center': 'BlenderUnits'},
|
||||
)
|
||||
def on_input_changed(
|
||||
self,
|
||||
|
|
|
@ -62,6 +62,7 @@ class BoxStructureNode(base.MaxwellSimNode):
|
|||
@events.on_value_changed(
|
||||
socket_name={'Center', 'Size'},
|
||||
prop_name='preview_active',
|
||||
run_on_init=True,
|
||||
props={'preview_active'},
|
||||
input_sockets={'Center', 'Size'},
|
||||
managed_objs={'mesh', 'modifier'},
|
||||
|
@ -93,10 +94,6 @@ class BoxStructureNode(base.MaxwellSimNode):
|
|||
if props['preview_active']:
|
||||
managed_objs['mesh'].show_preview()
|
||||
|
||||
@events.on_init()
|
||||
def on_init(self):
|
||||
self.on_inputs_changed()
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
|
|
|
@ -64,6 +64,7 @@ class SphereStructureNode(base.MaxwellSimNode):
|
|||
@events.on_value_changed(
|
||||
socket_name={'Center', 'Radius'},
|
||||
prop_name='preview_active',
|
||||
run_on_init=True,
|
||||
props={'preview_active'},
|
||||
input_sockets={'Center', 'Radius'},
|
||||
managed_objs={'mesh', 'modifier'},
|
||||
|
@ -96,10 +97,6 @@ class SphereStructureNode(base.MaxwellSimNode):
|
|||
if props['preview_active']:
|
||||
managed_objs['mesh'].show_preview()
|
||||
|
||||
@events.on_init()
|
||||
def on_init(self):
|
||||
self.on_inputs_changed()
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import sympy as sp
|
||||
|
||||
|
@ -5,18 +7,15 @@ from ... import contracts as ct
|
|||
from ... import sockets
|
||||
from .. import base, events
|
||||
|
||||
MAX_AMOUNT = 20
|
||||
|
||||
|
||||
class CombineNode(base.MaxwellSimNode):
|
||||
node_type = ct.NodeType.Combine
|
||||
bl_label = 'Combine'
|
||||
# bl_icon = ...
|
||||
|
||||
####################
|
||||
# - Sockets
|
||||
####################
|
||||
input_socket_sets = {
|
||||
input_socket_sets: typ.ClassVar = {
|
||||
'Maxwell Sources': {},
|
||||
'Maxwell Structures': {},
|
||||
'Maxwell Monitors': {},
|
||||
|
@ -69,7 +68,7 @@ class CombineNode(base.MaxwellSimNode):
|
|||
description='Amount of Objects to Combine',
|
||||
default=1,
|
||||
min=1,
|
||||
max=MAX_AMOUNT,
|
||||
# max=MAX_AMOUNT,
|
||||
update=lambda self, context: self.sync_prop('amount', context),
|
||||
)
|
||||
|
||||
|
@ -118,6 +117,7 @@ class CombineNode(base.MaxwellSimNode):
|
|||
@events.on_value_changed(
|
||||
prop_name='active_socket_set',
|
||||
props={'active_socket_set', 'amount'},
|
||||
run_on_init=True,
|
||||
)
|
||||
def on_value_changed__active_socket_set(self, props):
|
||||
if props['active_socket_set'] == 'Maxwell Sources':
|
||||
|
@ -144,10 +144,6 @@ class CombineNode(base.MaxwellSimNode):
|
|||
def on_value_changed__amount(self):
|
||||
self.on_value_changed__active_socket_set()
|
||||
|
||||
@events.on_init()
|
||||
def on_init(self):
|
||||
self.on_value_changed__active_socket_set()
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
|
|
|
@ -116,7 +116,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
`trigger_action` method will be called.
|
||||
"""
|
||||
# Forwards Chains
|
||||
if action in {'value_changed'}:
|
||||
if action in {ct.DataFlowAction.DataChanged}:
|
||||
## Input Socket
|
||||
if not self.is_output:
|
||||
self.node.trigger_action(action, socket_name=self.name)
|
||||
|
@ -128,15 +128,17 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
|
||||
# Backwards Chains
|
||||
elif action in {
|
||||
'enable_lock',
|
||||
'disable_lock',
|
||||
'show_preview',
|
||||
'show_plot',
|
||||
ct.DataFlowAction.EnableLock,
|
||||
ct.DataFlowAction.DisableLock,
|
||||
ct.DataFlowAction.OutputRequested,
|
||||
ct.DataFlowAction.DataChanged,
|
||||
ct.DataFlowAction.ShowPreview,
|
||||
ct.DataFlowAction.ShowPlot,
|
||||
}:
|
||||
if action == 'enable_lock':
|
||||
if action == ct.DataFlowAction.EnableLock:
|
||||
self.locked = True
|
||||
|
||||
if action == 'disable_lock':
|
||||
if action == ct.DataFlowAction.DisableLock:
|
||||
self.locked = False
|
||||
|
||||
## Output Socket
|
||||
|
@ -208,6 +210,7 @@ class MaxwellSimSocket(bpy.types.NodeSocket):
|
|||
|
||||
Returns a bool, whether or not the socket consents to the link change.
|
||||
"""
|
||||
## TODO: Crash if deleting removing linked loose sockets.
|
||||
if self.locked:
|
||||
return False
|
||||
if self.is_output:
|
||||
|
|
|
@ -105,7 +105,7 @@ class TidyCloudFolders:
|
|||
cloud_folder.folder_id: cloud_folder for cloud_folder in cloud_folders
|
||||
}
|
||||
cls.cache_folders = folders
|
||||
log.info("Retrieved Folders: %s", str(cls.cache_folders))
|
||||
log.info('Retrieved Folders: %s', str(cls.cache_folders))
|
||||
return folders
|
||||
|
||||
@classmethod
|
||||
|
@ -243,7 +243,11 @@ class TidyCloudTasks:
|
|||
## Task by-Folder Cache
|
||||
cls.cache_folder_tasks[cloud_folder.folder_id] = set(cloud_tasks)
|
||||
|
||||
log.info('Retrieved Tasks (folder="%s"): %s)', cloud_folder.folder_id, str(set(cloud_tasks)))
|
||||
log.info(
|
||||
'Retrieved Tasks (folder="%s"): %s)',
|
||||
cloud_folder.folder_id,
|
||||
str(set(cloud_tasks)),
|
||||
)
|
||||
return cloud_tasks
|
||||
|
||||
####################
|
||||
|
|
Loading…
Reference in New Issue