Compare commits
No commits in common. "9960cd3480e35fdb8044a09b891e413b009f22be" and "568fc449e818da3e06e799c9789dff7f8c2eaf36" have entirely different histories.
9960cd3480
...
568fc449e8
|
@ -9,4 +9,3 @@ trim_trailing_whitespace = false
|
|||
|
||||
[*.yml]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
- Output: Write the input socket value.
|
||||
- Condition: Input socket is unlinked. (If it's linked, then lock the object's position. Use sync_link_added() for that)
|
||||
- Node to BL:
|
||||
- Trigger: "Report" event on an input socket that the managed object declares reliance on.
|
||||
- Trigger: "Report" action on an input socket that the managed object declares reliance on.
|
||||
- Input: The input socket value (linked or unlinked)
|
||||
- Output: The object location (origin), using a unit system.
|
||||
|
||||
|
|
192
doc/_quarto.yml
192
doc/_quarto.yml
|
@ -1,44 +1,10 @@
|
|||
####################
|
||||
# - Project Config
|
||||
####################
|
||||
project:
|
||||
type: website
|
||||
output-dir: _site
|
||||
|
||||
format:
|
||||
html:
|
||||
toc: true
|
||||
|
||||
filters:
|
||||
- interlinks
|
||||
|
||||
interlinks:
|
||||
sources:
|
||||
numpy:
|
||||
url: https://numpy.org/doc/stable/
|
||||
matplotlib:
|
||||
url: https://matplotlib.org/stable/
|
||||
python:
|
||||
url: https://docs.python.org/3/
|
||||
|
||||
metadata-files:
|
||||
# Sidebar for /pydocs Paths
|
||||
- pydocs/_sidebar.yml
|
||||
|
||||
####################
|
||||
# - Website Config
|
||||
####################
|
||||
# Website Configuration
|
||||
website:
|
||||
title: "Blender Maxwell"
|
||||
description: "A Blender-based design and analysis tool for electromagnetic simulations"
|
||||
page-footer: "Copyright 2024, Sofus Albert Høgsbro Rose"
|
||||
repo-url: https://github.com/so-rose/blender_maxwell/
|
||||
repo-actions: [issue]
|
||||
page-navigation: true
|
||||
navbar:
|
||||
background: primary
|
||||
pinned: true
|
||||
search: true
|
||||
left:
|
||||
- file: index.qmd
|
||||
text: Home
|
||||
|
@ -52,12 +18,19 @@ website:
|
|||
- text: Report a Bug
|
||||
url: https://github.com/so-rose/blender_maxwell/issues/new/choose
|
||||
|
||||
# Auto-Generated Metadata
|
||||
metadata-files:
|
||||
# Sidebar for /pydocs Paths
|
||||
- pydocs/_sidebar.yml
|
||||
|
||||
|
||||
####################
|
||||
# - Quartodoc Config
|
||||
# - quartodoc - Autogenerated Python Docs
|
||||
####################
|
||||
quartodoc:
|
||||
# Output
|
||||
dir: pydocs
|
||||
#out_index: _api_index.qmd
|
||||
sidebar: pydocs/_sidebar.yml
|
||||
|
||||
# Python Package
|
||||
|
@ -70,13 +43,8 @@ quartodoc:
|
|||
title: "Blender Maxwell"
|
||||
|
||||
# Options
|
||||
renderer:
|
||||
style: markdown
|
||||
#show_signature: true
|
||||
show_signature_annotations: false
|
||||
display_name: name
|
||||
options:
|
||||
#include_private: true
|
||||
include_private: true
|
||||
include_empty: true
|
||||
include_attributes: true
|
||||
signature_name: "short"
|
||||
|
@ -89,11 +57,16 @@ quartodoc:
|
|||
desc: Build/packaging scripts for developing and publishing the addon.
|
||||
package: scripts
|
||||
contents:
|
||||
- info
|
||||
- pack
|
||||
- dev
|
||||
- bl_delete_addon
|
||||
- bl_install_addon
|
||||
- name: info
|
||||
children: embedded
|
||||
- name: pack
|
||||
children: embedded
|
||||
- name: dev
|
||||
children: embedded
|
||||
- name: bl_delete_addon
|
||||
children: embedded
|
||||
- name: bl_install_addon
|
||||
children: embedded
|
||||
|
||||
####################
|
||||
# - bl_maxwell
|
||||
|
@ -101,39 +74,54 @@ quartodoc:
|
|||
- title: "`bl_maxwell`"
|
||||
desc: Root package for the addon.
|
||||
contents:
|
||||
- preferences
|
||||
- registration
|
||||
- name: info
|
||||
children: embedded
|
||||
- name: preferences
|
||||
children: embedded
|
||||
- name: registration
|
||||
children: embedded
|
||||
|
||||
- subtitle: "`bl_maxwell.assets`"
|
||||
desc: Blender assets bundled w/Blender Maxwell
|
||||
contents:
|
||||
- assets
|
||||
- assets.import_geonodes
|
||||
- name: assets
|
||||
children: embedded
|
||||
- name: assets.import_geonodes
|
||||
children: embedded
|
||||
|
||||
- subtitle: "`bl_maxwell.nodeps`"
|
||||
desc: No-Dependency
|
||||
contents:
|
||||
- operators
|
||||
- name: operators
|
||||
children: embedded
|
||||
|
||||
- subtitle: "`bl_maxwell.utils`"
|
||||
desc: Utilities wo/shared global state.
|
||||
contents:
|
||||
- utils.analyze_geonodes
|
||||
- utils.blender_type_enum
|
||||
- utils.extra_sympy_units
|
||||
- utils.logger
|
||||
- utils.pydantic_sympy
|
||||
- name: utils.analyze_geonodes
|
||||
children: embedded
|
||||
- name: utils.blender_type_enum
|
||||
children: embedded
|
||||
- name: utils.extra_sympy_units
|
||||
children: embedded
|
||||
- name: utils.logger
|
||||
children: embedded
|
||||
- name: utils.pydantic_sympy
|
||||
children: embedded
|
||||
|
||||
- subtitle: "`bl_maxwell.services`"
|
||||
desc: Utilities w/shared global state.
|
||||
contents:
|
||||
- services.tdcloud
|
||||
- name: services.tdcloud
|
||||
children: embedded
|
||||
|
||||
- subtitle: "`bl_maxwell.operators`"
|
||||
desc: General Blender operators.
|
||||
contents:
|
||||
- operators.bl_append
|
||||
- operators.connect_viewer
|
||||
- name: operators.bl_append
|
||||
children: embedded
|
||||
- name: operators.connect_viewer
|
||||
children: embedded
|
||||
|
||||
####################
|
||||
# - ..maxwell_sim_nodes
|
||||
|
@ -142,49 +130,73 @@ quartodoc:
|
|||
desc: Maxwell Simulation Design/Viz Node Tree.
|
||||
package: blender_maxwell.node_trees.maxwell_sim_nodes
|
||||
contents:
|
||||
- bl_socket_map
|
||||
- categories
|
||||
- bl_cache
|
||||
- node_tree
|
||||
- name: bl_socket_map
|
||||
children: embedded
|
||||
- name: categories
|
||||
children: embedded
|
||||
- name: bl_cache
|
||||
children: embedded
|
||||
- name: node_tree
|
||||
children: embedded
|
||||
|
||||
- subtitle: "`contracts`"
|
||||
desc: Constants and interfaces for identifying resources.
|
||||
package: blender_maxwell.node_trees.maxwell_sim_nodes.contracts
|
||||
contents:
|
||||
# General
|
||||
- flow_kinds
|
||||
- flow_kinds.FlowKind
|
||||
- flow_kinds.LazyValueFuncFlow
|
||||
- icons
|
||||
- name: bl
|
||||
children: embedded
|
||||
- name: data_flows
|
||||
children: embedded
|
||||
- name: icons
|
||||
children: embedded
|
||||
|
||||
- tree_types
|
||||
- name: trees
|
||||
children: embedded
|
||||
|
||||
# Managed Objects
|
||||
- mobj_types
|
||||
- name: managed_obj_type
|
||||
children: embedded
|
||||
|
||||
# Nodes
|
||||
- node_types
|
||||
- category_types
|
||||
- category_labels
|
||||
- name: node_types
|
||||
children: embedded
|
||||
- name: node_cats
|
||||
children: embedded
|
||||
- name: node_cat_labels
|
||||
children: embedded
|
||||
|
||||
# Sockets
|
||||
- socket_types
|
||||
- socket_colors
|
||||
- bl_socket_types
|
||||
- bl_socket_desc_map
|
||||
- socket_units
|
||||
- name: socket_types
|
||||
children: embedded
|
||||
- name: socket_colors
|
||||
children: embedded
|
||||
- name: socket_from_bl_desc
|
||||
children: embedded
|
||||
- name: socket_from_bl_direct
|
||||
children: embedded
|
||||
- name: socket_shapes
|
||||
children: embedded
|
||||
- name: socket_units
|
||||
children: embedded
|
||||
|
||||
- unit_systems
|
||||
- name: unit_systems
|
||||
children: embedded
|
||||
|
||||
- subtitle: "`managed_objs`"
|
||||
desc: Maxwell Simulation Design/Viz Node Tree
|
||||
package: blender_maxwell.node_trees.maxwell_sim_nodes.managed_objs
|
||||
contents:
|
||||
- managed_bl_collection
|
||||
- managed_bl_empty
|
||||
- managed_bl_image
|
||||
- managed_bl_mesh
|
||||
- managed_bl_modifier
|
||||
- name: managed_bl_collection
|
||||
children: embedded
|
||||
- name: managed_bl_empty
|
||||
children: embedded
|
||||
- name: managed_bl_image
|
||||
children: embedded
|
||||
- name: managed_bl_mesh
|
||||
children: embedded
|
||||
- name: managed_bl_modifier
|
||||
children: embedded
|
||||
|
||||
####################
|
||||
# - ..maxwell_sim_nodes.nodes
|
||||
|
@ -193,8 +205,10 @@ quartodoc:
|
|||
desc: Maxwell Simulation Node Sockets
|
||||
package: blender_maxwell.node_trees.maxwell_sim_nodes.sockets
|
||||
contents:
|
||||
- base
|
||||
- scan_socket_defs
|
||||
- name: base
|
||||
children: embedded
|
||||
- name: scan_socket_defs
|
||||
children: embedded
|
||||
|
||||
####################
|
||||
# - ..maxwell_sim_nodes.nodes
|
||||
|
@ -203,5 +217,7 @@ quartodoc:
|
|||
desc: Maxwell Simulation Nodes
|
||||
package: blender_maxwell.node_trees.maxwell_sim_nodes.nodes
|
||||
contents:
|
||||
- base
|
||||
- events
|
||||
- name: base
|
||||
children: embedded
|
||||
- name: events
|
||||
children: embedded
|
||||
|
|
|
@ -14,10 +14,6 @@ dependencies = [
|
|||
"networkx==3.2.*",
|
||||
"rich==12.5.*",
|
||||
"rtree==1.2.*",
|
||||
"jax[cpu]==0.4.26",
|
||||
"msgspec[toml]==0.18.6",
|
||||
"numba==0.59.1",
|
||||
"jaxtyping==0.2.28",
|
||||
# Pin Blender 4.1.0-Compatible Versions
|
||||
## The dependency resolver will report if anything is wonky.
|
||||
"urllib3==1.26.8",
|
||||
|
@ -26,6 +22,8 @@ dependencies = [
|
|||
"idna==3.3",
|
||||
"charset-normalizer==2.0.10",
|
||||
"certifi==2021.10.8",
|
||||
"jax[cpu]>=0.4.26",
|
||||
"msgspec[toml]>=0.18.6",
|
||||
]
|
||||
readme = "README.md"
|
||||
requires-python = "~= 3.11"
|
||||
|
@ -107,7 +105,6 @@ ignore = [
|
|||
"B008", # FastAPI uses this for Depends(), Security(), etc. .
|
||||
"E701", # class foo(Parent): pass or if simple: return are perfectly elegant
|
||||
"ERA001", # 'Commented-out code' seems to be just about anything to ruff
|
||||
"F722", # jaxtyping uses type annotations that ruff sees as "syntax error"
|
||||
|
||||
# Line Length - Controversy Incoming
|
||||
## Hot Take: Let the Formatter Worry about Line Length
|
||||
|
|
|
@ -49,14 +49,11 @@ importlib-metadata==6.11.0
|
|||
jax==0.4.26
|
||||
jaxlib==0.4.26
|
||||
# via jax
|
||||
jaxtyping==0.2.28
|
||||
jmespath==1.0.1
|
||||
# via boto3
|
||||
# via botocore
|
||||
kiwisolver==1.4.5
|
||||
# via matplotlib
|
||||
llvmlite==0.42.0
|
||||
# via numba
|
||||
locket==1.0.0
|
||||
# via partd
|
||||
matplotlib==3.8.3
|
||||
|
@ -68,16 +65,13 @@ mpmath==1.3.0
|
|||
# via sympy
|
||||
msgspec==0.18.6
|
||||
networkx==3.2
|
||||
numba==0.59.1
|
||||
numpy==1.24.3
|
||||
# via contourpy
|
||||
# via h5py
|
||||
# via jax
|
||||
# via jaxlib
|
||||
# via jaxtyping
|
||||
# via matplotlib
|
||||
# via ml-dtypes
|
||||
# via numba
|
||||
# via opt-einsum
|
||||
# via scipy
|
||||
# via shapely
|
||||
|
@ -148,8 +142,6 @@ toolz==0.12.1
|
|||
# via dask
|
||||
# via partd
|
||||
trimesh==4.2.0
|
||||
typeguard==2.13.3
|
||||
# via jaxtyping
|
||||
types-pyyaml==6.0.12.20240311
|
||||
# via responses
|
||||
typing-extensions==4.10.0
|
||||
|
|
|
@ -48,14 +48,11 @@ importlib-metadata==6.11.0
|
|||
jax==0.4.26
|
||||
jaxlib==0.4.26
|
||||
# via jax
|
||||
jaxtyping==0.2.28
|
||||
jmespath==1.0.1
|
||||
# via boto3
|
||||
# via botocore
|
||||
kiwisolver==1.4.5
|
||||
# via matplotlib
|
||||
llvmlite==0.42.0
|
||||
# via numba
|
||||
locket==1.0.0
|
||||
# via partd
|
||||
matplotlib==3.8.3
|
||||
|
@ -67,16 +64,13 @@ mpmath==1.3.0
|
|||
# via sympy
|
||||
msgspec==0.18.6
|
||||
networkx==3.2
|
||||
numba==0.59.1
|
||||
numpy==1.24.3
|
||||
# via contourpy
|
||||
# via h5py
|
||||
# via jax
|
||||
# via jaxlib
|
||||
# via jaxtyping
|
||||
# via matplotlib
|
||||
# via ml-dtypes
|
||||
# via numba
|
||||
# via opt-einsum
|
||||
# via scipy
|
||||
# via shapely
|
||||
|
@ -146,8 +140,6 @@ toolz==0.12.1
|
|||
# via dask
|
||||
# via partd
|
||||
trimesh==4.2.0
|
||||
typeguard==2.13.3
|
||||
# via jaxtyping
|
||||
types-pyyaml==6.0.12.20240311
|
||||
# via responses
|
||||
typing-extensions==4.10.0
|
||||
|
|
|
@ -1,44 +1,13 @@
|
|||
"""A Blender-based system for electromagnetic simulation design and analysis, with deep Tidy3D integration.
|
||||
|
||||
# `bl_info`
|
||||
`bl_info` declares information about the addon to Blender.
|
||||
|
||||
However, it is not _dynamically_ read: Blender traverses it using `ast.parse`.
|
||||
This makes it difficult to synchronize `bl_info` with the project's `pyproject.toml`.
|
||||
As a workaround, **the addon zip-packer will replace `bl_info` entries**.
|
||||
|
||||
The following `bl_info` entries are currently replaced when the ZIP is built:
|
||||
|
||||
- `description`: To match the description in `pyproject.toml`.
|
||||
- `version`: To match the version in `pyproject.toml`.
|
||||
|
||||
For more information, see `scripts.pack.BL_INFO_REPLACEMENTS`.
|
||||
|
||||
**NOTE**: The find/replace procedure is "dumb" (aka. no regex, no `ast` traversal, etc.).
|
||||
|
||||
This is surprisingly robust, so long as use of the deterministic code-formatter `ruff fmt` is enforced.
|
||||
|
||||
Still. Be careful around `bl_info`.
|
||||
|
||||
Attributes:
|
||||
bl_info: Information about the addon declared to Blender.
|
||||
BL_REGISTER_BEFORE_DEPS: Blender classes to register before dependencies are verified as installed.
|
||||
BL_HOTKEYS: Blender keymap item defs to register before dependencies are verified as installed.
|
||||
"""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import bpy
|
||||
|
||||
from . import contracts as ct
|
||||
from . import info
|
||||
from .nodeps.utils import simple_logger
|
||||
|
||||
simple_logger.sync_bootstrap_logging(
|
||||
console_level=ct.addon.BOOTSTRAP_LOG_LEVEL,
|
||||
console_level=info.BOOTSTRAP_LOG_LEVEL,
|
||||
)
|
||||
|
||||
from . import preferences, registration # noqa: E402
|
||||
from .nodeps import operators as nodeps_operators # noqa: E402
|
||||
from . import nodeps, preferences, registration # noqa: E402
|
||||
from .nodeps.utils import pydeps # noqa: E402
|
||||
|
||||
log = simple_logger.get(__name__)
|
||||
|
@ -46,6 +15,9 @@ log = simple_logger.get(__name__)
|
|||
####################
|
||||
# - Addon Information
|
||||
####################
|
||||
# The following parameters are replaced when packing the addon ZIP
|
||||
## - description
|
||||
## - version
|
||||
bl_info = {
|
||||
'name': 'Maxwell PDE Sim and Viz',
|
||||
'blender': (4, 1, 0),
|
||||
|
@ -56,36 +28,24 @@ bl_info = {
|
|||
'wiki_url': 'https://git.sofus.io/dtu-courses/bsc_thesis',
|
||||
'tracker_url': 'https://git.sofus.io/dtu-courses/bsc_thesis/issues',
|
||||
}
|
||||
## bl_info MUST readable via. ast.parse
|
||||
## See scripts/pack.py::BL_INFO_REPLACEMENTS for active replacements
|
||||
## The mechanism is a 'dumb' - output of 'ruff fmt' MUST be basis for replacing
|
||||
|
||||
|
||||
####################
|
||||
# - Load and Register Addon
|
||||
####################
|
||||
BL_REGISTER_BEFORE_DEPS: list[ct.BLClass] = [
|
||||
*nodeps_operators.BL_REGISTER,
|
||||
log.info('Loading Before-Deps BL_REGISTER')
|
||||
BL_REGISTER__BEFORE_DEPS = [
|
||||
*nodeps.operators.BL_REGISTER,
|
||||
*preferences.BL_REGISTER,
|
||||
]
|
||||
|
||||
## TODO: BL_HANDLERS and BL_SOCKET_DEFS
|
||||
|
||||
BL_HOTKEYS_BEFORE_DEPS: list[ct.KeymapItemDef] = [
|
||||
*nodeps_operators.BL_HOTKEYS,
|
||||
]
|
||||
|
||||
|
||||
def load_main_blclasses(path_pydeps: Path) -> list[ct.BLClass]:
|
||||
"""Imports all addon classes that rely on Python dependencies.
|
||||
|
||||
Notes:
|
||||
`sys.path` is modified while executing this function.
|
||||
|
||||
Parameters:
|
||||
path_pydeps: The path to the Python dependencies.
|
||||
|
||||
Returns:
|
||||
An ordered list of Blender classes to register.
|
||||
"""
|
||||
with pydeps.importable_addon_deps(path_pydeps):
|
||||
def BL_REGISTER__AFTER_DEPS(path_deps: Path):
|
||||
log.info('Loading After-Deps BL_REGISTER')
|
||||
with pydeps.importable_addon_deps(path_deps):
|
||||
from . import assets, node_trees, operators
|
||||
return [
|
||||
*operators.BL_REGISTER,
|
||||
|
@ -94,114 +54,64 @@ def load_main_blclasses(path_pydeps: Path) -> list[ct.BLClass]:
|
|||
]
|
||||
|
||||
|
||||
def load_main_blhotkeys(path_deps: Path) -> list[ct.KeymapItemDef]:
|
||||
"""Imports all keymap item defs that rely on Python dependencies.
|
||||
log.info('Loading Before-Deps BL_KEYMAP_ITEM_DEFS')
|
||||
BL_KEYMAP_ITEM_DEFS__BEFORE_DEPS = [
|
||||
*nodeps.operators.BL_KEYMAP_ITEM_DEFS,
|
||||
]
|
||||
|
||||
Notes:
|
||||
`sys.path` is modified while executing this function.
|
||||
|
||||
Parameters:
|
||||
path_pydeps: The path to the Python dependencies.
|
||||
|
||||
Returns:
|
||||
An ordered list of Blender keymap item defs to register.
|
||||
"""
|
||||
def BL_KEYMAP_ITEM_DEFS__AFTER_DEPS(path_deps: Path):
|
||||
log.info('Loading After-Deps BL_KEYMAP_ITEM_DEFS')
|
||||
with pydeps.importable_addon_deps(path_deps):
|
||||
from . import assets, operators
|
||||
return [
|
||||
*operators.BL_HOTKEYS,
|
||||
*assets.BL_HOTKEYS,
|
||||
*operators.BL_KEYMAP_ITEM_DEFS,
|
||||
*assets.BL_KEYMAP_ITEM_DEFS,
|
||||
]
|
||||
|
||||
|
||||
####################
|
||||
# - Registration
|
||||
####################
|
||||
@bpy.app.handlers.persistent
|
||||
def manage_pydeps(*_):
|
||||
# ct.addon.operator(
|
||||
# ct.OperatorType.ManagePyDeps,
|
||||
# 'INVOKE_DEFAULT',
|
||||
# path_addon_pydeps='',
|
||||
# path_addon_reqs='',
|
||||
# )
|
||||
ct.addon.prefs().on_addon_pydeps_changed(show_popup_if_deps_invalid=True)
|
||||
def register():
|
||||
"""Register the Blender addon."""
|
||||
log.info('Starting %s Registration', info.ADDON_NAME)
|
||||
|
||||
# Register Barebones Addon (enough for PyDeps Installability)
|
||||
registration.register_classes(BL_REGISTER__BEFORE_DEPS)
|
||||
registration.register_keymap_items(BL_KEYMAP_ITEM_DEFS__BEFORE_DEPS)
|
||||
|
||||
def register() -> None:
|
||||
"""Implements a multi-stage addon registration, which accounts for Python dependency management.
|
||||
# Retrieve PyDeps Path from Addon Preferences
|
||||
if (addon_prefs := info.addon_prefs()) is None:
|
||||
unregister()
|
||||
msg = f'Addon preferences not found; aborting registration of {info.ADDON_NAME}'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Multi-Stage Registration
|
||||
The trouble is that many classes in our addon might require Python dependencies.
|
||||
# Retrieve PyDeps Path
|
||||
path_pydeps = addon_prefs.pydeps_path
|
||||
log.info('Loaded PyDeps Path from Addon Prefs: %s', path_pydeps)
|
||||
|
||||
## Stage 1: Barebones Addon
|
||||
Many classes in our addon might require Python dependencies.
|
||||
However, they may not yet be installed.
|
||||
|
||||
To solve this bootstrapping problem in a streamlined manner, we only **guarantee** the registration of a few key classes, including:
|
||||
|
||||
- `AddonPreferences`: The addon preferences provide an interface for the user to fix Python dependency problems, thereby triggering subsequent stages.
|
||||
- `InstallPyDeps`: An operator that installs missing Python dependencies, using Blender's embeded `pip`.
|
||||
- `UninstallPyDeps`: An operator that uninstalls Python dependencies.
|
||||
|
||||
**These classes provide just enough interface to help the user install the missing Python dependencies**.
|
||||
|
||||
## Stage 2: Declare Delayed Registration
|
||||
We may not be able to register any classes that rely on Python dependencies.
|
||||
However, we can use `registration.delay_registration()` to **delay the registration until it is determined that the Python dependencies are satisfied**.`
|
||||
|
||||
For now, we just pass a callback that will import + return a list of classes to register (`load_main_blclasses()`) when the time comes.
|
||||
|
||||
## Stage 3: Trigger "PyDeps Changed"
|
||||
The addon preferences is responsible for storing (and exposing to the user) the path to the Python dependencies.
|
||||
|
||||
Thus, the addon preferences method `on_addon_pydeps_changed()` has the responsibility for checking when the dependencies are valid, and running the delayed registrations (and any other delayed setup) in response.
|
||||
In general, `on_addon_pydeps_changed()` runs whenever the PyDeps path is changed, but it can also be run manually.
|
||||
|
||||
As the last part of this process, that's exactly what `register()` does: Runs `on_addon_pydeps_changed()` manually.
|
||||
Depending on the addon preferences (which persist), one of two things can happen:
|
||||
|
||||
1. **Deps Satisfied**: The addon will load without issue: The just-declared "delayed registrations" will run immediately, and all is well.
|
||||
2. **Deps Not Satisfied**: The user must take action to fix the conflicts due to Python dependencies, before the addon can load. **A popup will show to help the user do so.
|
||||
|
||||
|
||||
Notes:
|
||||
Called by Blender when enabling the addon.
|
||||
"""
|
||||
log.info('Commencing Registration of Addon: %s', ct.addon.NAME)
|
||||
bpy.app.handlers.load_post.append(manage_pydeps)
|
||||
|
||||
# Register Barebones Addon
|
||||
## Contains all no-dependency BLClasses:
|
||||
## - Contains AddonPreferences.
|
||||
## Contains all BLClasses from 'nodeps'.
|
||||
registration.register_classes(BL_REGISTER_BEFORE_DEPS)
|
||||
registration.register_hotkeys(BL_HOTKEYS_BEFORE_DEPS)
|
||||
|
||||
# Delay Complete Registration until DEPS_SATISFIED
|
||||
registration.delay_registration_until(
|
||||
registration.BLRegisterEvent.DepsSatisfied,
|
||||
then_register_classes=load_main_blclasses,
|
||||
then_register_hotkeys=load_main_blhotkeys,
|
||||
if pydeps.check_pydeps(path_pydeps):
|
||||
log.info('PyDeps Satisfied: Loading Addon %s', info.ADDON_NAME)
|
||||
addon_prefs.sync_addon_logging()
|
||||
registration.register_classes(BL_REGISTER__AFTER_DEPS(path_pydeps))
|
||||
registration.register_keymap_items(BL_KEYMAP_ITEM_DEFS__AFTER_DEPS(path_pydeps))
|
||||
else:
|
||||
log.info(
|
||||
'PyDeps Invalid: Delaying Addon Registration of %s',
|
||||
info.ADDON_NAME,
|
||||
)
|
||||
|
||||
# Trigger PyDeps Check
|
||||
## Deps ARE OK: Delayed registration will trigger.
|
||||
## Deps NOT OK: User must fix the pydeps, then trigger this method.
|
||||
ct.addon.prefs().on_addon_pydeps_changed()
|
||||
registration.delay_registration(
|
||||
registration.EVENT__DEPS_SATISFIED,
|
||||
classes_cb=BL_REGISTER__AFTER_DEPS,
|
||||
keymap_item_defs_cb=BL_KEYMAP_ITEM_DEFS__AFTER_DEPS,
|
||||
)
|
||||
## TODO: bpy Popup to Deal w/Dependency Errors
|
||||
|
||||
|
||||
def unregister() -> None:
|
||||
"""Unregisters anything that was registered by the addon.
|
||||
|
||||
Notes:
|
||||
Run by Blender when disabling the addon.
|
||||
|
||||
This doesn't clean `sys.modules`.
|
||||
To fully revert to Blender's state before the addon was in use (especially various import-related caches in the Python process), Blender must be restarted.
|
||||
"""
|
||||
log.info('Starting %s Unregister', ct.addon.NAME)
|
||||
def unregister():
|
||||
"""Unregister the Blender addon."""
|
||||
log.info('Starting %s Unregister', info.ADDON_NAME)
|
||||
registration.unregister_classes()
|
||||
registration.unregister_hotkeys()
|
||||
registration.clear_delayed_registrations()
|
||||
log.info('Finished %s Unregister', ct.addon.NAME)
|
||||
registration.unregister_keymap_items()
|
||||
log.info('Finished %s Unregister', info.ADDON_NAME)
|
||||
|
|
|
@ -4,11 +4,11 @@ BL_REGISTER = [
|
|||
*import_geonodes.BL_REGISTER,
|
||||
]
|
||||
|
||||
BL_HOTKEYS = [
|
||||
*import_geonodes.BL_HOTKEYS,
|
||||
BL_KEYMAP_ITEM_DEFS = [
|
||||
*import_geonodes.BL_KEYMAP_ITEM_DEFS,
|
||||
]
|
||||
|
||||
__all__ = [
|
||||
'BL_REGISTER',
|
||||
'BL_HOTKEYS',
|
||||
'BL_KEYMAP_ITEM_DEFS',
|
||||
]
|
||||
|
|
|
@ -1,15 +1,22 @@
|
|||
"""Provides for the linking and/or appending of geometry nodes trees from vendored libraries included in Blender maxwell."""
|
||||
|
||||
import enum
|
||||
import typing as typ
|
||||
from pathlib import Path
|
||||
|
||||
import bpy
|
||||
import typing_extensions as typx
|
||||
|
||||
from blender_maxwell import contracts as ct
|
||||
from blender_maxwell.utils import logger
|
||||
from .. import info
|
||||
from ..utils import logger
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
ImportMethod: typ.TypeAlias = typx.Literal['append', 'link']
|
||||
BLOperatorStatus: typ.TypeAlias = set[
|
||||
typx.Literal['RUNNING_MODAL', 'CANCELLED', 'FINISHED', 'PASS_THROUGH', 'INTERFACE']
|
||||
]
|
||||
|
||||
|
||||
####################
|
||||
# - GeoNodes Specification
|
||||
|
@ -19,7 +26,6 @@ class GeoNodes(enum.StrEnum):
|
|||
|
||||
The value of this StrEnum is both the name of the .blend file containing the GeoNodes group, and of the GeoNodes group itself.
|
||||
"""
|
||||
|
||||
# Node Previews
|
||||
## Input
|
||||
InputConstantPhysicalPol = '_input_constant_physical_pol'
|
||||
|
@ -73,7 +79,7 @@ class GeoNodes(enum.StrEnum):
|
|||
|
||||
# GeoNodes Paths
|
||||
## Internal
|
||||
GN_INTERNAL_PATH = ct.addon.PATH_ASSETS / 'internal' / 'primitives'
|
||||
GN_INTERNAL_PATH = info.PATH_ASSETS / 'internal' / 'primitives'
|
||||
GN_INTERNAL_INPUTS_PATH = GN_INTERNAL_PATH / 'input'
|
||||
GN_INTERNAL_SOURCES_PATH = GN_INTERNAL_PATH / 'source'
|
||||
GN_INTERNAL_STRUCTURES_PATH = GN_INTERNAL_PATH / 'structure'
|
||||
|
@ -81,7 +87,7 @@ GN_INTERNAL_MONITORS_PATH = GN_INTERNAL_PATH / 'monitor'
|
|||
GN_INTERNAL_SIMULATIONS_PATH = GN_INTERNAL_PATH / 'simulation'
|
||||
|
||||
## Structures
|
||||
GN_STRUCTURES_PATH = ct.addon.PATH_ASSETS / 'structures'
|
||||
GN_STRUCTURES_PATH = info.PATH_ASSETS / 'structures'
|
||||
GN_STRUCTURES_PRIMITIVES_PATH = GN_STRUCTURES_PATH / 'primitives'
|
||||
|
||||
GN_PARENT_PATHS: dict[GeoNodes, Path] = {
|
||||
|
@ -128,6 +134,7 @@ GN_PARENT_PATHS: dict[GeoNodes, Path] = {
|
|||
GeoNodes.SimulationSimGridAxisManual: GN_INTERNAL_SIMULATIONS_PATH,
|
||||
GeoNodes.SimulationSimGridAxisUniform: GN_INTERNAL_SIMULATIONS_PATH,
|
||||
GeoNodes.SimulationSimGridAxisArray: GN_INTERNAL_SIMULATIONS_PATH,
|
||||
|
||||
# Structures
|
||||
GeoNodes.PrimitiveBox: GN_STRUCTURES_PRIMITIVES_PATH,
|
||||
GeoNodes.PrimitiveRing: GN_STRUCTURES_PRIMITIVES_PATH,
|
||||
|
@ -140,7 +147,7 @@ GN_PARENT_PATHS: dict[GeoNodes, Path] = {
|
|||
####################
|
||||
def import_geonodes(
|
||||
geonodes: GeoNodes,
|
||||
import_method: ct.BLImportMethod,
|
||||
import_method: ImportMethod,
|
||||
) -> bpy.types.GeometryNodeGroup:
|
||||
"""Given a (name of a) GeoNodes group packaged with Blender Maxwell, link/append it to the current file, and return the node group.
|
||||
|
||||
|
@ -152,7 +159,10 @@ def import_geonodes(
|
|||
Returns:
|
||||
A GeoNodes group available in the current .blend file, which can ex. be attached to a 'GeoNodes Structure' node.
|
||||
"""
|
||||
if import_method == 'link' and geonodes in bpy.data.node_groups:
|
||||
if (
|
||||
import_method == 'link'
|
||||
and geonodes in bpy.data.node_groups
|
||||
):
|
||||
return bpy.data.node_groups[geonodes]
|
||||
|
||||
filename = geonodes
|
||||
|
@ -263,7 +273,7 @@ class AppendGeoNodes(bpy.types.Operator):
|
|||
def invoke(self, context: bpy.types.Context, _):
|
||||
return self.execute(context)
|
||||
|
||||
def execute(self, context: bpy.types.Context) -> ct.BLOperatorStatus:
|
||||
def execute(self, context: bpy.types.Context) -> BLOperatorStatus:
|
||||
"""Initializes the while-dragging modal handler, which executes custom logic when the mouse button is released.
|
||||
|
||||
Runs in response to drag_handler of a `UILayout.template_asset_view`.
|
||||
|
@ -286,7 +296,7 @@ class AppendGeoNodes(bpy.types.Operator):
|
|||
|
||||
def modal(
|
||||
self, context: bpy.types.Context, event: bpy.types.Event
|
||||
) -> ct.BLOperatorStatus:
|
||||
) -> BLOperatorStatus:
|
||||
"""When LMB is released, creates a GeoNodes Structure node.
|
||||
|
||||
Runs in response to events in the node editor while dragging an asset from the side panel.
|
||||
|
@ -375,14 +385,14 @@ class AppendGeoNodes(bpy.types.Operator):
|
|||
asset_libraries = bpy.context.preferences.filepaths.asset_libraries
|
||||
if (
|
||||
asset_library_idx := asset_libraries.find('Blender Maxwell')
|
||||
) != -1 and asset_libraries['Blender Maxwell'].path != str(ct.addon.PATH_ASSETS):
|
||||
) != -1 and asset_libraries['Blender Maxwell'].path != str(info.PATH_ASSETS):
|
||||
bpy.ops.preferences.asset_library_remove(asset_library_idx)
|
||||
|
||||
if 'Blender Maxwell' not in asset_libraries:
|
||||
bpy.ops.preferences.asset_library_add()
|
||||
asset_library = asset_libraries[-1] ## Since the operator adds to the end
|
||||
asset_library.name = 'Blender Maxwell'
|
||||
asset_library.path = str(ct.addon.PATH_ASSETS)
|
||||
asset_library.path = str(info.PATH_ASSETS)
|
||||
|
||||
bpy.types.WindowManager.active_asset_list = bpy.props.CollectionProperty(
|
||||
type=bpy.types.AssetHandle
|
||||
|
@ -396,7 +406,7 @@ BL_REGISTER = [
|
|||
AppendGeoNodes,
|
||||
]
|
||||
|
||||
BL_HOTKEYS = [
|
||||
BL_KEYMAP_ITEM_DEFS = [
|
||||
# {
|
||||
# '_': [
|
||||
# AppendGeoNodes.bl_idname,
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
from . import addon
|
||||
from .bl import (
|
||||
BLClass,
|
||||
BLColorRGBA,
|
||||
BLEnumID,
|
||||
BLIconSet,
|
||||
BLImportMethod,
|
||||
BLKeymapItem,
|
||||
BLModifierType,
|
||||
BLNodeTreeInterfaceID,
|
||||
BLOperatorStatus,
|
||||
BLRegionType,
|
||||
BLSpaceType,
|
||||
KeymapItemDef,
|
||||
ManagedObjName,
|
||||
PresetName,
|
||||
SocketName,
|
||||
)
|
||||
from .operator_types import (
|
||||
OperatorType,
|
||||
)
|
||||
from .panel_types import (
|
||||
PanelType,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'addon',
|
||||
'BLClass',
|
||||
'BLColorRGBA',
|
||||
'BLEnumID',
|
||||
'BLIconSet',
|
||||
'BLImportMethod',
|
||||
'BLKeymapItem',
|
||||
'BLModifierType',
|
||||
'BLNodeTreeInterfaceID',
|
||||
'BLOperatorStatus',
|
||||
'BLRegionType',
|
||||
'BLSpaceType',
|
||||
'KeymapItemDef',
|
||||
'ManagedObjName',
|
||||
'PresetName',
|
||||
'SocketName',
|
||||
'OperatorType',
|
||||
'PanelType',
|
||||
]
|
|
@ -1,93 +0,0 @@
|
|||
import random
|
||||
import tomllib
|
||||
from pathlib import Path
|
||||
|
||||
import bpy
|
||||
import bpy_restrict_state
|
||||
|
||||
PATH_ADDON_ROOT = Path(__file__).resolve().parent.parent
|
||||
with (PATH_ADDON_ROOT / 'pyproject.toml').open('rb') as f:
|
||||
PROJ_SPEC = tomllib.load(f)
|
||||
## bl_info is filled with PROJ_SPEC when packing the .zip.
|
||||
|
||||
NAME = PROJ_SPEC['project']['name']
|
||||
VERSION = PROJ_SPEC['project']['version']
|
||||
|
||||
####################
|
||||
# - Assets
|
||||
####################
|
||||
PATH_ASSETS = PATH_ADDON_ROOT / 'assets'
|
||||
|
||||
####################
|
||||
# - PyDeps Info
|
||||
####################
|
||||
PATH_REQS = PATH_ADDON_ROOT / 'requirements.lock'
|
||||
DEFAULT_PATH_DEPS = PATH_ADDON_ROOT / '.addon_dependencies'
|
||||
## requirements.lock is written when packing the .zip.
|
||||
## By default, the addon pydeps are kept in the addon dir.
|
||||
|
||||
####################
|
||||
# - Local Addon Cache
|
||||
####################
|
||||
ADDON_CACHE = PATH_ADDON_ROOT / '.addon_cache'
|
||||
ADDON_CACHE.mkdir(exist_ok=True)
|
||||
|
||||
|
||||
####################
|
||||
# - Dynamic Addon Information
|
||||
####################
|
||||
def is_loading() -> bool:
|
||||
"""Checks whether the addon is currently loading.
|
||||
|
||||
While an addon is loading, `bpy.context` is temporarily very limited.
|
||||
For example, operators can't run while the addon is loading.
|
||||
|
||||
By checking whether `bpy.context` is limited like this, we can determine whether the addon is currently loading.
|
||||
|
||||
Notes:
|
||||
Since `bpy_restrict_state._RestrictContext` is a very internal thing, this function may be prone to breakage on Blender updates.
|
||||
|
||||
**Keep an eye out**!
|
||||
|
||||
Returns:
|
||||
Whether the addon has been fully loaded, such that `bpy.context` is fully accessible.
|
||||
"""
|
||||
return isinstance(bpy.context, bpy_restrict_state._RestrictContext)
|
||||
|
||||
|
||||
def operator(name: str, *operator_args, **operator_kwargs) -> None:
|
||||
# Parse Operator Name
|
||||
operator_namespace, operator_name = name.split('.')
|
||||
if operator_namespace != NAME:
|
||||
msg = f'Tried to call operator {operator_name}, but addon operators may only use the addon operator namespace "{operator_namespace}.<name>"'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
# Addon Not Loading: Run Operator
|
||||
if not is_loading():
|
||||
operator = getattr(getattr(bpy.ops, NAME), operator_name)
|
||||
operator(*operator_args, **operator_kwargs)
|
||||
else:
|
||||
msg = f'Tried to call operator "{operator_name}" while addon is loading'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def prefs() -> bpy.types.AddonPreferences | None:
|
||||
if (addon := bpy.context.preferences.addons.get(NAME)) is None:
|
||||
msg = 'Addon is not installed'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return addon.preferences
|
||||
|
||||
|
||||
####################
|
||||
# - Logging Info
|
||||
####################
|
||||
DEFAULT_LOG_PATH = PATH_ADDON_ROOT / 'addon.log'
|
||||
DEFAULT_LOG_PATH.touch(exist_ok=True)
|
||||
## By default, the addon file log writes to the addon dir.
|
||||
## The initial .log_level contents are written when packing the .zip.
|
||||
## Subsequent changes are managed by nodeps.utils.simple_logger.py.
|
||||
|
||||
PATH_BOOTSTRAP_LOG_LEVEL = PATH_ADDON_ROOT / '.bootstrap_log_level'
|
||||
with PATH_BOOTSTRAP_LOG_LEVEL.open('r') as f:
|
||||
BOOTSTRAP_LOG_LEVEL = int(f.read().strip())
|
|
@ -1,96 +0,0 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
|
||||
####################
|
||||
# - Blender Strings
|
||||
####################
|
||||
BLEnumID = str
|
||||
SocketName = str
|
||||
|
||||
####################
|
||||
# - Blender Enums
|
||||
####################
|
||||
BLImportMethod: typ.TypeAlias = typ.Literal['append', 'link']
|
||||
BLModifierType: typ.TypeAlias = typ.Literal['NODES', 'ARRAY']
|
||||
BLNodeTreeInterfaceID: typ.TypeAlias = str
|
||||
|
||||
BLIconSet: frozenset[str] = frozenset(
|
||||
bpy.types.UILayout.bl_rna.functions['prop'].parameters['icon'].enum_items.keys()
|
||||
)
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Structs
|
||||
####################
|
||||
BLClass: typ.TypeAlias = (
|
||||
bpy.types.Panel
|
||||
| bpy.types.UIList
|
||||
| bpy.types.Menu
|
||||
| bpy.types.Header
|
||||
| bpy.types.Operator
|
||||
| bpy.types.KeyingSetInfo
|
||||
| bpy.types.RenderEngine
|
||||
| bpy.types.AssetShelf
|
||||
| bpy.types.FileHandler
|
||||
)
|
||||
BLKeymapItem: typ.TypeAlias = typ.Any ## TODO: Better Type
|
||||
BLColorRGBA = tuple[float, float, float, float]
|
||||
|
||||
|
||||
####################
|
||||
# - Operators
|
||||
####################
|
||||
BLSpaceType: typ.TypeAlias = typ.Literal[
|
||||
'EMPTY',
|
||||
'VIEW_3D',
|
||||
'IMAGE_EDITOR',
|
||||
'NODE_EDITOR',
|
||||
'SEQUENCE_EDITOR',
|
||||
'CLIP_EDITOR',
|
||||
'DOPESHEET_EDITOR',
|
||||
'GRAPH_EDITOR',
|
||||
'NLA_EDITOR',
|
||||
'TEXT_EDITOR',
|
||||
'CONSOLE',
|
||||
'INFO',
|
||||
'TOPBAR',
|
||||
'STATUSBAR',
|
||||
'OUTLINER',
|
||||
'PROPERTIES',
|
||||
'FILE_BROWSER',
|
||||
'SPREADSHEET',
|
||||
'PREFERENCES',
|
||||
]
|
||||
BLRegionType: typ.TypeAlias = typ.Literal[
|
||||
'WINDOW',
|
||||
'HEADER',
|
||||
'CHANNELS',
|
||||
'TEMPORARY',
|
||||
'UI',
|
||||
'TOOLS',
|
||||
'TOOL_PROPS',
|
||||
'ASSET_SHELF',
|
||||
'ASSET_SHELF_HEADER',
|
||||
'PREVIEW',
|
||||
'HUD',
|
||||
'NAVIGATION_BAR',
|
||||
'EXECUTE',
|
||||
'FOOTER',
|
||||
'TOOL_HEADER',
|
||||
'XR',
|
||||
]
|
||||
BLOperatorStatus: typ.TypeAlias = set[
|
||||
typ.Literal['RUNNING_MODAL', 'CANCELLED', 'FINISHED', 'PASS_THROUGH', 'INTERFACE']
|
||||
]
|
||||
|
||||
####################
|
||||
# - Addon Types
|
||||
####################
|
||||
KeymapItemDef: typ.TypeAlias = typ.Any ## TODO: Better Type
|
||||
ManagedObjName = str
|
||||
|
||||
####################
|
||||
# - Blender Strings
|
||||
####################
|
||||
PresetName = str
|
|
@ -1,15 +0,0 @@
|
|||
"""Defines Operator Types as an enum, making it easy for any part of the addon to refer to any operator."""
|
||||
|
||||
import enum
|
||||
|
||||
from ..nodeps.utils import blender_type_enum
|
||||
from .addon import NAME as ADDON_NAME
|
||||
|
||||
|
||||
@blender_type_enum.prefix_values_with(f'{ADDON_NAME}.')
|
||||
class OperatorType(enum.StrEnum):
|
||||
"""Identifiers for addon-defined `bpy.types.Operator`."""
|
||||
|
||||
InstallPyDeps = enum.auto()
|
||||
UninstallPyDeps = enum.auto()
|
||||
ManagePyDeps = enum.auto()
|
|
@ -1,12 +0,0 @@
|
|||
"""Defines Panel Types as an enum, making it easy for any part of the addon to refer to any panel."""
|
||||
|
||||
import enum
|
||||
|
||||
from blender_maxwell.nodeps.utils import blender_type_enum
|
||||
|
||||
from .addon import NAME as ADDON_NAME
|
||||
|
||||
|
||||
@blender_type_enum.prefix_values_with(f'{ADDON_NAME.upper()}_PT_')
|
||||
class PanelType(enum.StrEnum):
|
||||
"""Identifiers for addon-defined `bpy.types.Panel`."""
|
|
@ -0,0 +1,59 @@
|
|||
import tomllib
|
||||
from pathlib import Path
|
||||
|
||||
import bpy
|
||||
|
||||
PATH_ADDON_ROOT = Path(__file__).resolve().parent
|
||||
|
||||
####################
|
||||
# - Addon Info
|
||||
####################
|
||||
with (PATH_ADDON_ROOT / 'pyproject.toml').open('rb') as f:
|
||||
PROJ_SPEC = tomllib.load(f)
|
||||
## bl_info is filled with PROJ_SPEC when packing the .zip.
|
||||
|
||||
ADDON_NAME = PROJ_SPEC['project']['name']
|
||||
ADDON_VERSION = PROJ_SPEC['project']['version']
|
||||
|
||||
####################
|
||||
# - Asset Info
|
||||
####################
|
||||
PATH_ASSETS = PATH_ADDON_ROOT / 'assets'
|
||||
|
||||
####################
|
||||
# - PyDeps Info
|
||||
####################
|
||||
PATH_REQS = PATH_ADDON_ROOT / 'requirements.lock'
|
||||
DEFAULT_PATH_DEPS = PATH_ADDON_ROOT / '.addon_dependencies'
|
||||
## requirements.lock is written when packing the .zip.
|
||||
## By default, the addon pydeps are kept in the addon dir.
|
||||
|
||||
####################
|
||||
# - Local Addon Cache
|
||||
####################
|
||||
ADDON_CACHE = PATH_ADDON_ROOT / '.addon_cache'
|
||||
ADDON_CACHE.mkdir(exist_ok=True)
|
||||
## TODO: Addon preferences?
|
||||
|
||||
####################
|
||||
# - Logging Info
|
||||
####################
|
||||
DEFAULT_LOG_PATH = PATH_ADDON_ROOT / 'addon.log'
|
||||
DEFAULT_LOG_PATH.touch(exist_ok=True)
|
||||
## By default, the addon file log writes to the addon dir.
|
||||
## The initial .log_level contents are written when packing the .zip.
|
||||
## Subsequent changes are managed by nodeps.utils.simple_logger.py.
|
||||
|
||||
PATH_BOOTSTRAP_LOG_LEVEL = PATH_ADDON_ROOT / '.bootstrap_log_level'
|
||||
with PATH_BOOTSTRAP_LOG_LEVEL.open('r') as f:
|
||||
BOOTSTRAP_LOG_LEVEL = int(f.read().strip())
|
||||
|
||||
|
||||
####################
|
||||
# - Addon Prefs Info
|
||||
####################
|
||||
def addon_prefs() -> bpy.types.AddonPreferences | None:
|
||||
if (addon := bpy.context.preferences.addons.get(ADDON_NAME)) is None:
|
||||
return None
|
||||
|
||||
return addon.preferences
|
|
@ -1,3 +1,9 @@
|
|||
import sympy as sp
|
||||
|
||||
sp.printing.str.StrPrinter._default_settings['abbrev'] = True
|
||||
## In this tree, all Sympy unit printing must be abbreviated.
|
||||
## By configuring this in __init__.py, we guarantee it for all subimports.
|
||||
## (Unless, elsewhere, this setting is changed. Be careful!)
|
||||
|
||||
from . import categories, node_tree, nodes, sockets
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ import typing as typ
|
|||
|
||||
import bpy
|
||||
|
||||
from blender_maxwell.utils import logger, serialize
|
||||
from ...utils import logger, serialize
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
|
@ -430,7 +430,7 @@ class BLField:
|
|||
|
||||
Parameters:
|
||||
default_value: The default value to use if the value is read before it's set.
|
||||
triggers_prop_update: Whether to run `bl_instance.on_prop_changed(attr_name)` whenever value is set.
|
||||
triggers_prop_update: Whether to run `bl_instance.sync_prop(attr_name)` whenever value is set.
|
||||
|
||||
"""
|
||||
log.debug(
|
|
@ -11,9 +11,8 @@ import typing as typ
|
|||
import bpy
|
||||
import sympy as sp
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
from blender_maxwell.utils import logger as _logger
|
||||
|
||||
from ...utils import extra_sympy_units as spux
|
||||
from ...utils import logger as _logger
|
||||
from . import contracts as ct
|
||||
from . import sockets
|
||||
|
||||
|
|
|
@ -1,66 +1,79 @@
|
|||
from blender_maxwell.contracts import (
|
||||
BLClass,
|
||||
BLColorRGBA,
|
||||
BLEnumID,
|
||||
BLIconSet,
|
||||
BLKeymapItem,
|
||||
BLModifierType,
|
||||
BLNodeTreeInterfaceID,
|
||||
BLOperatorStatus,
|
||||
BLRegionType,
|
||||
BLSpaceType,
|
||||
KeymapItemDef,
|
||||
ManagedObjName,
|
||||
OperatorType,
|
||||
PanelType,
|
||||
PresetName,
|
||||
SocketName,
|
||||
addon,
|
||||
)
|
||||
# ruff: noqa: I001
|
||||
|
||||
from .bl_socket_desc_map import BL_SOCKET_DESCR_TYPE_MAP
|
||||
from .bl_socket_types import BL_SOCKET_DESCR_ANNOT_STRING, BL_SOCKET_DIRECT_TYPE_MAP
|
||||
from .category_labels import NODE_CAT_LABELS
|
||||
from .category_types import NodeCategory
|
||||
from .flow_events import FlowEvent
|
||||
from .flow_kinds import (
|
||||
ArrayFlow,
|
||||
CapabilitiesFlow,
|
||||
FlowKind,
|
||||
InfoFlow,
|
||||
LazyArrayRangeFlow,
|
||||
LazyValueFuncFlow,
|
||||
ParamsFlow,
|
||||
ValueFlow,
|
||||
)
|
||||
####################
|
||||
# - String Types
|
||||
####################
|
||||
from .bl import SocketName
|
||||
from .bl import PresetName
|
||||
from .bl import ManagedObjName
|
||||
|
||||
|
||||
from .bl import BLEnumID
|
||||
from .bl import BLColorRGBA
|
||||
|
||||
####################
|
||||
# - Icon Types
|
||||
####################
|
||||
from .icons import Icon
|
||||
from .mobj_types import ManagedObjType
|
||||
from .node_types import NodeType
|
||||
|
||||
####################
|
||||
# - Tree Types
|
||||
####################
|
||||
from .trees import TreeType
|
||||
|
||||
####################
|
||||
# - Socket Types
|
||||
####################
|
||||
from .socket_types import SocketType
|
||||
|
||||
from .socket_units import SOCKET_UNITS
|
||||
from .socket_colors import SOCKET_COLORS
|
||||
from .socket_shapes import SOCKET_SHAPES
|
||||
from .socket_types import SocketType
|
||||
from .socket_units import SOCKET_UNITS
|
||||
from .tree_types import TreeType
|
||||
|
||||
from .unit_systems import UNITS_BLENDER, UNITS_TIDY3D
|
||||
|
||||
from .socket_from_bl_desc import BL_SOCKET_DESCR_TYPE_MAP
|
||||
from .socket_from_bl_direct import BL_SOCKET_DIRECT_TYPE_MAP
|
||||
|
||||
from .socket_from_bl_desc import BL_SOCKET_DESCR_ANNOT_STRING
|
||||
|
||||
####################
|
||||
# - Node Types
|
||||
####################
|
||||
from .node_types import NodeType
|
||||
|
||||
from .node_cats import NodeCategory
|
||||
from .node_cat_labels import NODE_CAT_LABELS
|
||||
|
||||
####################
|
||||
# - Managed Obj Type
|
||||
####################
|
||||
from .managed_obj_type import ManagedObjType
|
||||
|
||||
####################
|
||||
# - Data Flows
|
||||
####################
|
||||
from .data_flows import (
|
||||
DataFlowKind,
|
||||
DataCapabilities,
|
||||
DataValue,
|
||||
DataValueArray,
|
||||
DataValueSpectrum,
|
||||
LazyDataValue,
|
||||
LazyDataValueRange,
|
||||
LazyDataValueSpectrum,
|
||||
)
|
||||
from .data_flow_actions import DataFlowAction
|
||||
|
||||
####################
|
||||
# - Export
|
||||
####################
|
||||
__all__ = [
|
||||
'BLClass',
|
||||
'BLColorRGBA',
|
||||
'BLEnumID',
|
||||
'BLIconSet',
|
||||
'BLKeymapItem',
|
||||
'BLModifierType',
|
||||
'BLNodeTreeInterfaceID',
|
||||
'BLOperatorStatus',
|
||||
'BLRegionType',
|
||||
'BLSpaceType',
|
||||
'KeymapItemDef',
|
||||
'ManagedObjName',
|
||||
'OperatorType',
|
||||
'PanelType',
|
||||
'PresetName',
|
||||
'SocketName',
|
||||
'addon',
|
||||
'PresetName',
|
||||
'ManagedObjName',
|
||||
'BLEnumID',
|
||||
'BLColorRGBA',
|
||||
'Icon',
|
||||
'TreeType',
|
||||
'SocketType',
|
||||
|
@ -76,13 +89,13 @@ __all__ = [
|
|||
'NodeCategory',
|
||||
'NODE_CAT_LABELS',
|
||||
'ManagedObjType',
|
||||
'FlowEvent',
|
||||
'ArrayFlow',
|
||||
'CapabilitiesFlow',
|
||||
'FlowKind',
|
||||
'InfoFlow',
|
||||
'LazyArrayRangeFlow',
|
||||
'LazyValueFuncFlow',
|
||||
'ParamsFlow',
|
||||
'ValueFlow',
|
||||
'DataFlowKind',
|
||||
'DataCapabilities',
|
||||
'DataValue',
|
||||
'DataValueArray',
|
||||
'DataValueSpectrum',
|
||||
'LazyDataValue',
|
||||
'LazyDataValueRange',
|
||||
'LazyDataValueSpectrum',
|
||||
'DataFlowAction',
|
||||
]
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
import pydantic as pyd
|
||||
import typing_extensions as pytypes_ext
|
||||
|
||||
####################
|
||||
# - Pure BL Types
|
||||
####################
|
||||
BLEnumID = pytypes_ext.Annotated[
|
||||
str,
|
||||
pyd.StringConstraints(
|
||||
pattern=r'^[A-Z_]+$',
|
||||
),
|
||||
]
|
||||
SocketName = pytypes_ext.Annotated[
|
||||
str,
|
||||
pyd.StringConstraints(
|
||||
pattern=r'^[a-zA-Z0-9_]+$',
|
||||
),
|
||||
]
|
||||
PresetName = pytypes_ext.Annotated[
|
||||
str,
|
||||
pyd.StringConstraints(
|
||||
pattern=r'^[a-zA-Z0-9_]+$',
|
||||
),
|
||||
]
|
||||
BLColorRGBA = tuple[float, float, float, float]
|
||||
|
||||
####################
|
||||
# - Shared-With-BL Types
|
||||
####################
|
||||
ManagedObjName = pytypes_ext.Annotated[
|
||||
str,
|
||||
pyd.StringConstraints(
|
||||
pattern=r'^[a-z_]+$',
|
||||
),
|
||||
]
|
|
@ -0,0 +1,46 @@
|
|||
import enum
|
||||
import typing as typ
|
||||
|
||||
import typing_extensions as typx
|
||||
|
||||
|
||||
class DataFlowAction(enum.StrEnum):
|
||||
# Locking
|
||||
EnableLock = 'enable_lock'
|
||||
DisableLock = 'disable_lock'
|
||||
|
||||
# Value
|
||||
OutputRequested = 'output_requested'
|
||||
DataChanged = 'value_changed'
|
||||
|
||||
# Previewing
|
||||
ShowPreview = 'show_preview'
|
||||
ShowPlot = 'show_plot'
|
||||
|
||||
def trigger_direction(action: typ.Self) -> typx.Literal['input', 'output']:
|
||||
"""When a given action is triggered, all sockets/nodes/... in this direction should be recursively triggered.
|
||||
|
||||
Parameters:
|
||||
action: The action for which to retrieve the trigger direction.
|
||||
|
||||
Returns:
|
||||
The trigger direction, which can be used ex. in nodes to select `node.inputs` or `node.outputs`.
|
||||
"""
|
||||
return {
|
||||
DataFlowAction.EnableLock: 'input',
|
||||
DataFlowAction.DisableLock: 'input',
|
||||
DataFlowAction.DataChanged: 'output',
|
||||
DataFlowAction.OutputRequested: 'input',
|
||||
DataFlowAction.ShowPreview: 'input',
|
||||
DataFlowAction.ShowPlot: 'input',
|
||||
}[action]
|
||||
|
||||
def stop_if_no_event_methods(action: typ.Self) -> bool:
|
||||
return {
|
||||
DataFlowAction.EnableLock: False,
|
||||
DataFlowAction.DisableLock: False,
|
||||
DataFlowAction.DataChanged: True,
|
||||
DataFlowAction.OutputRequested: True,
|
||||
DataFlowAction.ShowPreview: False,
|
||||
DataFlowAction.ShowPlot: False,
|
||||
}[action]
|
|
@ -0,0 +1,334 @@
|
|||
import dataclasses
|
||||
import enum
|
||||
import functools
|
||||
import typing as typ
|
||||
from types import MappingProxyType
|
||||
|
||||
# import colour ## TODO
|
||||
import numpy as np
|
||||
import sympy as sp
|
||||
import sympy.physics.units as spu
|
||||
import typing_extensions as typx
|
||||
|
||||
from ....utils import extra_sympy_units as spux
|
||||
from ....utils import sci_constants as constants
|
||||
from .socket_types import SocketType
|
||||
|
||||
|
||||
class DataFlowKind(enum.StrEnum):
|
||||
"""Defines a shape/kind of data that may flow through a node tree.
|
||||
|
||||
Since a node socket may define one of each, we can support several related kinds of data flow through the same node-graph infrastructure.
|
||||
|
||||
Attributes:
|
||||
Value: A value without any unknown symbols.
|
||||
- Basic types aka. float, int, list, string, etc. .
|
||||
- Exotic (immutable-ish) types aka. numpy array, KDTree, etc. .
|
||||
- A usable constructed object, ex. a `tidy3d.Box`.
|
||||
- Expressions (`sp.Expr`) that don't have unknown variables.
|
||||
- Lazy sequences aka. generators, with all data bound.
|
||||
SpectralValue: A value defined along a spectral range.
|
||||
- {`np.array`
|
||||
|
||||
LazyValue: An object which, when given new data, can make many values.
|
||||
- An `sp.Expr`, which might need `simplify`ing, `jax` JIT'ing, unit cancellations, variable substitutions, etc. before use.
|
||||
- Lazy objects, for which all parameters aren't yet known.
|
||||
- A computational graph aka. `aesara`, which may even need to be handled before
|
||||
|
||||
Capabilities: A `ValueCapability` object providing compatibility.
|
||||
|
||||
# Value Data Flow
|
||||
Simply passing values is the simplest and easiest use case.
|
||||
|
||||
This doesn't mean it's "dumb" - ex. a `sp.Expr` might, before use, have `simplify`, rewriting, unit cancellation, etc. run.
|
||||
All of this is okay, as long as there is no *introduction of new data* ex. variable substitutions.
|
||||
|
||||
|
||||
# Lazy Value Data Flow
|
||||
By passing (essentially) functions, one supports:
|
||||
- **Lightness**: While lazy values can be made expensive to construct, they will generally not be nearly as heavy to handle when trying to work with ex. operations on voxel arrays.
|
||||
- **Performance**: Parameterizing ex. `sp.Expr` with variables allows one to build very optimized functions, which can make ex. node graph updates very fast if the only operation run is the `jax` JIT'ed function (aka. GPU accelerated) generated from the final full expression.
|
||||
- **Numerical Stability**: Libraries like `aesara` build a computational graph, which can be automatically rewritten to avoid many obvious conditioning / cancellation errors.
|
||||
- **Lazy Output**: The goal of a node-graph may not be the definition of a single value, but rather, a parameterized expression for generating *many values* with known properties. This is especially interesting for use cases where one wishes to build an optimization step using nodes.
|
||||
|
||||
|
||||
# Capability Passing
|
||||
By being able to pass "capabilities" next to other kinds of values, nodes can quickly determine whether a given link is valid without having to actually compute it.
|
||||
|
||||
|
||||
# Lazy Parameter Value
|
||||
When using parameterized LazyValues, one may wish to independently pass parameter values through the graph, so they can be inserted into the final (cached) high-performance expression without.
|
||||
|
||||
The advantage of using a different data flow would be changing this kind of value would ONLY invalidate lazy parameter value caches, which would allow an incredibly fast path of getting the value into the lazy expression for high-performance computation.
|
||||
|
||||
Implementation TBD - though, ostensibly, one would have a "parameter" node which both would only provide a LazyValue (aka. a symbolic variable), but would also be able to provide a LazyParamValue, which would be a particular value of some kind (probably via the `value` of some other node socket).
|
||||
"""
|
||||
|
||||
Capabilities = enum.auto()
|
||||
|
||||
# Values
|
||||
Value = enum.auto()
|
||||
ValueArray = enum.auto()
|
||||
ValueSpectrum = enum.auto()
|
||||
|
||||
# Lazy
|
||||
LazyValue = enum.auto()
|
||||
LazyValueRange = enum.auto()
|
||||
LazyValueSpectrum = enum.auto()
|
||||
|
||||
@classmethod
|
||||
def scale_to_unit_system(cls, kind: typ.Self, value, socket_type, unit_system):
|
||||
if kind == cls.Value:
|
||||
return spux.sympy_to_python(
|
||||
spux.scale_to_unit(
|
||||
value,
|
||||
unit_system[socket_type],
|
||||
)
|
||||
)
|
||||
if kind == cls.LazyValueRange:
|
||||
return value.rescale_to_unit(unit_system[socket_type])
|
||||
|
||||
msg = 'Tried to scale unknown kind'
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
####################
|
||||
# - Data Structures: Capabilities
|
||||
####################
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class DataCapabilities:
|
||||
socket_type: SocketType
|
||||
active_kind: DataFlowKind
|
||||
|
||||
is_universal: bool = False
|
||||
|
||||
def is_compatible_with(self, other: typ.Self) -> bool:
|
||||
return (
|
||||
self.socket_type == other.socket_type
|
||||
and self.active_kind == other.active_kind
|
||||
) or other.is_universal
|
||||
|
||||
|
||||
####################
|
||||
# - Data Structures: Non-Lazy
|
||||
####################
|
||||
DataValue: typ.TypeAlias = typ.Any
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class DataValueArray:
|
||||
"""A simple, flat array of values with an optionally-attached unit.
|
||||
|
||||
Attributes:
|
||||
values: An ND array-like object of arbitrary numerical type.
|
||||
unit: A `sympy` unit.
|
||||
None if unitless.
|
||||
"""
|
||||
|
||||
values: typ.Sequence[DataValue]
|
||||
unit: spu.Quantity | None
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class DataValueSpectrum:
|
||||
"""A numerical representation of a spectral distribution.
|
||||
|
||||
Attributes:
|
||||
wls: A 1D `numpy` float array of wavelength values.
|
||||
wls_unit: The unit of wavelengths, as length dimension.
|
||||
values: A 1D `numpy` float array of values corresponding to wavelength values.
|
||||
values_unit: The unit of the value, as arbitrary dimension.
|
||||
freqs_unit: The unit of the value, as arbitrary dimension.
|
||||
"""
|
||||
|
||||
# Wavelength
|
||||
wls: np.array
|
||||
wls_unit: spu.Quantity
|
||||
|
||||
# Value
|
||||
values: np.array
|
||||
values_unit: spu.Quantity
|
||||
|
||||
# Frequency
|
||||
freqs_unit: spu.Quantity = spu.hertz
|
||||
|
||||
@functools.cached_property
|
||||
def freqs(self) -> np.array:
|
||||
"""The spectral frequencies, computed from the wavelengths.
|
||||
|
||||
Frequencies are NOT reversed, so as to preserve the by-index mapping to `DataValueSpectrum.values`.
|
||||
|
||||
Returns:
|
||||
Frequencies, as a unitless `numpy` array.
|
||||
Use `DataValueSpectrum.wls_unit` to interpret this return value.
|
||||
"""
|
||||
unitless_speed_of_light = spux.sympy_to_python(
|
||||
spux.scale_to_unit(
|
||||
constants.vac_speed_of_light, (self.wl_unit / self.freq_unit)
|
||||
)
|
||||
)
|
||||
return unitless_speed_of_light / self.wls
|
||||
|
||||
# TODO: Colour Library
|
||||
# def as_colour_sd(self) -> colour.SpectralDistribution:
|
||||
# """Returns the `colour` representation of this spectral distribution, ideal for plotting and colorimetric analysis."""
|
||||
# return colour.SpectralDistribution(data=self.values, domain=self.wls)
|
||||
|
||||
|
||||
####################
|
||||
# - Data Structures: Lazy
|
||||
####################
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class LazyDataValue:
|
||||
callback: typ.Callable[[...], [DataValue]]
|
||||
|
||||
def realize(self, *args: list[DataValue]) -> DataValue:
|
||||
return self.callback(*args)
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class LazyDataValueRange:
|
||||
symbols: set[sp.Symbol]
|
||||
|
||||
start: sp.Basic
|
||||
stop: sp.Basic
|
||||
steps: int
|
||||
scaling: typx.Literal['lin', 'geom', 'log'] = 'lin'
|
||||
|
||||
has_unit: bool = False
|
||||
unit: spu.Quantity = False
|
||||
|
||||
def rescale_to_unit(self, unit: spu.Quantity) -> typ.Self:
|
||||
if self.has_unit:
|
||||
return LazyDataValueRange(
|
||||
symbols=self.symbols,
|
||||
has_unit=self.has_unit,
|
||||
unit=unit,
|
||||
start=spu.convert_to(self.start, unit),
|
||||
stop=spu.convert_to(self.stop, unit),
|
||||
steps=self.steps,
|
||||
scaling=self.scaling,
|
||||
)
|
||||
|
||||
msg = f'Tried to rescale unitless LazyDataValueRange to unit {unit}'
|
||||
raise ValueError(msg)
|
||||
|
||||
def rescale_bounds(
|
||||
self,
|
||||
bound_cb: typ.Callable[[sp.Expr], sp.Expr],
|
||||
reverse: bool = False,
|
||||
) -> typ.Self:
|
||||
"""Call a function on both bounds (start and stop), creating a new `LazyDataValueRange`."""
|
||||
return LazyDataValueRange(
|
||||
symbols=self.symbols,
|
||||
has_unit=self.has_unit,
|
||||
unit=self.unit,
|
||||
start=spu.convert_to(
|
||||
bound_cb(self.start if not reverse else self.stop), self.unit
|
||||
),
|
||||
stop=spu.convert_to(
|
||||
bound_cb(self.stop if not reverse else self.start), self.unit
|
||||
),
|
||||
steps=self.steps,
|
||||
scaling=self.scaling,
|
||||
)
|
||||
|
||||
def realize(
|
||||
self, symbol_values: dict[sp.Symbol, DataValue] = MappingProxyType({})
|
||||
) -> DataValueArray:
|
||||
# Realize Symbols
|
||||
if not self.has_unit:
|
||||
start = spux.sympy_to_python(self.start.subs(symbol_values))
|
||||
stop = spux.sympy_to_python(self.stop.subs(symbol_values))
|
||||
else:
|
||||
start = spux.sympy_to_python(
|
||||
spux.scale_to_unit(self.start.subs(symbol_values), self.unit)
|
||||
)
|
||||
stop = spux.sympy_to_python(
|
||||
spux.scale_to_unit(self.stop.subs(symbol_values), self.unit)
|
||||
)
|
||||
|
||||
# Return Linspace / Logspace
|
||||
if self.scaling == 'lin':
|
||||
return DataValueArray(
|
||||
values=np.linspace(start, stop, self.steps), unit=self.unit
|
||||
)
|
||||
if self.scaling == 'geom':
|
||||
return DataValueArray(np.geomspace(start, stop, self.steps), self.unit)
|
||||
if self.scaling == 'log':
|
||||
return DataValueArray(np.logspace(start, stop, self.steps), self.unit)
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class LazyDataValueSpectrum:
|
||||
wl_unit: spu.Quantity
|
||||
value_unit: spu.Quantity
|
||||
value_expr: sp.Expr
|
||||
|
||||
symbols: tuple[sp.Symbol, ...] = ()
|
||||
freq_symbol: sp.Symbol = sp.Symbol('lamda') # noqa: RUF009
|
||||
|
||||
def rescale_to_unit(self, unit: spu.Quantity) -> typ.Self:
|
||||
raise NotImplementedError
|
||||
|
||||
@functools.cached_property
|
||||
def as_func(self) -> typ.Callable[[DataValue, ...], DataValue]:
|
||||
"""Generates an optimized function for numerical evaluation of the spectral expression."""
|
||||
return sp.lambdify([self.freq_symbol, *self.symbols], self.value_expr)
|
||||
|
||||
def realize(
|
||||
self, wl_range: DataValueArray, symbol_values: tuple[DataValue, ...]
|
||||
) -> DataValueSpectrum:
|
||||
r"""Realizes the parameterized spectral function as a numerical spectral distribution.
|
||||
|
||||
Parameters:
|
||||
wl_range: The lazy wavelength range to build the concrete spectral distribution with.
|
||||
symbol_values: Numerical values for each symbol, in the same order as defined in `LazyDataValueSpectrum.symbols`.
|
||||
The wavelength symbol ($\lambda$ by default) always goes first.
|
||||
_This is used to call the spectral function using the output of `.as_func()`._
|
||||
|
||||
Returns:
|
||||
The concrete, numerical spectral distribution.
|
||||
"""
|
||||
return DataValueSpectrum(
|
||||
wls=wl_range.values,
|
||||
wls_unit=self.wl_unit,
|
||||
values=self.as_func(*list(symbol_values.values())),
|
||||
values_unit=self.value_unit,
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
#
|
||||
#####################
|
||||
## - Data Pipeline
|
||||
#####################
|
||||
# @dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
# class DataPipelineDim:
|
||||
# unit: spu.Quantity | None
|
||||
#
|
||||
# class DataPipelineDimType(enum.StrEnum):
|
||||
# # Map Inputs
|
||||
# Time = enum.auto()
|
||||
# Freq = enum.auto()
|
||||
# Space3D = enum.auto()
|
||||
# DiffOrder = enum.auto()
|
||||
#
|
||||
# # Map Inputs
|
||||
# Power = enum.auto()
|
||||
# EVec = enum.auto()
|
||||
# HVec = enum.auto()
|
||||
# RelPerm = enum.auto()
|
||||
#
|
||||
#
|
||||
# @dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
# class LazyDataPipeline:
|
||||
# dims: list[DataPipelineDim]
|
||||
#
|
||||
# def _callable(self):
|
||||
# """JITs the current pipeline of functions with `jax`."""
|
||||
#
|
||||
# def __call__(self):
|
||||
# pass
|
|
@ -1,71 +0,0 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.staticproperty import staticproperty
|
||||
|
||||
|
||||
class FlowEvent(enum.StrEnum):
|
||||
"""Defines an event that can propagate through the graph (node-socket-node-...).
|
||||
|
||||
Contrary to `FlowKind`, a `FlowEvent` doesn't propagate any data.
|
||||
Instead, it allows for dead-simple communication across direct graph connections.
|
||||
|
||||
The entire system is built around user-defined event handlers, which are also used internally.
|
||||
See `events`.
|
||||
|
||||
Attributes:
|
||||
EnableLock: Indicates that the node/socket should enable locking.
|
||||
Locking prevents the use of the UI, including adding/removing links.
|
||||
This event can lock a subset of the node tree graph.
|
||||
DisableLock: Indicates that the node/socket should disable locking.
|
||||
This event can unlock part of a locked subgraph.
|
||||
ShowPreview: Indicates that the node/socket should enable its primary preview.
|
||||
This should be used if a more specific preview-esque event doesn't apply.
|
||||
ShowPlot: Indicates that the node/socket should enable its plotted preview.
|
||||
This should generally be used if the node is rendering to an image, for viewing through the Blender image editor.
|
||||
LinkChanged: Indicates that a link to a node/socket was added/removed.
|
||||
In nodes, this is accompanied by a `socket_name` to indicate which socket it is that had its links altered.
|
||||
DataChanged: Indicates that data flowing through a node/socket was altered.
|
||||
In nodes, this event is accompanied by a `socket_name` or `prop_name`, to indicate which socket/property it is that was changed.
|
||||
**This event is essential**, as it invalidates all input/output socket caches along its path.
|
||||
"""
|
||||
|
||||
# Lock Events
|
||||
EnableLock = enum.auto()
|
||||
DisableLock = enum.auto()
|
||||
|
||||
# Preview Events
|
||||
ShowPreview = enum.auto()
|
||||
ShowPlot = enum.auto()
|
||||
|
||||
# Data Events
|
||||
LinkChanged = enum.auto()
|
||||
DataChanged = enum.auto()
|
||||
|
||||
# Non-Triggered Events
|
||||
OutputRequested = enum.auto()
|
||||
|
||||
# Properties
|
||||
@staticproperty
|
||||
def flow_direction() -> typ.Literal['input', 'output']:
|
||||
"""Describes the direction in which the event should flow.
|
||||
|
||||
Doesn't include `FlowEvent`s that aren't meant to be triggered:
|
||||
- `OutputRequested`.
|
||||
|
||||
Parameters:
|
||||
event: The event for which to retrieve the trigger direction.
|
||||
|
||||
Returns:
|
||||
The trigger direction, which can be used ex. in nodes to select `node.inputs` or `node.outputs`.
|
||||
"""
|
||||
return {
|
||||
# Lock Events
|
||||
FlowEvent.EnableLock: 'input',
|
||||
FlowEvent.DisableLock: 'input',
|
||||
# Preview Events
|
||||
FlowEvent.ShowPreview: 'input',
|
||||
FlowEvent.ShowPlot: 'input',
|
||||
# Data Events
|
||||
FlowEvent.LinkChanged: 'output',
|
||||
FlowEvent.DataChanged: 'output',
|
||||
}
|
|
@ -1,641 +0,0 @@
|
|||
import dataclasses
|
||||
import enum
|
||||
import functools
|
||||
import typing as typ
|
||||
from types import MappingProxyType
|
||||
|
||||
import jax
|
||||
import jax.numpy as jnp
|
||||
import jaxtyping as jtyp
|
||||
import numba
|
||||
import sympy as sp
|
||||
import sympy.physics.units as spu
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
|
||||
from .socket_types import SocketType
|
||||
|
||||
|
||||
class FlowKind(enum.StrEnum):
|
||||
"""Defines a kind of data that can flow between nodes.
|
||||
|
||||
Each node link can be thought to contain **multiple pipelines for data to flow along**.
|
||||
Each pipeline is cached incrementally, and independently, of the others.
|
||||
Thus, the same socket can easily support several kinds of related data flow at the same time.
|
||||
|
||||
Attributes:
|
||||
Capabilities: Describes a socket's linkeability with other sockets.
|
||||
Links between sockets with incompatible capabilities will be rejected.
|
||||
This doesn't need to be defined normally, as there is a default.
|
||||
However, in some cases, defining it manually to control linkeability more granularly may be desirable.
|
||||
Value: A generic object, which is "directly usable".
|
||||
This should be chosen when a more specific flow kind doesn't apply.
|
||||
Array: An object with dimensions, and possibly a unit.
|
||||
Whenever a `Value` is defined, a single-element `list` will also be generated by default as `Array`
|
||||
However, for any other array-like variants (or sockets that only represent array-like objects), `Array` should be defined manually.
|
||||
LazyValueFunc: A composable function.
|
||||
Can be used to represent computations for which all data is not yet known, or for which just-in-time compilation can drastically increase performance.
|
||||
LazyArrayRange: An object that generates an `Array` from range information (start/stop/step/spacing).
|
||||
This should be used instead of `Array` whenever possible.
|
||||
Param: A dictionary providing particular parameters for a lazy value.
|
||||
Info: An dictionary providing extra context about any aspect of flow.
|
||||
"""
|
||||
|
||||
Capabilities = enum.auto()
|
||||
|
||||
# Values
|
||||
Value = enum.auto()
|
||||
Array = enum.auto()
|
||||
|
||||
# Lazy
|
||||
LazyValueFunc = enum.auto()
|
||||
LazyArrayRange = enum.auto()
|
||||
|
||||
# Auxiliary
|
||||
Params = enum.auto()
|
||||
Info = enum.auto()
|
||||
|
||||
@classmethod
|
||||
def scale_to_unit_system(cls, kind: typ.Self, value, socket_type, unit_system):
|
||||
if kind == cls.Value:
|
||||
return spux.sympy_to_python(
|
||||
spux.scale_to_unit(
|
||||
value,
|
||||
unit_system[socket_type],
|
||||
)
|
||||
)
|
||||
if kind == cls.LazyArrayRange:
|
||||
return value.rescale_to_unit(unit_system[socket_type])
|
||||
|
||||
msg = 'Tried to scale unknown kind'
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
####################
|
||||
# - Capabilities
|
||||
####################
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class CapabilitiesFlow:
|
||||
socket_type: SocketType
|
||||
active_kind: FlowKind
|
||||
|
||||
is_universal: bool = False
|
||||
|
||||
def is_compatible_with(self, other: typ.Self) -> bool:
|
||||
return (
|
||||
self.socket_type == other.socket_type
|
||||
and self.active_kind == other.active_kind
|
||||
) or other.is_universal
|
||||
|
||||
|
||||
####################
|
||||
# - Value
|
||||
####################
|
||||
ValueFlow: typ.TypeAlias = typ.Any
|
||||
|
||||
|
||||
####################
|
||||
# - Value Array
|
||||
####################
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class ArrayFlow:
|
||||
"""A simple, flat array of values with an optionally-attached unit.
|
||||
|
||||
Attributes:
|
||||
values: An ND array-like object of arbitrary numerical type.
|
||||
unit: A `sympy` unit.
|
||||
None if unitless.
|
||||
"""
|
||||
|
||||
values: jtyp.Shaped[jtyp.Array, '...']
|
||||
unit: spu.Quantity | None = None
|
||||
|
||||
def correct_unit(self, corrected_unit: spu.Quantity) -> typ.Self:
|
||||
if self.unit is not None:
|
||||
return ArrayFlow(values=self.values, unit=corrected_unit)
|
||||
|
||||
msg = f'Tried to correct unit of unitless LazyDataValueRange "{corrected_unit}"'
|
||||
raise ValueError(msg)
|
||||
|
||||
def rescale_to_unit(self, unit: spu.Quantity) -> typ.Self:
|
||||
if self.unit is not None:
|
||||
return ArrayFlow(
|
||||
values=float(spux.scaling_factor(self.unit, unit)) * self.values,
|
||||
unit=unit,
|
||||
)
|
||||
## TODO: Is this scaling numerically stable?
|
||||
|
||||
msg = f'Tried to rescale unitless LazyDataValueRange to unit {unit}'
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
####################
|
||||
# - Lazy Value Func
|
||||
####################
|
||||
LazyFunction: typ.TypeAlias = typ.Callable[[typ.Any, ...], ValueFlow]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class LazyValueFuncFlow:
|
||||
r"""Wraps a composable function, providing useful information and operations.
|
||||
|
||||
# Data Flow as Function Composition
|
||||
When using nodes to do math, it can be a good idea to express a **flow of data as the composition of functions**.
|
||||
|
||||
Each node creates a new function, which uses the still-unknown (aka. **lazy**) output of the previous function to plan some calculations.
|
||||
Some new arguments may also be added, of course.
|
||||
|
||||
## Root Function
|
||||
Of course, one needs to select a "bottom" function, which has no previous function as input.
|
||||
Thus, the first step is to define this **root function**:
|
||||
|
||||
$$
|
||||
f_0:\ \ \ \ \biggl(
|
||||
\underbrace{a_1, a_2, ..., a_p}_{\texttt{args}},\
|
||||
\underbrace{
|
||||
\begin{bmatrix} k_1 \\ v_1\end{bmatrix},
|
||||
\begin{bmatrix} k_2 \\ v_2\end{bmatrix},
|
||||
...,
|
||||
\begin{bmatrix} k_q \\ v_q\end{bmatrix}
|
||||
}_{\texttt{kwargs}}
|
||||
\biggr) \to \text{output}_0
|
||||
$$
|
||||
|
||||
We'll express this simple snippet like so:
|
||||
|
||||
```python
|
||||
# Presume 'A0', 'KV0' contain only the args/kwargs for f_0
|
||||
## 'A0', 'KV0' are of length 'p' and 'q'
|
||||
def f_0(*args, **kwargs): ...
|
||||
|
||||
lazy_value_func_0 = LazyValueFuncFlow(
|
||||
func=f_0,
|
||||
func_args=[(a_i, type(a_i)) for a_i in A0],
|
||||
func_kwargs={k: v for k,v in KV0},
|
||||
)
|
||||
output_0 = lazy_value_func.func(*A0_computed, **KV0_computed)
|
||||
```
|
||||
|
||||
So far so good.
|
||||
But of course, nothing interesting has really happened yet.
|
||||
|
||||
## Composing Functions
|
||||
The key thing is the next step: The function that uses the result of $f_0$!
|
||||
|
||||
$$
|
||||
f_1:\ \ \ \ \biggl(
|
||||
f_0(...),\ \
|
||||
\underbrace{\{a_i\}_p^{p+r}}_{\texttt{args[p:]}},\
|
||||
\underbrace{\biggl\{
|
||||
\begin{bmatrix} k_i \\ v_i\end{bmatrix}
|
||||
\biggr\}_q^{q+s}}_{\texttt{kwargs[p:]}}
|
||||
\biggr) \to \text{output}_1
|
||||
$$
|
||||
|
||||
Notice that _$f_1$ needs the arguments of both $f_0$ and $f_1$_.
|
||||
Tracking arguments is already getting out of hand; we already have to use `...` to keep it readeable!
|
||||
|
||||
But doing so with `LazyValueFunc` is not so complex:
|
||||
|
||||
```python
|
||||
# Presume 'A1', 'K1' contain only the args/kwarg names for f_1
|
||||
## 'A1', 'KV1' are therefore of length 'r' and 's'
|
||||
def f_1(output_0, *args, **kwargs): ...
|
||||
|
||||
lazy_value_func_1 = lazy_value_func_0.compose_within(
|
||||
enclosing_func=f_1,
|
||||
enclosing_func_args=[(a_i, type(a_i)) for a_i in A1],
|
||||
enclosing_func_kwargs={k: type(v) for k,v in K1},
|
||||
)
|
||||
|
||||
A_computed = A0_computed + A1_computed
|
||||
KW_computed = KV0_computed + KV1_computed
|
||||
output_1 = lazy_value_func_1.func(*A_computed, **KW_computed)
|
||||
```
|
||||
|
||||
We only need the arguments to $f_1$, and `LazyValueFunc` figures out how to make one function with enough arguments to call both.
|
||||
|
||||
## Isn't Laying Functions Slow/Hard?
|
||||
Imagine that each function represents the action of a node, each of which performs expensive calculations on huge `numpy` arrays (**as one does when processing electromagnetic field data**).
|
||||
At the end, a node might run the entire procedure with all arguments:
|
||||
|
||||
```python
|
||||
output_n = lazy_value_func_n.func(*A_all, **KW_all)
|
||||
```
|
||||
|
||||
It's rough: Most non-trivial pipelines drown in the time/memory overhead of incremental `numpy` operations - individually fast, but collectively iffy.
|
||||
|
||||
The killer feature of `LazyValueFuncFlow` is a sprinkle of black magic:
|
||||
|
||||
```python
|
||||
func_n_jax = lazy_value_func_n.func_jax
|
||||
output_n = func_n_jax(*A_all, **KW_all) ## Runs on your GPU
|
||||
```
|
||||
|
||||
What happened was, **the entire pipeline** was compiled and optimized for high performance on not just your CPU, _but also (possibly) your GPU_.
|
||||
All the layered function calls and inefficient incremental processing is **transformed into a high-performance program**.
|
||||
|
||||
Thank `jax` - specifically, `jax.jit` (https://jax.readthedocs.io/en/latest/_autosummary/jax.jit.html#jax.jit), which internally enables this magic with a single function call.
|
||||
|
||||
## Other Considerations
|
||||
**Auto-Differentiation**: Incredibly, `jax.jit` isn't the killer feature of `jax`. The function that comes out of `LazyValueFuncFlow` can also be differentiated with `jax.grad` (read: high-performance Jacobians for optimizing input parameters).
|
||||
|
||||
Though designed for machine learning, there's no reason other fields can't enjoy their inventions!
|
||||
|
||||
**Impact of Independent Caching**: JIT'ing can be slow.
|
||||
That's why `LazyValueFuncFlow` has its own `FlowKind` "lane", which means that **only changes to the processing procedures will cause recompilation**.
|
||||
|
||||
Generally, adjustable values that affect the output will flow via the `Param` "lane", which has its own incremental caching, and only meets the compiled function when it's "plugged in" for final evaluation.
|
||||
The effect is a feeling of snappiness and interactivity, even as the volume of data grows.
|
||||
|
||||
Attributes:
|
||||
func: The function that the object encapsulates.
|
||||
bound_args: Arguments that will be packaged into function, which can't be later modifier.
|
||||
func_kwargs: Arguments to be specified by the user at the time of use.
|
||||
supports_jax: Whether the contained `self.function` can be compiled with JAX's JIT compiler.
|
||||
supports_numba: Whether the contained `self.function` can be compiled with Numba's JIT compiler.
|
||||
"""
|
||||
|
||||
func: LazyFunction
|
||||
func_args: list[tuple[str, type]] = MappingProxyType({})
|
||||
func_kwargs: dict[str, type] = MappingProxyType({})
|
||||
supports_jax: bool = False
|
||||
supports_numba: bool = False
|
||||
|
||||
# Composition
|
||||
def compose_within(
|
||||
self,
|
||||
enclosing_func: LazyFunction,
|
||||
enclosing_func_args: list[tuple[str, type]] = (),
|
||||
enclosing_func_kwargs: dict[str, type] = MappingProxyType({}),
|
||||
supports_jax: bool = False,
|
||||
supports_numba: bool = False,
|
||||
) -> typ.Self:
|
||||
return LazyValueFuncFlow(
|
||||
function=lambda *args, **kwargs: enclosing_func(
|
||||
self.func(
|
||||
*list(args[len(self.func_args) :]),
|
||||
**{k: v for k, v in kwargs.items() if k in self.func_kwargs},
|
||||
),
|
||||
**kwargs,
|
||||
),
|
||||
func_args=self.func_args + enclosing_func_args,
|
||||
func_kwargs=self.func_kwargs | enclosing_func_kwargs,
|
||||
supports_jax=self.supports_jax and supports_jax,
|
||||
supports_numba=self.supports_numba and supports_numba,
|
||||
)
|
||||
|
||||
@functools.cached_property
|
||||
def func_jax(self) -> LazyFunction:
|
||||
if self.supports_jax:
|
||||
return jax.jit(self.func)
|
||||
|
||||
msg = 'Can\'t express LazyValueFuncFlow as JAX function (using jax.jit), since "self.supports_jax" is False'
|
||||
raise ValueError(msg)
|
||||
|
||||
@functools.cached_property
|
||||
def func_numba(self) -> LazyFunction:
|
||||
if self.supports_numba:
|
||||
return numba.jit(self.func)
|
||||
|
||||
msg = 'Can\'t express LazyValueFuncFlow as Numba function (using numba.jit), since "self.supports_numba" is False'
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
####################
|
||||
# - Lazy Array Range
|
||||
####################
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class LazyArrayRangeFlow:
|
||||
r"""Represents a linearly/logarithmically spaced array using symbolic boundary expressions, with support for units and lazy evaluation.
|
||||
|
||||
# Advantages
|
||||
Whenever an array can be represented like this, the advantages over an `ArrayFlow` are numerous.
|
||||
|
||||
## Memory
|
||||
`ArrayFlow` generally has a memory scaling of $O(n)$.
|
||||
Naturally, `LazyArrayRangeFlow` is always constant, since only the boundaries and steps are stored.
|
||||
|
||||
## Symbolic
|
||||
Both boundary points are symbolic expressions, within which pre-defined `sp.Symbol`s can participate in a constrained manner (ex. an integer symbol).
|
||||
|
||||
One need not know the value of the symbols immediately - such decisions can be deferred until later in the computational flow.
|
||||
|
||||
## Performant Unit-Aware Operations
|
||||
While `ArrayFlow`s are also unit-aware, the time-cost of _any_ unit-scaling operation scales with $O(n)$.
|
||||
`LazyArrayRangeFlow`, by contrast, scales as $O(1)$.
|
||||
|
||||
As a result, more complicated operations (like symbolic or unit-based) that might be difficult to perform interactively in real-time on an `ArrayFlow` will work perfectly with this object, even with added complexity
|
||||
|
||||
## High-Performance Composition and Gradiant
|
||||
With `self.as_func`, a `jax` function is produced that generates the array according to the symbolic `start`, `stop` and `steps`.
|
||||
There are two nice things about this:
|
||||
|
||||
- **Gradient**: The gradient of the output array, with respect to any symbols used to define the input bounds, can easily be found using `jax.grad` over `self.as_func`.
|
||||
- **JIT**: When `self.as_func` is composed with other `jax` functions, and `jax.jit` is run to optimize the entire thing, the "cost of array generation" _will often be optimized away significantly or entirely_.
|
||||
|
||||
Thus, as part of larger computations, the performance properties of `LazyArrayRangeFlow` is extremely favorable.
|
||||
|
||||
## Numerical Properties
|
||||
Since the bounds support exact (ex. rational) calculations and symbolic manipulations (_by virtue of being symbolic expressions_), the opportunities for certain kinds of numerical instability are mitigated.
|
||||
|
||||
Attributes:
|
||||
start: An expression generating a scalar, unitless, complex value for the array's lower bound.
|
||||
_Integer, rational, and real values are also supported._
|
||||
stop: An expression generating a scalar, unitless, complex value for the array's upper bound.
|
||||
_Integer, rational, and real values are also supported._
|
||||
steps: The amount of steps (**inclusive**) to generate from `start` to `stop`.
|
||||
scaling: The method of distributing `step` values between the two endpoints.
|
||||
Generally, the linear default is sufficient.
|
||||
|
||||
unit: The unit of the generated array values
|
||||
|
||||
int_symbols: Set of integer-valued variables from which `start` and/or `stop` are determined.
|
||||
real_symbols: Set of real-valued variables from which `start` and/or `stop` are determined.
|
||||
complex_symbols: Set of complex-valued variables from which `start` and/or `stop` are determined.
|
||||
"""
|
||||
|
||||
start: spux.ScalarUnitlessComplexExpr
|
||||
stop: spux.ScalarUnitlessComplexExpr
|
||||
steps: int
|
||||
scaling: typ.Literal['lin', 'geom', 'log'] = 'lin'
|
||||
|
||||
unit: spux.Unit | None = None
|
||||
|
||||
int_symbols: set[spux.IntSymbol] = frozenset()
|
||||
real_symbols: set[spux.RealSymbol] = frozenset()
|
||||
complex_symbols: set[spux.ComplexSymbol] = frozenset()
|
||||
|
||||
@functools.cached_property
|
||||
def symbols(self) -> list[sp.Symbol]:
|
||||
"""Retrieves all symbols by concatenating int, real, and complex symbols, and sorting them by name.
|
||||
|
||||
The order is guaranteed to be **deterministic**.
|
||||
|
||||
Returns:
|
||||
All symbols valid for use in the expression.
|
||||
"""
|
||||
return sorted(
|
||||
self.int_symbols | self.real_symbols | self.complex_symbols,
|
||||
key=lambda sym: sym.name,
|
||||
)
|
||||
|
||||
####################
|
||||
# - Units
|
||||
####################
|
||||
def correct_unit(self, corrected_unit: spux.Unit) -> typ.Self:
|
||||
"""Replaces the unit without rescaling the unitless bounds.
|
||||
|
||||
Parameters:
|
||||
corrected_unit: The unit to replace the current unit with.
|
||||
|
||||
Returns:
|
||||
A new `LazyArrayRangeFlow` with replaced unit.
|
||||
|
||||
Raises:
|
||||
ValueError: If the existing unit is `None`, indicating that there is no unit to correct.
|
||||
"""
|
||||
if self.unit is not None:
|
||||
return LazyArrayRangeFlow(
|
||||
start=self.start,
|
||||
stop=self.stop,
|
||||
steps=self.steps,
|
||||
scaling=self.scaling,
|
||||
unit=corrected_unit,
|
||||
int_symbols=self.int_symbols,
|
||||
real_symbols=self.real_symbols,
|
||||
complex_symbols=self.complex_symbols,
|
||||
)
|
||||
|
||||
msg = f'Tried to correct unit of unitless LazyDataValueRange "{corrected_unit}"'
|
||||
raise ValueError(msg)
|
||||
|
||||
def rescale_to_unit(self, unit: spux.Unit) -> typ.Self:
|
||||
"""Replaces the unit, **with** rescaling of the bounds.
|
||||
|
||||
Parameters:
|
||||
unit: The unit to convert the bounds to.
|
||||
|
||||
Returns:
|
||||
A new `LazyArrayRangeFlow` with replaced unit.
|
||||
|
||||
Raises:
|
||||
ValueError: If the existing unit is `None`, indicating that there is no unit to correct.
|
||||
"""
|
||||
if self.unit is not None:
|
||||
return LazyArrayRangeFlow(
|
||||
start=spu.convert_to(self.start, unit),
|
||||
stop=spu.convert_to(self.stop, unit),
|
||||
steps=self.steps,
|
||||
scaling=self.scaling,
|
||||
unit=unit,
|
||||
symbols=self.symbols,
|
||||
int_symbols=self.int_symbols,
|
||||
real_symbols=self.real_symbols,
|
||||
complex_symbols=self.complex_symbols,
|
||||
)
|
||||
|
||||
msg = f'Tried to rescale unitless LazyDataValueRange to unit {unit}'
|
||||
raise ValueError(msg)
|
||||
|
||||
####################
|
||||
# - Bound Operations
|
||||
####################
|
||||
def rescale_bounds(
|
||||
self,
|
||||
scaler: typ.Callable[
|
||||
[spux.ScalarUnitlessComplexExpr], spux.ScalarUnitlessComplexExpr
|
||||
],
|
||||
reverse: bool = False,
|
||||
) -> typ.Self:
|
||||
"""Apply a function to the bounds, effectively rescaling the represented array.
|
||||
|
||||
Notes:
|
||||
**It is presumed that the bounds are scaled with the same factor**.
|
||||
Breaking this presumption may have unexpected results.
|
||||
|
||||
The scalar, unitless, complex-valuedness of the bounds must also be respected; additionally, new symbols must not be introduced.
|
||||
|
||||
Parameters:
|
||||
scaler: The function that scales each bound.
|
||||
reverse: Whether to reverse the bounds after running the `scaler`.
|
||||
|
||||
Returns:
|
||||
A rescaled `LazyArrayRangeFlow`.
|
||||
"""
|
||||
return LazyArrayRangeFlow(
|
||||
start=spu.convert_to(
|
||||
scaler(self.start if not reverse else self.stop), self.unit
|
||||
),
|
||||
stop=spu.convert_to(
|
||||
scaler(self.stop if not reverse else self.start), self.unit
|
||||
),
|
||||
steps=self.steps,
|
||||
scaling=self.scaling,
|
||||
unit=self.unit,
|
||||
int_symbols=self.int_symbols,
|
||||
real_symbols=self.real_symbols,
|
||||
complex_symbols=self.complex_symbols,
|
||||
)
|
||||
|
||||
####################
|
||||
# - Lazy Representation
|
||||
####################
|
||||
@functools.cached_property
|
||||
def array_generator(
|
||||
self,
|
||||
) -> typ.Callable[
|
||||
[int | float | complex, int | float | complex, int],
|
||||
jtyp.Inexact[jtyp.Array, ' steps'],
|
||||
]:
|
||||
"""Compute the correct `jnp.*space` array generator, where `*` is one of the supported scaling methods.
|
||||
|
||||
Returns:
|
||||
A `jax` function that takes a valid `start`, `stop`, and `steps`, and returns a 1D `jax` array.
|
||||
"""
|
||||
jnp_nspace = {
|
||||
'lin': jnp.linspace,
|
||||
'geom': jnp.geomspace,
|
||||
'log': jnp.logspace,
|
||||
}.get(self.scaling)
|
||||
if jnp_nspace is None:
|
||||
msg = f'ArrayFlow scaling method {self.scaling} is unsupported'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
return jnp_nspace
|
||||
|
||||
@functools.cached_property
|
||||
def as_func(
|
||||
self,
|
||||
) -> typ.Callable[[int | float | complex, ...], jtyp.Inexact[jtyp.Array, ' steps']]:
|
||||
"""Create a function that can compute the non-lazy output array as a function of the symbols in the expressions for `start` and `stop`.
|
||||
|
||||
Notes:
|
||||
The ordering of the symbols is identical to `self.symbols`, which is guaranteed to be a deterministically sorted list of symbols.
|
||||
|
||||
Returns:
|
||||
A `LazyValueFuncFlow` that, given the input symbols defined in `self.symbols`,
|
||||
"""
|
||||
# Compile JAX Functions for Start/End Expressions
|
||||
## FYI, JAX-in-JAX works perfectly fine.
|
||||
start_jax = sp.lambdify(self.symbols, self.start, 'jax')
|
||||
stop_jax = sp.lambdify(self.symbols, self.stop, 'jax')
|
||||
|
||||
# Compile ArrayGen Function
|
||||
def gen_array(
|
||||
*args: list[int | float | complex],
|
||||
) -> jtyp.Inexact[jtyp.Array, ' steps']:
|
||||
return self.array_generator(start_jax(*args), stop_jax(*args), self.steps)
|
||||
|
||||
# Return ArrayGen Function
|
||||
return gen_array
|
||||
|
||||
@functools.cached_property
|
||||
def as_lazy_value_func(self) -> LazyValueFuncFlow:
|
||||
"""Creates a `LazyValueFuncFlow` using the output of `self.as_func`.
|
||||
|
||||
This is useful for ex. parameterizing the first array in the node graph, without binding an entire computed array.
|
||||
|
||||
Notes:
|
||||
The the function enclosed in the `LazyValueFuncFlow` is identical to the one returned by `self.as_func`.
|
||||
|
||||
Returns:
|
||||
A `LazyValueFuncFlow` containing `self.as_func`, as well as appropriate supporting settings.
|
||||
"""
|
||||
return LazyValueFuncFlow(
|
||||
func=self.as_func,
|
||||
func_args=[
|
||||
(sym.name, spux.sympy_to_python_type(sym)) for sym in self.symbols
|
||||
],
|
||||
supports_jax=True,
|
||||
)
|
||||
|
||||
####################
|
||||
# - Realization
|
||||
####################
|
||||
def realize(
|
||||
self,
|
||||
symbol_values: dict[spux.Symbol, ValueFlow] = MappingProxyType({}),
|
||||
kind: typ.Literal[FlowKind.Array, FlowKind.LazyValueFunc] = FlowKind.Array,
|
||||
) -> ArrayFlow | LazyValueFuncFlow:
|
||||
"""Apply a function to the bounds, effectively rescaling the represented array.
|
||||
|
||||
Notes:
|
||||
**It is presumed that the bounds are scaled with the same factor**.
|
||||
Breaking this presumption may have unexpected results.
|
||||
|
||||
The scalar, unitless, complex-valuedness of the bounds must also be respected; additionally, new symbols must not be introduced.
|
||||
|
||||
Parameters:
|
||||
scaler: The function that scales each bound.
|
||||
reverse: Whether to reverse the bounds after running the `scaler`.
|
||||
|
||||
Returns:
|
||||
A rescaled `LazyArrayRangeFlow`.
|
||||
"""
|
||||
if not set(self.symbols).issubset(set(symbol_values.keys())):
|
||||
msg = f'Provided symbols ({set(symbol_values.keys())}) do not provide values for all expression symbols ({self.symbols}) that may be found in the boundary expressions (start={self.start}, end={self.end})'
|
||||
raise ValueError(msg)
|
||||
|
||||
# Realize Symbols
|
||||
realized_start = spux.sympy_to_python(
|
||||
self.start.subs({sym: symbol_values[sym.name] for sym in self.symbols})
|
||||
)
|
||||
realized_stop = spux.sympy_to_python(
|
||||
self.stop.subs({sym: symbol_values[sym.name] for sym in self.symbols})
|
||||
)
|
||||
|
||||
# Return Linspace / Logspace
|
||||
def gen_array() -> jtyp.Inexact[jtyp.Array, ' steps']:
|
||||
return self.array_generator(realized_start, realized_stop, self.steps)
|
||||
|
||||
if kind == FlowKind.Array:
|
||||
return ArrayFlow(values=gen_array(), unit=self.unit)
|
||||
if kind == FlowKind.LazyValueFunc:
|
||||
return LazyValueFuncFlow(func=gen_array, supports_jax=True)
|
||||
|
||||
msg = f'Invalid kind: {kind}'
|
||||
raise TypeError(msg)
|
||||
|
||||
|
||||
####################
|
||||
# - Params
|
||||
####################
|
||||
ParamsFlow: typ.TypeAlias = dict[str, typ.Any]
|
||||
|
||||
|
||||
####################
|
||||
# - Lazy Value Func
|
||||
####################
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
class InfoFlow:
|
||||
func_args: list[tuple[str, type]] = MappingProxyType({})
|
||||
func_kwargs: dict[str, type] = MappingProxyType({})
|
||||
|
||||
# Dimension Information
|
||||
has_ndims: bool = False
|
||||
dim_names: list[str] = ()
|
||||
dim_idx: dict[str, ArrayFlow | LazyArrayRangeFlow] = MappingProxyType({})
|
||||
|
||||
## TODO: Validation, esp. length of dims. Pydantic?
|
||||
|
||||
def compose_within(
|
||||
self,
|
||||
enclosing_func_args: list[tuple[str, type]] = (),
|
||||
enclosing_func_kwargs: dict[str, type] = MappingProxyType({}),
|
||||
) -> typ.Self:
|
||||
return InfoFlow(
|
||||
func_args=self.func_args + enclosing_func_args,
|
||||
func_kwargs=self.func_kwargs | enclosing_func_kwargs,
|
||||
)
|
||||
|
||||
def call_lazy_value_func(
|
||||
self,
|
||||
lazy_value_func: LazyValueFuncFlow,
|
||||
*args: list[typ.Any],
|
||||
**kwargs: dict[str, typ.Any],
|
||||
) -> tuple[list[typ.Any], dict[str, typ.Any]]:
|
||||
if lazy_value_func.supports_jax:
|
||||
lazy_value_func.func_jax(*args, **kwargs)
|
||||
|
||||
lazy_value_func.func(*args, **kwargs)
|
|
@ -1,4 +1,4 @@
|
|||
from blender_maxwell.utils.blender_type_enum import BlenderTypeEnum
|
||||
from ....utils.blender_type_enum import BlenderTypeEnum
|
||||
|
||||
|
||||
class Icon(BlenderTypeEnum):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.blender_type_enum import BlenderTypeEnum
|
||||
from ....utils.blender_type_enum import BlenderTypeEnum
|
||||
|
||||
|
||||
class ManagedObjType(BlenderTypeEnum):
|
|
@ -1,6 +1,6 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.blender_type_enum import BlenderTypeEnum, wrap_values_in_MT
|
||||
from ....utils.blender_type_enum import BlenderTypeEnum, wrap_values_in_MT
|
||||
|
||||
|
||||
@wrap_values_in_MT
|
|
@ -1,6 +1,6 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.blender_type_enum import (
|
||||
from ....utils.blender_type_enum import (
|
||||
BlenderTypeEnum,
|
||||
append_cls_name_to_values,
|
||||
)
|
||||
|
@ -8,7 +8,7 @@ from blender_maxwell.utils.blender_type_enum import (
|
|||
|
||||
@append_cls_name_to_values
|
||||
class NodeType(BlenderTypeEnum):
|
||||
# KitchenSink = enum.auto()
|
||||
#KitchenSink = enum.auto()
|
||||
|
||||
# Analysis
|
||||
Viz = enum.auto()
|
||||
|
@ -24,18 +24,18 @@ class NodeType(BlenderTypeEnum):
|
|||
UnitSystem = enum.auto()
|
||||
|
||||
## Inputs / Scene
|
||||
# Time = enum.auto()
|
||||
#Time = enum.auto()
|
||||
## Inputs / Web Importers
|
||||
Tidy3DWebImporter = enum.auto()
|
||||
## Inputs / File Importers
|
||||
Tidy3DFileImporter = enum.auto()
|
||||
## Inputs / Constants
|
||||
ExprConstant = enum.auto()
|
||||
ScientificConstant = enum.auto()
|
||||
NumberConstant = enum.auto()
|
||||
PhysicalConstant = enum.auto()
|
||||
BlenderConstant = enum.auto()
|
||||
|
||||
|
||||
# Outputs
|
||||
Viewer = enum.auto()
|
||||
## Outputs / File Exporters
|
||||
|
@ -48,43 +48,43 @@ class NodeType(BlenderTypeEnum):
|
|||
PointDipoleSource = enum.auto()
|
||||
PlaneWaveSource = enum.auto()
|
||||
UniformCurrentSource = enum.auto()
|
||||
# ModeSource = enum.auto()
|
||||
# GaussianBeamSource = enum.auto()
|
||||
# AstigmaticGaussianBeamSource = enum.auto()
|
||||
# TFSFSource = enum.auto()
|
||||
# EHEquivalenceSource = enum.auto()
|
||||
# EHSource = enum.auto()
|
||||
#ModeSource = enum.auto()
|
||||
#GaussianBeamSource = enum.auto()
|
||||
#AstigmaticGaussianBeamSource = enum.auto()
|
||||
#TFSFSource = enum.auto()
|
||||
#EHEquivalenceSource = enum.auto()
|
||||
#EHSource = enum.auto()
|
||||
## Sources / Temporal Shapes
|
||||
GaussianPulseTemporalShape = enum.auto()
|
||||
# ContinuousWaveTemporalShape = enum.auto()
|
||||
# ArrayTemporalShape = enum.auto()
|
||||
#ContinuousWaveTemporalShape = enum.auto()
|
||||
#ArrayTemporalShape = enum.auto()
|
||||
|
||||
# Mediums
|
||||
LibraryMedium = enum.auto()
|
||||
# PECMedium = enum.auto()
|
||||
# IsotropicMedium = enum.auto()
|
||||
# AnisotropicMedium = enum.auto()
|
||||
# TripleSellmeierMedium = enum.auto()
|
||||
# SellmeierMedium = enum.auto()
|
||||
# PoleResidueMedium = enum.auto()
|
||||
# DrudeMedium = enum.auto()
|
||||
# DrudeLorentzMedium = enum.auto()
|
||||
# DebyeMedium = enum.auto()
|
||||
#PECMedium = enum.auto()
|
||||
#IsotropicMedium = enum.auto()
|
||||
#AnisotropicMedium = enum.auto()
|
||||
#TripleSellmeierMedium = enum.auto()
|
||||
#SellmeierMedium = enum.auto()
|
||||
#PoleResidueMedium = enum.auto()
|
||||
#DrudeMedium = enum.auto()
|
||||
#DrudeLorentzMedium = enum.auto()
|
||||
#DebyeMedium = enum.auto()
|
||||
|
||||
## Mediums / Non-Linearities
|
||||
# AddNonLinearity = enum.auto()
|
||||
# ChiThreeSusceptibilityNonLinearity = enum.auto()
|
||||
# TwoPhotonAbsorptionNonLinearity = enum.auto()
|
||||
# KerrNonLinearity = enum.auto()
|
||||
#AddNonLinearity = enum.auto()
|
||||
#ChiThreeSusceptibilityNonLinearity = enum.auto()
|
||||
#TwoPhotonAbsorptionNonLinearity = enum.auto()
|
||||
#KerrNonLinearity = enum.auto()
|
||||
|
||||
# Structures
|
||||
# ObjectStructure = enum.auto()
|
||||
#ObjectStructure = enum.auto()
|
||||
GeoNodesStructure = enum.auto()
|
||||
# ScriptedStructure = enum.auto()
|
||||
#ScriptedStructure = enum.auto()
|
||||
## Structures / Primitives
|
||||
BoxStructure = enum.auto()
|
||||
SphereStructure = enum.auto()
|
||||
# CylinderStructure = enum.auto()
|
||||
#CylinderStructure = enum.auto()
|
||||
|
||||
# Bounds
|
||||
BoundConds = enum.auto()
|
||||
|
@ -99,22 +99,22 @@ class NodeType(BlenderTypeEnum):
|
|||
# Monitors
|
||||
EHFieldMonitor = enum.auto()
|
||||
PowerFluxMonitor = enum.auto()
|
||||
# EpsilonTensorMonitor = enum.auto()
|
||||
# DiffractionMonitor = enum.auto()
|
||||
#EpsilonTensorMonitor = enum.auto()
|
||||
#DiffractionMonitor = enum.auto()
|
||||
## Monitors / Projected
|
||||
# CartesianNearFieldProjectionMonitor = enum.auto()
|
||||
# ObservationAngleNearFieldProjectionMonitor = enum.auto()
|
||||
# KSpaceNearFieldProjectionMonitor = enum.auto()
|
||||
#CartesianNearFieldProjectionMonitor = enum.auto()
|
||||
#ObservationAngleNearFieldProjectionMonitor = enum.auto()
|
||||
#KSpaceNearFieldProjectionMonitor = enum.auto()
|
||||
|
||||
# Sims
|
||||
FDTDSim = enum.auto()
|
||||
SimDomain = enum.auto()
|
||||
SimGrid = enum.auto()
|
||||
## Sims / Sim Grid Axis
|
||||
# AutomaticSimGridAxis = enum.auto()
|
||||
# ManualSimGridAxis = enum.auto()
|
||||
# UniformSimGridAxis = enum.auto()
|
||||
# ArraySimGridAxis = enum.auto()
|
||||
#AutomaticSimGridAxis = enum.auto()
|
||||
#ManualSimGridAxis = enum.auto()
|
||||
#UniformSimGridAxis = enum.auto()
|
||||
#ArraySimGridAxis = enum.auto()
|
||||
|
||||
# Utilities
|
||||
Combine = enum.auto()
|
||||
|
|
|
@ -7,7 +7,6 @@ SOCKET_COLORS = {
|
|||
ST.Bool: (0.7, 0.7, 0.7, 1.0), # Medium Light Grey
|
||||
ST.String: (0.7, 0.7, 0.7, 1.0), # Medium Light Grey
|
||||
ST.FilePath: (0.6, 0.6, 0.6, 1.0), # Medium Grey
|
||||
ST.Expr: (0.5, 0.5, 0.5, 1.0), # Medium Grey
|
||||
# Number
|
||||
ST.IntegerNumber: (0.5, 0.5, 1.0, 1.0), # Light Blue
|
||||
ST.RationalNumber: (0.4, 0.4, 0.9, 1.0), # Medium Light Blue
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
from .socket_types import SocketType as ST
|
||||
|
||||
SOCKET_SHAPES = {
|
||||
# Basic
|
||||
ST.Any: 'CIRCLE',
|
||||
ST.Bool: 'CIRCLE',
|
||||
ST.String: 'CIRCLE',
|
||||
ST.FilePath: 'CIRCLE',
|
||||
# Number
|
||||
ST.IntegerNumber: 'CIRCLE',
|
||||
ST.RationalNumber: 'CIRCLE',
|
||||
ST.RealNumber: 'CIRCLE',
|
||||
ST.ComplexNumber: 'CIRCLE',
|
||||
# Vector
|
||||
ST.Integer2DVector: 'CIRCLE',
|
||||
ST.Real2DVector: 'CIRCLE',
|
||||
ST.Complex2DVector: 'CIRCLE',
|
||||
ST.Integer3DVector: 'CIRCLE',
|
||||
ST.Real3DVector: 'CIRCLE',
|
||||
ST.Complex3DVector: 'CIRCLE',
|
||||
# Physical
|
||||
ST.PhysicalUnitSystem: 'CIRCLE',
|
||||
ST.PhysicalTime: 'CIRCLE',
|
||||
ST.PhysicalAngle: 'CIRCLE',
|
||||
ST.PhysicalLength: 'CIRCLE',
|
||||
ST.PhysicalArea: 'CIRCLE',
|
||||
ST.PhysicalVolume: 'CIRCLE',
|
||||
ST.PhysicalPoint2D: 'CIRCLE',
|
||||
ST.PhysicalPoint3D: 'CIRCLE',
|
||||
ST.PhysicalSize2D: 'CIRCLE',
|
||||
ST.PhysicalSize3D: 'CIRCLE',
|
||||
ST.PhysicalMass: 'CIRCLE',
|
||||
ST.PhysicalSpeed: 'CIRCLE',
|
||||
ST.PhysicalAccelScalar: 'CIRCLE',
|
||||
ST.PhysicalForceScalar: 'CIRCLE',
|
||||
ST.PhysicalAccel3D: 'CIRCLE',
|
||||
ST.PhysicalForce3D: 'CIRCLE',
|
||||
ST.PhysicalPol: 'CIRCLE',
|
||||
ST.PhysicalFreq: 'CIRCLE',
|
||||
# Blender
|
||||
ST.BlenderMaterial: 'DIAMOND',
|
||||
ST.BlenderObject: 'DIAMOND',
|
||||
ST.BlenderCollection: 'DIAMOND',
|
||||
ST.BlenderImage: 'DIAMOND',
|
||||
ST.BlenderGeoNodes: 'DIAMOND',
|
||||
ST.BlenderText: 'DIAMOND',
|
||||
# Maxwell
|
||||
ST.MaxwellSource: 'CIRCLE',
|
||||
ST.MaxwellTemporalShape: 'CIRCLE',
|
||||
ST.MaxwellMedium: 'CIRCLE',
|
||||
ST.MaxwellMediumNonLinearity: 'CIRCLE',
|
||||
ST.MaxwellStructure: 'CIRCLE',
|
||||
ST.MaxwellBoundConds: 'CIRCLE',
|
||||
ST.MaxwellBoundCond: 'CIRCLE',
|
||||
ST.MaxwellMonitor: 'CIRCLE',
|
||||
ST.MaxwellFDTDSim: 'CIRCLE',
|
||||
ST.MaxwellFDTDSimData: 'CIRCLE',
|
||||
ST.MaxwellSimGrid: 'CIRCLE',
|
||||
ST.MaxwellSimGridAxis: 'CIRCLE',
|
||||
ST.MaxwellSimDomain: 'CIRCLE',
|
||||
# Tidy3D
|
||||
ST.Tidy3DCloudTask: 'DIAMOND',
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.blender_type_enum import (
|
||||
from ....utils.blender_type_enum import (
|
||||
BlenderTypeEnum,
|
||||
append_cls_name_to_values,
|
||||
)
|
||||
|
@ -14,7 +14,6 @@ class SocketType(BlenderTypeEnum):
|
|||
String = enum.auto()
|
||||
FilePath = enum.auto()
|
||||
Color = enum.auto()
|
||||
Expr = enum.auto()
|
||||
|
||||
# Number
|
||||
IntegerNumber = enum.auto()
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import sympy.physics.units as spu
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
|
||||
from ....utils import extra_sympy_units as spux
|
||||
from .socket_types import SocketType as ST # noqa: N817
|
||||
|
||||
SOCKET_UNITS = {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import enum
|
||||
|
||||
from blender_maxwell.utils.blender_type_enum import (
|
||||
from ....utils.blender_type_enum import (
|
||||
BlenderTypeEnum,
|
||||
append_cls_name_to_values,
|
||||
)
|
|
@ -2,9 +2,8 @@ import typing as typ
|
|||
|
||||
import sympy.physics.units as spu
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
from blender_maxwell.utils.pydantic_sympy import SympyExpr
|
||||
|
||||
from ....utils import extra_sympy_units as spux
|
||||
from ....utils.pydantic_sympy import SympyExpr
|
||||
from .socket_types import SocketType as ST # noqa: N817
|
||||
from .socket_units import SOCKET_UNITS
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
|
||||
import bpy
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
from ....utils import logger
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
|
|
|
@ -7,9 +7,9 @@ import jax.numpy as jnp
|
|||
import matplotlib
|
||||
import matplotlib.axis as mpl_ax
|
||||
import numpy as np
|
||||
import typing_extensions as typx
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from ....utils import logger
|
||||
from .. import contracts as ct
|
||||
from . import base
|
||||
|
||||
|
@ -23,9 +23,6 @@ _MPL_CM = matplotlib.cm.get_cmap('viridis', 512)
|
|||
VIRIDIS_COLORMAP = jnp.array([_MPL_CM(i)[:3] for i in range(512)])
|
||||
|
||||
|
||||
####################
|
||||
# - Image Functions
|
||||
####################
|
||||
def apply_colormap(normalized_data, colormap):
|
||||
# Linear interpolation between colormap points
|
||||
n_colors = colormap.shape[0]
|
||||
|
@ -41,8 +38,8 @@ def apply_colormap(normalized_data, colormap):
|
|||
|
||||
|
||||
@jax.jit
|
||||
def rgba_image_from_2d_map__viridis(map_2d):
|
||||
amplitude = jnp.abs(map_2d)
|
||||
def rgba_image_from_xyzf__viridis(xyz_freq):
|
||||
amplitude = jnp.abs(jnp.squeeze(xyz_freq))
|
||||
amplitude_normalized = (amplitude - amplitude.min()) / (
|
||||
amplitude.max() - amplitude.min()
|
||||
)
|
||||
|
@ -52,8 +49,8 @@ def rgba_image_from_2d_map__viridis(map_2d):
|
|||
|
||||
|
||||
@jax.jit
|
||||
def rgba_image_from_2d_map__grayscale(map_2d):
|
||||
amplitude = jnp.abs(map_2d)
|
||||
def rgba_image_from_xyzf__grayscale(xyz_freq):
|
||||
amplitude = jnp.abs(jnp.squeeze(xyz_freq))
|
||||
amplitude_normalized = (amplitude - amplitude.min()) / (
|
||||
amplitude.max() - amplitude.min()
|
||||
)
|
||||
|
@ -62,24 +59,23 @@ def rgba_image_from_2d_map__grayscale(map_2d):
|
|||
return jnp.dstack((rgb_array, alpha_channel))
|
||||
|
||||
|
||||
def rgba_image_from_2d_map(map_2d, colormap: str | None = None):
|
||||
"""RGBA Image from a map of 2D coordinates to values.
|
||||
def rgba_image_from_xyzf(xyz_freq, colormap: str | None = None):
|
||||
"""RGBA Image from Squeezable XYZ-Freq w/fixed freq.
|
||||
|
||||
Parameters:
|
||||
map_2d: Shape (width, height, value).
|
||||
xyz_freq: Shape (xlen, ylen, zlen), one dimension has length 1.
|
||||
width_px: Pixel width to resize the image to.
|
||||
height: Pixel height to resize the image to.
|
||||
|
||||
Returns:
|
||||
Image as a JAX array of shape (height, width, 4)
|
||||
Image as a JAX array of shape (height, width, 3)
|
||||
"""
|
||||
if colormap == 'VIRIDIS':
|
||||
return rgba_image_from_2d_map__viridis(map_2d)
|
||||
return rgba_image_from_xyzf__viridis(xyz_freq)
|
||||
if colormap == 'GRAYSCALE':
|
||||
return rgba_image_from_2d_map__grayscale(map_2d)
|
||||
return rgba_image_from_xyzf__grayscale(xyz_freq)
|
||||
|
||||
|
||||
####################
|
||||
# - Managed BL Image
|
||||
####################
|
||||
class ManagedBLImage(base.ManagedObj):
|
||||
managed_obj_type = ct.ManagedObjType.ManagedBLImage
|
||||
_bl_image_name: str
|
||||
|
@ -122,8 +118,8 @@ class ManagedBLImage(base.ManagedObj):
|
|||
self,
|
||||
width_px: int,
|
||||
height_px: int,
|
||||
color_model: typ.Literal['RGB', 'RGBA'],
|
||||
dtype: typ.Literal['uint8', 'float32'],
|
||||
color_model: typx.Literal['RGB', 'RGBA'],
|
||||
dtype: typx.Literal['uint8', 'float32'],
|
||||
):
|
||||
"""Returns the managed blender image.
|
||||
|
||||
|
@ -176,7 +172,7 @@ class ManagedBLImage(base.ManagedObj):
|
|||
)
|
||||
|
||||
####################
|
||||
# - Methods
|
||||
# - Actions
|
||||
####################
|
||||
def bl_select(self) -> None:
|
||||
"""Synchronizes the managed object to the preview, by manipulating
|
||||
|
@ -231,11 +227,11 @@ class ManagedBLImage(base.ManagedObj):
|
|||
####################
|
||||
# - Special Methods
|
||||
####################
|
||||
def map_2d_to_image(
|
||||
self, map_2d, colormap: str | None = 'VIRIDIS', bl_select: bool = False
|
||||
def xyzf_to_image(
|
||||
self, xyz_freq, colormap: str | None = 'VIRIDIS', bl_select: bool = False
|
||||
):
|
||||
self.data_to_image(
|
||||
lambda _: rgba_image_from_2d_map(map_2d, colormap=colormap),
|
||||
lambda _: rgba_image_from_xyzf(xyz_freq, colormap=colormap),
|
||||
bl_select=bl_select,
|
||||
)
|
||||
|
||||
|
|
|
@ -4,8 +4,7 @@ import bmesh
|
|||
import bpy
|
||||
import numpy as np
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from ....utils import logger
|
||||
from .. import contracts as ct
|
||||
from . import base
|
||||
from .managed_bl_collection import managed_collection, preview_collection
|
||||
|
@ -99,7 +98,7 @@ class ManagedBLMesh(base.ManagedObj):
|
|||
bpy.data.meshes.remove(bl_object.data)
|
||||
|
||||
####################
|
||||
# - Methods
|
||||
# - Actions
|
||||
####################
|
||||
def show_preview(self) -> None:
|
||||
"""Moves the managed Blender object to the preview collection.
|
||||
|
|
|
@ -3,15 +3,17 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import typing_extensions as typx
|
||||
|
||||
from blender_maxwell.utils import analyze_geonodes, logger
|
||||
|
||||
from ....utils import analyze_geonodes, logger
|
||||
from .. import bl_socket_map
|
||||
from .. import contracts as ct
|
||||
from . import base
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
ModifierType: typ.TypeAlias = typx.Literal['NODES', 'ARRAY']
|
||||
NodeTreeInterfaceID: typ.TypeAlias = str
|
||||
UnitSystem: typ.TypeAlias = typ.Any
|
||||
|
||||
|
||||
|
@ -31,7 +33,7 @@ class ModifierAttrsNODES(typ.TypedDict):
|
|||
|
||||
node_group: bpy.types.GeometryNodeTree
|
||||
unit_system: UnitSystem
|
||||
inputs: dict[ct.BLNodeTreeInterfaceID, typ.Any]
|
||||
inputs: dict[NodeTreeInterfaceID, typ.Any]
|
||||
|
||||
|
||||
class ModifierAttrsARRAY(typ.TypedDict):
|
||||
|
@ -220,7 +222,7 @@ class ManagedBLModifier(base.ManagedObj):
|
|||
def bl_modifier(
|
||||
self,
|
||||
bl_object: bpy.types.Object,
|
||||
modifier_type: ct.BLModifierType,
|
||||
modifier_type: ModifierType,
|
||||
modifier_attrs: ModifierAttrs,
|
||||
):
|
||||
"""Creates a new modifier for the current `bl_object`.
|
||||
|
|
|
@ -3,8 +3,7 @@ import typing as typ
|
|||
|
||||
import bpy
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from ...utils import logger
|
||||
from . import contracts as ct
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
@ -344,11 +343,9 @@ class MaxwellSimTree(bpy.types.NodeTree):
|
|||
## The link has already been removed, but we can fix that.
|
||||
## If NO: Queue re-adding the link (safe since the sockets exist)
|
||||
## TODO: Crash if deleting removing linked loose sockets.
|
||||
consent_removal = to_socket.allow_remove_link(from_socket)
|
||||
consent_removal = to_socket.sync_link_removed(from_socket)
|
||||
if not consent_removal:
|
||||
link_corrections['to_add'].append((from_socket, to_socket))
|
||||
else:
|
||||
to_socket.on_link_removed(from_socket)
|
||||
|
||||
# Ensure Removal of Socket PTRs, PTRs->REFs
|
||||
self.node_link_cache.remove_sockets_by_link_ptr(link_ptr)
|
||||
|
@ -357,14 +354,12 @@ class MaxwellSimTree(bpy.types.NodeTree):
|
|||
# Retrieve Link Reference
|
||||
link = self.node_link_cache.link_ptrs_as_links[link_ptr]
|
||||
|
||||
# Ask 'to_socket' for Consent to Add Link
|
||||
# Ask 'to_socket' for Consent to Remove Link
|
||||
## The link has already been added, but we can fix that.
|
||||
## If NO: Queue re-adding the link (safe since the sockets exist)
|
||||
consent_added = link.to_socket.allow_add_link(link)
|
||||
consent_added = link.to_socket.sync_link_added(link)
|
||||
if not consent_added:
|
||||
link_corrections['to_remove'].append(link)
|
||||
else:
|
||||
link.to_socket.on_link_added(link)
|
||||
|
||||
# Link Corrections
|
||||
## ADD: Links that 'to_socket' don't want removed.
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
from . import extract_data, math, viz
|
||||
from . import extract_data, viz
|
||||
|
||||
BL_REGISTER = [
|
||||
*extract_data.BL_REGISTER,
|
||||
*viz.BL_REGISTER,
|
||||
*math.BL_REGISTER,
|
||||
]
|
||||
BL_NODES = {
|
||||
**extract_data.BL_NODES,
|
||||
**viz.BL_NODES,
|
||||
**math.BL_NODES,
|
||||
}
|
||||
|
|
|
@ -1,20 +1,19 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import jax.numpy as jnp
|
||||
import sympy.physics.units as spu
|
||||
|
||||
from blender_maxwell.utils import bl_cache, logger
|
||||
|
||||
from .....utils import logger
|
||||
from ... import contracts as ct
|
||||
from ... import sockets
|
||||
from .. import base, events
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
CACHE_SIM_DATA = {}
|
||||
|
||||
|
||||
class ExtractDataNode(base.MaxwellSimNode):
|
||||
"""Node for extracting data from particular objects."""
|
||||
"""Node for extracting data from other objects."""
|
||||
|
||||
node_type = ct.NodeType.ExtractData
|
||||
bl_label = 'Extract'
|
||||
|
@ -29,196 +28,228 @@ class ExtractDataNode(base.MaxwellSimNode):
|
|||
}
|
||||
|
||||
####################
|
||||
# - Properties
|
||||
# - Properties: Sim Data
|
||||
####################
|
||||
extract_filter: bpy.props.EnumProperty(
|
||||
name='Extract Filter',
|
||||
description='Data to extract from the input',
|
||||
search=lambda self, _, edit_text: self.search_extract_filters(edit_text),
|
||||
update=lambda self, context: self.on_prop_changed('extract_filter', context),
|
||||
sim_data__monitor_name: bpy.props.EnumProperty(
|
||||
name='Sim Data Monitor Name',
|
||||
description='Monitor to extract from the attached SimData',
|
||||
items=lambda self, context: self.search_monitors(context),
|
||||
update=lambda self, context: self.sync_prop('sim_data__monitor_name', context),
|
||||
)
|
||||
|
||||
# Sim Data
|
||||
sim_data_monitor_nametype: dict[str, str] = bl_cache.BLField({})
|
||||
cache__num_monitors: bpy.props.StringProperty(default='')
|
||||
cache__monitor_names: bpy.props.StringProperty(default='')
|
||||
cache__monitor_types: bpy.props.StringProperty(default='')
|
||||
|
||||
# Field Data
|
||||
field_data_components: set[str] = bl_cache.BLField(set())
|
||||
def search_monitors(self, _: bpy.types.Context) -> list[tuple[str, str, str]]:
|
||||
"""Search the linked simulation data for monitors."""
|
||||
# No Linked Sim Data: Return 'None'
|
||||
if not self.inputs.get('Sim Data') or not self.inputs['Sim Data'].is_linked:
|
||||
return [('NONE', 'None', 'No monitors')]
|
||||
|
||||
def search_extract_filters(
|
||||
self, _: bpy.types.Context
|
||||
) -> list[tuple[str, str, str]]:
|
||||
# Sim Data
|
||||
if self.active_socket_set == 'Sim Data' and self.inputs['Sim Data'].is_linked:
|
||||
# Return Monitor Names
|
||||
## Special Case for No Monitors
|
||||
monitor_names = (
|
||||
self.cache__monitor_names.split(',') if self.cache__monitor_names else []
|
||||
)
|
||||
monitor_types = (
|
||||
self.cache__monitor_types.split(',') if self.cache__monitor_types else []
|
||||
)
|
||||
if len(monitor_names) == 0:
|
||||
return [('NONE', 'None', 'No monitors')]
|
||||
return [
|
||||
(
|
||||
monitor_name,
|
||||
f'{monitor_name}',
|
||||
f'Monitor "{monitor_name}" ({monitor_type}) recorded by the Sim',
|
||||
)
|
||||
for monitor_name, monitor_type in self.sim_data_monitor_nametype.items()
|
||||
for monitor_name, monitor_type in zip(
|
||||
monitor_names, monitor_types, strict=False
|
||||
)
|
||||
]
|
||||
|
||||
# Field Data
|
||||
if self.active_socket_set == 'Field Data' and self.inputs['Sim Data'].is_linked:
|
||||
return [
|
||||
([('Ex', 'Ex', 'Ex')] if 'Ex' in self.field_data_components else [])
|
||||
+ ([('Ey', 'Ey', 'Ey')] if 'Ey' in self.field_data_components else [])
|
||||
+ ([('Ez', 'Ez', 'Ez')] if 'Ez' in self.field_data_components else [])
|
||||
+ ([('Hx', 'Hx', 'Hx')] if 'Hx' in self.field_data_components else [])
|
||||
+ ([('Hy', 'Hy', 'Hy')] if 'Hy' in self.field_data_components else [])
|
||||
+ ([('Hz', 'Hz', 'Hz')] if 'Hz' in self.field_data_components else [])
|
||||
]
|
||||
def draw_props__sim_data(
|
||||
self, _: bpy.types.Context, col: bpy.types.UILayout
|
||||
) -> None:
|
||||
col.prop(self, 'sim_data__monitor_name', text='')
|
||||
|
||||
# Flux Data
|
||||
## Nothing to extract.
|
||||
|
||||
# Fallback
|
||||
return []
|
||||
|
||||
####################
|
||||
# - UI
|
||||
####################
|
||||
def draw_props(self, _: bpy.types.Context, col: bpy.types.UILayout) -> None:
|
||||
col.prop(self, 'extract_filter', text='')
|
||||
|
||||
def draw_info(self, _: bpy.types.Context, col: bpy.types.UILayout) -> None:
|
||||
if self.active_socket_set == 'Sim Data' and self.inputs['Sim Data'].is_linked:
|
||||
def draw_info__sim_data(
|
||||
self, _: bpy.types.Context, col: bpy.types.UILayout
|
||||
) -> None:
|
||||
if self.sim_data__monitor_name != 'NONE':
|
||||
# Header
|
||||
row = col.row()
|
||||
row.alignment = 'CENTER'
|
||||
row.label(text=f'{self.cache__num_monitors} Monitors')
|
||||
|
||||
# Monitor Info
|
||||
if len(self.sim_data_monitor_nametype) > 0:
|
||||
for (
|
||||
monitor_name,
|
||||
monitor_type,
|
||||
) in self.sim_data_monitor_nametype.items():
|
||||
if int(self.cache__num_monitors) > 0:
|
||||
for monitor_name, monitor_type in zip(
|
||||
self.cache__monitor_names.split(','),
|
||||
self.cache__monitor_types.split(','),
|
||||
strict=False,
|
||||
):
|
||||
col.label(text=f'{monitor_name}: {monitor_type}')
|
||||
|
||||
####################
|
||||
# - Events
|
||||
# - Events: Sim Data
|
||||
####################
|
||||
@events.on_value_changed(
|
||||
socket_name='Sim Data',
|
||||
input_sockets={'Sim Data'},
|
||||
input_sockets_optional={'Sim Data': True},
|
||||
)
|
||||
def on_sim_data_changed(self, input_sockets: dict):
|
||||
if input_sockets['Sim Data'] is not None:
|
||||
self.sim_data_monitor_nametype = {
|
||||
monitor_name: monitor_data.type
|
||||
for monitor_name, monitor_data in input_sockets[
|
||||
'Sim Data'
|
||||
].monitor_data.items()
|
||||
}
|
||||
def on_sim_data_changed(self):
|
||||
# SimData Cache Hit and SimData Input Unlinked
|
||||
## Delete Cache Entry
|
||||
if (
|
||||
CACHE_SIM_DATA.get(self.instance_id) is not None
|
||||
and not self.inputs['Sim Data'].is_linked
|
||||
):
|
||||
CACHE_SIM_DATA.pop(self.instance_id, None) ## Both member-check
|
||||
self.cache__num_monitors = ''
|
||||
self.cache__monitor_names = ''
|
||||
self.cache__monitor_types = ''
|
||||
|
||||
# SimData Cache Miss and Linked SimData
|
||||
if (
|
||||
CACHE_SIM_DATA.get(self.instance_id) is None
|
||||
and self.inputs['Sim Data'].is_linked
|
||||
):
|
||||
sim_data = self._compute_input('Sim Data')
|
||||
|
||||
## Create Cache Entry
|
||||
CACHE_SIM_DATA[self.instance_id] = {
|
||||
'sim_data': sim_data,
|
||||
'monitor_names': list(sim_data.monitor_data.keys()),
|
||||
'monitor_types': [
|
||||
monitor_data.type for monitor_data in sim_data.monitor_data.values()
|
||||
],
|
||||
}
|
||||
cache = CACHE_SIM_DATA[self.instance_id]
|
||||
self.cache__num_monitors = str(len(cache['monitor_names']))
|
||||
self.cache__monitor_names = ','.join(cache['monitor_names'])
|
||||
self.cache__monitor_types = ','.join(cache['monitor_types'])
|
||||
|
||||
####################
|
||||
# - Properties: Field Data
|
||||
####################
|
||||
field_data__component: bpy.props.EnumProperty(
|
||||
name='Field Data Component',
|
||||
description='Field monitor component to extract from the attached Field Data',
|
||||
items=lambda self, context: self.search_field_data_components(context),
|
||||
update=lambda self, context: self.sync_prop('field_data__component', context),
|
||||
)
|
||||
|
||||
cache__components: bpy.props.StringProperty(default='')
|
||||
|
||||
def search_field_data_components(
|
||||
self, _: bpy.types.Context
|
||||
) -> list[tuple[str, str, str]]:
|
||||
if not self.inputs.get('Field Data') or not self.inputs['Field Data'].is_linked:
|
||||
return [('NONE', 'None', 'No data')]
|
||||
|
||||
if not self.cache__components:
|
||||
return [('NONE', 'Loading...', 'Loading data...')]
|
||||
|
||||
components = [
|
||||
tuple(component_str.split(','))
|
||||
for component_str in self.cache__components.split('|')
|
||||
]
|
||||
|
||||
if len(components) == 0:
|
||||
return [('NONE', 'None', 'No components')]
|
||||
return components
|
||||
|
||||
def draw_props__field_data(
|
||||
self, _: bpy.types.Context, col: bpy.types.UILayout
|
||||
) -> None:
|
||||
col.prop(self, 'field_data__component', text='')
|
||||
|
||||
def draw_info__field_data(
|
||||
self, _: bpy.types.Context, col: bpy.types.UILayout
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
####################
|
||||
# - Events: Field Data
|
||||
####################
|
||||
@events.on_value_changed(
|
||||
socket_name='Field Data',
|
||||
input_sockets={'Field Data'},
|
||||
input_sockets_optional={'Field Data': True},
|
||||
)
|
||||
def on_field_data_changed(self, input_sockets: dict):
|
||||
if input_sockets['Field Data'] is not None:
|
||||
self.field_data_components = (
|
||||
{'Ex'}
|
||||
if input_sockets['Field Data'].Ex is not None
|
||||
else set() | {'Ey'}
|
||||
if input_sockets['Field Data'].Ey is not None
|
||||
else set() | {'Ez'}
|
||||
if input_sockets['Field Data'].Ez is not None
|
||||
else set() | {'Hx'}
|
||||
if input_sockets['Field Data'].Hx is not None
|
||||
else set() | {'Hy'}
|
||||
if input_sockets['Field Data'].Hy is not None
|
||||
else set() | {'Hz'}
|
||||
if input_sockets['Field Data'].Hz is not None
|
||||
else set()
|
||||
def on_field_data_changed(self):
|
||||
if self.inputs['Field Data'].is_linked and not self.cache__components:
|
||||
field_data = self._compute_input('Field Data')
|
||||
components = [
|
||||
*([('Ex', 'Ex', 'Ex')] if field_data.Ex is not None else []),
|
||||
*([('Ey', 'Ey', 'Ey')] if field_data.Ey is not None else []),
|
||||
*([('Ez', 'Ez', 'Ez')] if field_data.Ez is not None else []),
|
||||
*([('Hx', 'Hx', 'Hx')] if field_data.Hx is not None else []),
|
||||
*([('Hy', 'Hy', 'Hy')] if field_data.Hy is not None else []),
|
||||
*([('Hz', 'Hz', 'Hz')] if field_data.Hz is not None else []),
|
||||
]
|
||||
self.cache__components = '|'.join(
|
||||
[','.join(component) for component in components]
|
||||
)
|
||||
|
||||
elif not self.inputs['Field Data'].is_linked and self.cache__components:
|
||||
self.cache__components = ''
|
||||
|
||||
####################
|
||||
# - Output: Value
|
||||
# - Flux Data
|
||||
####################
|
||||
|
||||
def draw_props__flux_data(
|
||||
self, _: bpy.types.Context, col: bpy.types.UILayout
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
def draw_info__flux_data(
|
||||
self, _: bpy.types.Context, col: bpy.types.UILayout
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
####################
|
||||
# - Global
|
||||
####################
|
||||
def draw_props(self, context: bpy.types.Context, col: bpy.types.UILayout) -> None:
|
||||
if self.active_socket_set == 'Sim Data':
|
||||
self.draw_props__sim_data(context, col)
|
||||
if self.active_socket_set == 'Field Data':
|
||||
self.draw_props__field_data(context, col)
|
||||
if self.active_socket_set == 'Flux Data':
|
||||
self.draw_props__flux_data(context, col)
|
||||
|
||||
def draw_info(self, context: bpy.types.Context, col: bpy.types.UILayout) -> None:
|
||||
if self.active_socket_set == 'Sim Data':
|
||||
self.draw_info__sim_data(context, col)
|
||||
if self.active_socket_set == 'Field Data':
|
||||
self.draw_info__field_data(context, col)
|
||||
if self.active_socket_set == 'Flux Data':
|
||||
self.draw_info__flux_data(context, col)
|
||||
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.Value,
|
||||
props={'active_socket_set', 'extract_filter'},
|
||||
input_sockets={'Sim Data', 'Field Data', 'Flux Data'},
|
||||
input_sockets_optional={
|
||||
'Sim Data': True,
|
||||
'Field Data': True,
|
||||
'Flux Data': True,
|
||||
},
|
||||
props={'sim_data__monitor_name', 'field_data__component'},
|
||||
)
|
||||
def compute_extracted_data(self, props: dict, input_sockets: dict):
|
||||
if props['active_socket_set'] == 'Sim Data':
|
||||
return input_sockets['Sim Data'].monitor_data[props['extract_filter']]
|
||||
def compute_extracted_data(self, props: dict):
|
||||
if self.active_socket_set == 'Sim Data':
|
||||
if (
|
||||
CACHE_SIM_DATA.get(self.instance_id) is None
|
||||
and self.inputs['Sim Data'].is_linked
|
||||
):
|
||||
self.on_sim_data_changed()
|
||||
|
||||
if props['active_socket_set'] == 'Field Data':
|
||||
return getattr(input_sockets['Field Data'], props['extract_filter'])
|
||||
sim_data = CACHE_SIM_DATA[self.instance_id]['sim_data']
|
||||
return sim_data.monitor_data[props['sim_data__monitor_name']]
|
||||
|
||||
if props['active_socket_set'] == 'Flux Data':
|
||||
return input_sockets['Flux Data']
|
||||
elif self.active_socket_set == 'Field Data': # noqa: RET505
|
||||
field_data = self._compute_input('Field Data')
|
||||
return getattr(field_data, props['field_data__component'])
|
||||
|
||||
msg = f'Tried to get a "FlowKind.Value" from socket set {props["active_socket_set"]} in "{self.bl_label}"'
|
||||
raise RuntimeError(msg)
|
||||
elif self.active_socket_set == 'Flux Data':
|
||||
flux_data = self._compute_input('Flux Data')
|
||||
return flux_data.flux
|
||||
|
||||
####################
|
||||
# - Output: LazyValueFunc
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.LazyValueFunc,
|
||||
props={'active_socket_set'},
|
||||
output_sockets={'Data'},
|
||||
output_socket_kinds={'Data': ct.FlowKind.Value},
|
||||
)
|
||||
def compute_extracted_data_lazy(self, props: dict, output_sockets: dict):
|
||||
if self.active_socket_set in {'Field Data', 'Flux Data'}:
|
||||
data = jnp.array(output_sockets['Data'].data)
|
||||
return ct.LazyValueFuncFlow(func=lambda: data, supports_jax=True)
|
||||
|
||||
msg = f'Tried to get a "FlowKind.LazyValueFunc" from socket set {props["active_socket_set"]} in "{self.bl_label}"'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
####################
|
||||
# - Output: Info
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.Info,
|
||||
props={'active_socket_set'},
|
||||
output_sockets={'Data'},
|
||||
output_socket_kinds={'Data': ct.FlowKind.Value},
|
||||
)
|
||||
def compute_extracted_data_info(self, props: dict, output_sockets: dict):
|
||||
if props['active_socket_set'] == 'Field Data':
|
||||
xarr = output_sockets['Data']
|
||||
return ct.InfoFlow(
|
||||
dim_names=['x', 'y', 'z', 'f'],
|
||||
dim_idx={
|
||||
axis: ct.ArrayFlow(values=xarr.get_index(axis).values, unit=spu.um)
|
||||
for axis in ['x', 'y', 'z']
|
||||
}
|
||||
| {
|
||||
'f': ct.ArrayFlow(
|
||||
values=xarr.get_index('f').values, unit=spu.hertz
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
if props['active_socket_set'] == 'Flux Data':
|
||||
xarr = output_sockets['Data']
|
||||
return ct.InfoFlow(
|
||||
dim_names=['f'],
|
||||
dim_idx={
|
||||
'f': ct.ArrayFlow(
|
||||
values=xarr.get_index('f').values, unit=spu.hertz
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
msg = f'Tried to get a "FlowKind.Info" from socket set {props["active_socket_set"]} in "{self.bl_label}"'
|
||||
msg = f'Tried to get data from unknown output socket in "{self.bl_label}"'
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
from . import filter_math, map_math, operate_math, reduce_math
|
||||
|
||||
BL_REGISTER = [
|
||||
*map_math.BL_REGISTER,
|
||||
*filter_math.BL_REGISTER,
|
||||
*reduce_math.BL_REGISTER,
|
||||
*operate_math.BL_REGISTER,
|
||||
]
|
||||
BL_NODES = {
|
||||
**map_math.BL_NODES,
|
||||
**filter_math.BL_NODES,
|
||||
**reduce_math.BL_NODES,
|
||||
**operate_math.BL_NODES,
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import jax.numpy as jnp
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from .... import contracts as ct
|
||||
from .... import sockets
|
||||
from ... import base, events
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
|
||||
# @functools.partial(jax.jit, static_argnames=('fixed_axis', 'fixed_axis_value'))
|
||||
# jax.jit
|
||||
def fix_axis(data, fixed_axis: int, fixed_axis_value: float):
|
||||
log.critical(data.shape)
|
||||
# Select Values of Fixed Axis
|
||||
fixed_axis_values = data[
|
||||
tuple(slice(None) if i == fixed_axis else 0 for i in range(data.ndim))
|
||||
]
|
||||
log.critical(fixed_axis_values)
|
||||
|
||||
# Compute Nearest Index on Fixed Axis
|
||||
idx_of_nearest = jnp.argmin(jnp.abs(fixed_axis_values - fixed_axis_value))
|
||||
log.critical(idx_of_nearest)
|
||||
|
||||
# Select Values along Fixed Axis Value
|
||||
return jnp.take(data, idx_of_nearest, axis=fixed_axis)
|
||||
|
||||
|
||||
class FilterMathNode(base.MaxwellSimNode):
|
||||
node_type = ct.NodeType.FilterMath
|
||||
bl_label = 'Filter Math'
|
||||
|
||||
input_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
}
|
||||
input_socket_sets: typ.ClassVar = {
|
||||
'By Axis Value': {
|
||||
'Axis': sockets.IntegerNumberSocketDef(),
|
||||
'Value': sockets.RealNumberSocketDef(),
|
||||
},
|
||||
'By Axis': {
|
||||
'Axis': sockets.IntegerNumberSocketDef(),
|
||||
},
|
||||
## TODO: bool arrays for comparison/switching/sparse 0-setting/etc. .
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
}
|
||||
|
||||
####################
|
||||
# - Properties
|
||||
####################
|
||||
operation: bpy.props.EnumProperty(
|
||||
name='Op',
|
||||
description='Operation to reduce the input axis with',
|
||||
items=lambda self, _: self.search_operations(),
|
||||
update=lambda self, context: self.on_prop_changed('operation', context),
|
||||
)
|
||||
|
||||
def search_operations(self) -> list[tuple[str, str, str]]:
|
||||
items = []
|
||||
if self.active_socket_set == 'By Axis Value':
|
||||
items += [
|
||||
('FIX', 'Fix Coordinate', '(*, N, *) -> (*, *)'),
|
||||
]
|
||||
if self.active_socket_set == 'By Axis':
|
||||
items += [
|
||||
('SQUEEZE', 'Squeeze', '(*, 1, *) -> (*, *)'),
|
||||
]
|
||||
else:
|
||||
items += [('NONE', 'None', 'No operations...')]
|
||||
|
||||
return items
|
||||
|
||||
def draw_props(self, _: bpy.types.Context, layout: bpy.types.UILayout) -> None:
|
||||
if self.active_socket_set != 'Axis Expr':
|
||||
layout.prop(self, 'operation')
|
||||
|
||||
####################
|
||||
# - Compute
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
props={'operation', 'active_socket_set'},
|
||||
input_sockets={'Data', 'Axis', 'Value'},
|
||||
input_sockets_optional={'Axis': True, 'Value': True},
|
||||
)
|
||||
def compute_data(self, props: dict, input_sockets: dict):
|
||||
if not hasattr(input_sockets['Data'], 'shape'):
|
||||
msg = 'Input socket "Data" must be an N-D Array (with a "shape" attribute)'
|
||||
raise ValueError(msg)
|
||||
|
||||
# By Axis Value
|
||||
if props['active_socket_set'] == 'By Axis Value':
|
||||
if props['operation'] == 'FIX':
|
||||
return fix_axis(
|
||||
input_sockets['Data'], input_sockets['Axis'], input_sockets['Value']
|
||||
)
|
||||
|
||||
# By Axis
|
||||
if props['active_socket_set'] == 'By Axis':
|
||||
if props['operation'] == 'SQUEEZE':
|
||||
return jnp.squeeze(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
|
||||
msg = 'Operation invalid'
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
####################
|
||||
BL_REGISTER = [
|
||||
FilterMathNode,
|
||||
]
|
||||
BL_NODES = {ct.NodeType.FilterMath: (ct.NodeCategory.MAXWELLSIM_ANALYSIS_MATH)}
|
|
@ -1,169 +0,0 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import jax
|
||||
import jax.numpy as jnp
|
||||
import sympy as sp
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from .... import contracts as ct
|
||||
from .... import sockets
|
||||
from ... import base, events
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
|
||||
class MapMathNode(base.MaxwellSimNode):
|
||||
node_type = ct.NodeType.MapMath
|
||||
bl_label = 'Map Math'
|
||||
|
||||
input_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
}
|
||||
input_socket_sets: typ.ClassVar = {
|
||||
'By Element': {},
|
||||
'By Vector': {},
|
||||
'By Matrix': {},
|
||||
'Expr': {
|
||||
'Mapper': sockets.ExprSocketDef(
|
||||
symbols=[sp.Symbol('x')],
|
||||
default_expr=sp.Symbol('x'),
|
||||
),
|
||||
},
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
}
|
||||
|
||||
####################
|
||||
# - Properties
|
||||
####################
|
||||
operation: bpy.props.EnumProperty(
|
||||
name='Op',
|
||||
description='Operation to apply to the input',
|
||||
items=lambda self, _: self.search_operations(),
|
||||
update=lambda self, context: self.on_prop_changed('operation', context),
|
||||
)
|
||||
|
||||
def search_operations(self) -> list[tuple[str, str, str]]:
|
||||
items = []
|
||||
if self.active_socket_set == 'By Element':
|
||||
items += [
|
||||
# General
|
||||
('REAL', 'real', 'ℝ(L) (by el)'),
|
||||
('IMAG', 'imag', 'Im(L) (by el)'),
|
||||
('ABS', 'abs', '|L| (by el)'),
|
||||
('SQ', 'square', 'L^2 (by el)'),
|
||||
('SQRT', 'sqrt', 'sqrt(L) (by el)'),
|
||||
('INV_SQRT', '1/sqrt', '1/sqrt(L) (by el)'),
|
||||
# Trigonometry
|
||||
('COS', 'cos', 'cos(L) (by el)'),
|
||||
('SIN', 'sin', 'sin(L) (by el)'),
|
||||
('TAN', 'tan', 'tan(L) (by el)'),
|
||||
('ACOS', 'acos', 'acos(L) (by el)'),
|
||||
('ASIN', 'asin', 'asin(L) (by el)'),
|
||||
('ATAN', 'atan', 'atan(L) (by el)'),
|
||||
]
|
||||
elif self.active_socket_set in 'By Vector':
|
||||
items += [
|
||||
('NORM_2', '2-Norm', '||L||_2 (by Vec)'),
|
||||
]
|
||||
elif self.active_socket_set == 'By Matrix':
|
||||
items += [
|
||||
# Matrix -> Number
|
||||
('DET', 'Determinant', 'det(L) (by Mat)'),
|
||||
('COND', 'Condition', 'κ(L) (by Mat)'),
|
||||
('NORM_FRO', 'Frobenius Norm', '||L||_F (by Mat)'),
|
||||
('RANK', 'Rank', 'rank(L) (by Mat)'),
|
||||
# Matrix -> Array
|
||||
('DIAG', 'Diagonal', 'diag(L) (by Mat)'),
|
||||
('EIG_VALS', 'Eigenvalues', 'eigvals(L) (by Mat)'),
|
||||
('SVD_VALS', 'SVD', 'svd(L) -> diag(Σ) (by Mat)'),
|
||||
# Matrix -> Matrix
|
||||
('INV', 'Invert', 'L^(-1) (by Mat)'),
|
||||
('TRA', 'Transpose', 'L^T (by Mat)'),
|
||||
# Matrix -> Matrices
|
||||
('QR', 'QR', 'L -> Q·R (by Mat)'),
|
||||
('CHOL', 'Cholesky', 'L -> L·Lh (by Mat)'),
|
||||
('SVD', 'SVD', 'L -> U·Σ·Vh (by Mat)'),
|
||||
]
|
||||
else:
|
||||
items += ['EXPR_EL', 'Expr (by el)', 'Expression-defined (by el)']
|
||||
return items
|
||||
|
||||
def draw_props(self, _: bpy.types.Context, layout: bpy.types.UILayout) -> None:
|
||||
if self.active_socket_set not in {'Expr (Element)'}:
|
||||
layout.prop(self, 'operation')
|
||||
|
||||
####################
|
||||
# - Compute
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
kind=ct.FlowKind.LazyValueFunc,
|
||||
props={'active_socket_set', 'operation'},
|
||||
input_sockets={'Data', 'Mapper'},
|
||||
input_socket_kinds={
|
||||
'Data': ct.FlowKind.LazyValueFunc,
|
||||
'Mapper': ct.FlowKind.LazyValueFunc,
|
||||
},
|
||||
input_sockets_optional={'Mapper': True},
|
||||
)
|
||||
def compute_data(self, props: dict, input_sockets: dict):
|
||||
mapping_func: typ.Callable[[jax.Array], jax.Array] = {
|
||||
'By Element': {
|
||||
'REAL': lambda data: jnp.real(data),
|
||||
'IMAG': lambda data: jnp.imag(data),
|
||||
'ABS': lambda data: jnp.abs(data),
|
||||
'SQ': lambda data: jnp.square(data),
|
||||
'SQRT': lambda data: jnp.sqrt(data),
|
||||
'INV_SQRT': lambda data: 1 / jnp.sqrt(data),
|
||||
'COS': lambda data: jnp.cos(data),
|
||||
'SIN': lambda data: jnp.sin(data),
|
||||
'TAN': lambda data: jnp.tan(data),
|
||||
'ACOS': lambda data: jnp.acos(data),
|
||||
'ASIN': lambda data: jnp.asin(data),
|
||||
'ATAN': lambda data: jnp.atan(data),
|
||||
'SINC': lambda data: jnp.sinc(data),
|
||||
},
|
||||
'By Vector': {
|
||||
'NORM_2': lambda data: jnp.norm(data, ord=2, axis=-1),
|
||||
},
|
||||
'By Matrix': {
|
||||
# Matrix -> Number
|
||||
'DET': lambda data: jnp.linalg.det(data),
|
||||
'COND': lambda data: jnp.linalg.cond(data),
|
||||
'NORM_FRO': lambda data: jnp.linalg.matrix_norm(data, ord='fro'),
|
||||
'RANK': lambda data: jnp.linalg.matrix_rank(data),
|
||||
# Matrix -> Vec
|
||||
'DIAG': lambda data: jnp.diag(data),
|
||||
'EIG_VALS': lambda data: jnp.eigvals(data),
|
||||
'SVD_VALS': lambda data: jnp.svdvals(data),
|
||||
# Matrix -> Matrix
|
||||
'INV': lambda data: jnp.inv(data),
|
||||
'TRA': lambda data: jnp.matrix_transpose(data),
|
||||
# Matrix -> Matrices
|
||||
'QR': lambda data: jnp.inv(data),
|
||||
'CHOL': lambda data: jnp.linalg.cholesky(data),
|
||||
'SVD': lambda data: jnp.linalg.svd(data),
|
||||
},
|
||||
'By El (Expr)': {
|
||||
'EXPR_EL': lambda data: input_sockets['Mapper'](data),
|
||||
},
|
||||
}[props['active_socket_set']][props['operation']]
|
||||
|
||||
# Compose w/Lazy Root Function Data
|
||||
return input_sockets['Data'].compose_within(
|
||||
mapping_func,
|
||||
supports_jax=True,
|
||||
)
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
####################
|
||||
BL_REGISTER = [
|
||||
MapMathNode,
|
||||
]
|
||||
BL_NODES = {ct.NodeType.MapMath: (ct.NodeCategory.MAXWELLSIM_ANALYSIS_MATH)}
|
|
@ -1,138 +0,0 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import jax.numpy as jnp
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from .... import contracts as ct
|
||||
from .... import sockets
|
||||
from ... import base, events
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
|
||||
class OperateMathNode(base.MaxwellSimNode):
|
||||
node_type = ct.NodeType.OperateMath
|
||||
bl_label = 'Operate Math'
|
||||
|
||||
input_socket_sets: typ.ClassVar = {
|
||||
'Elementwise': {
|
||||
'Data L': sockets.AnySocketDef(),
|
||||
'Data R': sockets.AnySocketDef(),
|
||||
},
|
||||
## TODO: Filter-array building operations
|
||||
'Vec-Vec': {
|
||||
'Data L': sockets.AnySocketDef(),
|
||||
'Data R': sockets.AnySocketDef(),
|
||||
},
|
||||
'Mat-Vec': {
|
||||
'Data L': sockets.AnySocketDef(),
|
||||
'Data R': sockets.AnySocketDef(),
|
||||
},
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
}
|
||||
|
||||
####################
|
||||
# - Properties
|
||||
####################
|
||||
operation: bpy.props.EnumProperty(
|
||||
name='Op',
|
||||
description='Operation to apply to the two inputs',
|
||||
items=lambda self, _: self.search_operations(),
|
||||
update=lambda self, context: self.on_prop_changed('operation', context),
|
||||
)
|
||||
|
||||
def search_operations(self) -> list[tuple[str, str, str]]:
|
||||
items = []
|
||||
if self.active_socket_set == 'Elementwise':
|
||||
items = [
|
||||
('ADD', 'Add', 'L + R (by el)'),
|
||||
('SUB', 'Subtract', 'L - R (by el)'),
|
||||
('MUL', 'Multiply', 'L · R (by el)'),
|
||||
('DIV', 'Divide', 'L ÷ R (by el)'),
|
||||
('POW', 'Power', 'L^R (by el)'),
|
||||
('FMOD', 'Trunc Modulo', 'fmod(L,R) (by el)'),
|
||||
('ATAN2', 'atan2', 'atan2(L,R) (by el)'),
|
||||
('HEAVISIDE', 'Heaviside', '{0|L<0 1|L>0 R|L=0} (by el)'),
|
||||
]
|
||||
elif self.active_socket_set in 'Vec | Vec':
|
||||
items = [
|
||||
('DOT', 'Dot', 'L · R'),
|
||||
('CROSS', 'Cross', 'L x R (by last-axis'),
|
||||
]
|
||||
elif self.active_socket_set == 'Mat | Vec':
|
||||
items = [
|
||||
('DOT', 'Dot', 'L · R'),
|
||||
('LIN_SOLVE', 'Lin Solve', 'Lx = R -> x (by last-axis of R)'),
|
||||
('LSQ_SOLVE', 'LSq Solve', 'Lx = R ~> x (by last-axis of R)'),
|
||||
]
|
||||
return items
|
||||
|
||||
def draw_props(self, _: bpy.types.Context, layout: bpy.types.UILayout) -> None:
|
||||
layout.prop(self, 'operation')
|
||||
|
||||
####################
|
||||
# - Properties
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
props={'operation'},
|
||||
input_sockets={'Data L', 'Data R'},
|
||||
)
|
||||
def compute_data(self, props: dict, input_sockets: dict):
|
||||
if self.active_socket_set == 'Elementwise':
|
||||
# Element-Wise Arithmetic
|
||||
if props['operation'] == 'ADD':
|
||||
return input_sockets['Data L'] + input_sockets['Data R']
|
||||
if props['operation'] == 'SUB':
|
||||
return input_sockets['Data L'] - input_sockets['Data R']
|
||||
if props['operation'] == 'MUL':
|
||||
return input_sockets['Data L'] * input_sockets['Data R']
|
||||
if props['operation'] == 'DIV':
|
||||
return input_sockets['Data L'] / input_sockets['Data R']
|
||||
|
||||
# Element-Wise Arithmetic
|
||||
if props['operation'] == 'POW':
|
||||
return input_sockets['Data L'] ** input_sockets['Data R']
|
||||
|
||||
# Binary Trigonometry
|
||||
if props['operation'] == 'ATAN2':
|
||||
return jnp.atan2(input_sockets['Data L'], input_sockets['Data R'])
|
||||
|
||||
# Special Functions
|
||||
if props['operation'] == 'HEAVISIDE':
|
||||
return jnp.heaviside(input_sockets['Data L'], input_sockets['Data R'])
|
||||
|
||||
# Linear Algebra
|
||||
if self.active_socket_set in {'Vec-Vec', 'Mat-Vec'}:
|
||||
if props['operation'] == 'DOT':
|
||||
return jnp.dot(input_sockets['Data L'], input_sockets['Data R'])
|
||||
|
||||
elif self.active_socket_set == 'Vec-Vec':
|
||||
if props['operation'] == 'CROSS':
|
||||
return jnp.cross(input_sockets['Data L'], input_sockets['Data R'])
|
||||
|
||||
elif self.active_socket_set == 'Mat-Vec':
|
||||
if props['operation'] == 'LIN_SOLVE':
|
||||
return jnp.linalg.lstsq(
|
||||
input_sockets['Data L'], input_sockets['Data R']
|
||||
)
|
||||
if props['operation'] == 'LSQ_SOLVE':
|
||||
return jnp.linalg.solve(
|
||||
input_sockets['Data L'], input_sockets['Data R']
|
||||
)
|
||||
|
||||
msg = 'Invalid operation'
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
####################
|
||||
BL_REGISTER = [
|
||||
OperateMathNode,
|
||||
]
|
||||
BL_NODES = {ct.NodeType.OperateMath: (ct.NodeCategory.MAXWELLSIM_ANALYSIS_MATH)}
|
|
@ -1,129 +0,0 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import jax.numpy as jnp
|
||||
import sympy as sp
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from .... import contracts as ct
|
||||
from .... import sockets
|
||||
from ... import base, events
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
||||
|
||||
class ReduceMathNode(base.MaxwellSimNode):
|
||||
node_type = ct.NodeType.ReduceMath
|
||||
bl_label = 'Reduce Math'
|
||||
|
||||
input_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
'Axis': sockets.IntegerNumberSocketDef(),
|
||||
}
|
||||
input_socket_sets: typ.ClassVar = {
|
||||
'By Axis': {
|
||||
'Axis': sockets.IntegerNumberSocketDef(),
|
||||
},
|
||||
'Expr': {
|
||||
'Reducer': sockets.ExprSocketDef(
|
||||
symbols=[sp.Symbol('a'), sp.Symbol('b')],
|
||||
default_expr=sp.Symbol('a') + sp.Symbol('b'),
|
||||
),
|
||||
'Axis': sockets.IntegerNumberSocketDef(),
|
||||
},
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
}
|
||||
|
||||
####################
|
||||
# - Properties
|
||||
####################
|
||||
operation: bpy.props.EnumProperty(
|
||||
name='Op',
|
||||
description='Operation to reduce the input axis with',
|
||||
items=lambda self, _: self.search_operations(),
|
||||
update=lambda self, context: self.on_prop_changed('operation', context),
|
||||
)
|
||||
|
||||
def search_operations(self) -> list[tuple[str, str, str]]:
|
||||
items = []
|
||||
if self.active_socket_set == 'By Axis':
|
||||
items += [
|
||||
# Accumulation
|
||||
('SUM', 'Sum', 'sum(*, N, *) -> (*, 1, *)'),
|
||||
('PROD', 'Prod', 'prod(*, N, *) -> (*, 1, *)'),
|
||||
('MIN', 'Axis-Min', '(*, N, *) -> (*, 1, *)'),
|
||||
('MAX', 'Axis-Max', '(*, N, *) -> (*, 1, *)'),
|
||||
('P2P', 'Peak-to-Peak', '(*, N, *) -> (*, 1 *)'),
|
||||
# Stats
|
||||
('MEAN', 'Mean', 'mean(*, N, *) -> (*, 1, *)'),
|
||||
('MEDIAN', 'Median', 'median(*, N, *) -> (*, 1, *)'),
|
||||
('STDDEV', 'Std Dev', 'stddev(*, N, *) -> (*, 1, *)'),
|
||||
('VARIANCE', 'Variance', 'var(*, N, *) -> (*, 1, *)'),
|
||||
# Dimension Reduction
|
||||
('SQUEEZE', 'Squeeze', '(*, 1, *) -> (*, *)'),
|
||||
]
|
||||
else:
|
||||
items += [('NONE', 'None', 'No operations...')]
|
||||
|
||||
return items
|
||||
|
||||
def draw_props(self, _: bpy.types.Context, layout: bpy.types.UILayout) -> None:
|
||||
if self.active_socket_set != 'Axis Expr':
|
||||
layout.prop(self, 'operation')
|
||||
|
||||
####################
|
||||
# - Compute
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Data',
|
||||
props={'active_socket_set', 'operation'},
|
||||
input_sockets={'Data', 'Axis', 'Reducer'},
|
||||
input_socket_kinds={'Reducer': ct.FlowKind.LazyValueFunc},
|
||||
input_sockets_optional={'Reducer': True},
|
||||
)
|
||||
def compute_data(self, props: dict, input_sockets: dict):
|
||||
if props['active_socket_set'] == 'By Axis':
|
||||
# Simple Accumulation
|
||||
if props['operation'] == 'SUM':
|
||||
return jnp.sum(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
if props['operation'] == 'PROD':
|
||||
return jnp.prod(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
if props['operation'] == 'MIN':
|
||||
return jnp.min(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
if props['operation'] == 'MAX':
|
||||
return jnp.max(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
if props['operation'] == 'P2P':
|
||||
return jnp.p2p(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
|
||||
# Stats
|
||||
if props['operation'] == 'MEAN':
|
||||
return jnp.mean(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
if props['operation'] == 'MEDIAN':
|
||||
return jnp.median(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
if props['operation'] == 'STDDEV':
|
||||
return jnp.std(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
if props['operation'] == 'VARIANCE':
|
||||
return jnp.var(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
|
||||
# Dimension Reduction
|
||||
if props['operation'] == 'SQUEEZE':
|
||||
return jnp.squeeze(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
|
||||
if props['active_socket_set'] == 'Expr':
|
||||
ufunc = jnp.ufunc(input_sockets['Reducer'], nin=2, nout=1)
|
||||
return ufunc.reduce(input_sockets['Data'], axis=input_sockets['Axis'])
|
||||
|
||||
msg = 'Operation invalid'
|
||||
raise ValueError(msg)
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
####################
|
||||
BL_REGISTER = [
|
||||
ReduceMathNode,
|
||||
]
|
||||
BL_NODES = {ct.NodeType.ReduceMath: (ct.NodeCategory.MAXWELLSIM_ANALYSIS_MATH)}
|
|
@ -1,9 +1,9 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import jax.numpy as jnp
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from .....utils import logger
|
||||
from ... import contracts as ct
|
||||
from ... import managed_objs, sockets
|
||||
from .. import base, events
|
||||
|
@ -20,7 +20,7 @@ class VizNode(base.MaxwellSimNode):
|
|||
####################
|
||||
# - Sockets
|
||||
####################
|
||||
input_sockets: typ.ClassVar = {
|
||||
input_sockets = {
|
||||
'Data': sockets.AnySocketDef(),
|
||||
'Freq': sockets.PhysicalFreqSocketDef(),
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ class VizNode(base.MaxwellSimNode):
|
|||
('GRAYSCALE', 'Grayscale', 'Barebones'),
|
||||
],
|
||||
default='VIRIDIS',
|
||||
update=lambda self, context: self.on_prop_changed('colormap', context),
|
||||
update=lambda self, context: self.sync_prop('colormap', context),
|
||||
)
|
||||
|
||||
#####################
|
||||
|
@ -72,12 +72,20 @@ class VizNode(base.MaxwellSimNode):
|
|||
props: dict,
|
||||
unit_systems: dict,
|
||||
):
|
||||
managed_objs['plot'].map_2d_to_image(
|
||||
input_sockets['Data'].as_bound_jax_func(),
|
||||
selected_data = jnp.array(
|
||||
input_sockets['Data'].sel(f=input_sockets['Freq'], method='nearest')
|
||||
)
|
||||
|
||||
managed_objs['plot'].xyzf_to_image(
|
||||
selected_data,
|
||||
colormap=props['colormap'],
|
||||
bl_select=True,
|
||||
)
|
||||
|
||||
# @events.on_init()
|
||||
# def on_init(self):
|
||||
# self.on_changed_inputs()
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
|
|
|
@ -10,12 +10,13 @@ from types import MappingProxyType
|
|||
|
||||
import bpy
|
||||
import sympy as sp
|
||||
import typing_extensions as typx
|
||||
|
||||
from blender_maxwell.utils import bl_cache, logger
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from .. import bl_cache, sockets
|
||||
from .. import contracts as ct
|
||||
from .. import managed_objs as _managed_objs
|
||||
from .. import sockets
|
||||
from . import events
|
||||
from . import presets as _presets
|
||||
|
||||
|
@ -101,12 +102,12 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
Parameters:
|
||||
name: The name of the property to set.
|
||||
prop: The `bpy.types.Property` to instantiate and attach..
|
||||
no_update: Don't attach a `self.on_prop_changed()` callback to the property's `update`.
|
||||
no_update: Don't attach a `self.sync_prop()` callback to the property's `update`.
|
||||
"""
|
||||
_update_with_name = prop_name if update_with_name is None else update_with_name
|
||||
extra_kwargs = (
|
||||
{
|
||||
'update': lambda self, context: self.on_prop_changed(
|
||||
'update': lambda self, context: self.sync_prop(
|
||||
_update_with_name, context
|
||||
),
|
||||
}
|
||||
|
@ -120,26 +121,28 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
|
||||
@classmethod
|
||||
def _gather_event_methods(cls) -> dict[str, typ.Callable[[], None]]:
|
||||
"""Gathers all methods called in response to events observed by the node.
|
||||
"""Gathers all methods called in response to actions/events observed by the node.
|
||||
|
||||
Notes:
|
||||
- 'Event methods' must have an attribute 'event' in order to be picked up.
|
||||
- 'Event methods' must have an attribute 'event'.
|
||||
- 'Event methods' must have an attribute 'action_type' in order to be picked up.
|
||||
- 'Event methods' must have an attribute 'action_type'.
|
||||
|
||||
Returns:
|
||||
Event methods, indexed by the event that (maybe) triggers them.
|
||||
Event methods, indexed by the action that (maybe) triggers them.
|
||||
"""
|
||||
event_methods = [
|
||||
method
|
||||
for attr_name in dir(cls)
|
||||
if hasattr(method := getattr(cls, attr_name), 'event')
|
||||
and method.event in set(ct.FlowEvent)
|
||||
if hasattr(method := getattr(cls, attr_name), 'action_type')
|
||||
and method.action_type in set(ct.DataFlowAction)
|
||||
]
|
||||
event_methods_by_event = {event: [] for event in set(ct.FlowEvent)}
|
||||
event_methods_by_action = {
|
||||
action_type: [] for action_type in set(ct.DataFlowAction)
|
||||
}
|
||||
for method in event_methods:
|
||||
event_methods_by_event[method.event].append(method)
|
||||
event_methods_by_action[method.action_type].append(method)
|
||||
|
||||
return event_methods_by_event
|
||||
return event_methods_by_action
|
||||
|
||||
@classmethod
|
||||
def socket_set_names(cls) -> list[str]:
|
||||
|
@ -182,7 +185,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
cls.set_prop('locked', bpy.props.BoolProperty, no_update=True, default=False)
|
||||
|
||||
## Event Method Callbacks
|
||||
cls.event_methods_by_event = cls._gather_event_methods()
|
||||
cls.event_methods_by_action = cls._gather_event_methods()
|
||||
|
||||
## Active Socket Set
|
||||
if len(cls.input_socket_sets) + len(cls.output_socket_sets) > 0:
|
||||
|
@ -191,6 +194,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
'active_socket_set',
|
||||
bpy.props.EnumProperty,
|
||||
name='Active Socket Set',
|
||||
description='Selector of active sockets',
|
||||
items=[
|
||||
(socket_set_name, socket_set_name, socket_set_name)
|
||||
for socket_set_name in socket_set_names
|
||||
|
@ -269,7 +273,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
msg = f'Tried to set preset socket/value pair ({socket_name}={socket_value}), but socket is not in active input sockets ({self.inputs})'
|
||||
raise ValueError(msg)
|
||||
|
||||
## TODO: Account for FlowKind
|
||||
## TODO: Account for DataFlowKind
|
||||
bl_socket.value = socket_value
|
||||
|
||||
@events.on_show_preview()
|
||||
|
@ -315,7 +319,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
# - Socket Accessors
|
||||
####################
|
||||
def _bl_sockets(
|
||||
self, direc: typ.Literal['input', 'output']
|
||||
self, direc: typx.Literal['input', 'output']
|
||||
) -> bpy.types.NodeInputs:
|
||||
"""Retrieve currently visible Blender sockets on the node, by-direction.
|
||||
|
||||
|
@ -334,7 +338,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
|
||||
def _active_socket_set_socket_defs(
|
||||
self,
|
||||
direc: typ.Literal['input', 'output'],
|
||||
direc: typx.Literal['input', 'output'],
|
||||
) -> dict[ct.SocketName, sockets.base.SocketDef]:
|
||||
"""Retrieve all socket definitions for sockets that should be defined, according to the `self.active_socket_set`.
|
||||
|
||||
|
@ -360,7 +364,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
return socket_sets.get(self.active_socket_set, {})
|
||||
|
||||
def active_socket_defs(
|
||||
self, direc: typ.Literal['input', 'output']
|
||||
self, direc: typx.Literal['input', 'output']
|
||||
) -> dict[ct.SocketName, sockets.base.SocketDef]:
|
||||
"""Retrieve all socket definitions for sockets that should be defined.
|
||||
|
||||
|
@ -479,22 +483,25 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
# - Event Methods
|
||||
####################
|
||||
@property
|
||||
def _event_method_filter_by_event(self) -> dict[ct.FlowEvent, typ.Callable]:
|
||||
"""Compute a map of FlowEvents, to a function that filters its event methods.
|
||||
def _event_method_filter_by_action(self) -> dict[ct.DataFlowAction, typ.Callable]:
|
||||
"""Compute a map of DataFlowActions, to a function that filters its event methods.
|
||||
|
||||
The returned filter functions are hard-coded, and must always return a `bool`.
|
||||
They may use attributes of `self`, always return `True` or `False`, or something different.
|
||||
|
||||
Notes:
|
||||
This is an internal method; you probably want `self.filtered_event_methods_by_event`.
|
||||
This is an internal method; you probably want `self.filtered_event_methods_by_action`.
|
||||
|
||||
Returns:
|
||||
The map of `ct.FlowEvent` to a function that can determine whether any `event_method` should be run.
|
||||
The map of `ct.DataFlowAction` to a function that can determine whether any `event_method` should be run.
|
||||
"""
|
||||
return {
|
||||
ct.FlowEvent.EnableLock: lambda *_: True,
|
||||
ct.FlowEvent.DisableLock: lambda *_: True,
|
||||
ct.FlowEvent.DataChanged: lambda event_method, socket_name, prop_name, _: (
|
||||
ct.DataFlowAction.EnableLock: lambda *_: True,
|
||||
ct.DataFlowAction.DisableLock: lambda *_: True,
|
||||
ct.DataFlowAction.DataChanged: lambda event_method,
|
||||
socket_name,
|
||||
prop_name,
|
||||
_: (
|
||||
(
|
||||
socket_name
|
||||
and socket_name in event_method.callback_info.on_changed_sockets
|
||||
|
@ -509,7 +516,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
and socket_name in self.loose_input_sockets
|
||||
)
|
||||
),
|
||||
ct.FlowEvent.OutputRequested: lambda output_socket_method,
|
||||
ct.DataFlowAction.OutputRequested: lambda output_socket_method,
|
||||
output_socket_name,
|
||||
_,
|
||||
kind: (
|
||||
|
@ -519,26 +526,26 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
== output_socket_method.callback_info.output_socket_name
|
||||
)
|
||||
),
|
||||
ct.FlowEvent.ShowPreview: lambda *_: True,
|
||||
ct.FlowEvent.ShowPlot: lambda *_: True,
|
||||
ct.DataFlowAction.ShowPreview: lambda *_: True,
|
||||
ct.DataFlowAction.ShowPlot: lambda *_: True,
|
||||
}
|
||||
|
||||
def filtered_event_methods_by_event(
|
||||
def filtered_event_methods_by_action(
|
||||
self,
|
||||
event: ct.FlowEvent,
|
||||
action: ct.DataFlowAction,
|
||||
_filter: tuple[ct.SocketName, str],
|
||||
) -> list[typ.Callable]:
|
||||
"""Return all event methods that should run, given the context provided by `_filter`.
|
||||
|
||||
The inclusion decision is made by the internal property `self._event_method_filter_by_event`.
|
||||
The inclusion decision is made by the internal property `self._event_method_filter_by_action`.
|
||||
|
||||
Returns:
|
||||
All `event_method`s that should run, as callable objects (they can be run using `event_method(self)`).
|
||||
"""
|
||||
return [
|
||||
event_method
|
||||
for event_method in self.event_methods_by_event[event]
|
||||
if self._event_method_filter_by_event[event](event_method, *_filter)
|
||||
for event_method in self.event_methods_by_action[action]
|
||||
if self._event_method_filter_by_action[action](event_method, *_filter)
|
||||
]
|
||||
|
||||
####################
|
||||
|
@ -551,7 +558,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
def _compute_input(
|
||||
self,
|
||||
input_socket_name: ct.SocketName,
|
||||
kind: ct.FlowKind = ct.FlowKind.Value,
|
||||
kind: ct.DataFlowKind = ct.DataFlowKind.Value,
|
||||
unit_system: dict[ct.SocketType, sp.Expr] | None = None,
|
||||
optional: bool = False,
|
||||
) -> typ.Any:
|
||||
|
@ -567,7 +574,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
"""
|
||||
if (bl_socket := self.inputs.get(input_socket_name)) is not None:
|
||||
return (
|
||||
ct.FlowKind.scale_to_unit_system(
|
||||
ct.DataFlowKind.scale_to_unit_system(
|
||||
kind,
|
||||
bl_socket.compute_data(kind=kind),
|
||||
bl_socket.socket_type,
|
||||
|
@ -584,7 +591,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
raise ValueError(msg)
|
||||
|
||||
####################
|
||||
# - Compute Event: Output Socket
|
||||
# - Compute Action: Output Socket
|
||||
####################
|
||||
@bl_cache.keyed_cache(
|
||||
exclude={'self', 'optional'},
|
||||
|
@ -592,14 +599,14 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
def compute_output(
|
||||
self,
|
||||
output_socket_name: ct.SocketName,
|
||||
kind: ct.FlowKind = ct.FlowKind.Value,
|
||||
kind: ct.DataFlowKind = ct.DataFlowKind.Value,
|
||||
optional: bool = False,
|
||||
) -> typ.Any:
|
||||
"""Computes the value of an output socket.
|
||||
|
||||
Parameters:
|
||||
output_socket_name: The name declaring the output socket, for which this method computes the output.
|
||||
kind: The FlowKind to use when computing the output socket value.
|
||||
kind: The DataFlowKind to use when computing the output socket value.
|
||||
|
||||
Returns:
|
||||
The value of the output socket, as computed by the dedicated method
|
||||
|
@ -612,8 +619,8 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
msg = f"Can't compute nonexistent output socket name {output_socket_name}, as it's not currently active"
|
||||
raise RuntimeError(msg)
|
||||
|
||||
output_socket_methods = self.filtered_event_methods_by_event(
|
||||
ct.FlowEvent.OutputRequested,
|
||||
output_socket_methods = self.filtered_event_methods_by_action(
|
||||
ct.DataFlowAction.OutputRequested,
|
||||
(output_socket_name, None, kind),
|
||||
)
|
||||
|
||||
|
@ -629,7 +636,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
raise ValueError(msg)
|
||||
|
||||
####################
|
||||
# - Event Trigger
|
||||
# - Action Trigger
|
||||
####################
|
||||
def _should_recompute_output_socket(
|
||||
self,
|
||||
|
@ -650,28 +657,25 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
)
|
||||
)
|
||||
|
||||
def trigger_event(
|
||||
def trigger_action(
|
||||
self,
|
||||
event: ct.FlowEvent,
|
||||
action: ct.DataFlowAction,
|
||||
socket_name: ct.SocketName | None = None,
|
||||
prop_name: ct.SocketName | None = None,
|
||||
) -> None:
|
||||
"""Recursively triggers events forwards or backwards along the node tree, allowing nodes in the update path to react.
|
||||
"""Recursively triggers actions/events forwards or backwards along the node tree, allowing nodes in the update path to react.
|
||||
|
||||
Use `events` decorators to define methods that react to particular `ct.FlowEvent`s.
|
||||
Use `events` decorators to define methods that react to particular `ct.DataFlowAction`s.
|
||||
|
||||
Notes:
|
||||
This can be an unpredictably heavy function, depending on the node graph topology.
|
||||
|
||||
Doesn't currently accept `LinkChanged` (->Output) events; rather, these propagate as `DataChanged` events.
|
||||
**This may change** if it becomes important for the node to differentiate between "change in data" and "change in link".
|
||||
|
||||
Parameters:
|
||||
event: The event to report forwards/backwards along the node tree.
|
||||
action: The action/event to report forwards/backwards along the node tree.
|
||||
socket_name: The input socket that was altered, if any, in order to trigger this event.
|
||||
pop_name: The property that was altered, if any, in order to trigger this event.
|
||||
"""
|
||||
if event == ct.FlowEvent.DataChanged:
|
||||
if action == ct.DataFlowAction.DataChanged:
|
||||
input_socket_name = socket_name ## Trigger direction is forwards
|
||||
|
||||
# Invalidate Input Socket Cache
|
||||
|
@ -683,8 +687,8 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
)
|
||||
|
||||
# Invalidate Output Socket Cache
|
||||
for output_socket_method in self.event_methods_by_event[
|
||||
ct.FlowEvent.OutputRequested
|
||||
for output_socket_method in self.event_methods_by_action[
|
||||
ct.DataFlowAction.OutputRequested
|
||||
]:
|
||||
method_info = output_socket_method.callback_info
|
||||
if self._should_recompute_output_socket(
|
||||
|
@ -697,26 +701,26 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
|
||||
# Run Triggered Event Methods
|
||||
stop_propagation = False
|
||||
triggered_event_methods = self.filtered_event_methods_by_event(
|
||||
event, (socket_name, prop_name, None)
|
||||
triggered_event_methods = self.filtered_event_methods_by_action(
|
||||
action, (socket_name, prop_name, None)
|
||||
)
|
||||
for event_method in triggered_event_methods:
|
||||
stop_propagation |= event_method.stop_propagation
|
||||
event_method(self)
|
||||
|
||||
# Propagate Event to All Sockets in "Trigger Direction"
|
||||
# Propagate Action to All Sockets in "Trigger Direction"
|
||||
## The trigger chain goes node/socket/node/socket/...
|
||||
if not stop_propagation:
|
||||
triggered_sockets = self._bl_sockets(
|
||||
direc=ct.FlowEvent.flow_direction[event]
|
||||
direc=ct.DataFlowAction.trigger_direction(action)
|
||||
)
|
||||
for bl_socket in triggered_sockets:
|
||||
bl_socket.trigger_event(event)
|
||||
bl_socket.trigger_action(action)
|
||||
|
||||
####################
|
||||
# - Property Event: On Update
|
||||
# - Property Action: On Update
|
||||
####################
|
||||
def on_prop_changed(self, prop_name: str, _: bpy.types.Context) -> None:
|
||||
def sync_prop(self, prop_name: str, _: bpy.types.Context) -> None:
|
||||
"""Report that a particular property has changed, which may cause certain caches to regenerate.
|
||||
|
||||
Notes:
|
||||
|
@ -728,7 +732,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
prop_name: The name of the property that changed.
|
||||
"""
|
||||
if hasattr(self, prop_name):
|
||||
self.trigger_event(ct.FlowEvent.DataChanged, prop_name=prop_name)
|
||||
self.trigger_action(ct.DataFlowAction.DataChanged, prop_name=prop_name)
|
||||
else:
|
||||
msg = f'Property {prop_name} not defined on node {self}'
|
||||
raise RuntimeError(msg)
|
||||
|
@ -743,13 +747,13 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
) -> None:
|
||||
"""Draws the UI of the node.
|
||||
|
||||
- **Locked** (`self.locked`): The UI will be unusable.
|
||||
- **Active Preset** (`self.active_preset`): The preset selector will display.
|
||||
- **Active Socket Set** (`self.active_socket_set`): The socket set selector will display.
|
||||
- **Use Sim Node Name** (`self.use_sim_node_name`): The `self.sim_node_name` will display.
|
||||
- **Properties**: Node properties will display, if `self.draw_props()` is overridden.
|
||||
- **Operators**: Node operators will display, if `self.draw_operators()` is overridden.
|
||||
- **Info**: Node information will display, if `self.draw_info()` is overridden.
|
||||
- Locked (`self.locked`): The UI will be unusable.
|
||||
- Active Preset (`self.active_preset`): The preset selector will display.
|
||||
- Active Socket Set (`self.active_socket_set`): The socket set selector will display.
|
||||
- Use Sim Node Name (`self.use_sim_node_name`): The "Sim Node Name will display.
|
||||
- Properties (`self.draw_props()`): Node properties will display.
|
||||
- Operators (`self.draw_operators()`): Node operators will display.
|
||||
- Info (`self.draw_operators()`): Node information will display.
|
||||
|
||||
Parameters:
|
||||
context: The current Blender context.
|
||||
|
@ -860,7 +864,9 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
## -> Compromise: Users explicitly say 'run_on_init' in @on_value_changed
|
||||
for event_method in [
|
||||
event_method
|
||||
for event_method in self.event_methods_by_event[ct.FlowEvent.DataChanged]
|
||||
for event_method in self.event_methods_by_action[
|
||||
ct.DataFlowAction.DataChanged
|
||||
]
|
||||
if event_method.callback_info.run_on_init
|
||||
]:
|
||||
event_method(self)
|
||||
|
@ -909,7 +915,7 @@ class MaxwellSimNode(bpy.types.Node):
|
|||
bl_socket.is_linked and bl_socket.locked
|
||||
for bl_socket in self.inputs.values()
|
||||
):
|
||||
self.trigger_event(ct.FlowEvent.DisableLock)
|
||||
self.trigger_action(ct.DataFlowAction.DisableLock)
|
||||
|
||||
# Free Managed Objects
|
||||
for managed_obj in self.managed_objs.values():
|
||||
|
|
|
@ -3,8 +3,7 @@ import inspect
|
|||
import typing as typ
|
||||
from types import MappingProxyType
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from ....utils import logger
|
||||
from .. import contracts as ct
|
||||
|
||||
log = logger.get(__name__)
|
||||
|
@ -27,16 +26,16 @@ class InfoDataChanged:
|
|||
@dataclasses.dataclass(kw_only=True, frozen=True)
|
||||
class InfoOutputRequested:
|
||||
output_socket_name: ct.SocketName
|
||||
kind: ct.FlowKind
|
||||
kind: ct.DataFlowKind
|
||||
|
||||
depon_props: set[str]
|
||||
|
||||
depon_input_sockets: set[ct.SocketName]
|
||||
depon_input_socket_kinds: dict[ct.SocketName, ct.FlowKind]
|
||||
depon_input_socket_kinds: dict[ct.SocketName, ct.DataFlowKind]
|
||||
depon_all_loose_input_sockets: bool
|
||||
|
||||
depon_output_sockets: set[ct.SocketName]
|
||||
depon_output_socket_kinds: dict[ct.SocketName, ct.FlowKind]
|
||||
depon_output_socket_kinds: dict[ct.SocketName, ct.DataFlowKind]
|
||||
depon_all_loose_output_sockets: bool
|
||||
|
||||
|
||||
|
@ -51,7 +50,7 @@ PropName: typ.TypeAlias = str
|
|||
|
||||
|
||||
def event_decorator(
|
||||
event: ct.FlowEvent,
|
||||
action_type: ct.DataFlowAction,
|
||||
callback_info: EventCallbackInfo | None,
|
||||
stop_propagation: bool = False,
|
||||
# Request Data for Callback
|
||||
|
@ -59,10 +58,10 @@ def event_decorator(
|
|||
props: set[PropName] = frozenset(),
|
||||
input_sockets: set[ct.SocketName] = frozenset(),
|
||||
input_sockets_optional: dict[ct.SocketName, bool] = MappingProxyType({}),
|
||||
input_socket_kinds: dict[ct.SocketName, ct.FlowKind] = MappingProxyType({}),
|
||||
input_socket_kinds: dict[ct.SocketName, ct.DataFlowKind] = MappingProxyType({}),
|
||||
output_sockets: set[ct.SocketName] = frozenset(),
|
||||
output_sockets_optional: dict[ct.SocketName, bool] = MappingProxyType({}),
|
||||
output_socket_kinds: dict[ct.SocketName, ct.FlowKind] = MappingProxyType({}),
|
||||
output_socket_kinds: dict[ct.SocketName, ct.DataFlowKind] = MappingProxyType({}),
|
||||
all_loose_input_sockets: bool = False,
|
||||
all_loose_output_sockets: bool = False,
|
||||
# Request Unit System Scaling
|
||||
|
@ -73,17 +72,17 @@ def event_decorator(
|
|||
"""Returns a decorator for a method of `MaxwellSimNode`, declaring it as able respond to events passing through a node.
|
||||
|
||||
Parameters:
|
||||
event: A name describing which event the decorator should respond to.
|
||||
Set to `return_method.event`
|
||||
callback_info: A dictionary that provides the caller with additional per-`event` information.
|
||||
This might include parameters to help select the most appropriate method(s) to respond to an event with, or events to take after running the callback.
|
||||
action_type: A name describing which event the decorator should respond to.
|
||||
Set to `return_method.action_type`
|
||||
callback_info: A dictionary that provides the caller with additional per-`action_type` information.
|
||||
This might include parameters to help select the most appropriate method(s) to respond to an event with, or actions to take after running the callback.
|
||||
props: Set of `props` to compute, then pass to the decorated method.
|
||||
stop_propagation: Whether or stop propagating the event through the graph after encountering this method.
|
||||
Other methods defined on the same node will still run.
|
||||
managed_objs: Set of `managed_objs` to retrieve, then pass to the decorated method.
|
||||
input_sockets: Set of `input_sockets` to compute, then pass to the decorated method.
|
||||
input_socket_kinds: The `ct.FlowKind` to compute per-input-socket.
|
||||
If an input socket isn't specified, it defaults to `ct.FlowKind.Value`.
|
||||
input_socket_kinds: The `ct.DataFlowKind` to compute per-input-socket.
|
||||
If an input socket isn't specified, it defaults to `ct.DataFlowKind.Value`.
|
||||
output_sockets: Set of `output_sockets` to compute, then pass to the decorated method.
|
||||
all_loose_input_sockets: Whether to compute all loose input sockets and pass them to the decorated method.
|
||||
Used when the names of the loose input sockets are unknown, but all of their values are needed.
|
||||
|
@ -94,7 +93,7 @@ def event_decorator(
|
|||
A decorator, which can be applied to a method of `MaxwellSimNode`.
|
||||
When a `MaxwellSimNode` subclass initializes, such a decorated method will be picked up on.
|
||||
|
||||
When `event` passes through the node, then `callback_info` is used to determine
|
||||
When the `action_type` action passes through the node, then `callback_info` is used to determine
|
||||
"""
|
||||
req_params = (
|
||||
{'self'}
|
||||
|
@ -158,7 +157,7 @@ def event_decorator(
|
|||
input_socket_name: node._compute_input(
|
||||
input_socket_name,
|
||||
kind=input_socket_kinds.get(
|
||||
input_socket_name, ct.FlowKind.Value
|
||||
input_socket_name, ct.DataFlowKind.Value
|
||||
),
|
||||
unit_system=(
|
||||
unit_system := unit_systems.get(
|
||||
|
@ -180,10 +179,10 @@ def event_decorator(
|
|||
method_kw_args |= (
|
||||
{
|
||||
'output_sockets': {
|
||||
output_socket_name: ct.FlowKind.scale_to_unit_system(
|
||||
output_socket_name: ct.DataFlowKind.scale_to_unit_system(
|
||||
(
|
||||
output_socket_kind := output_socket_kinds.get(
|
||||
output_socket_name, ct.FlowKind.Value
|
||||
output_socket_name, ct.DataFlowKind.Value
|
||||
)
|
||||
),
|
||||
node.compute_output(
|
||||
|
@ -202,7 +201,7 @@ def event_decorator(
|
|||
else node.compute_output(
|
||||
output_socket_name,
|
||||
kind=output_socket_kinds.get(
|
||||
output_socket_name, ct.FlowKind.Value
|
||||
output_socket_name, ct.DataFlowKind.Value
|
||||
),
|
||||
optional=output_sockets_optional.get(
|
||||
output_socket_name, False
|
||||
|
@ -253,14 +252,14 @@ def event_decorator(
|
|||
)
|
||||
|
||||
# Set Decorated Attributes and Return
|
||||
## TODO: Fix Introspection + Documentation
|
||||
## Fix Introspection + Documentation
|
||||
# decorated.__name__ = method.__name__
|
||||
# decorated.__module__ = method.__module__
|
||||
# decorated.__qualname__ = method.__qualname__
|
||||
decorated.__doc__ = method.__doc__
|
||||
# decorated.__doc__ = method.__doc__
|
||||
|
||||
## Add Spice
|
||||
decorated.event = event
|
||||
decorated.action_type = action_type
|
||||
decorated.callback_info = callback_info
|
||||
decorated.stop_propagation = stop_propagation
|
||||
|
||||
|
@ -276,7 +275,7 @@ def on_enable_lock(
|
|||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
event=ct.FlowEvent.EnableLock,
|
||||
action_type=ct.DataFlowAction.EnableLock,
|
||||
callback_info=None,
|
||||
**kwargs,
|
||||
)
|
||||
|
@ -286,7 +285,7 @@ def on_disable_lock(
|
|||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
event=ct.FlowEvent.DisableLock,
|
||||
action_type=ct.DataFlowAction.DisableLock,
|
||||
callback_info=None,
|
||||
**kwargs,
|
||||
)
|
||||
|
@ -301,7 +300,7 @@ def on_value_changed(
|
|||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
event=ct.FlowEvent.DataChanged,
|
||||
action_type=ct.DataFlowAction.DataChanged,
|
||||
callback_info=InfoDataChanged(
|
||||
run_on_init=run_on_init,
|
||||
on_changed_sockets=(
|
||||
|
@ -317,11 +316,11 @@ def on_value_changed(
|
|||
## TODO: Change name to 'on_output_requested'
|
||||
def computes_output_socket(
|
||||
output_socket_name: ct.SocketName | None,
|
||||
kind: ct.FlowKind = ct.FlowKind.Value,
|
||||
kind: ct.DataFlowKind = ct.DataFlowKind.Value,
|
||||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
event=ct.FlowEvent.OutputRequested,
|
||||
action_type=ct.DataFlowAction.OutputRequested,
|
||||
callback_info=InfoOutputRequested(
|
||||
output_socket_name=output_socket_name,
|
||||
kind=kind,
|
||||
|
@ -343,7 +342,7 @@ def on_show_preview(
|
|||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
event=ct.FlowEvent.ShowPreview,
|
||||
action_type=ct.DataFlowAction.ShowPreview,
|
||||
callback_info={},
|
||||
**kwargs,
|
||||
)
|
||||
|
@ -354,7 +353,7 @@ def on_show_plot(
|
|||
**kwargs,
|
||||
):
|
||||
return event_decorator(
|
||||
event=ct.FlowEvent.ShowPlot,
|
||||
action_type=ct.DataFlowAction.ShowPlot,
|
||||
callback_info={},
|
||||
stop_propagation=stop_propagation,
|
||||
**kwargs,
|
||||
|
|
|
@ -1,16 +1,14 @@
|
|||
# from . import scientific_constant
|
||||
# from . import physical_constant
|
||||
from . import blender_constant, expr_constant, number_constant, scientific_constant
|
||||
from . import blender_constant, number_constant, scientific_constant
|
||||
|
||||
BL_REGISTER = [
|
||||
*expr_constant.BL_REGISTER,
|
||||
*scientific_constant.BL_REGISTER,
|
||||
*number_constant.BL_REGISTER,
|
||||
# *physical_constant.BL_REGISTER,
|
||||
*blender_constant.BL_REGISTER,
|
||||
]
|
||||
BL_NODES = {
|
||||
**expr_constant.BL_NODES,
|
||||
**scientific_constant.BL_NODES,
|
||||
**number_constant.BL_NODES,
|
||||
# **physical_constant.BL_NODES,
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
import typing as typ
|
||||
|
||||
from .... import contracts as ct
|
||||
from .... import sockets
|
||||
from ... import base, events
|
||||
|
||||
|
||||
class ExprConstantNode(base.MaxwellSimNode):
|
||||
node_type = ct.NodeType.ExprConstant
|
||||
bl_label = 'Expr Constant'
|
||||
|
||||
input_sockets: typ.ClassVar = {
|
||||
'Expr': sockets.ExprSocketDef(),
|
||||
}
|
||||
output_sockets: typ.ClassVar = {
|
||||
'Expr': sockets.ExprSocketDef(),
|
||||
}
|
||||
|
||||
## TODO: Symbols (defined w/props?)
|
||||
## - Currently expr constant isn't excessively useful, since there are no variables.
|
||||
## - We'll define the #, type, name with props.
|
||||
## - We'll add loose-socket inputs as int/real/complex/physical socket (based on type) for Param.
|
||||
## - We the output expr would support `Value` (just the expression), `LazyValueFunc` (evaluate w/symbol support), `Param` (example values for symbols).
|
||||
|
||||
####################
|
||||
# - Callbacks
|
||||
####################
|
||||
@events.computes_output_socket(
|
||||
'Expr', kind=ct.FlowKind.Value, input_sockets={'Expr'}
|
||||
)
|
||||
def compute_value(self, input_sockets: dict) -> typ.Any:
|
||||
return input_sockets['Expr']
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
####################
|
||||
BL_REGISTER = [
|
||||
ExprConstantNode,
|
||||
]
|
||||
BL_NODES = {ct.NodeType.ExprConstant: (ct.NodeCategory.MAXWELLSIM_INPUTS_CONSTANTS)}
|
|
@ -2,8 +2,7 @@ import typing as typ
|
|||
|
||||
import bpy
|
||||
|
||||
from blender_maxwell.utils import sci_constants as constants
|
||||
|
||||
from ......utils import sci_constants as constants
|
||||
from .... import contracts as ct
|
||||
from .... import sockets
|
||||
from ... import base, events
|
||||
|
@ -56,7 +55,7 @@ class ScientificConstantNode(base.MaxwellSimNode):
|
|||
self.cache__units = ''
|
||||
self.cache__uncertainty = ''
|
||||
|
||||
self.on_prop_changed('sci_constant', context)
|
||||
self.sync_prop('sci_constant', context)
|
||||
|
||||
####################
|
||||
# - UI
|
||||
|
|
|
@ -5,8 +5,7 @@ import bpy
|
|||
import tidy3d as td
|
||||
import tidy3d.plugins.dispersion as td_dispersion
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from ......utils import logger
|
||||
from .... import contracts as ct
|
||||
from .... import managed_objs, sockets
|
||||
from ... import base, events
|
||||
|
@ -74,7 +73,7 @@ class Tidy3DFileImporterNode(base.MaxwellSimNode):
|
|||
),
|
||||
],
|
||||
default='SIMULATION_DATA',
|
||||
update=lambda self, context: self.on_prop_changed('tidy3d_type', context),
|
||||
update=lambda self, context: self.sync_prop('tidy3d_type', context),
|
||||
)
|
||||
|
||||
disp_fit__min_poles: bpy.props.IntProperty(
|
||||
|
|
|
@ -4,10 +4,9 @@ import bpy
|
|||
import sympy as sp
|
||||
import sympy.physics.units as spu
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
from blender_maxwell.utils import logger
|
||||
from blender_maxwell.utils import sci_constants as constants
|
||||
|
||||
from .....utils import extra_sympy_units as spux
|
||||
from .....utils import logger
|
||||
from .....utils import sci_constants as constants
|
||||
from ... import contracts as ct
|
||||
from ... import sockets
|
||||
from .. import base, events
|
||||
|
@ -28,7 +27,7 @@ class WaveConstantNode(base.MaxwellSimNode):
|
|||
name='Range',
|
||||
description='Whether to use a wavelength/frequency range',
|
||||
default=False,
|
||||
update=lambda self, context: self.on_prop_changed('use_range', context),
|
||||
update=lambda self, context: self.sync_prop('use_range', context),
|
||||
)
|
||||
|
||||
def draw_props(self, _: bpy.types.Context, col: bpy.types.UILayout):
|
||||
|
@ -39,7 +38,7 @@ class WaveConstantNode(base.MaxwellSimNode):
|
|||
####################
|
||||
@events.computes_output_socket(
|
||||
'WL',
|
||||
kind=ct.FlowKind.Value,
|
||||
kind=ct.DataFlowKind.Value,
|
||||
# Data
|
||||
input_sockets={'WL', 'Freq'},
|
||||
input_sockets_optional={'WL': True, 'Freq': True},
|
||||
|
@ -56,7 +55,7 @@ class WaveConstantNode(base.MaxwellSimNode):
|
|||
|
||||
@events.computes_output_socket(
|
||||
'Freq',
|
||||
kind=ct.FlowKind.Value,
|
||||
kind=ct.DataFlowKind.Value,
|
||||
# Data
|
||||
input_sockets={'WL', 'Freq'},
|
||||
input_sockets_optional={'WL': True, 'Freq': True},
|
||||
|
@ -74,7 +73,7 @@ class WaveConstantNode(base.MaxwellSimNode):
|
|||
|
||||
@events.computes_output_socket(
|
||||
'WL',
|
||||
kind=ct.FlowKind.LazyArrayRange,
|
||||
kind=ct.DataFlowKind.LazyValueRange,
|
||||
# Data
|
||||
input_sockets={'WL', 'Freq'},
|
||||
input_sockets_optional={'WL': True, 'Freq': True},
|
||||
|
@ -93,12 +92,12 @@ class WaveConstantNode(base.MaxwellSimNode):
|
|||
|
||||
@events.computes_output_socket(
|
||||
'Freq',
|
||||
kind=ct.FlowKind.LazyArrayRange,
|
||||
kind=ct.DataFlowKind.LazyValueRange,
|
||||
# Data
|
||||
input_sockets={'WL', 'Freq'},
|
||||
input_socket_kinds={
|
||||
'WL': ct.FlowKind.LazyArrayRange,
|
||||
'Freq': ct.FlowKind.LazyArrayRange,
|
||||
'WL': ct.DataFlowKind.LazyValueRange,
|
||||
'Freq': ct.DataFlowKind.LazyValueRange,
|
||||
},
|
||||
input_sockets_optional={'WL': True, 'Freq': True},
|
||||
)
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import typing as typ
|
||||
from pathlib import Path
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from ...... import info
|
||||
from ......services import tdcloud
|
||||
from ......utils import logger
|
||||
from .... import contracts as ct
|
||||
from .... import sockets
|
||||
from ... import base, events
|
||||
|
@ -17,8 +17,8 @@ def _sim_data_cache_path(task_id: str) -> Path:
|
|||
Arguments:
|
||||
task_id: The ID of the Tidy3D cloud task.
|
||||
"""
|
||||
(ct.addon.ADDON_CACHE / task_id).mkdir(exist_ok=True)
|
||||
return ct.addon.ADDON_CACHE / task_id / 'sim_data.hdf5'
|
||||
(info.ADDON_CACHE / task_id).mkdir(exist_ok=True)
|
||||
return info.ADDON_CACHE / task_id / 'sim_data.hdf5'
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import scipy as sc
|
||||
|
@ -5,8 +6,7 @@ import sympy as sp
|
|||
import sympy.physics.units as spu
|
||||
import tidy3d as td
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spuex
|
||||
|
||||
from .....utils import extra_sympy_units as spuex
|
||||
from ... import contracts as ct
|
||||
from ... import managed_objs, sockets
|
||||
from .. import base, events
|
||||
|
@ -54,7 +54,7 @@ class LibraryMediumNode(base.MaxwellSimNode):
|
|||
if mat_key != 'graphene' ## For some reason, it's unique...
|
||||
],
|
||||
default='Au',
|
||||
update=(lambda self, context: self.on_prop_changed('material', context)),
|
||||
update=(lambda self, context: self.sync_prop('material', context)),
|
||||
)
|
||||
|
||||
@property
|
||||
|
|
|
@ -3,10 +3,9 @@ import typing as typ
|
|||
import sympy as sp
|
||||
import tidy3d as td
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from .....assets.import_geonodes import GeoNodes, import_geonodes
|
||||
from .....utils import extra_sympy_units as spux
|
||||
from .....utils import logger
|
||||
from ... import contracts as ct
|
||||
from ... import managed_objs, sockets
|
||||
from .. import base, events
|
||||
|
@ -68,7 +67,7 @@ class EHFieldMonitorNode(base.MaxwellSimNode):
|
|||
'Freqs',
|
||||
},
|
||||
input_socket_kinds={
|
||||
'Freqs': ct.FlowKind.LazyArrayRange,
|
||||
'Freqs': ct.DataFlowKind.LazyValueRange,
|
||||
},
|
||||
unit_systems={'Tidy3DUnits': ct.UNITS_TIDY3D},
|
||||
scale_input_sockets={
|
||||
|
|
|
@ -3,10 +3,9 @@ import typing as typ
|
|||
import sympy as sp
|
||||
import tidy3d as td
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from .....assets.import_geonodes import GeoNodes, import_geonodes
|
||||
from .....utils import extra_sympy_units as spux
|
||||
from .....utils import logger
|
||||
from ... import contracts as ct
|
||||
from ... import managed_objs, sockets
|
||||
from .. import base, events
|
||||
|
@ -68,7 +67,7 @@ class PowerFluxMonitorNode(base.MaxwellSimNode):
|
|||
'Direction',
|
||||
},
|
||||
input_socket_kinds={
|
||||
'Freqs': ct.FlowKind.LazyArrayRange,
|
||||
'Freqs': ct.DataFlowKind.LazyValueRange,
|
||||
},
|
||||
unit_systems={'Tidy3DUnits': ct.UNITS_TIDY3D},
|
||||
scale_input_sockets={
|
||||
|
|
|
@ -3,8 +3,7 @@ import typing as typ
|
|||
import bpy
|
||||
import sympy as sp
|
||||
|
||||
from blender_maxwell.utils import logger
|
||||
|
||||
from .....utils import logger
|
||||
from ... import contracts as ct
|
||||
from ... import sockets
|
||||
from .. import base, events
|
||||
|
@ -60,14 +59,14 @@ class ViewerNode(base.MaxwellSimNode):
|
|||
name='Auto-Plot',
|
||||
description='Whether to auto-plot anything plugged into the viewer node',
|
||||
default=False,
|
||||
update=lambda self, context: self.on_prop_changed('auto_plot', context),
|
||||
update=lambda self, context: self.sync_prop('auto_plot', context),
|
||||
)
|
||||
|
||||
auto_3d_preview: bpy.props.BoolProperty(
|
||||
name='Auto 3D Preview',
|
||||
description="Whether to auto-preview anything 3D, that's plugged into the viewer node",
|
||||
default=True,
|
||||
update=lambda self, context: self.on_prop_changed('auto_3d_preview', context),
|
||||
update=lambda self, context: self.sync_prop('auto_3d_preview', context),
|
||||
)
|
||||
|
||||
####################
|
||||
|
@ -125,7 +124,7 @@ class ViewerNode(base.MaxwellSimNode):
|
|||
)
|
||||
def on_changed_plot_preview(self, props):
|
||||
if self.inputs['Data'].is_linked and props['auto_plot']:
|
||||
self.trigger_event(ct.FlowEvent.ShowPlot)
|
||||
self.trigger_action(ct.DataFlowAction.ShowPlot)
|
||||
|
||||
@events.on_value_changed(
|
||||
socket_name='Data',
|
||||
|
@ -138,7 +137,7 @@ class ViewerNode(base.MaxwellSimNode):
|
|||
# Remove Non-Repreviewed Previews on Close
|
||||
with node_tree.repreview_all():
|
||||
if self.inputs['Data'].is_linked and props['auto_3d_preview']:
|
||||
self.trigger_event(ct.FlowEvent.ShowPreview)
|
||||
self.trigger_action(ct.DataFlowAction.ShowPreview)
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
@ -180,7 +180,7 @@ class Tidy3DWebExporterNode(base.MaxwellSimNode):
|
|||
####################
|
||||
def sync_lock_tree(self, context):
|
||||
if self.lock_tree:
|
||||
self.trigger_event(ct.FlowEvent.EnableLock)
|
||||
self.trigger_action(ct.DataFlowAction.EnableLock)
|
||||
self.locked = False
|
||||
for bl_socket in self.inputs:
|
||||
if bl_socket.name == 'FDTD Sim':
|
||||
|
@ -188,9 +188,9 @@ class Tidy3DWebExporterNode(base.MaxwellSimNode):
|
|||
bl_socket.locked = False
|
||||
|
||||
else:
|
||||
self.trigger_event(ct.FlowEvent.DisableLock)
|
||||
self.trigger_action(ct.DataFlowAction.DisableLock)
|
||||
|
||||
self.on_prop_changed('lock_tree', context)
|
||||
self.sync_prop('lock_tree', context)
|
||||
|
||||
def sync_tracked_task_id(self, context):
|
||||
# Select Tracked Task
|
||||
|
@ -212,7 +212,7 @@ class Tidy3DWebExporterNode(base.MaxwellSimNode):
|
|||
self.inputs['Cloud Task'].sync_prepare_new_task()
|
||||
self.inputs['Cloud Task'].locked = False
|
||||
|
||||
self.on_prop_changed('tracked_task_id', context)
|
||||
self.sync_prop('tracked_task_id', context)
|
||||
|
||||
####################
|
||||
# - Output Socket Callbacks
|
||||
|
|
|
@ -35,7 +35,7 @@ class FDTDSimNode(base.MaxwellSimNode):
|
|||
####################
|
||||
@events.computes_output_socket(
|
||||
'FDTD Sim',
|
||||
kind=ct.FlowKind.Value,
|
||||
kind=ct.DataFlowKind.Value,
|
||||
input_sockets={'Sources', 'Structures', 'Domain', 'BCs', 'Monitors'},
|
||||
)
|
||||
def compute_fdtd_sim(self, input_sockets: dict) -> sp.Expr:
|
||||
|
|
|
@ -42,7 +42,7 @@ class PointDipoleSourceNode(base.MaxwellSimNode):
|
|||
('EZ', 'Ez', 'Electric field in z-dir'),
|
||||
],
|
||||
default='EX',
|
||||
update=(lambda self, context: self.on_prop_changed('pol_axis', context)),
|
||||
update=(lambda self, context: self.sync_prop('pol_axis', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import numpy as np
|
||||
import sympy.physics.units as spu
|
||||
import tidy3d as td
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spuex
|
||||
|
||||
from ......utils import extra_sympy_units as spuex
|
||||
from .... import contracts as ct
|
||||
from .... import managed_objs, sockets
|
||||
from ... import base, events
|
||||
|
@ -52,13 +52,13 @@ class GaussianPulseTemporalShapeNode(base.MaxwellSimNode):
|
|||
name='Plot Time Start (ps)',
|
||||
description='The instance ID of a particular MaxwellSimNode instance, used to index caches',
|
||||
default=0.0,
|
||||
update=(lambda self, context: self.on_prop_changed('plot_time_start', context)),
|
||||
update=(lambda self, context: self.sync_prop('plot_time_start', context)),
|
||||
)
|
||||
plot_time_end: bpy.props.FloatProperty(
|
||||
name='Plot Time End (ps)',
|
||||
description='The instance ID of a particular MaxwellSimNode instance, used to index caches',
|
||||
default=5,
|
||||
update=(lambda self, context: self.on_prop_changed('plot_time_start', context)),
|
||||
update=(lambda self, context: self.sync_prop('plot_time_start', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -2,8 +2,7 @@ import typing as typ
|
|||
|
||||
import tidy3d as td
|
||||
|
||||
from blender_maxwell.utils import analyze_geonodes, logger
|
||||
|
||||
from .....utils import analyze_geonodes, logger
|
||||
from ... import bl_socket_map, managed_objs, sockets
|
||||
from ... import contracts as ct
|
||||
from .. import base, events
|
||||
|
|
|
@ -69,7 +69,7 @@ class CombineNode(base.MaxwellSimNode):
|
|||
default=1,
|
||||
min=1,
|
||||
# max=MAX_AMOUNT,
|
||||
update=lambda self, context: self.on_prop_changed('amount', context),
|
||||
update=lambda self, context: self.sync_prop('amount', context),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
#class MaxwellSimProp(bpy.types.PropertyGroup):
|
||||
# """A Blender property usable in nodes and sockets."""
|
||||
# name: str = ""
|
||||
# data_flow_kind: ct.FlowKind
|
||||
# data_flow_kind: ct.DataFlowKind
|
||||
#
|
||||
# value: dict[str, tuple[bpy.types.Property, dict]] | None = None
|
||||
#
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from blender_maxwell.utils import logger
|
||||
|
||||
from ....utils import logger
|
||||
from .. import contracts as ct
|
||||
from . import basic, blender, maxwell, number, physical, tidy3d, vector
|
||||
from .scan_socket_defs import scan_for_socket_defs
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,12 +1,11 @@
|
|||
from . import any as any_socket
|
||||
from . import bool as bool_socket
|
||||
from . import expr, file_path, string
|
||||
from . import file_path, string
|
||||
|
||||
AnySocketDef = any_socket.AnySocketDef
|
||||
BoolSocketDef = bool_socket.BoolSocketDef
|
||||
StringSocketDef = string.StringSocketDef
|
||||
FilePathSocketDef = file_path.FilePathSocketDef
|
||||
ExprSocketDef = expr.ExprSocketDef
|
||||
StringSocketDef = string.StringSocketDef
|
||||
|
||||
|
||||
BL_REGISTER = [
|
||||
|
@ -14,5 +13,4 @@ BL_REGISTER = [
|
|||
*bool_socket.BL_REGISTER,
|
||||
*string.BL_REGISTER,
|
||||
*file_path.BL_REGISTER,
|
||||
*expr.BL_REGISTER,
|
||||
]
|
||||
|
|
|
@ -18,7 +18,7 @@ class BoolBLSocket(base.MaxwellSimSocket):
|
|||
name='Boolean',
|
||||
description='Represents a boolean value',
|
||||
default=False,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,132 +0,0 @@
|
|||
import typing as typ
|
||||
|
||||
import bpy
|
||||
import pydantic as pyd
|
||||
import sympy as sp
|
||||
|
||||
from blender_maxwell.utils import bl_cache
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
||||
class ExprBLSocket(base.MaxwellSimSocket):
|
||||
socket_type = ct.SocketType.Expr
|
||||
bl_label = 'Expr'
|
||||
|
||||
####################
|
||||
# - Properties
|
||||
####################
|
||||
raw_value: bpy.props.StringProperty(
|
||||
name='Expr',
|
||||
description='Represents a symbolic expression',
|
||||
default='',
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
)
|
||||
|
||||
int_symbols: set[spux.IntSymbol] = bl_cache.BLField([])
|
||||
real_symbols: set[spux.RealSymbol] = bl_cache.BLField([])
|
||||
complex_symbols: set[spux.ComplexSymbol] = bl_cache.BLField([])
|
||||
|
||||
@property
|
||||
def symbols(self) -> list[spux.Symbol]:
|
||||
"""Retrieves all symbols by concatenating int, real, and complex symbols, and sorting them by name.
|
||||
|
||||
The order is guaranteed to be **deterministic**.
|
||||
|
||||
Returns:
|
||||
All symbols valid for use in the expression.
|
||||
"""
|
||||
return sorted(
|
||||
self.int_symbols | self.real_symbols | self.complex_symbols,
|
||||
key=lambda sym: sym.name,
|
||||
)
|
||||
|
||||
####################
|
||||
# - Socket UI
|
||||
####################
|
||||
def draw_value(self, col: bpy.types.UILayout) -> None:
|
||||
col.prop(self, 'raw_value', text='')
|
||||
|
||||
####################
|
||||
# - Computation of Default Value
|
||||
####################
|
||||
@property
|
||||
def value(self) -> sp.Expr:
|
||||
expr = sp.sympify(
|
||||
self.raw_value,
|
||||
locals={sym.name: sym for sym in self.symbols},
|
||||
strict=False,
|
||||
convert_xor=True,
|
||||
).subs(spux.ALL_UNIT_SYMBOLS)
|
||||
|
||||
if not expr.free_symbols.issubset(self.symbols):
|
||||
msg = f'Expression "{expr}" (symbols={self.expr.free_symbols}) has invalid symbols (valid symbols: {self.symbols})'
|
||||
raise ValueError(msg)
|
||||
|
||||
return expr
|
||||
|
||||
@value.setter
|
||||
def value(self, value: str) -> None:
|
||||
self.raw_value = sp.sstr(value)
|
||||
|
||||
@property
|
||||
def lazy_value_func(self) -> ct.LazyValueFuncFlow:
|
||||
return ct.LazyValueFuncFlow(
|
||||
func=sp.lambdify(self.symbols, self.value, 'jax'),
|
||||
func_args=[
|
||||
(sym.name, spux.sympy_to_python_type(sym)) for sym in self.symbols
|
||||
],
|
||||
supports_jax=True,
|
||||
)
|
||||
|
||||
|
||||
####################
|
||||
# - Socket Configuration
|
||||
####################
|
||||
class ExprSocketDef(base.SocketDef):
|
||||
socket_type: ct.SocketType = ct.SocketType.Expr
|
||||
|
||||
_x = sp.Symbol('x', real=True)
|
||||
int_symbols: list[spux.IntSymbol] = []
|
||||
real_symbols: list[spux.RealSymbol] = [_x]
|
||||
complex_symbols: list[spux.ComplexSymbol] = []
|
||||
|
||||
# Expression
|
||||
default_expr: spux.SympyExpr = _x
|
||||
allow_units: bool = True
|
||||
|
||||
@pyd.model_validator(mode='after')
|
||||
def check_default_expr_follows_unit_allowance(self) -> typ.Self:
|
||||
"""Checks that `self.default_expr` only uses units if `self.allow_units` is defined.
|
||||
|
||||
Raises:
|
||||
ValueError: If the expression uses symbols not defined in `self.symbols`.
|
||||
"""
|
||||
if not spux.uses_units(self.default_expr):
|
||||
msg = f'Expression symbols ({self.default_expr.free_symbol}) are not a strict subset of defined symbols ({self.symbols})'
|
||||
raise ValueError(msg)
|
||||
|
||||
@pyd.model_validator(mode='after')
|
||||
def check_default_expr_uses_allowed_symbols(self) -> typ.Self:
|
||||
"""Checks that `self.default_expr` only uses symbols defined in `self.symbols`.
|
||||
|
||||
Raises:
|
||||
ValueError: If the expression uses symbols not defined in `self.symbols`.
|
||||
"""
|
||||
if not self.default_expr.free_symbols.issubset(self.symbols):
|
||||
msg = f'Expression symbols ({self.default_expr.free_symbol}) are not a strict subset of defined symbols ({self.symbols})'
|
||||
raise ValueError(msg)
|
||||
|
||||
def init(self, bl_socket: ExprBLSocket) -> None:
|
||||
bl_socket.value = self.default_expr
|
||||
bl_socket.symbols = self.symbols
|
||||
|
||||
|
||||
####################
|
||||
# - Blender Registration
|
||||
####################
|
||||
BL_REGISTER = [
|
||||
ExprBLSocket,
|
||||
]
|
|
@ -20,7 +20,7 @@ class FilePathBLSocket(base.MaxwellSimSocket):
|
|||
name='File Path',
|
||||
description='Represents the path to a file',
|
||||
subtype='FILE_PATH',
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -18,7 +18,7 @@ class StringBLSocket(base.MaxwellSimSocket):
|
|||
name='String',
|
||||
description='Represents a string',
|
||||
default='',
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
@ -57,6 +57,3 @@ class StringSocketDef(base.SocketDef):
|
|||
BL_REGISTER = [
|
||||
StringBLSocket,
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ class BlenderCollectionBLSocket(base.MaxwellSimSocket):
|
|||
name='Blender Collection',
|
||||
description='A Blender collection',
|
||||
type=bpy.types.Collection,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -21,7 +21,7 @@ class BlenderMaxwellResetGeoNodesSocket(bpy.types.Operator):
|
|||
socket = node.inputs[self.socket_name]
|
||||
|
||||
# Report as though the GeoNodes Tree Changed
|
||||
socket.on_prop_changed('raw_value', context)
|
||||
socket.sync_prop('raw_value', context)
|
||||
|
||||
return {'FINISHED'}
|
||||
|
||||
|
@ -41,7 +41,7 @@ class BlenderGeoNodesBLSocket(base.MaxwellSimSocket):
|
|||
description='Represents a Blender GeoNodes Tree',
|
||||
type=bpy.types.NodeTree,
|
||||
poll=(lambda self, obj: obj.bl_idname == 'GeometryNodeTree'),
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -18,7 +18,7 @@ class BlenderImageBLSocket(base.MaxwellSimSocket):
|
|||
name='Blender Image',
|
||||
description='Represents a Blender Image',
|
||||
type=bpy.types.Image,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -15,7 +15,7 @@ class BlenderMaterialBLSocket(base.MaxwellSimSocket):
|
|||
name='Blender Material',
|
||||
description='Represents a Blender material',
|
||||
type=bpy.types.Material,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -39,7 +39,7 @@ class BlenderObjectBLSocket(base.MaxwellSimSocket):
|
|||
name='Blender Object',
|
||||
description='Represents a Blender object',
|
||||
type=bpy.types.Object,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -18,7 +18,7 @@ class BlenderTextBLSocket(base.MaxwellSimSocket):
|
|||
name='Blender Text',
|
||||
description='Represents a Blender text datablock',
|
||||
type=bpy.types.Text,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import bpy
|
||||
import tidy3d as td
|
||||
import typing_extensions as typx
|
||||
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
@ -22,7 +23,7 @@ class MaxwellBoundCondBLSocket(base.MaxwellSimSocket):
|
|||
('PERIODIC', 'Periodic', 'Infinitely periodic layer'),
|
||||
],
|
||||
default='PML',
|
||||
update=(lambda self, context: self.on_prop_changed('default_choice', context)),
|
||||
update=(lambda self, context: self.sync_prop('default_choice', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
@ -44,7 +45,7 @@ class MaxwellBoundCondBLSocket(base.MaxwellSimSocket):
|
|||
}[self.default_choice]
|
||||
|
||||
@value.setter
|
||||
def value(self, value: typ.Literal['PML', 'PEC', 'PMC', 'PERIODIC']) -> None:
|
||||
def value(self, value: typx.Literal['PML', 'PEC', 'PMC', 'PERIODIC']) -> None:
|
||||
self.default_choice = value
|
||||
|
||||
|
||||
|
@ -54,7 +55,7 @@ class MaxwellBoundCondBLSocket(base.MaxwellSimSocket):
|
|||
class MaxwellBoundCondSocketDef(base.SocketDef):
|
||||
socket_type: ct.SocketType = ct.SocketType.MaxwellBoundCond
|
||||
|
||||
default_choice: typ.Literal['PML', 'PEC', 'PMC', 'PERIODIC'] = 'PML'
|
||||
default_choice: typx.Literal['PML', 'PEC', 'PMC', 'PERIODIC'] = 'PML'
|
||||
|
||||
def init(self, bl_socket: MaxwellBoundCondBLSocket) -> None:
|
||||
bl_socket.value = self.default_choice
|
||||
|
|
|
@ -29,7 +29,7 @@ class MaxwellBoundCondsBLSocket(base.MaxwellSimSocket):
|
|||
name='Show Bounds Definition',
|
||||
description='Toggle to show bound faces',
|
||||
default=False,
|
||||
update=(lambda self, context: self.on_prop_changed('show_definition', context)),
|
||||
update=(lambda self, context: self.sync_prop('show_definition', context)),
|
||||
)
|
||||
|
||||
x_pos: bpy.props.EnumProperty(
|
||||
|
@ -37,42 +37,42 @@ class MaxwellBoundCondsBLSocket(base.MaxwellSimSocket):
|
|||
description='+x choice of default boundary face',
|
||||
items=BOUND_FACE_ITEMS,
|
||||
default='PML',
|
||||
update=(lambda self, context: self.on_prop_changed('x_pos', context)),
|
||||
update=(lambda self, context: self.sync_prop('x_pos', context)),
|
||||
)
|
||||
x_neg: bpy.props.EnumProperty(
|
||||
name='-x Bound Face',
|
||||
description='-x choice of default boundary face',
|
||||
items=BOUND_FACE_ITEMS,
|
||||
default='PML',
|
||||
update=(lambda self, context: self.on_prop_changed('x_neg', context)),
|
||||
update=(lambda self, context: self.sync_prop('x_neg', context)),
|
||||
)
|
||||
y_pos: bpy.props.EnumProperty(
|
||||
name='+y Bound Face',
|
||||
description='+y choice of default boundary face',
|
||||
items=BOUND_FACE_ITEMS,
|
||||
default='PML',
|
||||
update=(lambda self, context: self.on_prop_changed('y_pos', context)),
|
||||
update=(lambda self, context: self.sync_prop('y_pos', context)),
|
||||
)
|
||||
y_neg: bpy.props.EnumProperty(
|
||||
name='-y Bound Face',
|
||||
description='-y choice of default boundary face',
|
||||
items=BOUND_FACE_ITEMS,
|
||||
default='PML',
|
||||
update=(lambda self, context: self.on_prop_changed('y_neg', context)),
|
||||
update=(lambda self, context: self.sync_prop('y_neg', context)),
|
||||
)
|
||||
z_pos: bpy.props.EnumProperty(
|
||||
name='+z Bound Face',
|
||||
description='+z choice of default boundary face',
|
||||
items=BOUND_FACE_ITEMS,
|
||||
default='PML',
|
||||
update=(lambda self, context: self.on_prop_changed('z_pos', context)),
|
||||
update=(lambda self, context: self.sync_prop('z_pos', context)),
|
||||
)
|
||||
z_neg: bpy.props.EnumProperty(
|
||||
name='-z Bound Face',
|
||||
description='-z choice of default boundary face',
|
||||
items=BOUND_FACE_ITEMS,
|
||||
default='PML',
|
||||
update=(lambda self, context: self.on_prop_changed('z_neg', context)),
|
||||
update=(lambda self, context: self.sync_prop('z_neg', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -3,8 +3,7 @@ import scipy as sc
|
|||
import sympy.physics.units as spu
|
||||
import tidy3d as td
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
|
||||
from .....utils.pydantic_sympy import ConstrSympyExpr
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
@ -25,7 +24,7 @@ class MaxwellMediumBLSocket(base.MaxwellSimSocket):
|
|||
default=500.0,
|
||||
precision=4,
|
||||
step=50,
|
||||
update=(lambda self, context: self.on_prop_changed('wl', context)),
|
||||
update=(lambda self, context: self.sync_prop('wl', context)),
|
||||
)
|
||||
|
||||
rel_permittivity: bpy.props.FloatVectorProperty(
|
||||
|
@ -34,9 +33,7 @@ class MaxwellMediumBLSocket(base.MaxwellSimSocket):
|
|||
size=2,
|
||||
default=(1.0, 0.0),
|
||||
precision=2,
|
||||
update=(
|
||||
lambda self, context: self.on_prop_changed('rel_permittivity', context)
|
||||
),
|
||||
update=(lambda self, context: self.sync_prop('rel_permittivity', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
@ -74,7 +71,7 @@ class MaxwellMediumBLSocket(base.MaxwellSimSocket):
|
|||
|
||||
@value.setter
|
||||
def value(
|
||||
self, value: tuple[spux.ConstrSympyExpr(allow_variables=False), complex]
|
||||
self, value: tuple[ConstrSympyExpr(allow_variables=False), complex]
|
||||
) -> None:
|
||||
_wl, rel_permittivity = value
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ class MaxwellMonitorSocketDef(base.SocketDef):
|
|||
|
||||
def init(self, bl_socket: MaxwellMonitorBLSocket) -> None:
|
||||
if self.is_list:
|
||||
bl_socket.active_kind = ct.FlowKind.Array
|
||||
bl_socket.active_kind = ct.DataFlowKind.ValueArray
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
@ -19,7 +19,7 @@ class MaxwellSimGridBLSocket(base.MaxwellSimSocket):
|
|||
min=0.01,
|
||||
# step=10,
|
||||
precision=2,
|
||||
update=(lambda self, context: self.on_prop_changed('min_steps_per_wl', context)),
|
||||
update=(lambda self, context: self.sync_prop('min_steps_per_wl', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -18,7 +18,7 @@ class MaxwellSourceSocketDef(base.SocketDef):
|
|||
|
||||
def init(self, bl_socket: MaxwellSourceBLSocket) -> None:
|
||||
if self.is_list:
|
||||
bl_socket.active_kind = ct.FlowKind.Array
|
||||
bl_socket.active_kind = ct.DataFlowKind.ValueArray
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
@ -18,7 +18,7 @@ class MaxwellStructureSocketDef(base.SocketDef):
|
|||
|
||||
def init(self, bl_socket: MaxwellStructureBLSocket) -> None:
|
||||
if self.is_list:
|
||||
bl_socket.active_kind = ct.FlowKind.ValueArray
|
||||
bl_socket.active_kind = ct.DataFlowKind.ValueArray
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
@ -3,8 +3,7 @@ import typing as typ
|
|||
import bpy
|
||||
import sympy as sp
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
|
||||
from .....utils.pydantic_sympy import SympyExpr
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
@ -24,7 +23,8 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
|
|||
description='Represents a complex number (real, imaginary)',
|
||||
size=2,
|
||||
default=(0.0, 0.0),
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
subtype='NONE',
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
coord_sys: bpy.props.EnumProperty(
|
||||
name='Coordinate System',
|
||||
|
@ -46,20 +46,58 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
|
|||
),
|
||||
],
|
||||
default='CARTESIAN',
|
||||
update=lambda self, context: self.on_coord_sys_changed(context),
|
||||
update=lambda self, context: self._sync_coord_sys(context),
|
||||
)
|
||||
|
||||
####################
|
||||
# - Event Methods
|
||||
# - Socket UI
|
||||
####################
|
||||
def on_coord_sys_changed(self, context: bpy.types.Context):
|
||||
r"""Transforms values when the coordinate system changes.
|
||||
|
||||
Notes:
|
||||
Cartesian coordinates with $y=0$ has no corresponding $\theta$
|
||||
Therefore, we manually set $\theta=0$.
|
||||
|
||||
def draw_value(self, col: bpy.types.UILayout) -> None:
|
||||
"""Draw the value of the complex number, including a toggle for
|
||||
specifying the active coordinate system.
|
||||
"""
|
||||
col_row = col.row()
|
||||
col_row.prop(self, 'raw_value', text='')
|
||||
col.prop(self, 'coord_sys', text='')
|
||||
|
||||
####################
|
||||
# - Computation of Default Value
|
||||
####################
|
||||
@property
|
||||
def value(self) -> SympyExpr:
|
||||
"""Return the complex number as a sympy expression, of a form
|
||||
determined by the coordinate system.
|
||||
|
||||
- Cartesian: a,b -> a + ib
|
||||
- Polar: r,t -> re^(it)
|
||||
|
||||
Returns:
|
||||
The sympy expression representing the complex number.
|
||||
"""
|
||||
v1, v2 = self.raw_value
|
||||
|
||||
return {
|
||||
'CARTESIAN': v1 + sp.I * v2,
|
||||
'POLAR': v1 * sp.exp(sp.I * v2),
|
||||
}[self.coord_sys]
|
||||
|
||||
@value.setter
|
||||
def value(self, value: SympyExpr) -> None:
|
||||
"""Set the complex number from a sympy expression, using an internal
|
||||
representation determined by the coordinate system.
|
||||
|
||||
- Cartesian: a,b -> a + ib
|
||||
- Polar: r,t -> re^(it)
|
||||
"""
|
||||
self.raw_value = {
|
||||
'CARTESIAN': (sp.re(value), sp.im(value)),
|
||||
'POLAR': (sp.Abs(value), sp.arg(value)),
|
||||
}[self.coord_sys]
|
||||
|
||||
####################
|
||||
# - Internal Update Methods
|
||||
####################
|
||||
def _sync_coord_sys(self, context: bpy.types.Context):
|
||||
if self.coord_sys == 'CARTESIAN':
|
||||
r, theta_rad = self.raw_value
|
||||
self.raw_value = (
|
||||
|
@ -70,58 +108,11 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
|
|||
x, y = self.raw_value
|
||||
cart_value = x + sp.I * y
|
||||
self.raw_value = (
|
||||
float(sp.Abs(cart_value)),
|
||||
float(sp.arg(cart_value)) if y != 0 else float(0),
|
||||
sp.Abs(cart_value),
|
||||
sp.arg(cart_value) if y != 0 else 0,
|
||||
)
|
||||
|
||||
self.on_prop_changed('coord_sys', context)
|
||||
|
||||
####################
|
||||
# - Socket UI
|
||||
####################
|
||||
def draw_value(self, col: bpy.types.UILayout) -> None:
|
||||
"""Draw the value of the complex number, including a toggle for specifying the active coordinate system."""
|
||||
# Value Row
|
||||
row = col.row()
|
||||
row.prop(self, 'raw_value', text='')
|
||||
|
||||
# Coordinate System Dropdown
|
||||
col.prop(self, 'coord_sys', text='')
|
||||
|
||||
####################
|
||||
# - Computation of Default Value
|
||||
####################
|
||||
@property
|
||||
def value(self) -> spux.Complex:
|
||||
"""Return the complex number as a sympy expression, of a form determined by the coordinate system.
|
||||
|
||||
- **Cartesian**: $(a,b) -> a + ib$
|
||||
- **Polar**: $(r,t) -> re^(it)$
|
||||
|
||||
Returns:
|
||||
The complex number as a `sympy` type.
|
||||
"""
|
||||
v1, v2 = self.raw_value
|
||||
|
||||
return {
|
||||
'CARTESIAN': v1 + sp.I * v2,
|
||||
'POLAR': v1 * sp.exp(sp.I * v2),
|
||||
}[self.coord_sys]
|
||||
|
||||
@value.setter
|
||||
def value(self, value: spux.Complex) -> None:
|
||||
"""Set the complex number from a sympy expression, by numerically simplifying it into coordinate-system determined components.
|
||||
|
||||
- **Cartesian**: $(a,b) -> a + ib$
|
||||
- **Polar**: $(r,t) -> re^(it)$
|
||||
|
||||
Parameters:
|
||||
value: The complex number as a `sympy` type.
|
||||
"""
|
||||
self.raw_value = {
|
||||
'CARTESIAN': (float(sp.re(value)), float(sp.im(value))),
|
||||
'POLAR': (float(sp.Abs(value)), float(sp.arg(value))),
|
||||
}[self.coord_sys]
|
||||
self.sync_prop('coord_sys', context)
|
||||
|
||||
|
||||
####################
|
||||
|
@ -130,7 +121,7 @@ class ComplexNumberBLSocket(base.MaxwellSimSocket):
|
|||
class ComplexNumberSocketDef(base.SocketDef):
|
||||
socket_type: ct.SocketType = ct.SocketType.ComplexNumber
|
||||
|
||||
default_value: spux.Complex = sp.S(0)
|
||||
default_value: SympyExpr = sp.S(0 + 0j)
|
||||
coord_sys: typ.Literal['CARTESIAN', 'POLAR'] = 'CARTESIAN'
|
||||
|
||||
def init(self, bl_socket: ComplexNumberBLSocket) -> None:
|
||||
|
|
|
@ -18,7 +18,7 @@ class IntegerNumberBLSocket(base.MaxwellSimSocket):
|
|||
name='Integer',
|
||||
description='Represents an integer',
|
||||
default=0,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import bpy
|
||||
import sympy as sp
|
||||
|
||||
from blender_maxwell.utils.pydantic_sympy import SympyExpr
|
||||
|
||||
from .....utils.pydantic_sympy import SympyExpr
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
@ -23,7 +22,7 @@ class RationalNumberBLSocket(base.MaxwellSimSocket):
|
|||
size=2,
|
||||
default=(1, 1),
|
||||
subtype='NONE',
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import bpy
|
||||
|
||||
from blender_maxwell.utils.pydantic_sympy import SympyExpr
|
||||
|
||||
from .....utils.pydantic_sympy import SympyExpr
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
@ -21,7 +20,7 @@ class RealNumberBLSocket(base.MaxwellSimSocket):
|
|||
description='Represents a real number',
|
||||
default=0.0,
|
||||
precision=6,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import bpy
|
||||
import sympy.physics.units as spu
|
||||
|
||||
from blender_maxwell.utils.pydantic_sympy import SympyExpr
|
||||
|
||||
from .....utils.pydantic_sympy import SympyExpr
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
@ -23,7 +22,7 @@ class PhysicalAccelScalarBLSocket(base.MaxwellSimSocket):
|
|||
description='Represents the unitless part of the acceleration',
|
||||
default=0.0,
|
||||
precision=6,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import bpy
|
||||
import sympy.physics.units as spu
|
||||
|
||||
from blender_maxwell.utils.pydantic_sympy import SympyExpr
|
||||
|
||||
from .....utils.pydantic_sympy import SympyExpr
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
@ -23,7 +22,7 @@ class PhysicalAngleBLSocket(base.MaxwellSimSocket):
|
|||
description='Represents the unitless part of the acceleration',
|
||||
default=0.0,
|
||||
precision=4,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -20,7 +20,7 @@ class PhysicalAreaBLSocket(base.MaxwellSimSocket):
|
|||
description='Represents the unitless part of the area',
|
||||
default=0.0,
|
||||
precision=6,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
import bpy
|
||||
import sympy.physics.units as spu
|
||||
|
||||
from blender_maxwell.utils.pydantic_sympy import SympyExpr
|
||||
|
||||
from .....utils.pydantic_sympy import SympyExpr
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
@ -23,7 +22,7 @@ class PhysicalForceScalarBLSocket(base.MaxwellSimSocket):
|
|||
description='Represents the unitless part of the force',
|
||||
default=0.0,
|
||||
precision=6,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
import bpy
|
||||
import sympy as sp
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
from blender_maxwell.utils import logger
|
||||
from blender_maxwell.utils.pydantic_sympy import SympyExpr
|
||||
|
||||
from .....utils import extra_sympy_units as spux
|
||||
from .....utils import logger
|
||||
from .....utils.pydantic_sympy import SympyExpr
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
@ -27,7 +26,7 @@ class PhysicalFreqBLSocket(base.MaxwellSimSocket):
|
|||
description='Represents the unitless part of the frequency',
|
||||
default=0.0,
|
||||
precision=6,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
min_freq: bpy.props.FloatProperty(
|
||||
|
@ -35,20 +34,20 @@ class PhysicalFreqBLSocket(base.MaxwellSimSocket):
|
|||
description='Lowest frequency',
|
||||
default=0.0,
|
||||
precision=4,
|
||||
update=(lambda self, context: self.on_prop_changed('min_freq', context)),
|
||||
update=(lambda self, context: self.sync_prop('min_freq', context)),
|
||||
)
|
||||
max_freq: bpy.props.FloatProperty(
|
||||
name='Max Frequency',
|
||||
description='Highest frequency',
|
||||
default=0.0,
|
||||
precision=4,
|
||||
update=(lambda self, context: self.on_prop_changed('max_freq', context)),
|
||||
update=(lambda self, context: self.sync_prop('max_freq', context)),
|
||||
)
|
||||
steps: bpy.props.IntProperty(
|
||||
name='Frequency Steps',
|
||||
description='# of steps between min and max',
|
||||
default=2,
|
||||
update=(lambda self, context: self.on_prop_changed('steps', context)),
|
||||
update=(lambda self, context: self.sync_prop('steps', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
@ -74,9 +73,10 @@ class PhysicalFreqBLSocket(base.MaxwellSimSocket):
|
|||
self.raw_value = spux.sympy_to_python(spux.scale_to_unit(value, self.unit))
|
||||
|
||||
@property
|
||||
def lazy_array_range(self) -> ct.LazyArrayRange:
|
||||
return ct.LazyArrayRange(
|
||||
def lazy_value_range(self) -> ct.LazyDataValueRange:
|
||||
return ct.LazyDataValueRange(
|
||||
symbols=set(),
|
||||
has_unit=True,
|
||||
unit=self.unit,
|
||||
start=sp.S(self.min_freq) * self.unit,
|
||||
stop=sp.S(self.max_freq) * self.unit,
|
||||
|
@ -84,8 +84,9 @@ class PhysicalFreqBLSocket(base.MaxwellSimSocket):
|
|||
scaling='lin',
|
||||
)
|
||||
|
||||
@lazy_array_range.setter
|
||||
def lazy_array_range(self, value: ct.LazyArrayRangeFlow) -> None:
|
||||
@lazy_value_range.setter
|
||||
def lazy_value_range(self, value: tuple[sp.Expr, sp.Expr, int]) -> None:
|
||||
log.debug('Lazy Value Range: %s', str(value))
|
||||
self.min_freq = spux.sympy_to_python(spux.scale_to_unit(value[0], self.unit))
|
||||
self.max_freq = spux.sympy_to_python(spux.scale_to_unit(value[1], self.unit))
|
||||
self.steps = value[2]
|
||||
|
@ -110,7 +111,7 @@ class PhysicalFreqSocketDef(base.SocketDef):
|
|||
|
||||
bl_socket.value = self.default_value
|
||||
if self.is_array:
|
||||
bl_socket.active_kind = ct.FlowKind.LazyArrayRange
|
||||
bl_socket.active_kind = ct.DataFlowKind.LazyValueRange
|
||||
bl_socket.lazy_value_range = (self.min_freq, self.max_freq, self.steps)
|
||||
|
||||
|
||||
|
|
|
@ -2,10 +2,9 @@ import bpy
|
|||
import sympy as sp
|
||||
import sympy.physics.units as spu
|
||||
|
||||
from blender_maxwell.utils import extra_sympy_units as spux
|
||||
from blender_maxwell.utils import logger
|
||||
from blender_maxwell.utils.pydantic_sympy import SympyExpr
|
||||
|
||||
from .....utils import extra_sympy_units as spux
|
||||
from .....utils import logger
|
||||
from .....utils.pydantic_sympy import SympyExpr
|
||||
from ... import contracts as ct
|
||||
from .. import base
|
||||
|
||||
|
@ -28,7 +27,7 @@ class PhysicalLengthBLSocket(base.MaxwellSimSocket):
|
|||
description='Represents the unitless part of the length',
|
||||
default=0.0,
|
||||
precision=6,
|
||||
update=(lambda self, context: self.on_prop_changed('raw_value', context)),
|
||||
update=(lambda self, context: self.sync_prop('raw_value', context)),
|
||||
)
|
||||
|
||||
min_len: bpy.props.FloatProperty(
|
||||
|
@ -36,20 +35,20 @@ class PhysicalLengthBLSocket(base.MaxwellSimSocket):
|
|||
description='Lowest length',
|
||||
default=0.0,
|
||||
precision=4,
|
||||
update=(lambda self, context: self.on_prop_changed('min_len', context)),
|
||||
update=(lambda self, context: self.sync_prop('min_len', context)),
|
||||
)
|
||||
max_len: bpy.props.FloatProperty(
|
||||
name='Max Length',
|
||||
description='Highest length',
|
||||
default=0.0,
|
||||
precision=4,
|
||||
update=(lambda self, context: self.on_prop_changed('max_len', context)),
|
||||
update=(lambda self, context: self.sync_prop('max_len', context)),
|
||||
)
|
||||
steps: bpy.props.IntProperty(
|
||||
name='Length Steps',
|
||||
description='# of steps between min and max',
|
||||
default=2,
|
||||
update=(lambda self, context: self.on_prop_changed('steps', context)),
|
||||
update=(lambda self, context: self.sync_prop('steps', context)),
|
||||
)
|
||||
|
||||
####################
|
||||
|
@ -75,9 +74,10 @@ class PhysicalLengthBLSocket(base.MaxwellSimSocket):
|
|||
self.raw_value = spux.sympy_to_python(spux.scale_to_unit(value, self.unit))
|
||||
|
||||
@property
|
||||
def lazy_array_range(self) -> ct.LazyArrayRange:
|
||||
return ct.LazyArrayRange(
|
||||
def lazy_value_range(self) -> ct.LazyDataValueRange:
|
||||
return ct.LazyDataValueRange(
|
||||
symbols=set(),
|
||||
has_unit=True,
|
||||
unit=self.unit,
|
||||
start=sp.S(self.min_len) * self.unit,
|
||||
stop=sp.S(self.max_len) * self.unit,
|
||||
|
@ -85,7 +85,7 @@ class PhysicalLengthBLSocket(base.MaxwellSimSocket):
|
|||
scaling='lin',
|
||||
)
|
||||
|
||||
@lazy_array_range.setter
|
||||
@lazy_value_range.setter
|
||||
def lazy_value_range(self, value: tuple[sp.Expr, sp.Expr, int]) -> None:
|
||||
self.min_len = spux.sympy_to_python(spux.scale_to_unit(value[0], self.unit))
|
||||
self.max_len = spux.sympy_to_python(spux.scale_to_unit(value[1], self.unit))
|
||||
|
@ -112,7 +112,7 @@ class PhysicalLengthSocketDef(base.SocketDef):
|
|||
|
||||
bl_socket.value = self.default_value
|
||||
if self.is_array:
|
||||
bl_socket.active_kind = ct.FlowKind.LazyArrayRange
|
||||
bl_socket.active_kind = ct.DataFlowKind.LazyValueRange
|
||||
bl_socket.lazy_value_range = (self.min_len, self.max_len, self.steps)
|
||||
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue