feat: Completely revamped dependency system.

main
Sofus Albert Høgsbro Rose 2024-03-20 12:55:40 +01:00
parent 59a236f33a
commit be4eec2242
Signed by: so-rose
GPG Key ID: AD901CB0F3701434
38 changed files with 1121 additions and 412 deletions

1
.gitignore vendored
View File

@ -1,4 +1,5 @@
dev dev
build
*.blend[0-9] *.blend[0-9]
.cached-dependencies .cached-dependencies

View File

@ -1 +1 @@
3.10.13 3.11.8

View File

@ -6,16 +6,25 @@ authors = [
{ name = "Sofus Albert Høgsbro Rose", email = "blender-maxwell@sofusrose.com" } { name = "Sofus Albert Høgsbro Rose", email = "blender-maxwell@sofusrose.com" }
] ]
dependencies = [ dependencies = [
"tidy3d>=2.6.1", "tidy3d~=2.6.1",
"pydantic>=2.6.4", "pydantic~=2.6.4",
"sympy>=1.12", "sympy~=1.12",
"scipy>=1.12.0", "scipy~=1.12.0",
"trimesh>=4.2.0", "trimesh~=4.2.0",
"networkx>=3.2.1", "networkx~=3.2.1",
"rtree>=1.2.0", "rtree~=1.2.0",
# Pin Blender 4.1.0-Compatible Versions
## The dependency resolver will report if anything is wonky.
"urllib3==1.26.8",
"requests==2.27.1",
"numpy==1.24.3",
"idna==3.3",
"charset-normalizer==2.0.10",
"certifi==2021.10.8",
] ]
readme = "README.md" readme = "README.md"
requires-python = "~= 3.10" requires-python = "~= 3.11"
license = { text = "AGPL-3.0-or-later" } license = { text = "AGPL-3.0-or-later" }
#################### ####################
@ -26,13 +35,18 @@ managed = true
virtual = true virtual = true
dev-dependencies = [ dev-dependencies = [
"ruff>=0.3.2", "ruff>=0.3.2",
"fake-bpy-module-4-0>=20231118", ## TODO: Update to Blender 4.1.0
] ]
[tool.rye.scripts]
dev = "python ./scripts/run.py"
#################### ####################
# - Tooling: Ruff # - Tooling: Ruff
#################### ####################
[tool.ruff] [tool.ruff]
target-version = "py312" target-version = "py311"
line-length = 79 line-length = 79
[tool.ruff.lint] [tool.ruff.lint]
@ -77,14 +91,15 @@ select = [
"PT", # flake8-pytest-style ## pytest-Specific Checks "PT", # flake8-pytest-style ## pytest-Specific Checks
] ]
ignore = [ ignore = [
"B008", # FastAPI uses this for Depends(), Security(), etc. .
"E701", # class foo(Parent): pass or if simple: return are perfectly elegant
"COM812", # Conflicts w/Formatter "COM812", # Conflicts w/Formatter
"ISC001", # Conflicts w/Formatter "ISC001", # Conflicts w/Formatter
"Q000", # Conflicts w/Formatter "Q000", # Conflicts w/Formatter
"Q001", # Conflicts w/Formatter "Q001", # Conflicts w/Formatter
"Q002", # Conflicts w/Formatter "Q002", # Conflicts w/Formatter
"Q003", # Conflicts w/Formatter "Q003", # Conflicts w/Formatter
"B008", # FastAPI uses this for Depends(), Security(), etc. .
"E701", # class foo(Parent): pass or if simple: return are perfectly elegant
"ERA001", # 'Commented-out code' seems to be just about anything to ruff
] ]
#################### ####################

View File

@ -14,9 +14,9 @@ boto3==1.23.1
botocore==1.26.10 botocore==1.26.10
# via boto3 # via boto3
# via s3transfer # via s3transfer
certifi==2024.2.2 certifi==2021.10.8
# via requests # via requests
charset-normalizer==3.3.2 charset-normalizer==2.0.10
# via requests # via requests
click==8.0.3 click==8.0.3
# via dask # via dask
@ -31,6 +31,7 @@ cycler==0.12.1
# via matplotlib # via matplotlib
dask==2023.10.1 dask==2023.10.1
# via tidy3d # via tidy3d
fake-bpy-module-4-0==20231118
fonttools==4.49.0 fonttools==4.49.0
# via matplotlib # via matplotlib
fsspec==2024.2.0 fsspec==2024.2.0
@ -40,7 +41,7 @@ h5netcdf==1.0.2
h5py==3.10.0 h5py==3.10.0
# via h5netcdf # via h5netcdf
# via tidy3d # via tidy3d
idna==3.6 idna==3.3
# via requests # via requests
importlib-metadata==6.11.0 importlib-metadata==6.11.0
# via dask # via dask
@ -57,11 +58,10 @@ matplotlib==3.8.3
mpmath==1.3.0 mpmath==1.3.0
# via sympy # via sympy
networkx==3.2.1 networkx==3.2.1
numpy==1.26.4 numpy==1.24.3
# via contourpy # via contourpy
# via h5py # via h5py
# via matplotlib # via matplotlib
# via pandas
# via scipy # via scipy
# via shapely # via shapely
# via trimesh # via trimesh
@ -99,10 +99,10 @@ pyyaml==6.0.1
# via dask # via dask
# via responses # via responses
# via tidy3d # via tidy3d
requests==2.31.0 requests==2.27.1
# via responses # via responses
# via tidy3d # via tidy3d
responses==0.25.0 responses==0.23.1
# via tidy3d # via tidy3d
rich==12.5.1 rich==12.5.1
# via tidy3d # via tidy3d
@ -124,12 +124,14 @@ toolz==0.12.1
# via dask # via dask
# via partd # via partd
trimesh==4.2.0 trimesh==4.2.0
types-pyyaml==6.0.12.20240311
# via responses
typing-extensions==4.10.0 typing-extensions==4.10.0
# via pydantic # via pydantic
# via pydantic-core # via pydantic-core
tzdata==2024.1 tzdata==2024.1
# via pandas # via pandas
urllib3==1.26.18 urllib3==1.26.8
# via botocore # via botocore
# via requests # via requests
# via responses # via responses

View File

@ -14,9 +14,9 @@ boto3==1.23.1
botocore==1.26.10 botocore==1.26.10
# via boto3 # via boto3
# via s3transfer # via s3transfer
certifi==2024.2.2 certifi==2021.10.8
# via requests # via requests
charset-normalizer==3.3.2 charset-normalizer==2.0.10
# via requests # via requests
click==8.0.3 click==8.0.3
# via dask # via dask
@ -40,7 +40,7 @@ h5netcdf==1.0.2
h5py==3.10.0 h5py==3.10.0
# via h5netcdf # via h5netcdf
# via tidy3d # via tidy3d
idna==3.6 idna==3.3
# via requests # via requests
importlib-metadata==6.11.0 importlib-metadata==6.11.0
# via dask # via dask
@ -57,11 +57,10 @@ matplotlib==3.8.3
mpmath==1.3.0 mpmath==1.3.0
# via sympy # via sympy
networkx==3.2.1 networkx==3.2.1
numpy==1.26.4 numpy==1.24.3
# via contourpy # via contourpy
# via h5py # via h5py
# via matplotlib # via matplotlib
# via pandas
# via scipy # via scipy
# via shapely # via shapely
# via trimesh # via trimesh
@ -99,10 +98,10 @@ pyyaml==6.0.1
# via dask # via dask
# via responses # via responses
# via tidy3d # via tidy3d
requests==2.31.0 requests==2.27.1
# via responses # via responses
# via tidy3d # via tidy3d
responses==0.25.0 responses==0.23.1
# via tidy3d # via tidy3d
rich==12.5.1 rich==12.5.1
# via tidy3d # via tidy3d
@ -123,12 +122,14 @@ toolz==0.12.1
# via dask # via dask
# via partd # via partd
trimesh==4.2.0 trimesh==4.2.0
types-pyyaml==6.0.12.20240311
# via responses
typing-extensions==4.10.0 typing-extensions==4.10.0
# via pydantic # via pydantic
# via pydantic-core # via pydantic-core
tzdata==2024.1 tzdata==2024.1
# via pandas # via pandas
urllib3==1.26.18 urllib3==1.26.8
# via botocore # via botocore
# via requests # via requests
# via responses # via responses

108
run.py
View File

@ -1,108 +0,0 @@
import zipfile
import contextlib
import shutil
import sys
from pathlib import Path
import bpy
import addon_utils
PATH_ROOT = Path(__file__).resolve().parent
####################
# - Defined Constants
####################
ADDON_NAME = "blender_maxwell"
PATH_BLEND = PATH_ROOT / "demo.blend"
PATH_ADDON_DEPS = PATH_ROOT / ".cached-dependencies"
####################
# - Computed Constants
####################
PATH_ADDON = PATH_ROOT / ADDON_NAME
PATH_ADDON_ZIP = PATH_ROOT / (ADDON_NAME + ".zip")
####################
# - Utilities
####################
@contextlib.contextmanager
def zipped_directory(path_dir: Path, path_zip: Path):
"""Context manager that exposes a zipped version of a directory,
then deletes the .zip file afterwards.
"""
# Delete Existing ZIP file (if exists)
if path_zip.is_file(): path_zip.unlink()
# Create a (new) ZIP file of the addon directory
with zipfile.ZipFile(path_zip, 'w', zipfile.ZIP_DEFLATED) as f_zip:
for file_to_zip in path_dir.rglob('*'):
f_zip.write(file_to_zip, file_to_zip.relative_to(path_dir.parent))
# Delete the ZIP
try:
yield path_zip
finally:
path_zip.unlink()
####################
# - main()
####################
if __name__ == "__main__":
# Check and uninstall the addon if it's enabled
is_loaded_by_default, is_loaded_now = addon_utils.check(ADDON_NAME)
if is_loaded_now:
# Disable the Addon
addon_utils.disable(ADDON_NAME, default_set=True, handle_error=None)
# Completey Delete the Addon
for mod in addon_utils.modules():
if mod.__name__ == ADDON_NAME:
# Delete Addon from Blender Python Tree
shutil.rmtree(Path(mod.__file__).parent)
# Reset All Addons
addon_utils.reset_all()
# Save User Preferences & Break
bpy.ops.wm.save_userpref()
break
# Quit Blender (hard-flush Python environment)
## - Python environments are not made to be partially flushed.
## - This is the only truly reliable way to avoid all bugs.
## - See https://github.com/JacquesLucke/blender_vscode
bpy.ops.wm.quit_blender()
try:
raise RuntimeError
except:
sys.exit(42)
with zipped_directory(PATH_ADDON, PATH_ADDON_ZIP) as path_zipped:
# Install the ZIPped Addon
bpy.ops.preferences.addon_install(filepath=str(path_zipped))
# Enable the Addon
addon_utils.enable(
ADDON_NAME,
default_set=True,
persistent=True,
handle_error=None,
)
# Save User Preferences
bpy.ops.wm.save_userpref()
# Load the .blend
bpy.ops.wm.open_mainfile(filepath=str(PATH_BLEND))
# Ensure Addon-Specific Dependency Cache is Importable
## - In distribution, the addon keeps this folder in the Blender script tree.
## - For testing, we need to hack sys.path here.
## - This avoids having to install all deps with every reload.
if str(PATH_ADDON_DEPS) not in sys.path:
sys.path.insert(0, str(PATH_ADDON_DEPS))
# Modify any specific settings, if needed
# Example: bpy.context.preferences.addons[addon_name].preferences.your_setting = "your_value"

11
run.sh
View File

@ -1,11 +0,0 @@
#!/bin/bash
blender --python run.py
if [ $? -eq 42 ]; then
echo
echo
echo
echo
echo
blender --python run.py
fi

View File

155
scripts/bl_run.py 100644
View File

@ -0,0 +1,155 @@
"""Blender startup script ensuring correct addon installation.
See <https://github.com/dfelinto/blender/blob/master/release/scripts/modules/addon_utils.py>
"""
import shutil
import sys
import traceback
from pathlib import Path
import bpy
sys.path.insert(0, str(Path(__file__).resolve().parent))
import info
import pack
## TODO: Preferences item that allows using BLMaxwell 'starter.blend' as Blender's default starter blendfile.
####################
# - Addon Functions
####################
def delete_addon_if_loaded(addon_name: str) -> None:
"""Strongly inspired by Blender's addon_utils.py."""
should_restart_blender = False
# Check if Python Module is Loaded
mod = sys.modules.get(addon_name)
# if (mod := sys.modules.get(addon_name)) is None:
# ## It could still be loaded-by-default; then, it's in the prefs list
# is_loaded_now = False
# loads_by_default = addon_name in bpy.context.preferences.addons
# else:
# ## BL sets __addon_enabled__ on module of enabled addons.
# ## BL sets __addon_persistent__ on module of load-by-default addons.
# is_loaded_now = getattr(mod, '__addon_enabled__', False)
# loads_by_default = getattr(mod, '__addon_persistent__', False)
# Unregister Modules and Mark Disabled & Non-Persistent
## This effectively disables it
if mod is not None:
mod.__addon_enabled__ = False
mod.__addon_persistent__ = False
try:
mod.unregister()
except BaseException:
traceback.print_exc()
should_restart_blender = True
# Remove Addon
## Remove Addon from Preferences
## - Unsure why addon_utils has a while, but let's trust the process...
while addon_name in bpy.context.preferences.addons:
addon = bpy.context.preferences.addons.get(addon_name)
if addon:
bpy.context.preferences.addons.remove(addon)
## Physically Excise Addon Code
for addons_path in bpy.utils.script_paths(subdir='addons'):
addon_path = Path(addons_path) / addon_name
if addon_path.exists():
shutil.rmtree(addon_path)
should_restart_blender = True
## Save User Preferences
bpy.ops.wm.save_userpref()
# Quit (Restart) Blender - hard-flush Python environment
## - Python environments are not made to be partially flushed.
## - This is the only truly reliable way to avoid all bugs.
## - See <https://github.com/JacquesLucke/blender_vscode>
## - By passing STATUS_UNINSTALLED_ADDON, we report that it's clean now.
if should_restart_blender:
bpy.ops.wm.quit_blender()
sys.exit(info.STATUS_UNINSTALLED_ADDON)
def install_addon(addon_name: str, addon_zip: Path) -> None:
"""Strongly inspired by Blender's addon_utils.py."""
# Check if Addon is Installable
if any(
[
(mod := sys.modules.get(addon_name)) is not None,
addon_name in bpy.context.preferences.addons,
any(
(Path(addon_path) / addon_name).exists()
for addon_path in bpy.utils.script_paths(subdir='addons')
),
]
):
## TODO: Check if addon file path exists?
in_pref_addons = addon_name in bpy.context.preferences.addons
existing_files_found = {
addon_path: (Path(addon_path) / addon_name).exists()
for addon_path in bpy.utils.script_paths(subdir='addons')
if (Path(addon_path) / addon_name).exists()
}
msg = f"Addon (module = '{mod}') is not installable (in preferences.addons: {in_pref_addons}) (existing files found: {existing_files_found})"
raise ValueError(msg)
# Install Addon
bpy.ops.preferences.addon_install(filepath=str(addon_zip))
if not any(
(Path(addon_path) / addon_name).exists()
for addon_path in bpy.utils.script_paths(subdir='addons')
):
msg = f"Couldn't install addon {addon_name}"
raise RuntimeError(msg)
# Enable Addon
bpy.ops.preferences.addon_enable(module=addon_name)
if addon_name not in bpy.context.preferences.addons:
msg = f"Couldn't enable addon {addon_name}"
raise RuntimeError(msg)
# Set Dev Path for Addon Dependencies
addon_prefs = bpy.context.preferences.addons[addon_name].preferences
addon_prefs.use_default_path_addon_pydeps = False
addon_prefs.path_addon_pydeps = info.PATH_ADDON_DEV_DEPS
# Save User Preferences
bpy.ops.wm.save_userpref()
####################
# - Entrypoint
####################
if __name__ == '__main__':
# Delete Addon (maybe; possibly restart)
delete_addon_if_loaded(info.ADDON_NAME)
# Signal that Live-Printing can Start
print(info.SIGNAL_START_CLEAN_BLENDER) # noqa: T201
# Install and Enable Addon
install_failed = False
with pack.zipped_addon(
info.PATH_ADDON_PKG,
info.PATH_ADDON_ZIP,
info.PATH_ROOT / 'pyproject.toml',
info.PATH_ROOT / 'requirements.lock',
) as path_zipped:
try:
install_addon(info.ADDON_NAME, path_zipped)
except Exception as exe:
traceback.print_exc()
install_failed = True
# Load Development .blend
## TODO: We need a better (also final-deployed-compatible) solution for what happens when a user opened a .blend file without installing dependencies!
if not install_failed:
bpy.ops.wm.open_mainfile(filepath=str(info.PATH_ADDON_DEV_BLEND))
else:
bpy.ops.wm.quit_blender()
sys.exit(info.STATUS_NOINSTALL_ADDON)

52
scripts/info.py 100644
View File

@ -0,0 +1,52 @@
import tomllib
from pathlib import Path
PATH_ROOT = Path(__file__).resolve().parent.parent
PATH_RUN = PATH_ROOT / 'scripts' / 'run.py'
PATH_BL_RUN = PATH_ROOT / 'scripts' / 'bl_run.py'
PATH_BUILD = PATH_ROOT / 'build'
PATH_BUILD.mkdir(exist_ok=True)
PATH_DEV = PATH_ROOT / 'dev'
PATH_DEV.mkdir(exist_ok=True)
####################
# - BL_RUN stdout Signals
####################
SIGNAL_START_CLEAN_BLENDER = 'SIGNAL__blender_is_clean'
####################
# - BL_RUN Exit Codes
####################
STATUS_UNINSTALLED_ADDON = 42
STATUS_NOINSTALL_ADDON = 68
####################
# - Addon Information
####################
with (PATH_ROOT / 'pyproject.toml').open('rb') as f:
PROJ_SPEC = tomllib.load(f)
ADDON_NAME = PROJ_SPEC['project']['name']
ADDON_VERSION = PROJ_SPEC['project']['version']
####################
# - Packaging Information
####################
PATH_ADDON_PKG = PATH_ROOT / 'src' / ADDON_NAME
PATH_ADDON_ZIP = (
PATH_ROOT / 'build' / (ADDON_NAME + '__' + ADDON_VERSION + '.zip')
)
PATH_ADDON_BLEND_STARTER = PATH_ADDON_PKG / 'blenders' / 'starter.blend'
# Install the ZIPped Addon
####################
# - Development Information
####################
PATH_ADDON_DEV_BLEND = PATH_DEV / 'demo.blend'
PATH_ADDON_DEV_DEPS = PATH_DEV / '.cached-dev-dependencies'
PATH_ADDON_DEV_DEPS.mkdir(exist_ok=True)

93
scripts/pack.py 100644
View File

@ -0,0 +1,93 @@
import contextlib
import tempfile
import typing as typ
import zipfile
from pathlib import Path
import info
_PROJ_VERSION_STR = str(
tuple(int(el) for el in info.PROJ_SPEC['project']['version'].split('.'))
)
_PROJ_DESC_STR = info.PROJ_SPEC['project']['description']
BL_INFO_REPLACEMENTS = {
"'version': (0, 0, 0),": f"'version': {_PROJ_VERSION_STR},",
"'description': 'Placeholder',": f"'description': '{_PROJ_DESC_STR}',",
}
@contextlib.contextmanager
def zipped_addon(
path_addon_pkg: Path,
path_addon_zip: Path,
path_pyproject_toml: Path,
path_requirements_lock: Path,
replace_if_exists: bool = False,
) -> typ.Iterator[Path]:
"""Context manager exposing a folder as a (temporary) zip file.
The .zip file is deleted afterwards.
"""
# Delete Existing ZIP (maybe)
if path_addon_zip.is_file():
if replace_if_exists:
msg = 'File already exists where ZIP would be made'
raise ValueError(msg)
path_addon_zip.unlink()
# Create New ZIP file of the addon directory
with zipfile.ZipFile(path_addon_zip, 'w', zipfile.ZIP_DEFLATED) as f_zip:
# Install Addon Files @ /*
for file_to_zip in path_addon_pkg.rglob('*'):
# Dynamically Alter 'bl_info' in __init__.py
## This is the only way to propagate ex. version information
if str(file_to_zip.relative_to(path_addon_pkg)) == '__init__.py':
with (
file_to_zip.open('r') as f_init,
tempfile.NamedTemporaryFile(mode='w') as f_tmp,
):
initpy = f_init.read()
for to_replace, replacement in BL_INFO_REPLACEMENTS.items():
initpy = initpy.replace(to_replace, replacement)
f_tmp.write(initpy)
# Write to ZIP
f_zip.writestr(
str(file_to_zip.relative_to(path_addon_pkg.parent)),
initpy,
)
# Write File to Zip
else:
f_zip.write(
file_to_zip, file_to_zip.relative_to(path_addon_pkg.parent)
)
# Install pyproject.toml @ /pyproject.toml of Addon
f_zip.write(
path_pyproject_toml,
str(
(
Path(path_addon_pkg.name)
/ Path(path_pyproject_toml.name)
)
.with_suffix('')
.with_suffix('.toml')
),
)
# Install requirements.lock @ /requirements.txt of Addon
f_zip.write(
path_requirements_lock,
str(
(Path(path_addon_pkg.name) / Path(path_requirements_lock.name))
.with_suffix('')
.with_suffix('.txt')
),
)
# Delete the ZIP
try:
yield path_addon_zip
finally:
path_addon_zip.unlink()

54
scripts/run.py 100644
View File

@ -0,0 +1,54 @@
import os
import subprocess
from pathlib import Path
import info
####################
# - Blender Runner
####################
def run_blender(py_script: Path, print_live: bool = False):
process = subprocess.Popen(
['blender', '--python', str(py_script)],
env = os.environ | {'PYTHONUNBUFFERED': '1'},
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
)
output = []
printing_live = print_live
# Process Real-Time Output
for line in iter(process.stdout.readline, b''):
if not line:
break
if printing_live:
print(line, end='') # noqa: T201
elif (
info.SIGNAL_START_CLEAN_BLENDER in line
#or 'Traceback (most recent call last)' in line
):
printing_live = True
print(''.join(output)) # noqa: T201
else:
output.append(line)
# Wait for the process to finish and get the exit code
process.wait()
return process.returncode, output
####################
# - Run Blender w/Clean Addon Reinstall
####################
if __name__ == '__main__':
return_code, output = run_blender(info.PATH_BL_RUN, print_live=False)
if return_code == info.STATUS_UNINSTALLED_ADDON:
return_code, output = run_blender(info.PATH_BL_RUN, print_live=True)
if return_code == info.STATUS_NOINSTALL_ADDON:
msg = f"Couldn't install addon {info.ADDON_NAME}"
raise ValueError(msg)
elif return_code != 0:
print(''.join(output)) # noqa: T201

View File

@ -1,84 +1,97 @@
import tomllib
from pathlib import Path
import bpy
from . import operators_nodeps, preferences, registration
from .utils import pydeps
from .utils import logger as _logger
log = _logger.get()
PATH_ADDON_ROOT = Path(__file__).resolve().parent
with (PATH_ADDON_ROOT / 'pyproject.toml').open('rb') as f:
PROJ_SPEC = tomllib.load(f)
####################
# - Addon Information
####################
# The following parameters are replaced when packing the addon ZIP
## - description
## - version
bl_info = { bl_info = {
"name": "Maxwell Simulation and Visualization", 'name': 'Maxwell PDE Sim and Viz',
"blender": (4, 0, 2), 'blender': (4, 1, 0),
"category": "Node", 'category': 'Node',
"description": "Custom node trees for defining and visualizing Maxwell simulation.", 'description': 'Placeholder',
"author": "Sofus Albert Høgsbro Rose", 'author': 'Sofus Albert Høgsbro Rose',
"version": (0, 1), 'version': (0, 0, 0),
"wiki_url": "https://git.sofus.io/dtu-courses/bsc_thesis", 'wiki_url': 'https://git.sofus.io/dtu-courses/bsc_thesis',
"tracker_url": "https://git.sofus.io/dtu-courses/bsc_thesis/issues", 'tracker_url': 'https://git.sofus.io/dtu-courses/bsc_thesis/issues',
} }
## bl_info MUST readable via. ast.parse
## See scripts/pack.py::BL_INFO_REPLACEMENTS for active replacements
## The mechanism is a 'dumb' - output of 'ruff fmt' MUST be basis for replacing
def ADDON_PREFS():
return bpy.context.preferences.addons[
PROJ_SPEC['project']['name']
].preferences
#################### ####################
# - sys.path Library Inclusion # - Load and Register Addon
#################### ####################
import sys BL_REGISTER__BEFORE_DEPS = [
sys.path.insert(0, "/home/sofus/src/college/bsc_ge/thesis/code/.cached-dependencies") *operators_nodeps.BL_REGISTER,
## ^^ Placeholder *preferences.BL_REGISTER,
]
####################
# - Module Import def BL_REGISTER__AFTER_DEPS(path_deps: Path):
#################### with pydeps.importable_addon_deps(path_deps):
if "bpy" not in locals(): from . import node_trees, operators
import bpy return [
import nodeitems_utils *operators.BL_REGISTER,
try: *node_trees.BL_REGISTER,
from . import node_trees ]
def BL_KEYMAP_ITEM_DEFS(path_deps: Path):
with pydeps.importable_addon_deps(path_deps):
from . import operators from . import operators
from . import preferences return [
except ImportError: *operators.BL_KMI_REGISTER,
import sys ]
sys.path.insert(0, "/home/sofus/src/college/bsc_ge/thesis/code/blender-maxwell")
import node_trees
import operators
import preferences
else:
import importlib
importlib.reload(node_trees)
#################### ####################
# - Registration # - Registration
#################### ####################
BL_REGISTER = [
*node_trees.BL_REGISTER,
*operators.BL_REGISTER,
*preferences.BL_REGISTER,
]
BL_KMI_REGISTER = [
*operators.BL_KMI_REGISTER,
]
BL_NODE_CATEGORIES = [
*node_trees.BL_NODE_CATEGORIES,
]
km = bpy.context.window_manager.keyconfigs.addon.keymaps.new(
name='Node Editor',
space_type="NODE_EDITOR",
)
REGISTERED_KEYMAPS = []
def register(): def register():
global REGISTERED_KEYMAPS # Register Barebones Addon for Dependency Installation
registration.register_classes(BL_REGISTER__BEFORE_DEPS)
for cls in BL_REGISTER:
bpy.utils.register_class(cls)
for kmi_def in BL_KMI_REGISTER:
kmi = km.keymap_items.new(
*kmi_def["_"],
ctrl=kmi_def["ctrl"],
shift=kmi_def["shift"],
alt=kmi_def["alt"],
)
REGISTERED_KEYMAPS.append(kmi)
def unregister():
for cls in reversed(BL_REGISTER):
bpy.utils.unregister_class(cls)
for kmi in REGISTERED_KEYMAPS:
km.keymap_items.remove(kmi)
if __name__ == "__main__": # Retrieve PyDeps Path from Addon Preferences
register() addon_prefs = ADDON_PREFS()
path_pydeps = addon_prefs.path_addon_pydeps
# If Dependencies are Satisfied, Register Everything
if pydeps.check_pydeps(path_pydeps):
registration.register_classes(BL_REGISTER__AFTER_DEPS())
registration.register_keymap_items(BL_KEYMAP_ITEM_DEFS())
else:
# Delay Registration
registration.delay_registration(
registration.EVENT__DEPS_SATISFIED,
classes_cb=BL_REGISTER__AFTER_DEPS,
keymap_item_defs_cb=BL_KEYMAP_ITEM_DEFS,
)
# TODO: A popup before the addon fully loads or something like that?
## TODO: Communicate that deps must be installed and all that?
def unregister():
registration.unregister_classes()
registration.unregister_keymap_items()

BIN
src/blender_maxwell/blends/starter.blend (Stored with Git LFS) 100644

Binary file not shown.

View File

@ -1,13 +1,9 @@
import typing as typ import typing as typ
import typing as typx
import pydantic as pyd from ..bl import ManagedObjName
import bpy
from ..bl import ManagedObjName, SocketName
from ..managed_obj_type import ManagedObjType from ..managed_obj_type import ManagedObjType
class ManagedObj(typ.Protocol): class ManagedObj(typ.Protocol):
managed_obj_type: ManagedObjType managed_obj_type: ManagedObjType
@ -30,4 +26,3 @@ class ManagedObj(typ.Protocol):
Else, do nothing. Else, do nothing.
""" """
pass

View File

@ -4,9 +4,9 @@ import bpy
from ..socket_types import SocketType from ..socket_types import SocketType
@typ.runtime_checkable @typ.runtime_checkable
class SocketDef(typ.Protocol): class SocketDef(typ.Protocol):
socket_type: SocketType socket_type: SocketType
def init(self, bl_socket: bpy.types.NodeSocket) -> None: def init(self, bl_socket: bpy.types.NodeSocket) -> None: ...
...

View File

@ -9,10 +9,12 @@ from . import contracts as ct
#################### ####################
MemAddr = int MemAddr = int
class DeltaNodeLinkCache(typ.TypedDict): class DeltaNodeLinkCache(typ.TypedDict):
added: set[MemAddr] added: set[MemAddr]
removed: set[MemAddr] removed: set[MemAddr]
class NodeLinkCache: class NodeLinkCache:
def __init__(self, node_tree: bpy.types.NodeTree): def __init__(self, node_tree: bpy.types.NodeTree):
# Initialize Parameters # Initialize Parameters
@ -21,46 +23,47 @@ class NodeLinkCache:
self.link_ptrs = set() self.link_ptrs = set()
self.link_ptrs_from_sockets = {} self.link_ptrs_from_sockets = {}
self.link_ptrs_to_sockets = {} self.link_ptrs_to_sockets = {}
# Fill Cache # Fill Cache
self.regenerate() self.regenerate()
def remove(self, link_ptrs: set[MemAddr]) -> None: def remove(self, link_ptrs: set[MemAddr]) -> None:
for link_ptr in link_ptrs: for link_ptr in link_ptrs:
self.link_ptrs.remove(link_ptr) self.link_ptrs.remove(link_ptr)
self.link_ptrs_to_links.pop(link_ptr, None) self.link_ptrs_to_links.pop(link_ptr, None)
def regenerate(self) -> DeltaNodeLinkCache: def regenerate(self) -> DeltaNodeLinkCache:
current_link_ptrs_to_links = { current_link_ptrs_to_links = {
link.as_pointer(): link for link in self._node_tree.links link.as_pointer(): link for link in self._node_tree.links
} }
current_link_ptrs = set(current_link_ptrs_to_links.keys()) current_link_ptrs = set(current_link_ptrs_to_links.keys())
# Compute Delta # Compute Delta
added_link_ptrs = current_link_ptrs - self.link_ptrs added_link_ptrs = current_link_ptrs - self.link_ptrs
removed_link_ptrs = self.link_ptrs - current_link_ptrs removed_link_ptrs = self.link_ptrs - current_link_ptrs
# Update Caches Incrementally # Update Caches Incrementally
self.remove(removed_link_ptrs) self.remove(removed_link_ptrs)
self.link_ptrs |= added_link_ptrs self.link_ptrs |= added_link_ptrs
for link_ptr in added_link_ptrs: for link_ptr in added_link_ptrs:
link = current_link_ptrs_to_links[link_ptr] link = current_link_ptrs_to_links[link_ptr]
self.link_ptrs_to_links[link_ptr] = link self.link_ptrs_to_links[link_ptr] = link
self.link_ptrs_from_sockets[link_ptr] = link.from_socket self.link_ptrs_from_sockets[link_ptr] = link.from_socket
self.link_ptrs_to_sockets[link_ptr] = link.to_socket self.link_ptrs_to_sockets[link_ptr] = link.to_socket
return {"added": added_link_ptrs, "removed": removed_link_ptrs} return {'added': added_link_ptrs, 'removed': removed_link_ptrs}
#################### ####################
# - Node Tree Definition # - Node Tree Definition
#################### ####################
class MaxwellSimTree(bpy.types.NodeTree): class MaxwellSimTree(bpy.types.NodeTree):
bl_idname = ct.TreeType.MaxwellSim.value bl_idname = ct.TreeType.MaxwellSim.value
bl_label = "Maxwell Sim Editor" bl_label = 'Maxwell Sim Editor'
bl_icon = ct.Icon.SimNodeEditor.value bl_icon = ct.Icon.SimNodeEditor.value
#################### ####################
# - Lock Methods # - Lock Methods
#################### ####################
@ -69,116 +72,117 @@ class MaxwellSimTree(bpy.types.NodeTree):
node.locked = False node.locked = False
for bl_socket in [*node.inputs, *node.outputs]: for bl_socket in [*node.inputs, *node.outputs]:
bl_socket.locked = False bl_socket.locked = False
#################### ####################
# - Init Methods # - Init Methods
#################### ####################
def on_load(self): def on_load(self):
"""Run by Blender when loading the NodeSimTree, ex. on file load, on creation, etc. . """Run by Blender when loading the NodeSimTree, ex. on file load, on creation, etc. .
It's a bit of a "fake" function - in practicality, it's triggered on the first update() function. It's a bit of a "fake" function - in practicality, it's triggered on the first update() function.
""" """
## TODO: Consider tying this to an "on_load" handler ## TODO: Consider tying this to an "on_load" handler
self._node_link_cache = NodeLinkCache(self) if hasattr(self, '_node_link_cache'):
self._node_link_cache.regenerate()
else:
self._node_link_cache = NodeLinkCache(self)
#################### ####################
# - Update Methods # - Update Methods
#################### ####################
def sync_node_removed(self, node: bpy.types.Node): def sync_node_removed(self, node: bpy.types.Node):
"""Run by `Node.free()` when a node is being removed. """Run by `Node.free()` when a node is being removed.
Removes node input links from the internal cache (so we don't attempt to update non-existant sockets). Removes node input links from the internal cache (so we don't attempt to update non-existant sockets).
""" """
for bl_socket in node.inputs.values(): for bl_socket in node.inputs.values():
# Retrieve Socket Links (if any) # Retrieve Socket Links (if any)
self._node_link_cache.remove({ self._node_link_cache.remove(
link.as_pointer() {link.as_pointer() for link in bl_socket.links}
for link in bl_socket.links )
})
## ONLY Input Socket Links are Removed from the NodeLink Cache ## ONLY Input Socket Links are Removed from the NodeLink Cache
## - update() handles link-removal from still-existing node just fine. ## - update() handles link-removal from still-existing node just fine.
## - update() does NOT handle link-removal of non-existant nodes. ## - update() does NOT handle link-removal of non-existant nodes.
def update(self): def update(self):
"""Run by Blender when 'something changes' in the node tree. """Run by Blender when 'something changes' in the node tree.
Updates an internal node link cache, then updates sockets that just lost/gained an input link. Updates an internal node link cache, then updates sockets that just lost/gained an input link.
""" """
if not hasattr(self, "_node_link_cache"): if not hasattr(self, '_node_link_cache'):
self.on_load() self.on_load()
## We presume update() is run before the first link is altered. ## We presume update() is run before the first link is altered.
## - Else, the first link of the session will not update caches. ## - Else, the first link of the session will not update caches.
## - We remain slightly unsure of the semantics. ## - We remain slightly unsure of the semantics.
## - More testing needed to prevent this 'first-link bug'. ## - Therefore, self.on_load() is also called as a load_post handler.
return return
# Compute Changes to NodeLink Cache # Compute Changes to NodeLink Cache
delta_links = self._node_link_cache.regenerate() delta_links = self._node_link_cache.regenerate()
link_alterations = { link_alterations = {
"to_remove": [], 'to_remove': [],
"to_add": [], 'to_add': [],
} }
for link_ptr in delta_links["removed"]: for link_ptr in delta_links['removed']:
from_socket = self._node_link_cache.link_ptrs_from_sockets[link_ptr] from_socket = self._node_link_cache.link_ptrs_from_sockets[
link_ptr
]
to_socket = self._node_link_cache.link_ptrs_to_sockets[link_ptr] to_socket = self._node_link_cache.link_ptrs_to_sockets[link_ptr]
# Update Socket Caches # Update Socket Caches
self._node_link_cache.link_ptrs_from_sockets.pop(link_ptr, None) self._node_link_cache.link_ptrs_from_sockets.pop(link_ptr, None)
self._node_link_cache.link_ptrs_to_sockets.pop(link_ptr, None) self._node_link_cache.link_ptrs_to_sockets.pop(link_ptr, None)
# Trigger Report Chain on Socket that Just Lost a Link # Trigger Report Chain on Socket that Just Lost a Link
## Aka. Forward-Refresh Caches Relying on Linkage ## Aka. Forward-Refresh Caches Relying on Linkage
if not ( if not (
consent_removal := to_socket.sync_link_removed(from_socket) consent_removal := to_socket.sync_link_removed(from_socket)
): ):
# Did Not Consent to Removal: Queue Add Link # Did Not Consent to Removal: Queue Add Link
link_alterations["to_add"].append((from_socket, to_socket)) link_alterations['to_add'].append((from_socket, to_socket))
for link_ptr in delta_links["added"]: for link_ptr in delta_links['added']:
link = self._node_link_cache.link_ptrs_to_links.get(link_ptr) link = self._node_link_cache.link_ptrs_to_links.get(link_ptr)
if link is None: continue if link is None:
continue
# Trigger Report Chain on Socket that Just Gained a Link # Trigger Report Chain on Socket that Just Gained a Link
## Aka. Forward-Refresh Caches Relying on Linkage ## Aka. Forward-Refresh Caches Relying on Linkage
if not ( if not (consent_added := link.to_socket.sync_link_added(link)):
consent_added := link.to_socket.sync_link_added(link)
):
# Did Not Consent to Addition: Queue Remove Link # Did Not Consent to Addition: Queue Remove Link
link_alterations["to_remove"].append(link) link_alterations['to_remove'].append(link)
# Execute Queued Operations # Execute Queued Operations
## - Especially undoing undesirable link changes. ## - Especially undoing undesirable link changes.
## - This is important for locked graphs, whose links must not change. ## - This is important for locked graphs, whose links must not change.
for link in link_alterations["to_remove"]: for link in link_alterations['to_remove']:
self.links.remove(link) self.links.remove(link)
for from_socket, to_socket in link_alterations["to_add"]: for from_socket, to_socket in link_alterations['to_add']:
self.links.new(from_socket, to_socket) self.links.new(from_socket, to_socket)
# If Queued Operations: Regenerate Cache # If Queued Operations: Regenerate Cache
## - This prevents the next update() from picking up on alterations. ## - This prevents the next update() from picking up on alterations.
if link_alterations["to_remove"] or link_alterations["to_add"]: if link_alterations['to_remove'] or link_alterations['to_add']:
self._node_link_cache.regenerate() self._node_link_cache.regenerate()
#################### ####################
# - Post-Load Handler # - Post-Load Handler
#################### ####################
def initialize_sim_tree_node_link_cache(scene: bpy.types.Scene): def initialize_sim_tree_node_link_cache(_: bpy.types.Scene):
"""Whenever a file is loaded, create/regenerate the NodeLinkCache in all trees. """Whenever a file is loaded, create/regenerate the NodeLinkCache in all trees."""
"""
for node_tree in bpy.data.node_groups: for node_tree in bpy.data.node_groups:
if node_tree.bl_idname == "MaxwellSimTree": if node_tree.bl_idname == 'MaxwellSimTree':
if not hasattr(node_tree, "_node_link_cache"): node_tree.on_load()
node_tree._node_link_cache = NodeLinkCache(node_tree)
else:
node_tree._node_link_cache.regenerate()
#################### ####################
# - Blender Registration # - Blender Registration
#################### ####################
bpy.app.handlers.load_post.append(initialize_sim_tree_node_link_cache) bpy.app.handlers.load_post.append(initialize_sim_tree_node_link_cache)
## TODO: Move to top-level registration.
BL_REGISTER = [ BL_REGISTER = [
MaxwellSimTree, MaxwellSimTree,

View File

@ -1,12 +1,11 @@
import uuid
import typing as typ
import typing_extensions as typx
import json
import inspect import inspect
import json
import typing as typ
import uuid
import bpy import bpy
import pydantic as pyd import pydantic as pyd
import typing_extensions as typx
from .. import contracts as ct from .. import contracts as ct
from .. import sockets from .. import sockets

View File

@ -1,10 +1,6 @@
from . import install_deps
from . import uninstall_deps
from . import connect_viewer from . import connect_viewer
BL_REGISTER = [ BL_REGISTER = [
*install_deps.BL_REGISTER,
*uninstall_deps.BL_REGISTER,
*connect_viewer.BL_REGISTER, *connect_viewer.BL_REGISTER,
] ]
BL_KMI_REGISTER = [ BL_KMI_REGISTER = [

View File

@ -1,18 +1,19 @@
import bpy import bpy
class BlenderMaxwellConnectViewer(bpy.types.Operator):
bl_idname = "blender_maxwell.connect_viewer" class ConnectViewerNode(bpy.types.Operator):
bl_label = "Connect Viewer to Active" bl_idname = 'blender_maxwell.connect_viewer_node'
bl_description = "Connect active node to Viewer Node" bl_label = 'Connect Viewer to Active'
bl_description = 'Connect active node to Viewer Node'
bl_options = {'REGISTER', 'UNDO'} bl_options = {'REGISTER', 'UNDO'}
@classmethod @classmethod
def poll(cls, context): def poll(cls, context):
space = context.space_data space = context.space_data
return ( return (
space.type == 'NODE_EDITOR' space.type == 'NODE_EDITOR'
and space.node_tree is not None and space.node_tree is not None
and space.node_tree.bl_idname == "MaxwellSimTreeType" and space.node_tree.bl_idname == 'MaxwellSimTreeType'
) )
def invoke(self, context, event): def invoke(self, context, event):
@ -24,44 +25,45 @@ class BlenderMaxwellConnectViewer(bpy.types.Operator):
location=(mlocx, mlocy), location=(mlocx, mlocy),
) )
select_node = context.selected_nodes[0] select_node = context.selected_nodes[0]
for node in node_tree.nodes: for node in node_tree.nodes:
if node.bl_idname == "ViewerNodeType": if node.bl_idname == 'ViewerNodeType':
viewer_node = node viewer_node = node
break break
else: else:
viewer_node = node_tree.nodes.new("ViewerNodeType") viewer_node = node_tree.nodes.new('ViewerNodeType')
viewer_node.location.x = select_node.location.x + 250 viewer_node.location.x = select_node.location.x + 250
viewer_node.location.y = select_node.location.y viewer_node.location.y = select_node.location.y
select_node.select = False select_node.select = False
new_link = True new_link = True
for link in viewer_node.inputs[0].links: for link in viewer_node.inputs[0].links:
if link.from_node.name == select_node.name: if link.from_node.name == select_node.name:
new_link = False new_link = False
continue continue
node_tree.links.remove(link) node_tree.links.remove(link)
if new_link: if new_link:
node_tree.links.new(select_node.outputs[0], viewer_node.inputs[0]) node_tree.links.new(select_node.outputs[0], viewer_node.inputs[0])
return {'FINISHED'} return {'FINISHED'}
#################### ####################
# - Blender Registration # - Blender Registration
#################### ####################
BL_REGISTER = [ BL_REGISTER = [
BlenderMaxwellConnectViewer, ConnectViewerNode,
] ]
BL_KMI_REGISTER = [ BL_KMI_REGISTER = [
dict( {
_=( '_': (
BlenderMaxwellConnectViewer.bl_idname, ConnectViewerNode.bl_idname,
"LEFTMOUSE", 'LEFTMOUSE',
"PRESS", 'PRESS',
), ),
ctrl=True, ## CTRL 'ctrl': True,
shift=True, ## Shift 'shift': True,
alt=False, ## Alt 'alt': False,
), },
] ]

View File

@ -1,62 +0,0 @@
import sys
import subprocess
from pathlib import Path
import bpy
from . import types
class BlenderMaxwellInstallDependenciesOperator(bpy.types.Operator):
bl_idname = types.BlenderMaxwellInstallDependencies
bl_label = "Install Dependencies for Blender Maxwell Addon"
def execute(self, context):
addon_dir = Path(__file__).parent.parent
requirements_path = addon_dir / 'requirements.txt'
#addon_specific_folder = addon_dir / '.dependencies'
addon_specific_folder = Path("/home/sofus/src/college/bsc_ge/thesis/code/.cached-dependencies")
# Create the Addon-Specific Folder
addon_specific_folder.mkdir(parents=True, exist_ok=True)
# Determine Path to Blender's Bundled Python
python_exec = Path(sys.executable)
## bpy.app.binary_path_python was deprecated in 2.91.
## sys.executable points to the correct bundled Python.
## See <https://developer.blender.org/docs/release_notes/2.91/python_api/>
# Install Dependencies w/Bundled pip
try:
subprocess.check_call([
str(python_exec), '-m',
'pip', 'install',
'-r', str(requirements_path),
'--target', str(addon_specific_folder),
])
self.report(
{'INFO'},
"Dependencies for 'blender_maxwell' installed successfully."
)
except subprocess.CalledProcessError as e:
self.report(
{'ERROR'},
f"Failed to install dependencies: {str(e)}"
)
return {'CANCELLED'}
# Install Dependencies w/Bundled pip
if str(addon_specific_folder) not in sys.path:
sys.path.insert(0, str(addon_specific_folder))
return {'FINISHED'}
####################
# - Blender Registration
####################
BL_REGISTER = [
BlenderMaxwellInstallDependenciesOperator,
]
BL_KMI_REGISTER = []

View File

@ -1,9 +0,0 @@
import bpy
####################
# - Blender Types
####################
BlenderMaxwellInstallDependencies = "blender_maxwell.install_dependencies"
BlenderMaxwellUninstallDependencies = "blender_maxwell.uninstall_dependencies"
BlenderMaxwellConnectViewer = "blender_maxwell.connect_viewer"
BlenderMaxwellRefreshRDAuth = "blender_maxwell.refresh_td_auth"

View File

@ -0,0 +1,7 @@
from . import install_deps
from . import uninstall_deps
BL_REGISTER = [
*install_deps.BL_REGISTER,
*uninstall_deps.BL_REGISTER,
]

View File

@ -0,0 +1,66 @@
import subprocess
import sys
from pathlib import Path
import bpy
from .. import registration
class InstallPyDeps(bpy.types.Operator):
bl_idname = 'blender_maxwell.nodeps__install_py_deps'
bl_label = 'Install BLMaxwell Python Deps'
path_addon_pydeps: bpy.props.StringProperty(
name='Path to Addon Python Dependencies'
)
path_addon_reqs: bpy.props.StringProperty(
name='Path to Addon Python Dependencies'
)
def execute(self, _: bpy.types.Context):
path_addon_pydeps = Path(self.path_addon_pydeps)
path_addon_reqs = Path(self.path_addon_reqs)
# Create the Addon-Specific Folder (if Needed)
## It MUST, however, have a parent already
path_addon_pydeps.mkdir(parents=False, exist_ok=True)
# Determine Path to Blender's Bundled Python
## bpy.app.binary_path_python was deprecated in 2.91.
## sys.executable points to the correct bundled Python.
## See <https://developer.blender.org/docs/release_notes/2.91/python_api/>
python_exec = Path(sys.executable)
# Install Deps w/Bundled pip
try:
subprocess.check_call(
[
str(python_exec),
'-m',
'pip',
'install',
'-r',
str(path_addon_reqs),
'--target',
str(path_addon_pydeps),
]
)
except subprocess.CalledProcessError as e:
msg = f'Failed to install dependencies: {str(e)}'
self.report({'ERROR'}, msg)
return {'CANCELLED'}
registration.run_delayed_registration(
registration.EVENT__ON_DEPS_INSTALLED,
path_addon_pydeps,
)
return {'FINISHED'}
####################
# - Blender Registration
####################
BL_REGISTER = [
InstallPyDeps,
]

View File

@ -0,0 +1,37 @@
import shutil
import bpy
from ..utils import pydeps
from .. import registration
class UninstallPyDeps(bpy.types.Operator):
bl_idname = 'blender_maxwell.nodeps__uninstall_py_deps'
bl_label = 'Uninstall BLMaxwell Python Deps'
path_addon_pydeps: bpy.props.StringProperty(
name='Path to Addon Python Dependencies'
)
def execute(self, _: bpy.types.Context):
if (
pydeps.check_pydeps()
and self.path_addon_pydeps.exists()
and self.path_addon_pydeps.is_dir()
):
# CAREFUL!!
shutil.rmtree(self.path_addon_pydeps)
else:
msg = "Can't uninstall pydeps"
raise RuntimeError(msg)
return {'FINISHED'}
####################
# - Blender Registration
####################
BL_REGISTER = [
UninstallPyDeps,
]

View File

@ -1,14 +1,173 @@
import tomllib
from pathlib import Path
import bpy import bpy
from .operators import types as operators_types from . import registration
from .operators_nodeps import install_deps, uninstall_deps
from .utils import logger as _logger
from .utils import pydeps
####################
# - Constants
####################
log = _logger.get()
PATH_ADDON_ROOT = Path(__file__).resolve().parent
with (PATH_ADDON_ROOT / 'pyproject.toml').open('rb') as f:
PROJ_SPEC = tomllib.load(f)
####################
# - Preferences
####################
class BlenderMaxwellAddonPreferences(bpy.types.AddonPreferences): class BlenderMaxwellAddonPreferences(bpy.types.AddonPreferences):
bl_idname = "blender_maxwell" bl_idname = PROJ_SPEC['project']['name'] ## MUST match addon package name
def draw(self, context): ####################
# - Properties
####################
# Default PyDeps Path
use_default_path_addon_pydeps: bpy.props.BoolProperty(
name='Use Default PyDeps Path',
description='Whether to use the default PyDeps path',
default=True,
update=lambda self, context: self.sync_use_default_path_addon_pydeps(
context
),
)
cache_path_addon_pydeps: bpy.props.StringProperty(
name='Cached Addon PyDeps Path',
default=(_default_pydeps_path := str(pydeps.DEFAULT_PATH_DEPS)),
) ## Cache for use when toggling use of default pydeps path.
## Must default to same as raw_path_* if default=True on use_default_*
# Custom PyDeps Path
raw_path_addon_pydeps: bpy.props.StringProperty(
name='Addon PyDeps Path',
description='Path to Addon Python Dependencies',
subtype='FILE_PATH',
default=_default_pydeps_path,
update=lambda self, context: self.sync_path_addon_pydeps(context),
)
prev_raw_path_addon_pydeps: bpy.props.StringProperty(
name='Previous Addon PyDeps Path',
default=_default_pydeps_path,
) ## Use to restore raw_path_addon_pydeps after non-validated change.
# TODO: LOGGING SETTINGS
####################
# - Property Sync
####################
def sync_use_default_path_addon_pydeps(self, _: bpy.types.Context):
# Switch to Default
if self.use_default_path_addon_pydeps:
self.cache_path_addon_pydeps = self.raw_path_addon_pydeps
self.raw_path_addon_pydeps = str(
pydeps.DEFAULT_PATH_DEPS.resolve()
)
# Switch from Default
else:
self.raw_path_addon_pydeps = self.cache_path_addon_pydeps
self.cache_path_addon_pydeps = ''
def sync_path_addon_pydeps(self, _: bpy.types.Context):
# Error if Default Path is in Use
if self.use_default_path_addon_pydeps:
self.raw_path_addon_pydeps = self.prev_raw_path_addon_pydeps
msg = "Can't update pydeps path while default path is being used"
raise ValueError(msg)
# Error if Dependencies are All Installed
if pydeps.DEPS_OK:
self.raw_path_addon_pydeps = self.prev_raw_path_addon_pydeps
msg = "Can't update pydeps path while dependencies are installed"
raise ValueError(msg)
# Update PyDeps
## This also updates pydeps.DEPS_OK and pydeps.DEPS_ISSUES.
## The result is used to run any delayed registrations...
## ...which might be waiting for deps to be satisfied.
if pydeps.check_pydeps(self.path_addon_pydeps):
registration.run_delayed_registration(
registration.EVENT__DEPS_SATISFIED,
self.path_addon_pydeps,
)
self.prev_raw_path_addon_pydeps = self.raw_path_addon_pydeps
####################
# - Property Methods
####################
@property
def path_addon_pydeps(self) -> Path:
return Path(bpy.path.abspath(self.raw_path_addon_pydeps))
@path_addon_pydeps.setter
def path_addon_pydeps(self, value: Path) -> None:
self.raw_path_addon_pydeps = str(value.resolve())
####################
# - UI
####################
def draw(self, _: bpy.types.Context) -> None:
layout = self.layout layout = self.layout
layout.operator(operators_types.BlenderMaxwellInstallDependencies, text="Install Dependencies") num_pydeps_issues = (
layout.operator(operators_types.BlenderMaxwellUninstallDependencies, text="Uninstall Dependencies") len(pydeps.DEPS_ISSUES) if pydeps.DEPS_ISSUES is not None else 0
)
# Box: Dependency Status
box = layout.box()
## Row: Header
row = box.row(align=True)
row.alignment = 'CENTER'
row.label(text='Addon-Specific Python Deps')
## Row: Toggle Default PyDeps Path
row = box.row(align=True)
row.enabled = not pydeps.DEPS_OK
row.prop(
self,
'use_default_path_addon_pydeps',
text='Use Default PyDeps Install Path',
toggle=True,
)
## Row: Current PyDeps Path
row = box.row(align=True)
row.enabled = (
not pydeps.DEPS_OK and not self.use_default_path_addon_pydeps
)
row.prop(self, 'raw_path_addon_pydeps', text='PyDeps Install Path')
## Row: More Information Panel
row = box.row(align=True)
header, panel = row.panel('pydeps_issues', default_closed=True)
header.label(text=f'Dependency Conflicts ({num_pydeps_issues})')
if panel is not None:
grid = panel.grid_flow()
for issue in pydeps.DEPS_ISSUES:
grid.label(text=issue)
## Row: Install
row = box.row(align=True)
row.enabled = not pydeps.DEPS_OK
op = row.operator(
install_deps.InstallPyDeps.bl_idname,
text='Install PyDeps',
)
op.path_addon_pydeps = str(self.path_addon_pydeps)
op.path_addon_reqs = str(pydeps.PATH_REQS)
## Row: Uninstall
row = box.row(align=True)
row.enabled = pydeps.DEPS_OK
op = row.operator(
uninstall_deps.UninstallPyDeps.bl_idname,
text='Uninstall PyDeps',
)
op.path_addon_pydeps = str(self.path_addon_pydeps)
#################### ####################
# - Blender Registration # - Blender Registration

View File

@ -0,0 +1,113 @@
import typing as typ
from pathlib import Path
import bpy
from .utils import logger as _logger
log = _logger.get()
# TODO: More types for these things!
DelayedRegKey: typ.TypeAlias = str
BLClass: typ.TypeAlias = typ.Any ## TODO: Better Type
BLKeymapItem: typ.TypeAlias = typ.Any ## TODO: Better Type
KeymapItemDef: typ.TypeAlias = typ.Any ## TODO: Better Type
####################
# - Globals
####################
BL_KEYMAP: bpy.types.KeyMap | None = None
REG__CLASSES: list[BLClass] = []
REG__KEYMAP_ITEMS: list[BLKeymapItem] = []
DELAYED_REGISTRATIONS: dict[DelayedRegKey, typ.Callable[[Path], None]] = {}
####################
# - Constants
####################
EVENT__DEPS_SATISFIED: str = 'on_deps_satisfied'
####################
# - Class Registration
####################
def register_classes(bl_register: list):
for cls in bl_register:
if cls.bl_idname in REG__CLASSES:
msg = f'Skipping register of {cls.bl_idname}'
log.info(msg)
continue
bpy.utils.register_class(cls)
REG__CLASSES.append(cls)
def unregister_classes():
for cls in reversed(REG__CLASSES):
bpy.utils.unregister_class(cls)
REG__CLASSES.clear()
####################
# - Keymap Registration
####################
def register_keymap_items(keymap_item_defs: list[dict]):
# Lazy-Load BL_NODE_KEYMAP
global BL_KEYMAP # noqa: PLW0603
if BL_KEYMAP is None:
BL_KEYMAP = bpy.context.window_manager.keyconfigs.addon.keymaps.new(
name='Node Editor',
space_type='NODE_EDITOR',
)
# Register Keymaps
for keymap_item_def in keymap_item_defs:
keymap_item = BL_KEYMAP.keymap_items.new(
*keymap_item_def['_'],
ctrl=keymap_item_def['ctrl'],
shift=keymap_item_def['shift'],
alt=keymap_item_def['alt'],
)
REG__KEYMAP_ITEMS.append(keymap_item)
def unregister_keymap_items():
global BL_KEYMAP # noqa: PLW0603
# Unregister Keymaps
for keymap_item in reversed(REG__KEYMAP_ITEMS):
BL_KEYMAP.keymap_items.remove(keymap_item)
# Lazy-Unload BL_NODE_KEYMAP
if BL_KEYMAP is not None:
REG__KEYMAP_ITEMS.clear()
BL_KEYMAP = None
####################
# - Delayed Registration Semantics
####################
def delay_registration(
delayed_reg_key: DelayedRegKey,
classes_cb: typ.Callable[[Path], list[BLClass]],
keymap_item_defs_cb: typ.Callable[[Path], list[KeymapItemDef]],
) -> None:
if delayed_reg_key in DELAYED_REGISTRATIONS:
msg = f'Already delayed a registration with key {delayed_reg_key}'
raise ValueError(msg)
def register_cb(path_deps: Path):
register_classes(classes_cb(path_deps))
register_keymap_items(keymap_item_defs_cb(path_deps))
DELAYED_REGISTRATIONS[delayed_reg_key] = register_cb
def run_delayed_registration(
delayed_reg_key: DelayedRegKey, path_deps: Path
) -> None:
register_cb = DELAYED_REGISTRATIONS.pop(delayed_reg_key)
register_cb(path_deps)

View File

@ -1,7 +0,0 @@
tidy3d==2.5.2
pydantic==2.6.0
sympy==1.12
scipy==1.12.0
trimesh==4.1.4
networkx==3.2.1
Rtree==1.2.0

View File

@ -1,7 +1,5 @@
import typing_extensions as typx import typing_extensions as typx
import bpy
INVALID_BL_SOCKET_TYPES = { INVALID_BL_SOCKET_TYPES = {
"NodeSocketGeometry", "NodeSocketGeometry",
} }

View File

@ -1,5 +1,6 @@
import enum import enum
class BlenderTypeEnum(str, enum.Enum): class BlenderTypeEnum(str, enum.Enum):
def _generate_next_value_(name, start, count, last_values): def _generate_next_value_(name, start, count, last_values):
return name return name

View File

@ -3,6 +3,7 @@ import functools
import sympy as sp import sympy as sp
import sympy.physics.units as spu import sympy.physics.units as spu
#################### ####################
# - Useful Methods # - Useful Methods
#################### ####################

View File

@ -0,0 +1,26 @@
import logging
LOGGER = logging.getLogger('blender_maxwell')
def get():
if LOGGER is None:
# Set Sensible Defaults
LOGGER.setLevel(logging.DEBUG)
#FORMATTER = logging.Formatter(
# '%(asctime)-15s %(levelname)8s %(name)s %(message)s'
#)
# Add Stream Handler
STREAM_HANDLER = logging.StreamHandler()
#STREAM_HANDLER.setFormatter(FORMATTER)
LOGGER.addHandler(STREAM_HANDLER)
return LOGGER
def set_level(level):
LOGGER.setLevel(level)
def enable_logfile():
raise NotImplementedError
def disable_logfile():
raise NotImplementedError

View File

@ -1,10 +1,8 @@
import typing as typ
import typing_extensions as typx
import pydantic as pyd import pydantic as pyd
from pydantic_core import core_schema as pyd_core_schema
import sympy as sp import sympy as sp
import sympy.physics.units as spu import sympy.physics.units as spu
import typing_extensions as typx
from pydantic_core import core_schema as pyd_core_schema
from . import extra_sympy_units as spux from . import extra_sympy_units as spux
@ -45,8 +43,7 @@ class _SympyExpr:
def validate_from_expr(value: AllowedSympyExprs) -> AllowedSympyExprs: def validate_from_expr(value: AllowedSympyExprs) -> AllowedSympyExprs:
if not ( if not (
isinstance(value, sp.Expr) isinstance(value, sp.Expr | sp.MatrixBase)
or isinstance(value, sp.MatrixBase)
): ):
msg = f"Value {value} is not a `sympy` expression" msg = f"Value {value} is not a `sympy` expression"
raise ValueError(msg) raise ValueError(msg)
@ -98,8 +95,7 @@ def ConstrSympyExpr(
## - <https://docs.sympy.org/latest/guides/assumptions.html#predicates> ## - <https://docs.sympy.org/latest/guides/assumptions.html#predicates>
def validate_expr(expr: AllowedSympyExprs): def validate_expr(expr: AllowedSympyExprs):
if not ( if not (
isinstance(expr, sp.Expr) isinstance(expr, sp.Expr | sp.MatrixBase),
or isinstance(expr, sp.MatrixBase),
): ):
## NOTE: Must match AllowedSympyExprs union elements. ## NOTE: Must match AllowedSympyExprs union elements.
msg = f"expr '{expr}' is not an allowed Sympy expression ({AllowedSympyExprs})" msg = f"expr '{expr}' is not an allowed Sympy expression ({AllowedSympyExprs})"
@ -143,7 +139,7 @@ def ConstrSympyExpr(
if ( if (
allowed_matrix_shapes allowed_matrix_shapes
and isinstance(expr, sp.MatrixBase) and isinstance(expr, sp.MatrixBase)
) and not (expr.shape in allowed_matrix_shapes): ) and expr.shape not in allowed_matrix_shapes:
msgs.add(f"allowed_matrix_shapes={allowed_matrix_shapes} does not match expression {expr} with shape {expr.shape}") msgs.add(f"allowed_matrix_shapes={allowed_matrix_shapes} does not match expression {expr} with shape {expr.shape}")
# Error or Return # Error or Return

View File

@ -0,0 +1,117 @@
import contextlib
import importlib.metadata
import os
import sys
from pathlib import Path
from . import logger as _logger
log = _logger.get()
####################
# - Constants
####################
PATH_ADDON_ROOT = Path(__file__).resolve().parent.parent
PATH_REQS = PATH_ADDON_ROOT / 'requirements.txt'
DEFAULT_PATH_DEPS = PATH_ADDON_ROOT / '.addon_dependencies'
DEFAULT_PATH_DEPS.mkdir(exist_ok=True)
####################
# - Globals
####################
DEPS_OK: bool | None = None
DEPS_ISSUES: list[str] | None = None
####################
# - sys.path Context Manager
####################
@contextlib.contextmanager
def importable_addon_deps(path_deps: Path):
os_path = os.fspath(path_deps)
sys.path.insert(0, os_path)
try:
yield
finally:
sys.path.remove(os_path)
####################
# - Check PyDeps
####################
def _check_pydeps(
path_requirementstxt: Path,
path_deps: Path,
) -> dict[str, tuple[str, str]]:
"""Check if packages defined in a 'requirements.txt' file are currently installed.
Returns a list of any issues (if empty, then all dependencies are correctly satisfied).
"""
def conform_pypi_package_deplock(deplock: str):
"""Conforms a <package>==<version> de-lock to match if pypi considers them the same (PyPi is case-insensitive and considers -/_ to be the same)
See <https://peps.python.org/pep-0426/#name>"""
return deplock.lower().replace('_', '-')
with path_requirementstxt.open('r') as file:
required_depslock = {
conform_pypi_package_deplock(line)
for raw_line in file.readlines()
if (line := raw_line.strip()) and not line.startswith('#')
}
# Investigate Issues
installed_deps = importlib.metadata.distributions(
path=[str(path_deps.resolve())] ## resolve() is just-in-case
)
installed_depslock = {
conform_pypi_package_deplock(
f'{dep.metadata["Name"]}=={dep.metadata["Version"]}'
)
for dep in installed_deps
}
# Determine Missing/Superfluous/Conflicting
req_not_inst = required_depslock - installed_depslock
inst_not_req = installed_depslock - required_depslock
conflicts = {
req.split('==')[0]: (req.split('==')[1], inst.split('==')[1])
for req in req_not_inst
for inst in inst_not_req
if req.split('==')[0] == inst.split('==')[0]
}
# Assemble and Return Issues
return [
f'{name}: Have {inst_ver}, Need {req_ver}'
for name, (req_ver, inst_ver) in conflicts.items()
] + [
f'Missing {deplock}'
for deplock in req_not_inst
if deplock.split('==')[0] not in conflicts
] + [
f'Superfluous {deplock}'
for deplock in inst_not_req
if deplock.split('==')[0] not in conflicts
]
####################
# - Refresh PyDeps
####################
def check_pydeps(path_deps: Path):
global DEPS_OK # noqa: PLW0603
global DEPS_ISSUES # noqa: PLW0603
if len(_issues := _check_pydeps(PATH_REQS, path_deps)) > 0:
#log.debug('Package Check Failed:', end='\n\t')
#log.debug(*_issues, sep='\n\t')
DEPS_OK = False
DEPS_ISSUES = _issues
else:
DEPS_OK = True
DEPS_ISSUES = _issues
return DEPS_OK

View File

@ -2,10 +2,10 @@
- SimulationTask: <https://github.com/flexcompute/tidy3d/blob/453055e89dcff6d619597120b47817e996f1c198/tidy3d/web/core/task_core.py> - SimulationTask: <https://github.com/flexcompute/tidy3d/blob/453055e89dcff6d619597120b47817e996f1c198/tidy3d/web/core/task_core.py>
- Tidy3D Stub: <https://github.com/flexcompute/tidy3d/blob/453055e89dcff6d619597120b47817e996f1c198/tidy3d/web/api/tidy3d_stub.py> - Tidy3D Stub: <https://github.com/flexcompute/tidy3d/blob/453055e89dcff6d619597120b47817e996f1c198/tidy3d/web/api/tidy3d_stub.py>
""" """
from dataclasses import dataclass
import typing as typ
import functools
import datetime as dt import datetime as dt
import functools
import typing as typ
from dataclasses import dataclass
import tidy3d as td import tidy3d as td
import tidy3d.web as td_web import tidy3d.web as td_web
@ -284,7 +284,7 @@ class TidyCloudTasks:
raise RuntimeError(msg) raise RuntimeError(msg)
# Upload Simulation to Cloud Task # Upload Simulation to Cloud Task
if not upload_progress_cb is None: if upload_progress_cb is not None:
upload_progress_cb = lambda uploaded_bytes: None upload_progress_cb = lambda uploaded_bytes: None
try: try:
cloud_task.upload_simulation( cloud_task.upload_simulation(