fix: The rabid __mp_main__ segfault.
parent
c2db40ca6d
commit
02c63e3b8f
|
@ -6,7 +6,7 @@ authors = [
|
||||||
{ name = "Sofus Albert Høgsbro Rose", email = "blender-maxwell@sofusrose.com" }
|
{ name = "Sofus Albert Høgsbro Rose", email = "blender-maxwell@sofusrose.com" }
|
||||||
]
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"tidy3d==2.6.*",
|
"tidy3d>=2.6.3",
|
||||||
"pydantic==2.6.*",
|
"pydantic==2.6.*",
|
||||||
"sympy==1.12",
|
"sympy==1.12",
|
||||||
"scipy==1.12.*",
|
"scipy==1.12.*",
|
||||||
|
|
|
@ -64,6 +64,7 @@ numpy==1.24.3
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via scipy
|
# via scipy
|
||||||
# via shapely
|
# via shapely
|
||||||
|
# via tidy3d
|
||||||
# via trimesh
|
# via trimesh
|
||||||
# via xarray
|
# via xarray
|
||||||
packaging==24.0
|
packaging==24.0
|
||||||
|
@ -117,7 +118,7 @@ shapely==2.0.3
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
# via python-dateutil
|
# via python-dateutil
|
||||||
sympy==1.12
|
sympy==1.12
|
||||||
tidy3d==2.6.0
|
tidy3d==2.6.3
|
||||||
toml==0.10.2
|
toml==0.10.2
|
||||||
# via tidy3d
|
# via tidy3d
|
||||||
toolz==0.12.1
|
toolz==0.12.1
|
||||||
|
|
|
@ -63,6 +63,7 @@ numpy==1.24.3
|
||||||
# via matplotlib
|
# via matplotlib
|
||||||
# via scipy
|
# via scipy
|
||||||
# via shapely
|
# via shapely
|
||||||
|
# via tidy3d
|
||||||
# via trimesh
|
# via trimesh
|
||||||
# via xarray
|
# via xarray
|
||||||
packaging==24.0
|
packaging==24.0
|
||||||
|
@ -115,7 +116,7 @@ shapely==2.0.3
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
# via python-dateutil
|
# via python-dateutil
|
||||||
sympy==1.12
|
sympy==1.12
|
||||||
tidy3d==2.6.0
|
tidy3d==2.6.3
|
||||||
toml==0.10.2
|
toml==0.10.2
|
||||||
# via tidy3d
|
# via tidy3d
|
||||||
toolz==0.12.1
|
toolz==0.12.1
|
||||||
|
|
|
@ -93,6 +93,7 @@ def register():
|
||||||
|
|
||||||
if pydeps.check_pydeps(path_pydeps):
|
if pydeps.check_pydeps(path_pydeps):
|
||||||
log.info('PyDeps Satisfied: Loading Addon %s', info.ADDON_NAME)
|
log.info('PyDeps Satisfied: Loading Addon %s', info.ADDON_NAME)
|
||||||
|
addon_prefs.sync_addon_logging()
|
||||||
registration.register_classes(BL_REGISTER__AFTER_DEPS(path_pydeps))
|
registration.register_classes(BL_REGISTER__AFTER_DEPS(path_pydeps))
|
||||||
registration.register_keymap_items(BL_KEYMAP_ITEM_DEFS__AFTER_DEPS(path_pydeps))
|
registration.register_keymap_items(BL_KEYMAP_ITEM_DEFS__AFTER_DEPS(path_pydeps))
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -221,10 +221,10 @@ def event_decorator(
|
||||||
|
|
||||||
# Set Decorated Attributes and Return
|
# Set Decorated Attributes and Return
|
||||||
## Fix Introspection + Documentation
|
## Fix Introspection + Documentation
|
||||||
decorated.__name__ = method.__name__
|
#decorated.__name__ = method.__name__
|
||||||
decorated.__module__ = method.__module__
|
#decorated.__module__ = method.__module__
|
||||||
decorated.__qualname__ = method.__qualname__
|
#decorated.__qualname__ = method.__qualname__
|
||||||
decorated.__doc__ = method.__doc__
|
#decorated.__doc__ = method.__doc__
|
||||||
|
|
||||||
## Add Spice
|
## Add Spice
|
||||||
decorated.action_type = action_type
|
decorated.action_type = action_type
|
||||||
|
|
|
@ -46,29 +46,17 @@ class WaveConstantNode(base.MaxwellSimNode):
|
||||||
####################
|
####################
|
||||||
@events.computes_output_socket(
|
@events.computes_output_socket(
|
||||||
'WL',
|
'WL',
|
||||||
input_sockets={'WL', 'Freq'},
|
input_sockets={'WL'},
|
||||||
)
|
)
|
||||||
def compute_vac_wl(self, input_sockets: dict) -> sp.Expr:
|
def compute_vacwl_from_vacwl(self, input_sockets: dict) -> sp.Expr:
|
||||||
if (vac_wl := input_sockets['WL']) is not None:
|
return input_sockets['WL']
|
||||||
return vac_wl
|
|
||||||
if (freq := input_sockets['Freq']) is not None:
|
|
||||||
return constants.vac_speed_of_light / freq
|
|
||||||
|
|
||||||
msg = 'Vac WL and Freq are both None'
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
|
|
||||||
@events.computes_output_socket(
|
@events.computes_output_socket(
|
||||||
'Freq',
|
'WL',
|
||||||
input_sockets={'WL', 'Freq'},
|
input_sockets={'Freq'},
|
||||||
)
|
)
|
||||||
def compute_freq(self, input_sockets: dict) -> sp.Expr:
|
def compute_freq_from_vacwl(self, input_sockets: dict) -> sp.Expr:
|
||||||
if (vac_wl := input_sockets['WL']) is not None:
|
return constants.vac_speed_of_light / input_sockets['Freq']
|
||||||
return constants.vac_speed_of_light / vac_wl
|
|
||||||
if (freq := input_sockets['Freq']) is not None:
|
|
||||||
return freq
|
|
||||||
|
|
||||||
msg = 'Vac WL and Freq are both None'
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
|
|
||||||
####################
|
####################
|
||||||
# - Event Methods: Listy Output
|
# - Event Methods: Listy Output
|
||||||
|
|
|
@ -1,12 +1,17 @@
|
||||||
import typing as typ
|
import typing as typ
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
import tidy3d as td
|
||||||
|
|
||||||
from ...... import info
|
from ...... import info
|
||||||
from ......services import tdcloud
|
from ......services import tdcloud
|
||||||
|
from ......utils import logger
|
||||||
from .... import contracts as ct
|
from .... import contracts as ct
|
||||||
from .... import sockets
|
from .... import sockets
|
||||||
from ... import base, events
|
from ... import base, events
|
||||||
|
|
||||||
|
log = logger.get(__name__)
|
||||||
|
|
||||||
|
|
||||||
def _sim_data_cache_path(task_id: str) -> Path:
|
def _sim_data_cache_path(task_id: str) -> Path:
|
||||||
"""Compute an appropriate location for caching simulations downloaded from the internet, unique to each task ID.
|
"""Compute an appropriate location for caching simulations downloaded from the internet, unique to each task ID.
|
||||||
|
@ -14,6 +19,7 @@ def _sim_data_cache_path(task_id: str) -> Path:
|
||||||
Arguments:
|
Arguments:
|
||||||
task_id: The ID of the Tidy3D cloud task.
|
task_id: The ID of the Tidy3D cloud task.
|
||||||
"""
|
"""
|
||||||
|
(info.ADDON_CACHE / task_id).mkdir(exist_ok=True)
|
||||||
return info.ADDON_CACHE / task_id / 'sim_data.hdf5'
|
return info.ADDON_CACHE / task_id / 'sim_data.hdf5'
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,6 +44,19 @@ class Tidy3DWebImporterNode(base.MaxwellSimNode):
|
||||||
input_sockets={'Cloud Task'},
|
input_sockets={'Cloud Task'},
|
||||||
)
|
)
|
||||||
def compute_sim_data(self, input_sockets: dict) -> str:
|
def compute_sim_data(self, input_sockets: dict) -> str:
|
||||||
|
## TODO: REMOVE TEST
|
||||||
|
log.info('Loading SimulationData File')
|
||||||
|
import sys
|
||||||
|
for module_name, module in sys.modules.copy().items():
|
||||||
|
if module_name == '__mp_main__':
|
||||||
|
print('Problematic Module Entry', module_name)
|
||||||
|
print(module)
|
||||||
|
#print('MODULE REPR', module)
|
||||||
|
continue
|
||||||
|
#return td.SimulationData.from_file(
|
||||||
|
# fname='/home/sofus/src/blender_maxwell/dev/sim_demo.hdf5'
|
||||||
|
#)
|
||||||
|
|
||||||
# Validate Task Availability
|
# Validate Task Availability
|
||||||
if (cloud_task := input_sockets['Cloud Task']) is None:
|
if (cloud_task := input_sockets['Cloud Task']) is None:
|
||||||
msg = f'"{self.bl_label}" CloudTask doesn\'t exist'
|
msg = f'"{self.bl_label}" CloudTask doesn\'t exist'
|
||||||
|
|
|
@ -23,6 +23,8 @@ class ConsoleViewOperator(bpy.types.Operator):
|
||||||
|
|
||||||
def execute(self, context):
|
def execute(self, context):
|
||||||
node = context.node
|
node = context.node
|
||||||
|
print('Executing Operator')
|
||||||
|
|
||||||
node.print_data_to_console()
|
node.print_data_to_console()
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
||||||
|
@ -110,9 +112,17 @@ class ViewerNode(base.MaxwellSimNode):
|
||||||
# - Methods
|
# - Methods
|
||||||
####################
|
####################
|
||||||
def print_data_to_console(self):
|
def print_data_to_console(self):
|
||||||
if not (data := self._compute_input('Data')):
|
import sys
|
||||||
|
for module_name, module in sys.modules.copy().items():
|
||||||
|
if module_name == '__mp_main__':
|
||||||
|
print('Anything, even repr(), with this module just crashes:', module_name)
|
||||||
|
print(module) ## Crash
|
||||||
|
|
||||||
|
if not self.inputs['Data'].is_linked:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
log.info('Printing Data to Console')
|
||||||
|
data = self._compute_input('Data')
|
||||||
if isinstance(data, sp.Basic):
|
if isinstance(data, sp.Basic):
|
||||||
console.print(sp.pretty(data, use_unicode=True))
|
console.print(sp.pretty(data, use_unicode=True))
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -0,0 +1,306 @@
|
||||||
|
import typing as typ
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
from ... import contracts as ct
|
||||||
|
from ... import managed_objs, sockets
|
||||||
|
from .. import base, events
|
||||||
|
|
||||||
|
CACHE = {}
|
||||||
|
|
||||||
|
|
||||||
|
class FDTDSimDataVizNode(base.MaxwellSimNode):
|
||||||
|
node_type = ct.NodeType.FDTDSimDataViz
|
||||||
|
bl_label = 'FDTD Sim Data Viz'
|
||||||
|
|
||||||
|
####################
|
||||||
|
# - Sockets
|
||||||
|
####################
|
||||||
|
input_sockets: typ.ClassVar = {
|
||||||
|
'FDTD Sim Data': sockets.MaxwellFDTDSimDataSocketDef(),
|
||||||
|
}
|
||||||
|
output_sockets: typ.ClassVar = {'Preview': sockets.AnySocketDef()}
|
||||||
|
|
||||||
|
managed_obj_defs: typ.ClassVar = {
|
||||||
|
'viz_plot': ct.schemas.ManagedObjDef(
|
||||||
|
mk=lambda name: managed_objs.ManagedBLImage(name),
|
||||||
|
name_prefix='',
|
||||||
|
),
|
||||||
|
'viz_object': ct.schemas.ManagedObjDef(
|
||||||
|
mk=lambda name: managed_objs.ManagedBLObject(name),
|
||||||
|
name_prefix='',
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
####################
|
||||||
|
# - Properties
|
||||||
|
####################
|
||||||
|
viz_monitor_name: bpy.props.EnumProperty(
|
||||||
|
name='Viz Monitor Name',
|
||||||
|
description='Monitor to visualize within the attached SimData',
|
||||||
|
items=lambda self, context: self.retrieve_monitors(context),
|
||||||
|
update=(lambda self, context: self.sync_viz_monitor_name(context)),
|
||||||
|
)
|
||||||
|
cache_viz_monitor_type: bpy.props.StringProperty(
|
||||||
|
name='Viz Monitor Type',
|
||||||
|
description='Type of the viz monitor',
|
||||||
|
default='',
|
||||||
|
)
|
||||||
|
|
||||||
|
# Field Monitor Type
|
||||||
|
field_viz_component: bpy.props.EnumProperty(
|
||||||
|
name='Field Component',
|
||||||
|
description='Field component to visualize',
|
||||||
|
items=[
|
||||||
|
('E', 'E', 'Electric'),
|
||||||
|
# ("H", "H", "Magnetic"),
|
||||||
|
# ("S", "S", "Poynting"),
|
||||||
|
('Ex', 'Ex', 'Ex'),
|
||||||
|
('Ey', 'Ey', 'Ey'),
|
||||||
|
('Ez', 'Ez', 'Ez'),
|
||||||
|
# ("Hx", "Hx", "Hx"),
|
||||||
|
# ("Hy", "Hy", "Hy"),
|
||||||
|
# ("Hz", "Hz", "Hz"),
|
||||||
|
],
|
||||||
|
default='E',
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_component', context),
|
||||||
|
)
|
||||||
|
field_viz_part: bpy.props.EnumProperty(
|
||||||
|
name='Field Part',
|
||||||
|
description='Field part to visualize',
|
||||||
|
items=[
|
||||||
|
('real', 'Real', 'Electric'),
|
||||||
|
('imag', 'Imaginary', 'Imaginary'),
|
||||||
|
('abs', 'Abs', 'Abs'),
|
||||||
|
('abs^2', 'Squared Abs', 'Square Abs'),
|
||||||
|
('phase', 'Phase', 'Phase'),
|
||||||
|
],
|
||||||
|
default='real',
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_part', context),
|
||||||
|
)
|
||||||
|
field_viz_scale: bpy.props.EnumProperty(
|
||||||
|
name='Field Scale',
|
||||||
|
description='Field scale to visualize in, Linear or Log',
|
||||||
|
items=[
|
||||||
|
('lin', 'Linear', 'Linear Scale'),
|
||||||
|
('dB', 'Log (dB)', 'Logarithmic (dB) Scale'),
|
||||||
|
],
|
||||||
|
default='lin',
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_scale', context),
|
||||||
|
)
|
||||||
|
field_viz_structure_visibility: bpy.props.FloatProperty(
|
||||||
|
name='Field Viz Plot: Structure Visibility',
|
||||||
|
description='Visibility of structes',
|
||||||
|
default=0.2,
|
||||||
|
min=0.0,
|
||||||
|
max=1.0,
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_plot_fixed_f', context),
|
||||||
|
)
|
||||||
|
|
||||||
|
field_viz_plot_fix_x: bpy.props.BoolProperty(
|
||||||
|
name='Field Viz Plot: Fix X',
|
||||||
|
description='Fix the x-coordinate on the plot',
|
||||||
|
default=False,
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_plot_fix_x', context),
|
||||||
|
)
|
||||||
|
field_viz_plot_fix_y: bpy.props.BoolProperty(
|
||||||
|
name='Field Viz Plot: Fix Y',
|
||||||
|
description='Fix the y coordinate on the plot',
|
||||||
|
default=False,
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_plot_fix_y', context),
|
||||||
|
)
|
||||||
|
field_viz_plot_fix_z: bpy.props.BoolProperty(
|
||||||
|
name='Field Viz Plot: Fix Z',
|
||||||
|
description='Fix the z coordinate on the plot',
|
||||||
|
default=False,
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_plot_fix_z', context),
|
||||||
|
)
|
||||||
|
field_viz_plot_fix_f: bpy.props.BoolProperty(
|
||||||
|
name='Field Viz Plot: Fix Freq',
|
||||||
|
description='Fix the frequency coordinate on the plot',
|
||||||
|
default=False,
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_plot_fix_f', context),
|
||||||
|
)
|
||||||
|
|
||||||
|
field_viz_plot_fixed_x: bpy.props.FloatProperty(
|
||||||
|
name='Field Viz Plot: Fix X',
|
||||||
|
description='Fix the x-coordinate on the plot',
|
||||||
|
default=0.0,
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_plot_fixed_x', context),
|
||||||
|
)
|
||||||
|
field_viz_plot_fixed_y: bpy.props.FloatProperty(
|
||||||
|
name='Field Viz Plot: Fixed Y',
|
||||||
|
description='Fix the y coordinate on the plot',
|
||||||
|
default=0.0,
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_plot_fixed_y', context),
|
||||||
|
)
|
||||||
|
field_viz_plot_fixed_z: bpy.props.FloatProperty(
|
||||||
|
name='Field Viz Plot: Fixed Z',
|
||||||
|
description='Fix the z coordinate on the plot',
|
||||||
|
default=0.0,
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_plot_fixed_z', context),
|
||||||
|
)
|
||||||
|
field_viz_plot_fixed_f: bpy.props.FloatProperty(
|
||||||
|
name='Field Viz Plot: Fixed Freq (Thz)',
|
||||||
|
description='Fix the frequency coordinate on the plot',
|
||||||
|
default=0.0,
|
||||||
|
update=lambda self, context: self.sync_prop('field_viz_plot_fixed_f', context),
|
||||||
|
)
|
||||||
|
|
||||||
|
####################
|
||||||
|
# - Derived Properties
|
||||||
|
####################
|
||||||
|
def sync_viz_monitor_name(self, context):
|
||||||
|
if (sim_data := self._compute_input('FDTD Sim Data')) is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.cache_viz_monitor_type = sim_data.monitor_data[self.viz_monitor_name].type
|
||||||
|
self.sync_prop('viz_monitor_name', context)
|
||||||
|
|
||||||
|
def retrieve_monitors(self, context) -> list[tuple]:
|
||||||
|
global CACHE
|
||||||
|
if not CACHE.get(self.instance_id):
|
||||||
|
sim_data = self._compute_input('FDTD Sim Data')
|
||||||
|
|
||||||
|
if sim_data is not None:
|
||||||
|
CACHE[self.instance_id] = {
|
||||||
|
'monitors': list(sim_data.monitor_data.keys())
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
return [('NONE', 'None', 'No monitors')]
|
||||||
|
|
||||||
|
monitor_names = CACHE[self.instance_id]['monitors']
|
||||||
|
|
||||||
|
# Check for No Monitors
|
||||||
|
if not monitor_names:
|
||||||
|
return [('NONE', 'None', 'No monitors')]
|
||||||
|
|
||||||
|
return [
|
||||||
|
(
|
||||||
|
monitor_name,
|
||||||
|
monitor_name,
|
||||||
|
f"Monitor '{monitor_name}' recorded by the FDTD Sim",
|
||||||
|
)
|
||||||
|
for monitor_name in monitor_names
|
||||||
|
]
|
||||||
|
|
||||||
|
####################
|
||||||
|
# - UI
|
||||||
|
####################
|
||||||
|
def draw_props(self, context, layout):
|
||||||
|
row = layout.row()
|
||||||
|
row.prop(self, 'viz_monitor_name', text='')
|
||||||
|
if self.cache_viz_monitor_type == 'FieldData':
|
||||||
|
# Array Selection
|
||||||
|
split = layout.split(factor=0.45)
|
||||||
|
col = split.column(align=False)
|
||||||
|
col.label(text='Component')
|
||||||
|
col.label(text='Part')
|
||||||
|
col.label(text='Scale')
|
||||||
|
|
||||||
|
col = split.column(align=False)
|
||||||
|
col.prop(self, 'field_viz_component', text='')
|
||||||
|
col.prop(self, 'field_viz_part', text='')
|
||||||
|
col.prop(self, 'field_viz_scale', text='')
|
||||||
|
|
||||||
|
# Coordinate Fixing
|
||||||
|
split = layout.split(factor=0.45)
|
||||||
|
col = split.column(align=False)
|
||||||
|
col.prop(self, 'field_viz_plot_fix_x', text='Fix x (um)')
|
||||||
|
col.prop(self, 'field_viz_plot_fix_y', text='Fix y (um)')
|
||||||
|
col.prop(self, 'field_viz_plot_fix_z', text='Fix z (um)')
|
||||||
|
col.prop(self, 'field_viz_plot_fix_f', text='Fix f (THz)')
|
||||||
|
|
||||||
|
col = split.column(align=False)
|
||||||
|
col.prop(self, 'field_viz_plot_fixed_x', text='')
|
||||||
|
col.prop(self, 'field_viz_plot_fixed_y', text='')
|
||||||
|
col.prop(self, 'field_viz_plot_fixed_z', text='')
|
||||||
|
col.prop(self, 'field_viz_plot_fixed_f', text='')
|
||||||
|
|
||||||
|
####################
|
||||||
|
# - On Value Changed Methods
|
||||||
|
####################
|
||||||
|
@events.on_value_changed(
|
||||||
|
socket_name='FDTD Sim Data',
|
||||||
|
managed_objs={'viz_object'},
|
||||||
|
input_sockets={'FDTD Sim Data'},
|
||||||
|
)
|
||||||
|
def on_value_changed__fdtd_sim_data(
|
||||||
|
self,
|
||||||
|
managed_objs: dict[str, ct.schemas.ManagedObj],
|
||||||
|
input_sockets: dict[str, typ.Any],
|
||||||
|
) -> None:
|
||||||
|
global CACHE
|
||||||
|
|
||||||
|
if (sim_data := input_sockets['FDTD Sim Data']) is None:
|
||||||
|
CACHE.pop(self.instance_id, None)
|
||||||
|
return
|
||||||
|
|
||||||
|
CACHE[self.instance_id] = {'monitors': list(sim_data.monitor_data.keys())}
|
||||||
|
|
||||||
|
####################
|
||||||
|
# - Plotting
|
||||||
|
####################
|
||||||
|
@events.on_show_plot(
|
||||||
|
managed_objs={'viz_plot'},
|
||||||
|
props={
|
||||||
|
'viz_monitor_name',
|
||||||
|
'field_viz_component',
|
||||||
|
'field_viz_part',
|
||||||
|
'field_viz_scale',
|
||||||
|
'field_viz_structure_visibility',
|
||||||
|
'field_viz_plot_fix_x',
|
||||||
|
'field_viz_plot_fix_y',
|
||||||
|
'field_viz_plot_fix_z',
|
||||||
|
'field_viz_plot_fix_f',
|
||||||
|
'field_viz_plot_fixed_x',
|
||||||
|
'field_viz_plot_fixed_y',
|
||||||
|
'field_viz_plot_fixed_z',
|
||||||
|
'field_viz_plot_fixed_f',
|
||||||
|
},
|
||||||
|
input_sockets={'FDTD Sim Data'},
|
||||||
|
stop_propagation=True,
|
||||||
|
)
|
||||||
|
def on_show_plot(
|
||||||
|
self,
|
||||||
|
managed_objs: dict[str, ct.schemas.ManagedObj],
|
||||||
|
input_sockets: dict[str, typ.Any],
|
||||||
|
props: dict[str, typ.Any],
|
||||||
|
):
|
||||||
|
if (sim_data := input_sockets['FDTD Sim Data']) is None or (
|
||||||
|
monitor_name := props['viz_monitor_name']
|
||||||
|
) == 'NONE':
|
||||||
|
return
|
||||||
|
|
||||||
|
coord_fix = {}
|
||||||
|
for coord in ['x', 'y', 'z', 'f']:
|
||||||
|
if props[f'field_viz_plot_fix_{coord}']:
|
||||||
|
coord_fix |= {
|
||||||
|
coord: props[f'field_viz_plot_fixed_{coord}'],
|
||||||
|
}
|
||||||
|
|
||||||
|
if 'f' in coord_fix:
|
||||||
|
coord_fix['f'] *= 1e12
|
||||||
|
|
||||||
|
managed_objs['viz_plot'].mpl_plot_to_image(
|
||||||
|
lambda ax: sim_data.plot_field(
|
||||||
|
monitor_name,
|
||||||
|
props['field_viz_component'],
|
||||||
|
val=props['field_viz_part'],
|
||||||
|
scale=props['field_viz_scale'],
|
||||||
|
eps_alpha=props['field_viz_structure_visibility'],
|
||||||
|
phase=0,
|
||||||
|
**coord_fix,
|
||||||
|
ax=ax,
|
||||||
|
),
|
||||||
|
bl_select=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
####################
|
||||||
|
# - Blender Registration
|
||||||
|
####################
|
||||||
|
BL_REGISTER = [
|
||||||
|
FDTDSimDataVizNode,
|
||||||
|
]
|
||||||
|
BL_NODES = {ct.NodeType.FDTDSimDataViz: (ct.NodeCategory.MAXWELLSIM_VIZ)}
|
|
@ -66,13 +66,22 @@ class InstallPyDeps(bpy.types.Operator):
|
||||||
'Running pip w/cmdline: %s',
|
'Running pip w/cmdline: %s',
|
||||||
' '.join(cmdline),
|
' '.join(cmdline),
|
||||||
)
|
)
|
||||||
|
print("TRYING CRASH")
|
||||||
|
import sys
|
||||||
|
for module_name, module in sys.modules.copy().items():
|
||||||
|
if module_name == '__mp_main__':
|
||||||
|
print('Problematic Module Entry', module_name)
|
||||||
|
print(module)
|
||||||
|
#print('MODULE REPR', module)
|
||||||
|
continue
|
||||||
|
print("NO CRASH")
|
||||||
subprocess.check_call(cmdline)
|
subprocess.check_call(cmdline)
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
log.exception('Failed to install PyDeps')
|
log.exception('Failed to install PyDeps')
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
registration.run_delayed_registration(
|
registration.run_delayed_registration(
|
||||||
registration.EVENT__ON_DEPS_INSTALLED,
|
registration.EVENT__DEPS_SATISFIED,
|
||||||
path_addon_pydeps,
|
path_addon_pydeps,
|
||||||
)
|
)
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
|
@ -70,7 +70,7 @@ class InstallPyDeps(bpy.types.Operator):
|
||||||
return {'CANCELLED'}
|
return {'CANCELLED'}
|
||||||
|
|
||||||
registration.run_delayed_registration(
|
registration.run_delayed_registration(
|
||||||
registration.EVENT__ON_DEPS_INSTALLED,
|
registration.EVENT__DEPS_SATISFIED,
|
||||||
path_addon_pydeps,
|
path_addon_pydeps,
|
||||||
)
|
)
|
||||||
return {'FINISHED'}
|
return {'FINISHED'}
|
||||||
|
|
|
@ -23,13 +23,20 @@ DEPS_ISSUES: list[str] | None = None
|
||||||
def importable_addon_deps(path_deps: Path):
|
def importable_addon_deps(path_deps: Path):
|
||||||
os_path = os.fspath(path_deps)
|
os_path = os.fspath(path_deps)
|
||||||
|
|
||||||
log.info('Adding Path to sys.path: %s', str(os_path))
|
if os_path not in sys.path:
|
||||||
sys.path.insert(0, os_path)
|
log.info('Adding Path to sys.path: %s', str(os_path))
|
||||||
try:
|
sys.path.insert(0, os_path)
|
||||||
yield
|
try:
|
||||||
finally:
|
yield
|
||||||
log.info('Removing Path from sys.path: %s', str(os_path))
|
finally:
|
||||||
sys.path.remove(os_path)
|
pass
|
||||||
|
#log.info('Removing Path from sys.path: %s', str(os_path))
|
||||||
|
#sys.path.remove(os_path)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
|
|
|
@ -2,6 +2,8 @@ import logging
|
||||||
import typing as typ
|
import typing as typ
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
## TODO: Hygiene; don't try to own all root loggers.
|
||||||
|
|
||||||
LogLevel: typ.TypeAlias = int
|
LogLevel: typ.TypeAlias = int
|
||||||
LogHandler: typ.TypeAlias = typ.Any ## TODO: Can we do better?
|
LogHandler: typ.TypeAlias = typ.Any ## TODO: Can we do better?
|
||||||
|
|
||||||
|
@ -35,6 +37,14 @@ CACHE = {
|
||||||
# - Logging Handlers
|
# - Logging Handlers
|
||||||
####################
|
####################
|
||||||
def console_handler(level: LogLevel) -> logging.StreamHandler:
|
def console_handler(level: LogLevel) -> logging.StreamHandler:
|
||||||
|
"""A logging handler that prints messages to the console.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
level: The log levels (debug, info, etc.) to print.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The logging handler, which can be added to a logger.
|
||||||
|
"""
|
||||||
stream_formatter = logging.Formatter(STREAM_LOG_FORMAT)
|
stream_formatter = logging.Formatter(STREAM_LOG_FORMAT)
|
||||||
stream_handler = logging.StreamHandler()
|
stream_handler = logging.StreamHandler()
|
||||||
stream_handler.setFormatter(stream_formatter)
|
stream_handler.setFormatter(stream_formatter)
|
||||||
|
@ -43,6 +53,15 @@ def console_handler(level: LogLevel) -> logging.StreamHandler:
|
||||||
|
|
||||||
|
|
||||||
def file_handler(path_log_file: Path, level: LogLevel) -> logging.FileHandler:
|
def file_handler(path_log_file: Path, level: LogLevel) -> logging.FileHandler:
|
||||||
|
"""A logging handler that prints messages to a file.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
path_log_file: The path to the log file.
|
||||||
|
level: The log levels (debug, info, etc.) to append to the file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The logging handler, which can be added to a logger.
|
||||||
|
"""
|
||||||
file_formatter = logging.Formatter(FILE_LOG_FORMAT)
|
file_formatter = logging.Formatter(FILE_LOG_FORMAT)
|
||||||
file_handler = logging.FileHandler(path_log_file)
|
file_handler = logging.FileHandler(path_log_file)
|
||||||
file_handler.setFormatter(file_formatter)
|
file_handler.setFormatter(file_formatter)
|
||||||
|
@ -60,7 +79,22 @@ def setup_logger(
|
||||||
console_level: LogLevel | None,
|
console_level: LogLevel | None,
|
||||||
file_path: Path | None,
|
file_path: Path | None,
|
||||||
file_level: LogLevel,
|
file_level: LogLevel,
|
||||||
):
|
) -> None:
|
||||||
|
"""Configures a single logger with given console and file handlers, individualizing the log level that triggers each.
|
||||||
|
|
||||||
|
This is a lower-level function - generally, modules that want to use a well-configured logger will use the `get()` function, which retrieves the parameters for this function from the addon preferences.
|
||||||
|
This function is also used by the higher-level log setup.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
cb_console_handler: A function that takes a log level threshold (inclusive), and returns a logging handler to a console-printer.
|
||||||
|
cb_file_handler: A function that takes a log level threshold (inclusive), and returns a logging handler to a file-printer.
|
||||||
|
logger: The logger to configure.
|
||||||
|
console_level: The log level threshold to print to the console.
|
||||||
|
None deactivates file logging.
|
||||||
|
path_log_file: The path to the log file.
|
||||||
|
None deactivates file logging.
|
||||||
|
file_level: The log level threshold to print to the log file.
|
||||||
|
"""
|
||||||
# Delegate Level Semantics to Log Handlers
|
# Delegate Level Semantics to Log Handlers
|
||||||
## This lets everything through
|
## This lets everything through
|
||||||
logger.setLevel(logging.DEBUG)
|
logger.setLevel(logging.DEBUG)
|
||||||
|
@ -83,7 +117,18 @@ def setup_logger(
|
||||||
logger.addHandler(cb_file_handler(file_path, file_level))
|
logger.addHandler(cb_file_handler(file_path, file_level))
|
||||||
|
|
||||||
|
|
||||||
def get(module_name):
|
def get(module_name) -> logging.Logger:
|
||||||
|
"""Get a simple logger from the module name.
|
||||||
|
|
||||||
|
Should be used by calling ex. `LOG = simple_logger.get(__name__)` in the module wherein logging is desired.
|
||||||
|
Should **only** be used if the dependencies aren't yet available for using `blender_maxwell.utils.logger`.
|
||||||
|
|
||||||
|
Uses the global `CACHE` to store `console_level`, `file_path`, and `file_level`, since addon preferences aren't yet available.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
module_name: The name of the module to create a logger for.
|
||||||
|
Should be set to `__name__`.
|
||||||
|
"""
|
||||||
logger = logging.getLogger(SIMPLE_LOGGER_PREFIX + module_name)
|
logger = logging.getLogger(SIMPLE_LOGGER_PREFIX + module_name)
|
||||||
|
|
||||||
# Reuse Cached Arguments from Last sync_*
|
# Reuse Cached Arguments from Last sync_*
|
||||||
|
@ -106,7 +151,19 @@ def sync_bootstrap_logging(
|
||||||
console_level: LogLevel | None = None,
|
console_level: LogLevel | None = None,
|
||||||
file_path: Path | None = None,
|
file_path: Path | None = None,
|
||||||
file_level: LogLevel = logging.NOTSET,
|
file_level: LogLevel = logging.NOTSET,
|
||||||
):
|
) -> None:
|
||||||
|
"""Initialize the simple logger, including the `CACHE`, so that logging will work without dependencies / the addon preferences being started yet.
|
||||||
|
|
||||||
|
Should only be called by the addon's pre-initialization code, before `register()`.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
console_level: The console log level threshold to store in `CACHE`.
|
||||||
|
`None` deactivates console logging.
|
||||||
|
file_path: The file path to use for file logging, stored in `CACHE`.
|
||||||
|
`None` deactivates file logging.
|
||||||
|
file_level: The file log level threshold to store in `CACHE`.
|
||||||
|
Only needs to be set if `file_path` is not `None`.
|
||||||
|
"""
|
||||||
CACHE['console_level'] = console_level
|
CACHE['console_level'] = console_level
|
||||||
CACHE['file_path'] = file_path
|
CACHE['file_path'] = file_path
|
||||||
CACHE['file_level'] = file_level
|
CACHE['file_level'] = file_level
|
||||||
|
@ -125,14 +182,18 @@ def sync_bootstrap_logging(
|
||||||
logger_logger.info('Bootstrapped Logging w/Settings %s', str(CACHE))
|
logger_logger.info('Bootstrapped Logging w/Settings %s', str(CACHE))
|
||||||
|
|
||||||
|
|
||||||
def sync_loggers(
|
def sync_all_loggers(
|
||||||
cb_console_handler: typ.Callable[[LogLevel], LogHandler],
|
cb_console_handler: typ.Callable[[LogLevel], LogHandler],
|
||||||
cb_file_handler: typ.Callable[[Path, LogLevel], LogHandler],
|
cb_file_handler: typ.Callable[[Path, LogLevel], LogHandler],
|
||||||
console_level: LogLevel | None,
|
console_level: LogLevel | None,
|
||||||
file_path: Path | None,
|
file_path: Path | None,
|
||||||
file_level: LogLevel,
|
file_level: LogLevel,
|
||||||
):
|
):
|
||||||
"""Update all loggers to conform to the given per-handler on/off state and log level."""
|
"""Update all loggers to conform to the given per-handler on/off state and log level.
|
||||||
|
|
||||||
|
This runs the corresponding `setup_logger()` for all active loggers.
|
||||||
|
Thus, all parameters are identical to `setup_logger()`.
|
||||||
|
"""
|
||||||
CACHE['console_level'] = console_level
|
CACHE['console_level'] = console_level
|
||||||
CACHE['file_path'] = file_path
|
CACHE['file_path'] = file_path
|
||||||
CACHE['file_level'] = file_level
|
CACHE['file_level'] = file_level
|
||||||
|
|
|
@ -117,7 +117,13 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
||||||
####################
|
####################
|
||||||
# - Property Sync
|
# - Property Sync
|
||||||
####################
|
####################
|
||||||
def sync_addon_logging(self, only_sync_logger: logging.Logger | None = None):
|
def sync_addon_logging(self, logger_to_setup: logging.Logger | None = None) -> None:
|
||||||
|
"""Configure one, or all, active addon logger(s).
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
logger_to_setup:
|
||||||
|
When set to None, all addon loggers will be configured
|
||||||
|
"""
|
||||||
if pydeps.DEPS_OK:
|
if pydeps.DEPS_OK:
|
||||||
log.info('Getting Logger (DEPS_OK = %s)', str(pydeps.DEPS_OK))
|
log.info('Getting Logger (DEPS_OK = %s)', str(pydeps.DEPS_OK))
|
||||||
with pydeps.importable_addon_deps(self.pydeps_path):
|
with pydeps.importable_addon_deps(self.pydeps_path):
|
||||||
|
@ -137,19 +143,20 @@ class BLMaxwellAddonPrefs(bpy.types.AddonPreferences):
|
||||||
}
|
}
|
||||||
|
|
||||||
# Sync Single Logger / All Loggers
|
# Sync Single Logger / All Loggers
|
||||||
if only_sync_logger is not None:
|
if logger_to_setup is not None:
|
||||||
logger.setup_logger(
|
logger.setup_logger(
|
||||||
logger.console_handler,
|
logger.console_handler,
|
||||||
logger.file_handler,
|
logger.file_handler,
|
||||||
only_sync_logger,
|
logger_to_setup,
|
||||||
|
**log_setup_kwargs,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
log.info('Re-Configuring All Loggers')
|
||||||
|
logger.sync_all_loggers(
|
||||||
|
logger.console_handler,
|
||||||
|
logger.file_handler,
|
||||||
**log_setup_kwargs,
|
**log_setup_kwargs,
|
||||||
)
|
)
|
||||||
return
|
|
||||||
logger.sync_loggers(
|
|
||||||
logger.console_handler,
|
|
||||||
logger.file_handler,
|
|
||||||
**log_setup_kwargs,
|
|
||||||
)
|
|
||||||
|
|
||||||
def sync_use_default_pydeps_path(self, _: bpy.types.Context):
|
def sync_use_default_pydeps_path(self, _: bpy.types.Context):
|
||||||
# Switch to Default
|
# Switch to Default
|
||||||
|
|
|
@ -1,3 +1,12 @@
|
||||||
|
"""Manages the registration of Blender classes, including delayed registrations that require access to Python dependencies.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
BL_KEYMAP: Addon-specific keymap used to register operator hotkeys. REG__CLASSES: Currently registered Blender classes.
|
||||||
|
REG__KEYMAP_ITEMS: Currently registered Blender keymap items.
|
||||||
|
DELAYED_REGISTRATIONS: Currently pending registration operations, which can be realized with `run_delayed_registration()`.
|
||||||
|
EVENT__DEPS_SATISFIED: A constant representing a semantic choice of key for `DELAYED_REGISTRATIONS`.
|
||||||
|
"""
|
||||||
|
|
||||||
import typing as typ
|
import typing as typ
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
@ -9,7 +18,17 @@ log = simple_logger.get(__name__)
|
||||||
|
|
||||||
# TODO: More types for these things!
|
# TODO: More types for these things!
|
||||||
DelayedRegKey: typ.TypeAlias = str
|
DelayedRegKey: typ.TypeAlias = str
|
||||||
BLClass: typ.TypeAlias = typ.Any ## TODO: Better Type
|
BLClass: typ.TypeAlias = (
|
||||||
|
bpy.types.Panel
|
||||||
|
| bpy.types.UIList
|
||||||
|
| bpy.types.Menu
|
||||||
|
| bpy.types.Header
|
||||||
|
| bpy.types.Operator
|
||||||
|
| bpy.types.KeyingSetInfo
|
||||||
|
| bpy.types.RenderEngine
|
||||||
|
| bpy.types.AssetShelf
|
||||||
|
| bpy.types.FileHandler
|
||||||
|
)
|
||||||
BLKeymapItem: typ.TypeAlias = typ.Any ## TODO: Better Type
|
BLKeymapItem: typ.TypeAlias = typ.Any ## TODO: Better Type
|
||||||
KeymapItemDef: typ.TypeAlias = typ.Any ## TODO: Better Type
|
KeymapItemDef: typ.TypeAlias = typ.Any ## TODO: Better Type
|
||||||
|
|
||||||
|
@ -24,15 +43,22 @@ REG__KEYMAP_ITEMS: list[BLKeymapItem] = []
|
||||||
DELAYED_REGISTRATIONS: dict[DelayedRegKey, typ.Callable[[Path], None]] = {}
|
DELAYED_REGISTRATIONS: dict[DelayedRegKey, typ.Callable[[Path], None]] = {}
|
||||||
|
|
||||||
####################
|
####################
|
||||||
# - Constants
|
# - Delayed Registration Keys
|
||||||
####################
|
####################
|
||||||
EVENT__DEPS_SATISFIED: str = 'on_deps_satisfied'
|
EVENT__DEPS_SATISFIED: DelayedRegKey = 'on_deps_satisfied'
|
||||||
|
|
||||||
|
|
||||||
####################
|
####################
|
||||||
# - Class Registration
|
# - Class Registration
|
||||||
####################
|
####################
|
||||||
def register_classes(bl_register: list):
|
def register_classes(bl_register: list[BLClass]) -> None:
|
||||||
|
"""Registers a Blender class, allowing it to hook into relevant Blender features.
|
||||||
|
|
||||||
|
Caches registered classes in the module global `REG__CLASSES`.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
bl_register: List of Blender classes to register.
|
||||||
|
"""
|
||||||
log.info('Registering %s Classes', len(bl_register))
|
log.info('Registering %s Classes', len(bl_register))
|
||||||
for cls in bl_register:
|
for cls in bl_register:
|
||||||
if cls.bl_idname in REG__CLASSES:
|
if cls.bl_idname in REG__CLASSES:
|
||||||
|
@ -48,7 +74,11 @@ def register_classes(bl_register: list):
|
||||||
REG__CLASSES.append(cls)
|
REG__CLASSES.append(cls)
|
||||||
|
|
||||||
|
|
||||||
def unregister_classes():
|
def unregister_classes() -> None:
|
||||||
|
"""Unregisters all previously registered Blender classes.
|
||||||
|
|
||||||
|
All previously registered Blender classes can be found in the module global variable `REG__CLASSES`.
|
||||||
|
"""
|
||||||
log.info('Unregistering %s Classes', len(REG__CLASSES))
|
log.info('Unregistering %s Classes', len(REG__CLASSES))
|
||||||
for cls in reversed(REG__CLASSES):
|
for cls in reversed(REG__CLASSES):
|
||||||
log.debug(
|
log.debug(
|
||||||
|
@ -123,22 +153,44 @@ def delay_registration(
|
||||||
classes_cb: typ.Callable[[Path], list[BLClass]],
|
classes_cb: typ.Callable[[Path], list[BLClass]],
|
||||||
keymap_item_defs_cb: typ.Callable[[Path], list[KeymapItemDef]],
|
keymap_item_defs_cb: typ.Callable[[Path], list[KeymapItemDef]],
|
||||||
) -> None:
|
) -> None:
|
||||||
|
"""Delays the registration of Blender classes that depend on certain Python dependencies, for which neither the location nor validity is yet known.
|
||||||
|
|
||||||
|
The function that registers is stored in the module global `DELAYED_REGISTRATIONS`, indexed by `delayed_reg_key`.
|
||||||
|
Once the PyDeps location and validity is determined, `run_delayed_registration()` can be used as a shorthand for accessing `DELAYED_REGISTRATIONS[delayed_reg_key]`.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
delayed_reg_key: The identifier with which to index the registration callback.
|
||||||
|
Module-level constants like `EVENT__DEPS_SATISFIED` are a good choice.
|
||||||
|
classes_cb: A function that takes a `sys.path`-compatible path to Python dependencies needed by the Blender classes in question, and returns a list of Blender classes to import.
|
||||||
|
`register_classes()` will be used to actually register the returned Blender classes.
|
||||||
|
keymap_item_defs_cb: Similar, except for addon keymap items.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A function that takes a `sys.path`-compatible path to the Python dependencies needed to import the given Blender classes.
|
||||||
|
"""
|
||||||
if delayed_reg_key in DELAYED_REGISTRATIONS:
|
if delayed_reg_key in DELAYED_REGISTRATIONS:
|
||||||
msg = f'Already delayed a registration with key {delayed_reg_key}'
|
msg = f'Already delayed a registration with key {delayed_reg_key}'
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
|
|
||||||
def register_cb(path_deps: Path):
|
def register_cb(path_pydeps: Path):
|
||||||
log.info(
|
log.info(
|
||||||
'Running Delayed Registration (key %s) with PyDeps: %s',
|
'Running Delayed Registration (key %s) with PyDeps: %s',
|
||||||
delayed_reg_key,
|
delayed_reg_key,
|
||||||
path_deps,
|
path_pydeps,
|
||||||
)
|
)
|
||||||
register_classes(classes_cb(path_deps))
|
register_classes(classes_cb(path_pydeps))
|
||||||
register_keymap_items(keymap_item_defs_cb(path_deps))
|
register_keymap_items(keymap_item_defs_cb(path_pydeps))
|
||||||
|
|
||||||
DELAYED_REGISTRATIONS[delayed_reg_key] = register_cb
|
DELAYED_REGISTRATIONS[delayed_reg_key] = register_cb
|
||||||
|
|
||||||
|
|
||||||
def run_delayed_registration(delayed_reg_key: DelayedRegKey, path_deps: Path) -> None:
|
def run_delayed_registration(delayed_reg_key: DelayedRegKey, path_pydeps: Path) -> None:
|
||||||
|
"""Run a delayed registration, by using `delayed_reg_key` to lookup the correct path, passing `path_pydeps` to the registration.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
delayed_reg_key: The identifier with which to index the registration callback.
|
||||||
|
Must match the parameter with which the delayed registration was first declared.
|
||||||
|
path_pydeps: The `sys.path`-compatible path to the Python dependencies that the classes need to have available in order to register.
|
||||||
|
"""
|
||||||
register_cb = DELAYED_REGISTRATIONS.pop(delayed_reg_key)
|
register_cb = DELAYED_REGISTRATIONS.pop(delayed_reg_key)
|
||||||
register_cb(path_deps)
|
register_cb(path_pydeps)
|
||||||
|
|
|
@ -14,6 +14,10 @@ from pathlib import Path
|
||||||
import tidy3d as td
|
import tidy3d as td
|
||||||
import tidy3d.web as td_web
|
import tidy3d.web as td_web
|
||||||
|
|
||||||
|
from ..utils import logger
|
||||||
|
|
||||||
|
log = logger.get(__name__)
|
||||||
|
|
||||||
CloudFolderID = str
|
CloudFolderID = str
|
||||||
CloudFolderName = str
|
CloudFolderName = str
|
||||||
CloudFolder = td_web.core.task_core.Folder
|
CloudFolder = td_web.core.task_core.Folder
|
||||||
|
@ -101,6 +105,7 @@ class TidyCloudFolders:
|
||||||
cloud_folder.folder_id: cloud_folder for cloud_folder in cloud_folders
|
cloud_folder.folder_id: cloud_folder for cloud_folder in cloud_folders
|
||||||
}
|
}
|
||||||
cls.cache_folders = folders
|
cls.cache_folders = folders
|
||||||
|
log.info("Retrieved Folders: %s", str(cls.cache_folders))
|
||||||
return folders
|
return folders
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -238,6 +243,7 @@ class TidyCloudTasks:
|
||||||
## Task by-Folder Cache
|
## Task by-Folder Cache
|
||||||
cls.cache_folder_tasks[cloud_folder.folder_id] = set(cloud_tasks)
|
cls.cache_folder_tasks[cloud_folder.folder_id] = set(cloud_tasks)
|
||||||
|
|
||||||
|
log.info('Retrieved Tasks (folder="%s"): %s)', cloud_folder.folder_id, str(set(cloud_tasks)))
|
||||||
return cloud_tasks
|
return cloud_tasks
|
||||||
|
|
||||||
####################
|
####################
|
||||||
|
@ -251,18 +257,26 @@ class TidyCloudTasks:
|
||||||
if download_sim_path is None:
|
if download_sim_path is None:
|
||||||
with tempfile.NamedTemporaryFile(delete=False) as f:
|
with tempfile.NamedTemporaryFile(delete=False) as f:
|
||||||
_path_tmp = Path(f.name)
|
_path_tmp = Path(f.name)
|
||||||
_path_tmp.rename(f.name + '.hdf5')
|
_path_tmp.rename(f.name + '.hdf5.gz')
|
||||||
path_sim = Path(f.name + '.hdf5')
|
path_sim = Path(f.name)
|
||||||
else:
|
else:
|
||||||
path_sim = download_sim_path
|
path_sim = download_sim_path
|
||||||
|
|
||||||
# Get Sim Data (from file and/or download)
|
# Get Sim Data (from file and/or download)
|
||||||
if path_sim.is_file():
|
if path_sim.is_file():
|
||||||
sim_data = td.SimulationData.from_file(str(download_sim_path))
|
log.info('Loading Cloud Task "%s" from "%s"', cloud_task.cloud_id, path_sim)
|
||||||
|
sim_data = td.SimulationData.from_file(str(path_sim))
|
||||||
else:
|
else:
|
||||||
|
log.info(
|
||||||
|
'Downloading & Loading Cloud Task "%s" to "%s"',
|
||||||
|
cloud_task.task_id,
|
||||||
|
path_sim,
|
||||||
|
)
|
||||||
sim_data = td_web.api.webapi.load(
|
sim_data = td_web.api.webapi.load(
|
||||||
cloud_task.task_id,
|
cloud_task.task_id,
|
||||||
path=str(download_sim_path),
|
path=str(path_sim),
|
||||||
|
replace_existing=True,
|
||||||
|
verbose=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Delete Temporary File (if used)
|
# Delete Temporary File (if used)
|
||||||
|
@ -404,10 +418,7 @@ class TidyCloudTasks:
|
||||||
## By deleting the folder ID, all tasks within will be reloaded
|
## By deleting the folder ID, all tasks within will be reloaded
|
||||||
del cls.cache_folder_tasks[folder_id]
|
del cls.cache_folder_tasks[folder_id]
|
||||||
|
|
||||||
return {
|
return dict(cls.tasks(cloud_folder).items())
|
||||||
task_id: cls.tasks(cloud_folder)[task_id]
|
|
||||||
for task_id in cls.cache_folder_tasks[folder_id]
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def abort_task(cls, cloud_task: CloudTask) -> CloudTask:
|
def abort_task(cls, cloud_task: CloudTask) -> CloudTask:
|
||||||
|
|
|
@ -13,7 +13,7 @@ from ..nodeps.utils.simple_logger import (
|
||||||
loggers, # noqa: F401
|
loggers, # noqa: F401
|
||||||
setup_logger, # noqa: F401
|
setup_logger, # noqa: F401
|
||||||
simple_loggers, # noqa: F401
|
simple_loggers, # noqa: F401
|
||||||
sync_loggers, # noqa: F401
|
sync_all_loggers, # noqa: F401
|
||||||
)
|
)
|
||||||
|
|
||||||
OUTPUT_CONSOLE = rich.console.Console(
|
OUTPUT_CONSOLE = rich.console.Console(
|
||||||
|
@ -59,7 +59,7 @@ def get(module_name):
|
||||||
if (addon_prefs := info.addon_prefs()) is None:
|
if (addon_prefs := info.addon_prefs()) is None:
|
||||||
msg = 'Addon preferences not defined'
|
msg = 'Addon preferences not defined'
|
||||||
raise RuntimeError(msg)
|
raise RuntimeError(msg)
|
||||||
addon_prefs.sync_addon_logging(only_sync_logger=logger)
|
addon_prefs.sync_addon_logging(logger_to_setup=logger)
|
||||||
|
|
||||||
return logger
|
return logger
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,78 @@
|
||||||
|
import logging
|
||||||
|
import shutil
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
PATH_SCRIPT = str(Path(__file__).resolve().parent)
|
||||||
|
sys.path.insert(0, str(PATH_SCRIPT))
|
||||||
|
import info # noqa: E402
|
||||||
|
import pack # noqa: E402
|
||||||
|
|
||||||
|
sys.path.remove(str(PATH_SCRIPT))
|
||||||
|
|
||||||
|
# Set Bootstrap Log Level
|
||||||
|
## This will be the log-level of both console and file logs, at first...
|
||||||
|
## ...until the addon preferences have been loaded.
|
||||||
|
BOOTSTRAP_LOG_LEVEL = logging.DEBUG
|
||||||
|
|
||||||
|
|
||||||
|
def delete_addon_if_loaded(addon_name: str) -> bool:
|
||||||
|
"""Strongly inspired by Blender's addon_utils.py."""
|
||||||
|
removed_addon = False
|
||||||
|
|
||||||
|
# Check if Python Module is Loaded
|
||||||
|
mod = sys.modules.get(addon_name)
|
||||||
|
# if (mod := sys.modules.get(addon_name)) is None:
|
||||||
|
# ## It could still be loaded-by-default; then, it's in the prefs list
|
||||||
|
# is_loaded_now = False
|
||||||
|
# loads_by_default = addon_name in bpy.context.preferences.addons
|
||||||
|
# else:
|
||||||
|
# ## BL sets __addon_enabled__ on module of enabled addons.
|
||||||
|
# ## BL sets __addon_persistent__ on module of load-by-default addons.
|
||||||
|
# is_loaded_now = getattr(mod, '__addon_enabled__', False)
|
||||||
|
# loads_by_default = getattr(mod, '__addon_persistent__', False)
|
||||||
|
|
||||||
|
# Unregister Modules and Mark Disabled & Non-Persistent
|
||||||
|
## This effectively disables it
|
||||||
|
if mod is not None:
|
||||||
|
removed_addon = True
|
||||||
|
mod.__addon_enabled__ = False
|
||||||
|
mod.__addon_persistent__ = False
|
||||||
|
try:
|
||||||
|
mod.unregister()
|
||||||
|
except BaseException:
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
|
# Remove Addon
|
||||||
|
## Remove Addon from Preferences
|
||||||
|
## - Unsure why addon_utils has a while, but let's trust the process...
|
||||||
|
while addon_name in bpy.context.preferences.addons:
|
||||||
|
addon = bpy.context.preferences.addons.get(addon_name)
|
||||||
|
if addon:
|
||||||
|
bpy.context.preferences.addons.remove(addon)
|
||||||
|
|
||||||
|
## Physically Excise Addon Code
|
||||||
|
for addons_path in bpy.utils.script_paths(subdir='addons'):
|
||||||
|
addon_path = Path(addons_path) / addon_name
|
||||||
|
if addon_path.is_dir():
|
||||||
|
shutil.rmtree(addon_path)
|
||||||
|
|
||||||
|
## Save User Preferences
|
||||||
|
bpy.ops.wm.save_userpref()
|
||||||
|
|
||||||
|
return removed_addon
|
||||||
|
|
||||||
|
|
||||||
|
####################
|
||||||
|
# - Main
|
||||||
|
####################
|
||||||
|
if __name__ == '__main__':
|
||||||
|
if delete_addon_if_loaded(info.ADDON_NAME):
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
sys.exit(info.STATUS_UNINSTALLED_ADDON)
|
||||||
|
else:
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
sys.exit(info.STATUS_NOCHANGE_ADDON)
|
|
@ -0,0 +1,83 @@
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
|
||||||
|
PATH_SCRIPT = str(Path(__file__).resolve().parent)
|
||||||
|
sys.path.insert(0, str(PATH_SCRIPT))
|
||||||
|
import info # noqa: E402
|
||||||
|
import pack # noqa: E402
|
||||||
|
|
||||||
|
sys.path.remove(str(PATH_SCRIPT))
|
||||||
|
|
||||||
|
|
||||||
|
def install_and_enable_addon(addon_name: str, addon_zip: Path) -> None:
|
||||||
|
"""Strongly inspired by Blender's addon_utils.py."""
|
||||||
|
# Check if Addon is Installable
|
||||||
|
if any(
|
||||||
|
[
|
||||||
|
(mod := sys.modules.get(addon_name)) is not None,
|
||||||
|
addon_name in bpy.context.preferences.addons,
|
||||||
|
any(
|
||||||
|
(Path(addon_path) / addon_name).exists()
|
||||||
|
for addon_path in bpy.utils.script_paths(subdir='addons')
|
||||||
|
),
|
||||||
|
]
|
||||||
|
):
|
||||||
|
## TODO: Check if addon file path exists?
|
||||||
|
in_pref_addons = addon_name in bpy.context.preferences.addons
|
||||||
|
existing_files_found = {
|
||||||
|
addon_path: (Path(addon_path) / addon_name).exists()
|
||||||
|
for addon_path in bpy.utils.script_paths(subdir='addons')
|
||||||
|
if (Path(addon_path) / addon_name).exists()
|
||||||
|
}
|
||||||
|
msg = f"Addon (module = '{mod}') is not installable (in preferences.addons: {in_pref_addons}) (existing files found: {existing_files_found})"
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
# Install Addon
|
||||||
|
bpy.ops.preferences.addon_install(filepath=str(addon_zip))
|
||||||
|
if not any(
|
||||||
|
(Path(addon_path) / addon_name).exists()
|
||||||
|
for addon_path in bpy.utils.script_paths(subdir='addons')
|
||||||
|
):
|
||||||
|
msg = f"Couldn't install addon {addon_name}"
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
# Enable Addon
|
||||||
|
bpy.ops.preferences.addon_enable(module=addon_name)
|
||||||
|
if addon_name not in bpy.context.preferences.addons:
|
||||||
|
msg = f"Couldn't enable addon {addon_name}"
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
|
# Save User Preferences
|
||||||
|
bpy.ops.wm.save_userpref()
|
||||||
|
|
||||||
|
|
||||||
|
def setup_for_development(addon_name: str, path_addon_dev_deps: Path) -> None:
|
||||||
|
addon_prefs = bpy.context.preferences.addons[addon_name].preferences
|
||||||
|
|
||||||
|
# PyDeps Path
|
||||||
|
addon_prefs.use_default_pydeps_path = False
|
||||||
|
addon_prefs.pydeps_path = path_addon_dev_deps
|
||||||
|
|
||||||
|
# Save User Preferences
|
||||||
|
bpy.ops.wm.save_userpref()
|
||||||
|
|
||||||
|
|
||||||
|
####################
|
||||||
|
# - Main
|
||||||
|
####################
|
||||||
|
if __name__ == '__main__':
|
||||||
|
with pack.zipped_addon(
|
||||||
|
info.PATH_ADDON_PKG,
|
||||||
|
info.PATH_ADDON_ZIP,
|
||||||
|
info.PATH_ROOT / 'pyproject.toml',
|
||||||
|
info.PATH_ROOT / 'requirements.lock',
|
||||||
|
initial_log_level=info.BOOTSTRAP_LOG_LEVEL,
|
||||||
|
) as path_zipped:
|
||||||
|
install_and_enable_addon(info.ADDON_NAME, path_zipped)
|
||||||
|
|
||||||
|
setup_for_development(info.ADDON_NAME, info.PATH_ADDON_DEV_DEPS)
|
||||||
|
|
||||||
|
bpy.ops.wm.quit_blender()
|
||||||
|
sys.exit(info.STATUS_INSTALLED_ADDON)
|
|
@ -1,175 +0,0 @@
|
||||||
"""Blender startup script ensuring correct addon installation.
|
|
||||||
|
|
||||||
See <https://github.com/dfelinto/blender/blob/master/release/scripts/modules/addon_utils.py>
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
import traceback
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import bpy
|
|
||||||
|
|
||||||
PATH_SCRIPT = str(Path(__file__).resolve().parent)
|
|
||||||
sys.path.insert(0, str(PATH_SCRIPT))
|
|
||||||
import info # noqa: E402
|
|
||||||
import pack # noqa: E402
|
|
||||||
|
|
||||||
sys.path.remove(str(PATH_SCRIPT))
|
|
||||||
|
|
||||||
# Set Bootstrap Log Level
|
|
||||||
## This will be the log-level of both console and file logs, at first...
|
|
||||||
## ...until the addon preferences have been loaded.
|
|
||||||
BOOTSTRAP_LOG_LEVEL = logging.DEBUG
|
|
||||||
|
|
||||||
## TODO: Preferences item that allows using BLMaxwell 'starter.blend' as Blender's default starter blendfile.
|
|
||||||
|
|
||||||
|
|
||||||
####################
|
|
||||||
# - Addon Functions
|
|
||||||
####################
|
|
||||||
def delete_addon_if_loaded(addon_name: str) -> None:
|
|
||||||
"""Strongly inspired by Blender's addon_utils.py."""
|
|
||||||
should_restart_blender = False
|
|
||||||
|
|
||||||
# Check if Python Module is Loaded
|
|
||||||
mod = sys.modules.get(addon_name)
|
|
||||||
# if (mod := sys.modules.get(addon_name)) is None:
|
|
||||||
# ## It could still be loaded-by-default; then, it's in the prefs list
|
|
||||||
# is_loaded_now = False
|
|
||||||
# loads_by_default = addon_name in bpy.context.preferences.addons
|
|
||||||
# else:
|
|
||||||
# ## BL sets __addon_enabled__ on module of enabled addons.
|
|
||||||
# ## BL sets __addon_persistent__ on module of load-by-default addons.
|
|
||||||
# is_loaded_now = getattr(mod, '__addon_enabled__', False)
|
|
||||||
# loads_by_default = getattr(mod, '__addon_persistent__', False)
|
|
||||||
|
|
||||||
# Unregister Modules and Mark Disabled & Non-Persistent
|
|
||||||
## This effectively disables it
|
|
||||||
if mod is not None:
|
|
||||||
mod.__addon_enabled__ = False
|
|
||||||
mod.__addon_persistent__ = False
|
|
||||||
try:
|
|
||||||
mod.unregister()
|
|
||||||
except BaseException:
|
|
||||||
traceback.print_exc()
|
|
||||||
should_restart_blender = True
|
|
||||||
|
|
||||||
# Remove Addon
|
|
||||||
## Remove Addon from Preferences
|
|
||||||
## - Unsure why addon_utils has a while, but let's trust the process...
|
|
||||||
while addon_name in bpy.context.preferences.addons:
|
|
||||||
addon = bpy.context.preferences.addons.get(addon_name)
|
|
||||||
if addon:
|
|
||||||
bpy.context.preferences.addons.remove(addon)
|
|
||||||
|
|
||||||
## Physically Excise Addon Code
|
|
||||||
for addons_path in bpy.utils.script_paths(subdir='addons'):
|
|
||||||
addon_path = Path(addons_path) / addon_name
|
|
||||||
if addon_path.exists():
|
|
||||||
shutil.rmtree(addon_path)
|
|
||||||
should_restart_blender = True
|
|
||||||
|
|
||||||
## Save User Preferences
|
|
||||||
bpy.ops.wm.save_userpref()
|
|
||||||
|
|
||||||
# Quit (Restart) Blender - hard-flush Python environment
|
|
||||||
## - Python environments are not made to be partially flushed.
|
|
||||||
## - This is the only truly reliable way to avoid all bugs.
|
|
||||||
## - See <https://github.com/JacquesLucke/blender_vscode>
|
|
||||||
## - By passing STATUS_UNINSTALLED_ADDON, we report that it's clean now.
|
|
||||||
if should_restart_blender:
|
|
||||||
bpy.ops.wm.quit_blender()
|
|
||||||
sys.exit(info.STATUS_UNINSTALLED_ADDON)
|
|
||||||
|
|
||||||
|
|
||||||
def install_addon(addon_name: str, addon_zip: Path) -> None:
|
|
||||||
"""Strongly inspired by Blender's addon_utils.py."""
|
|
||||||
# Check if Addon is Installable
|
|
||||||
if any(
|
|
||||||
[
|
|
||||||
(mod := sys.modules.get(addon_name)) is not None,
|
|
||||||
addon_name in bpy.context.preferences.addons,
|
|
||||||
any(
|
|
||||||
(Path(addon_path) / addon_name).exists()
|
|
||||||
for addon_path in bpy.utils.script_paths(subdir='addons')
|
|
||||||
),
|
|
||||||
]
|
|
||||||
):
|
|
||||||
## TODO: Check if addon file path exists?
|
|
||||||
in_pref_addons = addon_name in bpy.context.preferences.addons
|
|
||||||
existing_files_found = {
|
|
||||||
addon_path: (Path(addon_path) / addon_name).exists()
|
|
||||||
for addon_path in bpy.utils.script_paths(subdir='addons')
|
|
||||||
if (Path(addon_path) / addon_name).exists()
|
|
||||||
}
|
|
||||||
msg = f"Addon (module = '{mod}') is not installable (in preferences.addons: {in_pref_addons}) (existing files found: {existing_files_found})"
|
|
||||||
raise ValueError(msg)
|
|
||||||
|
|
||||||
# Install Addon
|
|
||||||
bpy.ops.preferences.addon_install(filepath=str(addon_zip))
|
|
||||||
if not any(
|
|
||||||
(Path(addon_path) / addon_name).exists()
|
|
||||||
for addon_path in bpy.utils.script_paths(subdir='addons')
|
|
||||||
):
|
|
||||||
msg = f"Couldn't install addon {addon_name}"
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
|
|
||||||
# Enable Addon
|
|
||||||
bpy.ops.preferences.addon_enable(module=addon_name)
|
|
||||||
if addon_name not in bpy.context.preferences.addons:
|
|
||||||
msg = f"Couldn't enable addon {addon_name}"
|
|
||||||
raise RuntimeError(msg)
|
|
||||||
|
|
||||||
# Save User Preferences
|
|
||||||
bpy.ops.wm.save_userpref()
|
|
||||||
|
|
||||||
|
|
||||||
def setup_for_development(addon_name: str, path_addon_dev_deps: Path) -> None:
|
|
||||||
addon_prefs = bpy.context.preferences.addons[addon_name].preferences
|
|
||||||
|
|
||||||
# PyDeps Path
|
|
||||||
addon_prefs.use_default_pydeps_path = False
|
|
||||||
addon_prefs.pydeps_path = path_addon_dev_deps
|
|
||||||
|
|
||||||
|
|
||||||
####################
|
|
||||||
# - Entrypoint
|
|
||||||
####################
|
|
||||||
def main():
|
|
||||||
# Delete Addon (maybe; possibly restart)
|
|
||||||
delete_addon_if_loaded(info.ADDON_NAME)
|
|
||||||
|
|
||||||
# Signal that Live-Printing can Start
|
|
||||||
print(info.SIGNAL_START_CLEAN_BLENDER) # noqa: T201
|
|
||||||
|
|
||||||
# Install and Enable Addon
|
|
||||||
install_failed = False
|
|
||||||
with pack.zipped_addon(
|
|
||||||
info.PATH_ADDON_PKG,
|
|
||||||
info.PATH_ADDON_ZIP,
|
|
||||||
info.PATH_ROOT / 'pyproject.toml',
|
|
||||||
info.PATH_ROOT / 'requirements.lock',
|
|
||||||
initial_log_level=BOOTSTRAP_LOG_LEVEL,
|
|
||||||
) as path_zipped:
|
|
||||||
try:
|
|
||||||
install_addon(info.ADDON_NAME, path_zipped)
|
|
||||||
except Exception:
|
|
||||||
traceback.print_exc()
|
|
||||||
install_failed = True
|
|
||||||
|
|
||||||
# Setup Addon for Development Use
|
|
||||||
setup_for_development(info.ADDON_NAME, info.PATH_ADDON_DEV_DEPS)
|
|
||||||
|
|
||||||
# Load Development .blend
|
|
||||||
## TODO: We need a better (also final-deployed-compatible) solution for what happens when a user opened a .blend file without installing dependencies!
|
|
||||||
if not install_failed:
|
|
||||||
bpy.ops.wm.open_mainfile(filepath=str(info.PATH_ADDON_DEV_BLEND))
|
|
||||||
else:
|
|
||||||
bpy.ops.wm.quit_blender()
|
|
||||||
sys.exit(info.STATUS_NOINSTALL_ADDON)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
|
@ -1,6 +1,7 @@
|
||||||
# noqa: INP001
|
# noqa: INP001
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import info
|
import info
|
||||||
|
@ -9,16 +10,33 @@ import info
|
||||||
####################
|
####################
|
||||||
# - Blender Runner
|
# - Blender Runner
|
||||||
####################
|
####################
|
||||||
def run_blender(py_script: Path, print_live: bool = False):
|
def run_blender(
|
||||||
|
py_script: Path | None,
|
||||||
|
load_devfile: bool = False,
|
||||||
|
headless: bool = True,
|
||||||
|
monitor: bool = False,
|
||||||
|
):
|
||||||
process = subprocess.Popen(
|
process = subprocess.Popen(
|
||||||
['blender', '--python', str(py_script)],
|
[
|
||||||
|
'blender',
|
||||||
|
*(['--background'] if headless else []),
|
||||||
|
*(
|
||||||
|
[
|
||||||
|
'--python',
|
||||||
|
str(py_script),
|
||||||
|
]
|
||||||
|
if py_script is not None
|
||||||
|
else []
|
||||||
|
),
|
||||||
|
*([info.PATH_ADDON_DEV_BLEND] if load_devfile else []),
|
||||||
|
],
|
||||||
env=os.environ | {'PYTHONUNBUFFERED': '1'},
|
env=os.environ | {'PYTHONUNBUFFERED': '1'},
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.STDOUT,
|
stderr=subprocess.STDOUT,
|
||||||
text=True,
|
text=True,
|
||||||
)
|
)
|
||||||
output = []
|
output = []
|
||||||
printing_live = print_live
|
printing_live = monitor
|
||||||
|
|
||||||
# Process Real-Time Output
|
# Process Real-Time Output
|
||||||
for line in iter(process.stdout.readline, b''):
|
for line in iter(process.stdout.readline, b''):
|
||||||
|
@ -42,18 +60,30 @@ def run_blender(py_script: Path, print_live: bool = False):
|
||||||
|
|
||||||
|
|
||||||
####################
|
####################
|
||||||
# - Run Blender w/Clean Addon Reinstall
|
# - Main
|
||||||
####################
|
####################
|
||||||
def main():
|
|
||||||
return_code, output = run_blender(info.PATH_BL_RUN, print_live=False)
|
|
||||||
if return_code == info.STATUS_UNINSTALLED_ADDON:
|
|
||||||
return_code, output = run_blender(info.PATH_BL_RUN, print_live=True)
|
|
||||||
if return_code == info.STATUS_NOINSTALL_ADDON:
|
|
||||||
msg = f"Couldn't install addon {info.ADDON_NAME}"
|
|
||||||
raise ValueError(msg)
|
|
||||||
elif return_code != 0:
|
|
||||||
print(''.join(output)) # noqa: T201
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
# Uninstall Addon
|
||||||
|
print(f'Blender: Uninstalling "{info.ADDON_NAME}"...')
|
||||||
|
return_code, output = run_blender(info.PATH_BL_DELETE_ADDON, monitor=False)
|
||||||
|
if return_code == info.STATUS_UNINSTALLED_ADDON:
|
||||||
|
print(f'\tBlender: Uninstalled "{info.ADDON_NAME}"')
|
||||||
|
elif return_code == info.STATUS_NOCHANGE_ADDON:
|
||||||
|
print(f'\tBlender: "{info.ADDON_NAME}" Not Installed')
|
||||||
|
|
||||||
|
# Install Addon
|
||||||
|
print(f'Blender: Installing & Enabling "{info.ADDON_NAME}"...')
|
||||||
|
return_code, output = run_blender(info.PATH_BL_INSTALL_ADDON, monitor=False)
|
||||||
|
if return_code == info.STATUS_INSTALLED_ADDON:
|
||||||
|
print(f'\tBlender: Install & Enable "{info.ADDON_NAME}"')
|
||||||
|
else:
|
||||||
|
print(f'\tBlender: "{info.ADDON_NAME}" Not Installed')
|
||||||
|
print(output)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Run Addon
|
||||||
|
print(f'Blender: Running "{info.ADDON_NAME}"...')
|
||||||
|
subprocess.run
|
||||||
|
return_code, output = run_blender(
|
||||||
|
None, headless=False, load_devfile=True, monitor=True
|
||||||
|
)
|
||||||
|
|
|
@ -1,13 +1,20 @@
|
||||||
|
import logging
|
||||||
import tomllib
|
import tomllib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
PATH_ROOT = Path(__file__).resolve().parent.parent.parent
|
PATH_ROOT = Path(__file__).resolve().parent.parent.parent
|
||||||
PATH_SRC = PATH_ROOT / 'src'
|
PATH_SRC = PATH_ROOT / 'src'
|
||||||
PATH_BL_RUN = PATH_SRC / 'scripts' / 'bl_run.py'
|
|
||||||
|
|
||||||
|
# Scripts
|
||||||
|
PATH_BL_DELETE_ADDON = PATH_SRC / 'scripts' / 'bl_delete_addon.py'
|
||||||
|
PATH_BL_INSTALL_ADDON = PATH_SRC / 'scripts' / 'bl_install_addon.py'
|
||||||
|
PATH_BL_RUN_DEV = PATH_SRC / 'scripts' / 'bl_run_dev.py'
|
||||||
|
|
||||||
|
# Build Dir
|
||||||
PATH_BUILD = PATH_ROOT / 'build'
|
PATH_BUILD = PATH_ROOT / 'build'
|
||||||
PATH_BUILD.mkdir(exist_ok=True)
|
PATH_BUILD.mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
# Dev Dir
|
||||||
PATH_DEV = PATH_ROOT / 'dev'
|
PATH_DEV = PATH_ROOT / 'dev'
|
||||||
PATH_DEV.mkdir(exist_ok=True)
|
PATH_DEV.mkdir(exist_ok=True)
|
||||||
|
|
||||||
|
@ -19,7 +26,9 @@ SIGNAL_START_CLEAN_BLENDER = 'SIGNAL__blender_is_clean'
|
||||||
####################
|
####################
|
||||||
# - BL_RUN Exit Codes
|
# - BL_RUN Exit Codes
|
||||||
####################
|
####################
|
||||||
|
STATUS_NOCHANGE_ADDON = 42
|
||||||
STATUS_UNINSTALLED_ADDON = 42
|
STATUS_UNINSTALLED_ADDON = 42
|
||||||
|
STATUS_INSTALLED_ADDON = 69
|
||||||
STATUS_NOINSTALL_ADDON = 68
|
STATUS_NOINSTALL_ADDON = 68
|
||||||
|
|
||||||
####################
|
####################
|
||||||
|
@ -39,6 +48,10 @@ PATH_ADDON_ZIP = PATH_ROOT / 'build' / (ADDON_NAME + '__' + ADDON_VERSION + '.zi
|
||||||
|
|
||||||
PATH_ADDON_BLEND_STARTER = PATH_ADDON_PKG / 'blenders' / 'starter.blend'
|
PATH_ADDON_BLEND_STARTER = PATH_ADDON_PKG / 'blenders' / 'starter.blend'
|
||||||
|
|
||||||
|
# Set Bootstrap Log Level
|
||||||
|
## This will be the log-level of both console and file logs, at first...
|
||||||
|
## ...until the addon preferences have been loaded.
|
||||||
|
BOOTSTRAP_LOG_LEVEL = logging.DEBUG
|
||||||
BOOTSTRAP_LOG_LEVEL_FILENAME = '.bootstrap_log_level'
|
BOOTSTRAP_LOG_LEVEL_FILENAME = '.bootstrap_log_level'
|
||||||
|
|
||||||
# Install the ZIPped Addon
|
# Install the ZIPped Addon
|
||||||
|
|
|
@ -33,7 +33,13 @@ def zipped_addon( # noqa: PLR0913
|
||||||
remove_after_close: bool = True,
|
remove_after_close: bool = True,
|
||||||
) -> typ.Iterator[Path]:
|
) -> typ.Iterator[Path]:
|
||||||
"""Context manager exposing a folder as a (temporary) zip file.
|
"""Context manager exposing a folder as a (temporary) zip file.
|
||||||
The .zip file is deleted afterwards.
|
|
||||||
|
Parameters:
|
||||||
|
path_addon_pkg: Path to the folder containing __init__.py of the Blender addon.
|
||||||
|
path_addon_zip: Path to the Addon ZIP to generate.
|
||||||
|
path_pyproject_toml: Path to the `pyproject.toml` of the project.
|
||||||
|
This is made available to the addon, to de-duplicate definition of name,
|
||||||
|
The .zip file is deleted afterwards, unless `remove_after_close` is specified.
|
||||||
"""
|
"""
|
||||||
# Delete Existing ZIP (maybe)
|
# Delete Existing ZIP (maybe)
|
||||||
if path_addon_zip.is_file():
|
if path_addon_zip.is_file():
|
||||||
|
|
Loading…
Reference in New Issue