Skip to content

Commit 1039d0b

Browse files
authored
Merge pull request #55 from zasexton/main
fixing reported telemetry issues and [accel] extra uploads
2 parents e7fc630 + 7c115f6 commit 1039d0b

14 files changed

Lines changed: 441 additions & 87 deletions

File tree

.github/workflows/test-upload.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -106,15 +106,15 @@ jobs:
106106
TWINE_USERNAME: __token__
107107
TWINE_PASSWORD: ${{ secrets.TEST_PYPI_PASSWORD_SVV }}
108108
- name: Validate TestPyPI token (svv-accelerated)
109-
if: ${{ hashFiles('dist/svv-accelerated-*.whl') != '' }}
109+
if: ${{ hashFiles('dist/svv_accelerated-*.whl') != '' }}
110110
run: |
111111
if [ -z "${{ secrets.TEST_PYPI_PASSWORD_ACCEL }}" ]; then
112112
echo "::error title=Missing secret::The repository secret 'TEST_PYPI_PASSWORD_ACCEL' is not configured. Set it to a TestPyPI API token with access to the 'svv-accelerated' project.";
113113
exit 1;
114114
fi
115115
- name: Upload accelerated wheels (svv-accelerated) to TestPyPI
116-
if: ${{ hashFiles('dist/svv-accelerated-*.whl') != '' }}
117-
run: twine upload --non-interactive --skip-existing --verbose --repository testpypi dist/svv-accelerated-*.whl
116+
if: ${{ hashFiles('dist/svv_accelerated-*.whl') != '' }}
117+
run: twine upload --non-interactive --skip-existing --verbose --repository testpypi dist/svv_accelerated-*.whl
118118
env:
119119
TWINE_USERNAME: __token__
120120
TWINE_PASSWORD: ${{ secrets.TEST_PYPI_PASSWORD_ACCEL }}

.github/workflows/upload.yml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,8 @@ jobs:
117117
- name: Upload accelerated wheels (svv-accelerated) to PyPI
118118
run: |
119119
set -euo pipefail
120-
files=$(ls dist/svv-accelerated-*.whl 2>/dev/null || true)
120+
# Wheel filenames normalize '-' to '_' (PEP 427), e.g. svv_accelerated-0.0.42-...
121+
files=$(ls dist/svv[-_]accelerated-*.whl 2>/dev/null || true)
121122
if [ -z "$files" ]; then
122123
echo "No svv-accelerated artifacts found; skipping upload."
123124
exit 0
@@ -159,4 +160,5 @@ jobs:
159160
files: |
160161
dist/svv-*.whl
161162
dist/svv-*.tar.gz
163+
dist/svv_accelerated-*.whl
162164
dist/svv-accelerated-*.whl

MANIFEST.in

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ include README.md
55
recursive-include svv/utils/spatial *.pyx
66
recursive-include svv/tree/utils *.pyx
77
recursive-include svv/domain/routines *.pyx
8+
recursive-include svv/simulation/utils *.pyx
89

910
# Include all remesher executables
1011
include svv/utils/remeshing/Linux/mmg2d_O3
@@ -20,5 +21,8 @@ include svv/utils/remeshing/Windows/mmg2d_O3.exe
2021
include svv/utils/remeshing/Windows/mmg3d_O3.exe
2122
include svv/utils/remeshing/Windows/mmgs_O3.exe
2223

24+
# Include GUI assets (theme tokens, icons, etc.)
25+
recursive-include svv/visualize/gui *.json *.qss *.png *.svg
26+
2327
# Exclude any compiled extension artifacts from source distribution
2428
global-exclude *.so *.pyd *.dylib *.dll

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[build-system]
22
requires = [
33
"setuptools>=45.0",
4-
"wheel>=0.36; python_version < '3.12' ",
4+
"wheel>=0.36",
55
# Ensure PEP 517 build environments have the headers/tools needed to compile extensions
66
"Cython>=3.0.7",
77
"numpy>=1.24; python_version < '3.12'",

setup.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -612,7 +612,19 @@ def parse_requirements(path="requirements.txt"):
612612
long_description=DESCRIPTION,
613613
long_description_content_type="text/markdown",
614614
ext_modules=cythonize(extensions) if (extensions and HAS_CYTHON) else [],
615-
package_data=( {'svv.bin': ['*']} if not ACCEL_COMPANION else {} ),
615+
package_data=(
616+
{
617+
'svv.bin': ['*'],
618+
'svv.visualize.gui': [
619+
'design_tokens.json',
620+
'theme.qss',
621+
'svIcon.png',
622+
'icons/*.svg',
623+
],
624+
}
625+
if not ACCEL_COMPANION
626+
else {}
627+
),
616628
exclude_package_data=(
617629
{"svv": ["*.so", "*.pyd", "*.dylib", "*.dll"]} if not ACCEL_COMPANION else {}
618630
),

svv/domain/domain.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -92,10 +92,20 @@ def save(self, path, include_boundary=False, include_mesh=False, include_patch_n
9292
boundary/mesh. Ensure `create()`, `solve()`, and `build()` have run so
9393
these arrays exist; otherwise save() will raise an error.
9494
- Boundary and mesh persistence are optional to keep files compact.
95+
96+
Returns
97+
-------
98+
str
99+
The path written to (with the ``.dmn`` extension enforced).
95100
"""
96101
from svv.domain.io.dmn import write_dmn
97-
write_dmn(self, path, include_boundary=include_boundary, include_mesh=include_mesh,
98-
include_patch_normals=include_patch_normals)
102+
return write_dmn(
103+
self,
104+
path,
105+
include_boundary=include_boundary,
106+
include_mesh=include_mesh,
107+
include_patch_normals=include_patch_normals,
108+
)
99109

100110
@classmethod
101111
def load(cls, path):

svv/domain/io/dmn.py

Lines changed: 53 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import os
22
import json
3-
from typing import Optional
3+
from typing import Optional, Union
44

55
import numpy as np
66
from scipy.spatial import cKDTree
@@ -14,10 +14,30 @@
1414
DMN_FORMAT = "svv.domain/1.0"
1515

1616

17-
def _ensure_ext(path: str) -> str:
18-
if not path.lower().endswith(".dmn"):
19-
return path + ".dmn"
20-
return path
17+
def ensure_dmn_path(path: Union[str, os.PathLike]) -> str:
18+
"""
19+
Normalize an output path for .dmn persistence.
20+
21+
- Accepts ``str`` or ``PathLike`` objects.
22+
- Ensures the returned path ends with a lowercase ``.dmn`` extension.
23+
- Collapses legacy ``.dmn.npz`` filenames (from NumPy ``savez`` behavior)
24+
to ``.dmn``.
25+
"""
26+
path_str = os.fsdecode(os.fspath(path))
27+
lower = path_str.lower()
28+
29+
# Legacy: np.savez_compressed("name.dmn", ...) writes "name.dmn.npz"
30+
if lower.endswith(".npz") and lower[:-4].endswith(".dmn"):
31+
path_str = path_str[:-4]
32+
lower = lower[:-4]
33+
34+
if lower.endswith(".dmn"):
35+
return path_str[:-4] + ".dmn"
36+
return path_str + ".dmn"
37+
38+
39+
def _ensure_ext(path: Union[str, os.PathLike]) -> str:
40+
return ensure_dmn_path(path)
2141

2242

2343
def _compute_firsts_from_pts(PTS: np.ndarray) -> np.ndarray:
@@ -39,8 +59,8 @@ def _compute_firsts_from_pts(PTS: np.ndarray) -> np.ndarray:
3959
return firsts
4060

4161

42-
def write_dmn(domain, path: str, include_boundary: bool = False, include_mesh: bool = False,
43-
include_patch_normals: bool = True) -> None:
62+
def write_dmn(domain, path: Union[str, os.PathLike], include_boundary: bool = False, include_mesh: bool = False,
63+
include_patch_normals: bool = True) -> str:
4464
"""
4565
Serialize a Domain instance into a .dmn file.
4666
@@ -188,9 +208,33 @@ def write_dmn(domain, path: str, include_boundary: bool = False, include_mesh: b
188208
# Use a file handle to avoid NumPy forcing a .npz extension
189209
with open(out_path, "wb") as fh:
190210
np.savez_compressed(fh, **arrays)
211+
return out_path
212+
213+
214+
def resolve_dmn_read_path(path: Union[str, os.PathLike]) -> str:
215+
"""
216+
Resolve a path for reading a Domain from disk.
217+
218+
Accepts filenames both with and without the ``.dmn`` suffix. Also attempts
219+
to load legacy ``.dmn.npz`` files created by passing a ``.dmn`` filename
220+
directly to NumPy ``savez`` routines.
221+
"""
222+
path_str = os.fsdecode(os.fspath(path))
223+
if os.path.isfile(path_str):
224+
return path_str
225+
226+
candidate = ensure_dmn_path(path_str)
227+
if os.path.isfile(candidate):
228+
return candidate
229+
230+
legacy_candidate = candidate + ".npz"
231+
if os.path.isfile(legacy_candidate):
232+
return legacy_candidate
233+
234+
return candidate
191235

192236

193-
def read_dmn(path: str):
237+
def read_dmn(path: Union[str, os.PathLike]):
194238
"""
195239
Deserialize a Domain from a .dmn file.
196240
@@ -213,7 +257,7 @@ def read_dmn(path: str):
213257
from svv.domain.domain import Domain
214258
from svv.domain.patch import Patch
215259

216-
file_path = _ensure_ext(path)
260+
file_path = resolve_dmn_read_path(path)
217261
data = np.load(file_path, allow_pickle=False)
218262

219263
# Parse metadata

svv/domain/io/read.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,9 @@ def read(data, **kwargs):
77
Read and process mesh data. Supports PyVista objects and file paths to mesh files.
88
99
Parameters:
10-
data: pyvista.PolyData or str
10+
data: pyvista.PolyData, pyvista.UnstructuredGrid, or str
1111
Input mesh data as a PyVista object or a file path to a supported mesh file.
12+
For UnstructuredGrid objects (e.g., from .vtu files), the surface is extracted.
1213
**kwargs:
1314
feature_angle: float
1415
Angle used to determine sharp edges for normal computation.
@@ -29,8 +30,12 @@ def read(data, **kwargs):
2930
if isinstance(data, str):
3031
data = pyvista.read(data)
3132

33+
# Handle UnstructuredGrid by extracting surface (e.g., from .vtu files)
34+
if isinstance(data, pyvista.UnstructuredGrid):
35+
data = data.extract_surface()
36+
3237
if not isinstance(data, pyvista.PolyData):
33-
raise TypeError("Input data must be a PyVista PolyData object or a valid file path.")
38+
raise TypeError("Input data must be a PyVista PolyData, UnstructuredGrid, or a valid file path.")
3439

3540
data = data.compute_normals(split_vertices=True, feature_angle=feature_angle)
3641

svv/tree/branch/root.py

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -139,18 +139,30 @@ def set_root(tree, **kwargs):
139139
lower = set(tree.domain.mesh_tree.query_ball_point(_start, threshold)[0])
140140
cells = list(upper.difference(lower))
141141
if tree.domain.points.shape[1] == 2:
142-
cells = tree.domain.random_generator.choice(cells, min(attempts, len(cells)),
143-
p=(tree.domain.mesh['Area'][cells] /
144-
tree.domain.mesh['Area'][cells].sum()),
142+
areas = tree.domain.mesh['Area'][cells]
143+
valid_mask = areas > 0
144+
if not numpy.any(valid_mask):
145+
count += 1
146+
continue
147+
valid_indices = numpy.array(cells)[valid_mask]
148+
valid_areas = areas[valid_mask]
149+
p = valid_areas / valid_areas.sum()
150+
p = p / p.sum() # Ensure exact sum of 1.0 for numerical stability
151+
cells = tree.domain.random_generator.choice(valid_indices, min(attempts, len(valid_indices)),
152+
p=p,
145153
replace=False)
146154
elif tree.domain.points.shape[1] == 3:
147-
print("Cells: ", cells)
148-
print("Mesh Volume: ", tree.domain.mesh['Volume'][cells])
149-
print("Mesh Volume Sum: ", tree.domain.mesh['Volume'][cells].sum())
150-
print("Mesh Volume Fraction: ", tree.domain.mesh['Volume'][cells] / tree.domain.mesh['Volume'][cells].sum())
151-
cells = tree.domain.random_generator.choice(cells, min(attempts, len(cells)),
152-
p=(tree.domain.mesh['Volume'][cells] /
153-
tree.domain.mesh['Volume'][cells].sum()),
155+
volumes = tree.domain.mesh['Volume'][cells]
156+
valid_mask = volumes > 0
157+
if not numpy.any(valid_mask):
158+
count += 1
159+
continue
160+
valid_indices = numpy.array(cells)[valid_mask]
161+
valid_volumes = volumes[valid_mask]
162+
p = valid_volumes / valid_volumes.sum()
163+
p = p / p.sum() # Ensure exact sum of 1.0 for numerical stability
164+
cells = tree.domain.random_generator.choice(valid_indices, min(attempts, len(valid_indices)),
165+
p=p,
154166
replace=False)
155167
else:
156168
raise ValueError("Only 2D and 3D domains are supported.")

0 commit comments

Comments
 (0)