diff --git a/.cursor/rules/project-standards.mdc b/.cursor/rules/project-standards.mdc
new file mode 100644
index 0000000..be3d6f4
--- /dev/null
+++ b/.cursor/rules/project-standards.mdc
@@ -0,0 +1,80 @@
+---
+description: PhysioMotion4D project standards and workflow preferences
+alwaysApply: true
+---
+
+# PhysioMotion4D Project Standards
+
+## File Operations
+
+**ALWAYS use git commands for file operations in this repository:**
+
+```bash
+# Moving files
+git mv old_path new_path
+
+# Deleting files
+git rm file_path
+
+# Renaming files
+git mv old_name.py new_name.py
+```
+
+❌ **Don't use**: `mv`, `rm`, `cp` directly
+✅ **Do use**: `git mv`, `git rm` to maintain git history
+
+## Documentation
+
+**Do NOT create extra documentation files** describing what was done:
+
+❌ **Don't create**:
+- `MIGRATION.md`
+- `CHANGES.md`
+- `UPDATE_SUMMARY.md`
+- `MODERNIZATION_*.md`
+- Similar meta-documentation files
+
+✅ **Do document**:
+- In-code docstrings
+- README files for new modules
+- Inline comments for complex logic
+- API documentation in existing docs
+
+## Backward Compatibility
+
+**Backward compatibility is NOT a priority** for this project:
+
+- Feel free to make breaking changes to improve code quality
+- Remove deprecated code without extensive migration paths
+- Update APIs for clarity and consistency
+- Prioritize modern, clean design over legacy support
+
+## Code Style
+
+- Use descriptive variable and function names
+- Add type hints to Python functions
+- Keep functions focused and small
+- Use `logging` module instead of `print` statements
+- Follow PEP 8 for Python code
+
+## Testing
+
+- Test new functionality with Jupyter notebooks in `experiments/`
+- Update existing tests when changing APIs
+- Use meaningful test names that describe what is being tested
+
+## Git Workflow
+
+**Do NOT stage files automatically:**
+
+❌ **Don't use**: `git add`, `git stage`
+✅ **Do use**: `git status` to show what changed
+✅ **User will**: Stage files themselves when ready
+
+The user prefers to review and stage changes manually.
+
+**Other git guidelines:**
+- Use `git rm` and `git mv` for file operations
+- Make atomic commits with clear messages
+- Don't commit large binary files (add to `.gitignore`)
+- Use `git status` to verify changes before committing
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 2cc134e..a4de808 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -37,4 +37,4 @@ repos:
pass_filenames: false
always_run: false
files: ^(src/physiomotion4d/|tests/)
- stages: [pre-push]
\ No newline at end of file
+ stages: [pre-push]
diff --git a/README.md b/README.md
index 563c47e..57a8ab1 100644
--- a/README.md
+++ b/README.md
@@ -107,8 +107,16 @@ print(f"PhysioMotion4D version: {physiomotion4d.__version__}")
- **Utility Classes**: Tools for data manipulation and conversion
- `TransformTools`: Comprehensive transform manipulation utilities
- `USDTools`: USD file manipulation for Omniverse integration
+ - `USDAnatomyTools`: Apply surgical materials to anatomy meshes
- `ImageTools`: Medical image processing utilities
- `ContourTools`: Mesh extraction and contour manipulation
+- **USD Conversion**: VTK to USD conversion for Omniverse visualization
+ - `ConvertVTKToUSD`: High-level converter for PyVista/VTK objects with colormap support
+ - `vtk_to_usd` module: File-based conversion library
+ - `VTKToUSDConverter`: Core converter with time-series support
+ - `read_vtk_file()`: Read VTK/VTP/VTU files into MeshData
+ - `ConversionSettings`: Configurable conversion parameters
+ - `MaterialData`: USD material definitions
### Key Dependencies
@@ -261,6 +269,80 @@ inverse_transform = results["inverse_transform"] # Fixed to moving
forward_transform = results["forward_transform"] # Moving to fixed
```
+### VTK to USD Conversion
+
+PhysioMotion4D provides two APIs for converting VTK data to USD for NVIDIA Omniverse visualization:
+
+#### Option 1: High-Level ConvertVTKToUSD (for PyVista/VTK objects)
+
+```python
+from physiomotion4d import ConvertVTKToUSD
+import pyvista as pv
+
+# Load VTK data
+meshes = [pv.read(f"cardiac_frame_{i:03d}.vtp") for i in range(20)]
+
+# Convert to animated USD with anatomical labels
+converter = ConvertVTKToUSD(
+ data_basename='CardiacModel',
+ input_polydata=meshes,
+ mask_ids={1: 'ventricle', 2: 'atrium', 3: 'vessels'},
+ compute_normals=True
+)
+
+# Optional: Apply colormap visualization
+converter.set_colormap(
+ color_by_array='transmembrane_potential',
+ colormap='rainbow',
+ intensity_range=(-80.0, 20.0)
+)
+
+stage = converter.convert('cardiac_motion.usd')
+```
+
+#### Option 2: File-Based vtk_to_usd Library
+
+```python
+from physiomotion4d.vtk_to_usd import (
+ VTKToUSDConverter,
+ ConversionSettings,
+ MaterialData,
+ convert_vtk_file,
+)
+
+# Simple single-file conversion
+stage = convert_vtk_file('mesh.vtp', 'output.usd')
+
+# Advanced: Custom settings and materials
+settings = ConversionSettings(
+ triangulate_meshes=True,
+ compute_normals=True,
+ meters_per_unit=0.001, # mm to meters
+ times_per_second=60.0,
+)
+
+material = MaterialData(
+ name="cardiac_tissue",
+ diffuse_color=(0.9, 0.3, 0.3),
+ roughness=0.4,
+)
+
+converter = VTKToUSDConverter(settings)
+stage = converter.convert_file('heart.vtp', 'heart.usd', material=material)
+
+# Time-series conversion
+files = ['frame_000.vtp', 'frame_001.vtp', 'frame_002.vtp']
+time_codes = [0.0, 0.1, 0.2]
+stage = converter.convert_sequence(files, 'animated.usd', time_codes=time_codes)
+```
+
+Features:
+- Automatic coordinate system conversion (RAS to Y-up)
+- Material system with UsdPreviewSurface
+- Preserves all VTK data arrays as USD primvars
+- Time-series animation support
+- Supports VTP, VTK, and VTU file formats
+
### Logging and Debug Control
PhysioMotion4D provides standardized logging through the `PhysioMotion4DBase` class, which is inherited by workflow and registration classes.
diff --git a/data/CHOP-Valve4D/.gitignore b/data/CHOP-Valve4D/.gitignore
new file mode 100644
index 0000000..b689538
--- /dev/null
+++ b/data/CHOP-Valve4D/.gitignore
@@ -0,0 +1,2 @@
+Alterra*
+TPV*
diff --git a/data/KCL-Heart-Model/README.md b/data/KCL-Heart-Model/README.md
index b3aedc3..f681bba 100644
--- a/data/KCL-Heart-Model/README.md
+++ b/data/KCL-Heart-Model/README.md
@@ -1,5 +1,17 @@
# KCL Heart Model Dataset
+## ⚠️ Manual Download Required
+
+**This data is NOT automatically downloaded.** Users must manually download and preprocess the required files.
+
+### Required Files for PhysioMotion4D
+
+The following files must be present in this directory for VTK to USD conversion:
+- `average_surface.vtp` - Mean heart surface mesh (VTK PolyData format)
+- `average_mesh.vtk` - Mean heart volume mesh (VTK UnstructuredGrid format)
+
+These files can be generated from the KCL dataset using conversion tools provided in this directory.
+
## Overview
This directory contains data from the King's College London (KCL) four-chamber heart model dataset, which provides a virtual cohort of adult healthy heart meshes derived from CT images.
@@ -74,4 +86,3 @@ For questions about the dataset, contact the original authors:
- Pablo Lamata, King's College London
For questions about SlicerSALT, contact: beatriz.paniagua@kitware.com
-
diff --git a/data/README.md b/data/README.md
index 1b68e3c..e423128 100644
--- a/data/README.md
+++ b/data/README.md
@@ -6,14 +6,28 @@ This directory contains sample datasets used for experiments, testing, and devel
```
data/
-├── Slicer-Heart-CT/ # 4D cardiac CT with gated cardiac phases
-├── DirLab-4DCT/ # 4D lung CT benchmark dataset (respiratory motion)
-├── KCL-Heart-Model/ # Statistical shape model of the heart
+├── Slicer-Heart-CT/ # 4D cardiac CT with gated cardiac phases (AUTO-DOWNLOAD)
+├── DirLab-4DCT/ # 4D lung CT benchmark dataset (MANUAL)
+├── KCL-Heart-Model/ # Statistical shape model of the heart (MANUAL)
+├── CHOP-Valve4D/ # 4D valve models (MANUAL)
```
+## 📥 Data Download Methods
+
+### Automatic Download (Only Slicer-Heart-CT)
+Only the **Slicer-Heart-CT** dataset can be automatically downloaded by running the appropriate notebook.
+
+### Manual Download (All Others)
+The following datasets must be **manually downloaded and preprocessed** by the user:
+- **DirLab-4DCT**: Respiratory motion benchmark data
+- **KCL-Heart-Model**: Statistical cardiac shape models
+- **CHOP-Valve4D**: Time-varying valve reconstructions
+
+See individual dataset sections below for download instructions and preprocessing requirements.
+
---
-## 🫀 Slicer-Heart-CT
+## 🫀 Slicer-Heart-CT ✅ AUTO-DOWNLOAD
### Description
4D cardiac CT dataset with temporal gating showing complete cardiac cycle motion. Pediatric cardiac CT with truncal valve visualization.
@@ -38,7 +52,7 @@ Data provided by Jolley Lab at CHOP (Children's Hospital of Philadelphia):
# experiments/Heart-GatedCT_To_USD/0-download_and_convert_4d_to_3d.ipynb
```
-**Manual download**:
+**Manual download** (alternative):
```bash
# Direct download link:
wget https://github.com/Slicer-Heart-CT/Slicer-Heart-CT/releases/download/TestingData/TruncalValve_4DCT.seq.nrrd -P data/Slicer-Heart-CT/
@@ -51,7 +65,7 @@ wget https://github.com/Slicer-Heart-CT/Slicer-Heart-CT/releases/download/Testin
---
-## 🫁 DirLab-4DCT
+## 🫁 DirLab-4DCT ⚠️ MANUAL DOWNLOAD
### Description
Benchmark dataset for 4D CT respiratory motion analysis. Contains 10 cases of lung CT scans at different respiratory phases with annotated landmark points for registration validation.
@@ -72,25 +86,23 @@ Data provided by the DIR-Lab at MD Anderson Cancer Center:
### Downloading the Data
-**Python API** (recommended):
-```python
-from physiomotion4d.lung_gatedct_to_usd import data_dirlab_4d_ct
+⚠️ **MANUAL DOWNLOAD REQUIRED**
-# Download specific case
-downloader = data_dirlab_4d_ct.DirLab4DCT()
-downloader.download_case(1) # Downloads Case 1
+Users must manually download and preprocess this dataset. Follow these steps:
-# Or download multiple cases
-for case_num in [1, 2, 3]:
- downloader.download_case(case_num)
-```
-
-**Automatic download via notebooks**:
+**Step 1: Manual Download**
```python
+# Using provided utilities in experiments notebooks
# See: experiments/Lung-GatedCT_To_USD/0-register_dirlab_4dct.ipynb
-# The notebook includes automatic download functionality
+# The notebook includes download utilities but requires manual execution
```
+**Step 2: User Preprocessing**
+Users are responsible for:
+- Downloading data from DIR-Lab website
+- Extracting and organizing files in the proper directory structure
+- Running preprocessing notebooks if needed
+
### Directory Structure
```
DirLab-4DCT/
@@ -113,15 +125,15 @@ DirLab-4DCT/
---
-## 🧠 KCL-Heart-Model
+## 🧠 KCL-Heart-Model ⚠️ MANUAL DOWNLOAD
### Description
Statistical shape model (SSM) of the human heart derived from cardiac imaging data. Includes principal component analysis (PCA) modes of shape variation.
### Specifications
-- **Format**: `.vtk` (VTK PolyData format)
-- **Content**:
- - Average heart mesh
+- **Format**: `.vtk`, `.vtp` (VTK PolyData formats)
+- **Content**:
+ - Average heart surface and mesh
- Individual heart models
- PCA eigenvectors
- Mode standard deviations
@@ -129,7 +141,8 @@ Statistical shape model (SSM) of the human heart derived from cardiac imaging da
- **Components**: Full heart mesh with chambers and vessels
### Files
-- `average.vtk` - Mean heart shape
+- `average_surface.vtp` - Mean heart surface (PolyData)
+- `average_mesh.vtk` - Mean heart volume mesh (UnstructuredGrid)
- `Full_Heart_Mesh_1.vtk` - Example individual heart
- `Eigenvectors.csv` - PCA eigenvectors (shape modes)
- `Mode_standard_deviations.csv` - Standard deviation for each mode
@@ -140,20 +153,86 @@ Statistical shape model (SSM) of the human heart derived from cardiac imaging da
### Acknowledgement
Data from King's College London (KCL):
-- **Repository**: Likely from cardiac imaging research group
+- **Repository**: Cardiac imaging research group
- **License**: Check `citation.txt` for proper attribution
### Downloading the Data
-Data is typically obtained from published research repositories or upon request from the authors. Check the included PDFs for source information.
+
+⚠️ **MANUAL DOWNLOAD REQUIRED**
+
+Users must manually obtain and place this data:
+1. Obtain data from published research repositories or contact authors
+2. Place files in `data/KCL-Heart-Model/` directory
+3. Required files: `average_surface.vtp`, `average_mesh.vtk`
+
+Check the included PDFs (if available) for source information and proper citation.
### Usage
-- Model-to-image registration experiments (`Heart-Model_To_Image_Registration/`)
+- VTK to USD conversion experiments (`experiments/convert_vtk_to_usd_lib/`)
+- Model-to-image registration experiments
- Shape-based cardiac analysis
- Atlas-based segmentation initialization
- Statistical shape analysis
---
+## 🫀 CHOP-Valve4D ⚠️ MANUAL DOWNLOAD
+
+### Description
+Time-varying 4D valve reconstruction models showing valve motion over the cardiac cycle. These datasets represent dynamic valve geometries reconstructed from medical imaging data.
+
+### Specifications
+- **Format**: `.vtk` (VTK PolyData files)
+- **Content**: Time series of valve surface meshes
+- **Valves**: Alterra, TPV25, and other valve types
+- **Phases**: Multiple time points per cardiac cycle (200+ frames)
+- **Resolution**: High-resolution surface meshes with anatomical features
+
+### Directory Structure
+```
+CHOP-Valve4D/
+├── Alterra/
+│ ├── frame_0000.vtk
+│ ├── frame_0001.vtk
+│ └── ... (232 frames)
+├── TPV25/
+│ ├── frame_0000.vtk
+│ ├── frame_0001.vtk
+│ └── ... (265 frames)
+```
+
+### Acknowledgement
+Data provided by Jolley Lab at CHOP (Children's Hospital of Philadelphia):
+- https://www.linkedin.com/company/jolleylab
+
+### Downloading the Data
+
+⚠️ **MANUAL DOWNLOAD REQUIRED**
+
+**Availability**: This dataset will soon be publicly available for download from the **FEBio website** under the **Creative Commons Attribution (CC-BY) license**.
+
+- **Source**: https://febio.org/ (coming soon)
+- **License**: CC-BY (Creative Commons Attribution)
+- **Citation**: Please cite the Jolley Lab and FEBio when using this data
+
+**Setup Instructions**:
+1. Download valve reconstruction data from FEBio website when available
+2. Place files in `data/CHOP-Valve4D/` with proper subdirectory structure
+3. Ensure files are named sequentially for time-series processing (e.g., `frame_0000.vtk`, `frame_0001.vtk`, ...)
+4. Organize by valve type in subdirectories (e.g., `Alterra/`, `TPV25/`)
+
+### Usage
+- Time-series VTK to USD conversion (`experiments/convert_vtk_to_usd_lib/valve4d_time_series.ipynb`)
+- 4D valve motion visualization in NVIDIA Omniverse
+- Temporal cardiac mechanics analysis
+- Valve dynamics studies and surgical planning
+
+### Related Resources
+- **FEBio**: Finite Element Biomechanics software suite (https://febio.org/)
+- **Jolley Lab**: Cardiac imaging and computational modeling research
+
+---
+
## 📝 Data Usage Guidelines
### For Testing
@@ -175,12 +254,21 @@ Data is typically obtained from published research repositories or upon request
## 🔒 Data Access and Licensing
-- **Slicer-Heart-CT**: Public release from GitHub
-- **DirLab-4DCT**: Public benchmark dataset (may require registration)
-- **KCL-Heart-Model**: Check included citation and license files
+- **Slicer-Heart-CT** ✅: Public release from GitHub (auto-download available)
+- **DirLab-4DCT** ⚠️: Public benchmark dataset (manual download required, may require registration)
+- **KCL-Heart-Model** ⚠️: Requires manual download from research repositories
+- **CHOP-Valve4D** ⚠️: Soon available from FEBio website under CC-BY license (manual download)
⚠️ **Important**: Always cite the original data sources in publications and respect any usage restrictions.
+### Summary of Download Methods
+| Dataset | Auto-Download | Manual Required | License | Source | Used in Tests |
+| --------------- | ------------- | --------------- | --------------- | ------------------- | ------------------------ |
+| Slicer-Heart-CT | ✅ Yes | No | Public | GitHub | Yes |
+| DirLab-4DCT | ❌ No | Yes | Public/Academic | DIR-Lab | No |
+| KCL-Heart-Model | ❌ No | Yes | Check citation | Zenodo/KCL | Yes (skipped if missing) |
+| CHOP-Valve4D | ❌ No | Yes | CC-BY | FEBio (coming soon) | No |
+
---
## 📚 References
@@ -194,7 +282,15 @@ Data is typically obtained from published research repositories or upon request
- DIR-Lab: https://med.emory.edu/departments/radiation-oncology/research-laboratories/deformable-image-registration/
### KCL-Heart-Model
-- See included PDFs and citation.txt for proper attribution
+- Rodero et al. (2021), "Linking statistical shape models and simulated function in the healthy adult human heart", *PLOS Computational Biology*
+- DOI: [10.1371/journal.pcbi.1008851](https://doi.org/10.1371/journal.pcbi.1008851)
+- Zenodo: https://zenodo.org/records/4590294
+
+### CHOP-Valve4D
+- Jolley Lab (CHOP): https://www.linkedin.com/company/jolleylab
+- FEBio Project: https://febio.org/ (dataset coming soon)
+- License: Creative Commons Attribution (CC-BY)
+- Citation: Please acknowledge Jolley Lab at CHOP and the FEBio Project
---
diff --git a/experiments/Colormap-VTK_To_USD/colormap_vtk_to_usd.ipynb b/experiments/Colormap-VTK_To_USD/colormap_vtk_to_usd.ipynb
index a20bf17..61ee966 100644
--- a/experiments/Colormap-VTK_To_USD/colormap_vtk_to_usd.ipynb
+++ b/experiments/Colormap-VTK_To_USD/colormap_vtk_to_usd.ipynb
@@ -34,10 +34,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:07:54.229564Z",
- "iopub.status.busy": "2026-01-28T01:07:54.229190Z",
- "iopub.status.idle": "2026-01-28T01:08:10.221917Z",
- "shell.execute_reply": "2026-01-28T01:08:10.220909Z"
+ "iopub.execute_input": "2026-01-30T04:17:59.994030Z",
+ "iopub.status.busy": "2026-01-30T04:17:59.994030Z",
+ "iopub.status.idle": "2026-01-30T04:18:15.079053Z",
+ "shell.execute_reply": "2026-01-30T04:18:15.078550Z"
}
},
"outputs": [],
@@ -72,10 +72,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:10.232929Z",
- "iopub.status.busy": "2026-01-28T01:08:10.232929Z",
- "iopub.status.idle": "2026-01-28T01:08:10.252021Z",
- "shell.execute_reply": "2026-01-28T01:08:10.251013Z"
+ "iopub.execute_input": "2026-01-30T04:18:15.096446Z",
+ "iopub.status.busy": "2026-01-30T04:18:15.096446Z",
+ "iopub.status.idle": "2026-01-30T04:18:15.110477Z",
+ "shell.execute_reply": "2026-01-30T04:18:15.109315Z"
}
},
"outputs": [],
@@ -130,10 +130,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:10.254170Z",
- "iopub.status.busy": "2026-01-28T01:08:10.254170Z",
- "iopub.status.idle": "2026-01-28T01:08:12.342965Z",
- "shell.execute_reply": "2026-01-28T01:08:12.341674Z"
+ "iopub.execute_input": "2026-01-30T04:18:15.112289Z",
+ "iopub.status.busy": "2026-01-30T04:18:15.112289Z",
+ "iopub.status.idle": "2026-01-30T04:18:15.275065Z",
+ "shell.execute_reply": "2026-01-30T04:18:15.274165Z"
}
},
"outputs": [],
@@ -153,7 +153,7 @@
"print(\"Available point data arrays:\")\n",
"available = converter.list_available_arrays()\n",
"for name, info in available.items():\n",
- " print(f\" - {name}: range={info.range}, dtype={info.dtype}\")\n",
+ " print(f\" - {name}: range={info['range']}, dtype={info['dtype']}\")\n",
"\n",
"# Set colormap (automatic range)\n",
"converter.set_colormap(\n",
@@ -182,10 +182,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:12.345424Z",
- "iopub.status.busy": "2026-01-28T01:08:12.345424Z",
- "iopub.status.idle": "2026-01-28T01:08:13.940763Z",
- "shell.execute_reply": "2026-01-28T01:08:13.940256Z"
+ "iopub.execute_input": "2026-01-30T04:18:15.276916Z",
+ "iopub.status.busy": "2026-01-30T04:18:15.276916Z",
+ "iopub.status.idle": "2026-01-30T04:18:15.425881Z",
+ "shell.execute_reply": "2026-01-30T04:18:15.424995Z"
}
},
"outputs": [],
@@ -224,10 +224,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:13.943785Z",
- "iopub.status.busy": "2026-01-28T01:08:13.942769Z",
- "iopub.status.idle": "2026-01-28T01:08:16.443870Z",
- "shell.execute_reply": "2026-01-28T01:08:16.442878Z"
+ "iopub.execute_input": "2026-01-30T04:18:15.428422Z",
+ "iopub.status.busy": "2026-01-30T04:18:15.428422Z",
+ "iopub.status.idle": "2026-01-30T04:18:15.560816Z",
+ "shell.execute_reply": "2026-01-30T04:18:15.560052Z"
}
},
"outputs": [],
@@ -243,7 +243,7 @@
"\n",
"converter.set_colormap(\n",
" color_by_array=\"temperature\",\n",
- " colormap=\"heat\",\n",
+ " colormap=\"hot\", # 'heat' alias maps to 'hot' colormap\n",
" intensity_range=(15.0, 40.0), # Temperature range in Celsius\n",
")\n",
"\n",
@@ -266,10 +266,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:16.446870Z",
- "iopub.status.busy": "2026-01-28T01:08:16.445877Z",
- "iopub.status.idle": "2026-01-28T01:08:19.333449Z",
- "shell.execute_reply": "2026-01-28T01:08:19.332465Z"
+ "iopub.execute_input": "2026-01-30T04:18:15.562378Z",
+ "iopub.status.busy": "2026-01-30T04:18:15.562378Z",
+ "iopub.status.idle": "2026-01-30T04:18:15.696063Z",
+ "shell.execute_reply": "2026-01-30T04:18:15.695660Z"
}
},
"outputs": [],
@@ -308,10 +308,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:19.336369Z",
- "iopub.status.busy": "2026-01-28T01:08:19.335370Z",
- "iopub.status.idle": "2026-01-28T01:08:21.885779Z",
- "shell.execute_reply": "2026-01-28T01:08:21.884780Z"
+ "iopub.execute_input": "2026-01-30T04:18:15.697980Z",
+ "iopub.status.busy": "2026-01-30T04:18:15.697980Z",
+ "iopub.status.idle": "2026-01-30T04:18:15.832131Z",
+ "shell.execute_reply": "2026-01-30T04:18:15.831378Z"
}
},
"outputs": [],
@@ -348,10 +348,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:21.887781Z",
- "iopub.status.busy": "2026-01-28T01:08:21.887781Z",
- "iopub.status.idle": "2026-01-28T01:08:22.263163Z",
- "shell.execute_reply": "2026-01-28T01:08:22.262286Z"
+ "iopub.execute_input": "2026-01-30T04:18:15.834005Z",
+ "iopub.status.busy": "2026-01-30T04:18:15.834005Z",
+ "iopub.status.idle": "2026-01-30T04:18:15.967891Z",
+ "shell.execute_reply": "2026-01-30T04:18:15.967135Z"
}
},
"outputs": [],
@@ -394,10 +394,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:22.265487Z",
- "iopub.status.busy": "2026-01-28T01:08:22.264177Z",
- "iopub.status.idle": "2026-01-28T01:08:24.416345Z",
- "shell.execute_reply": "2026-01-28T01:08:24.415504Z"
+ "iopub.execute_input": "2026-01-30T04:18:15.969564Z",
+ "iopub.status.busy": "2026-01-30T04:18:15.969564Z",
+ "iopub.status.idle": "2026-01-30T04:18:16.104940Z",
+ "shell.execute_reply": "2026-01-30T04:18:16.103947Z"
}
},
"outputs": [],
@@ -458,10 +458,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:24.417994Z",
- "iopub.status.busy": "2026-01-28T01:08:24.417994Z",
- "iopub.status.idle": "2026-01-28T01:08:24.431917Z",
- "shell.execute_reply": "2026-01-28T01:08:24.430667Z"
+ "iopub.execute_input": "2026-01-30T04:18:16.106943Z",
+ "iopub.status.busy": "2026-01-30T04:18:16.105951Z",
+ "iopub.status.idle": "2026-01-30T04:18:16.119940Z",
+ "shell.execute_reply": "2026-01-30T04:18:16.118939Z"
}
},
"outputs": [],
diff --git a/experiments/Heart-GatedCT_To_USD/0-download_and_convert_4d_to_3d.ipynb b/experiments/Heart-GatedCT_To_USD/0-download_and_convert_4d_to_3d.ipynb
index 3da6865..64d4112 100644
--- a/experiments/Heart-GatedCT_To_USD/0-download_and_convert_4d_to_3d.ipynb
+++ b/experiments/Heart-GatedCT_To_USD/0-download_and_convert_4d_to_3d.ipynb
@@ -5,10 +5,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:20:44.628453Z",
- "iopub.status.busy": "2026-01-28T03:20:44.628453Z",
- "iopub.status.idle": "2026-01-28T03:21:01.117771Z",
- "shell.execute_reply": "2026-01-28T03:21:01.116763Z"
+ "iopub.execute_input": "2026-01-30T06:15:02.020711Z",
+ "iopub.status.busy": "2026-01-30T06:15:02.020188Z",
+ "iopub.status.idle": "2026-01-30T06:15:16.576256Z",
+ "shell.execute_reply": "2026-01-30T06:15:16.575254Z"
}
},
"outputs": [],
@@ -25,10 +25,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:21:01.120033Z",
- "iopub.status.busy": "2026-01-28T03:21:01.119007Z",
- "iopub.status.idle": "2026-01-28T03:21:01.133471Z",
- "shell.execute_reply": "2026-01-28T03:21:01.131905Z"
+ "iopub.execute_input": "2026-01-30T06:15:16.578313Z",
+ "iopub.status.busy": "2026-01-30T06:15:16.577359Z",
+ "iopub.status.idle": "2026-01-30T06:15:16.590352Z",
+ "shell.execute_reply": "2026-01-30T06:15:16.590352Z"
}
},
"outputs": [],
@@ -48,10 +48,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:21:01.135270Z",
- "iopub.status.busy": "2026-01-28T03:21:01.135270Z",
- "iopub.status.idle": "2026-01-28T03:21:01.148453Z",
- "shell.execute_reply": "2026-01-28T03:21:01.146851Z"
+ "iopub.execute_input": "2026-01-30T06:15:16.593075Z",
+ "iopub.status.busy": "2026-01-30T06:15:16.593075Z",
+ "iopub.status.idle": "2026-01-30T06:15:16.606796Z",
+ "shell.execute_reply": "2026-01-30T06:15:16.605787Z"
}
},
"outputs": [],
@@ -68,10 +68,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:21:01.150036Z",
- "iopub.status.busy": "2026-01-28T03:21:01.150036Z",
- "iopub.status.idle": "2026-01-28T03:21:45.375343Z",
- "shell.execute_reply": "2026-01-28T03:21:45.374343Z"
+ "iopub.execute_input": "2026-01-30T06:15:16.608780Z",
+ "iopub.status.busy": "2026-01-30T06:15:16.608780Z",
+ "iopub.status.idle": "2026-01-30T06:16:00.035266Z",
+ "shell.execute_reply": "2026-01-30T06:16:00.034413Z"
}
},
"outputs": [],
diff --git a/experiments/Heart-GatedCT_To_USD/1-register_images.ipynb b/experiments/Heart-GatedCT_To_USD/1-register_images.ipynb
index 3569c00..ee8eab5 100644
--- a/experiments/Heart-GatedCT_To_USD/1-register_images.ipynb
+++ b/experiments/Heart-GatedCT_To_USD/1-register_images.ipynb
@@ -6,10 +6,10 @@
"id": "3ce61753-11ad-4ade-9afe-6ad1bc748e25",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T14:31:31.975124Z",
- "iopub.status.busy": "2026-01-26T14:31:31.975124Z",
- "iopub.status.idle": "2026-01-26T14:31:51.797766Z",
- "shell.execute_reply": "2026-01-26T14:31:51.796503Z"
+ "iopub.execute_input": "2026-01-30T06:16:06.255697Z",
+ "iopub.status.busy": "2026-01-30T06:16:06.255697Z",
+ "iopub.status.idle": "2026-01-30T06:16:21.047282Z",
+ "shell.execute_reply": "2026-01-30T06:16:21.046456Z"
}
},
"outputs": [],
@@ -29,10 +29,10 @@
"id": "9d2e5d21",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T14:31:51.800134Z",
- "iopub.status.busy": "2026-01-26T14:31:51.800134Z",
- "iopub.status.idle": "2026-01-26T14:31:52.537077Z",
- "shell.execute_reply": "2026-01-26T14:31:52.535067Z"
+ "iopub.execute_input": "2026-01-30T06:16:21.049042Z",
+ "iopub.status.busy": "2026-01-30T06:16:21.048415Z",
+ "iopub.status.idle": "2026-01-30T06:16:21.501763Z",
+ "shell.execute_reply": "2026-01-30T06:16:21.500886Z"
}
},
"outputs": [],
@@ -53,10 +53,10 @@
"id": "b35f90c6",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T14:31:52.541875Z",
- "iopub.status.busy": "2026-01-26T14:31:52.541875Z",
- "iopub.status.idle": "2026-01-26T14:33:37.663045Z",
- "shell.execute_reply": "2026-01-26T14:33:37.661850Z"
+ "iopub.execute_input": "2026-01-30T06:16:21.503686Z",
+ "iopub.status.busy": "2026-01-30T06:16:21.503686Z",
+ "iopub.status.idle": "2026-01-30T06:17:45.778181Z",
+ "shell.execute_reply": "2026-01-30T06:17:45.777190Z"
}
},
"outputs": [],
@@ -111,10 +111,10 @@
"id": "10ffbbf6",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T14:33:37.665344Z",
- "iopub.status.busy": "2026-01-26T14:33:37.664521Z",
- "iopub.status.idle": "2026-01-26T14:33:37.677478Z",
- "shell.execute_reply": "2026-01-26T14:33:37.676588Z"
+ "iopub.execute_input": "2026-01-30T06:17:45.781189Z",
+ "iopub.status.busy": "2026-01-30T06:17:45.781189Z",
+ "iopub.status.idle": "2026-01-30T06:17:45.793719Z",
+ "shell.execute_reply": "2026-01-30T06:17:45.792726Z"
}
},
"outputs": [],
@@ -129,10 +129,10 @@
"id": "cc9418da",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T14:33:37.679521Z",
- "iopub.status.busy": "2026-01-26T14:33:37.678464Z",
- "iopub.status.idle": "2026-01-26T15:43:26.205290Z",
- "shell.execute_reply": "2026-01-26T15:43:26.203289Z"
+ "iopub.execute_input": "2026-01-30T06:17:45.796726Z",
+ "iopub.status.busy": "2026-01-30T06:17:45.795718Z",
+ "iopub.status.idle": "2026-01-30T06:46:33.144835Z",
+ "shell.execute_reply": "2026-01-30T06:46:33.143832Z"
}
},
"outputs": [],
diff --git a/experiments/Heart-GatedCT_To_USD/2-generate_segmentation.ipynb b/experiments/Heart-GatedCT_To_USD/2-generate_segmentation.ipynb
index 732383a..343b3ec 100644
--- a/experiments/Heart-GatedCT_To_USD/2-generate_segmentation.ipynb
+++ b/experiments/Heart-GatedCT_To_USD/2-generate_segmentation.ipynb
@@ -6,10 +6,10 @@
"id": "3ce61753-11ad-4ade-9afe-6ad1bc748e25",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:43:36.112468Z",
- "iopub.status.busy": "2026-01-26T15:43:36.111467Z",
- "iopub.status.idle": "2026-01-26T15:43:56.479687Z",
- "shell.execute_reply": "2026-01-26T15:43:56.478870Z"
+ "iopub.execute_input": "2026-01-30T06:46:42.549565Z",
+ "iopub.status.busy": "2026-01-30T06:46:42.548542Z",
+ "iopub.status.idle": "2026-01-30T06:46:58.740412Z",
+ "shell.execute_reply": "2026-01-30T06:46:58.739580Z"
}
},
"outputs": [],
@@ -30,10 +30,10 @@
"id": "b60954cf",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:43:56.483752Z",
- "iopub.status.busy": "2026-01-26T15:43:56.483248Z",
- "iopub.status.idle": "2026-01-26T15:43:56.495540Z",
- "shell.execute_reply": "2026-01-26T15:43:56.494111Z"
+ "iopub.execute_input": "2026-01-30T06:46:58.742701Z",
+ "iopub.status.busy": "2026-01-30T06:46:58.742168Z",
+ "iopub.status.idle": "2026-01-30T06:46:58.755490Z",
+ "shell.execute_reply": "2026-01-30T06:46:58.754541Z"
}
},
"outputs": [],
@@ -50,10 +50,10 @@
"id": "5c2f5e00",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:43:56.497330Z",
- "iopub.status.busy": "2026-01-26T15:43:56.497330Z",
- "iopub.status.idle": "2026-01-26T15:43:56.510544Z",
- "shell.execute_reply": "2026-01-26T15:43:56.509544Z"
+ "iopub.execute_input": "2026-01-30T06:46:58.757387Z",
+ "iopub.status.busy": "2026-01-30T06:46:58.756493Z",
+ "iopub.status.idle": "2026-01-30T06:46:58.770530Z",
+ "shell.execute_reply": "2026-01-30T06:46:58.769653Z"
}
},
"outputs": [],
@@ -99,10 +99,10 @@
"id": "9438634d",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:43:56.512544Z",
- "iopub.status.busy": "2026-01-26T15:43:56.512544Z",
- "iopub.status.idle": "2026-01-26T15:45:32.753051Z",
- "shell.execute_reply": "2026-01-26T15:45:32.752045Z"
+ "iopub.execute_input": "2026-01-30T06:46:58.771679Z",
+ "iopub.status.busy": "2026-01-30T06:46:58.771679Z",
+ "iopub.status.idle": "2026-01-30T06:48:21.803293Z",
+ "shell.execute_reply": "2026-01-30T06:48:21.801972Z"
}
},
"outputs": [],
@@ -143,10 +143,10 @@
"id": "a2325199",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:45:32.754556Z",
- "iopub.status.busy": "2026-01-26T15:45:32.754556Z",
- "iopub.status.idle": "2026-01-26T15:45:47.419509Z",
- "shell.execute_reply": "2026-01-26T15:45:47.419005Z"
+ "iopub.execute_input": "2026-01-30T06:48:21.805534Z",
+ "iopub.status.busy": "2026-01-30T06:48:21.804528Z",
+ "iopub.status.idle": "2026-01-30T06:48:35.145622Z",
+ "shell.execute_reply": "2026-01-30T06:48:35.145622Z"
}
},
"outputs": [],
@@ -162,10 +162,10 @@
"id": "10db4bfa",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:45:47.421364Z",
- "iopub.status.busy": "2026-01-26T15:45:47.421364Z",
- "iopub.status.idle": "2026-01-26T15:45:47.495982Z",
- "shell.execute_reply": "2026-01-26T15:45:47.494973Z"
+ "iopub.execute_input": "2026-01-30T06:48:35.147732Z",
+ "iopub.status.busy": "2026-01-30T06:48:35.147732Z",
+ "iopub.status.idle": "2026-01-30T06:48:35.206708Z",
+ "shell.execute_reply": "2026-01-30T06:48:35.205753Z"
}
},
"outputs": [],
@@ -186,10 +186,10 @@
"id": "4d19335d",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:45:47.497982Z",
- "iopub.status.busy": "2026-01-26T15:45:47.497982Z",
- "iopub.status.idle": "2026-01-26T15:45:51.053077Z",
- "shell.execute_reply": "2026-01-26T15:45:51.052072Z"
+ "iopub.execute_input": "2026-01-30T06:48:35.208709Z",
+ "iopub.status.busy": "2026-01-30T06:48:35.208709Z",
+ "iopub.status.idle": "2026-01-30T06:48:38.514826Z",
+ "shell.execute_reply": "2026-01-30T06:48:38.513835Z"
}
},
"outputs": [],
@@ -215,10 +215,10 @@
"id": "e4601d28",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:45:51.055075Z",
- "iopub.status.busy": "2026-01-26T15:45:51.055075Z",
- "iopub.status.idle": "2026-01-26T15:46:05.088920Z",
- "shell.execute_reply": "2026-01-26T15:46:05.087536Z"
+ "iopub.execute_input": "2026-01-30T06:48:38.516746Z",
+ "iopub.status.busy": "2026-01-30T06:48:38.516746Z",
+ "iopub.status.idle": "2026-01-30T06:48:51.170394Z",
+ "shell.execute_reply": "2026-01-30T06:48:51.169484Z"
}
},
"outputs": [],
@@ -243,10 +243,10 @@
"id": "b9abb28c",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:46:05.093079Z",
- "iopub.status.busy": "2026-01-26T15:46:05.092540Z",
- "iopub.status.idle": "2026-01-26T15:46:09.868953Z",
- "shell.execute_reply": "2026-01-26T15:46:09.868174Z"
+ "iopub.execute_input": "2026-01-30T06:48:51.172391Z",
+ "iopub.status.busy": "2026-01-30T06:48:51.172391Z",
+ "iopub.status.idle": "2026-01-30T06:48:56.013497Z",
+ "shell.execute_reply": "2026-01-30T06:48:56.012614Z"
}
},
"outputs": [],
@@ -300,7 +300,25 @@
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {
- "0e6c56b5543f4eafb347593eae89d5b7": {
+ "3e45c91a8b3d433ba6cac19ef0542074": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "StyleView",
+ "background": null,
+ "description_width": "",
+ "font_size": null,
+ "text_color": null
+ }
+ },
+ "7db82c190a0b49e2b5432b4efa5ad260": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
@@ -353,25 +371,7 @@
"width": null
}
},
- "37a8219d5eab470cb4a1afb9395dfbe7": {
- "model_module": "@jupyter-widgets/controls",
- "model_module_version": "2.0.0",
- "model_name": "HTMLStyleModel",
- "state": {
- "_model_module": "@jupyter-widgets/controls",
- "_model_module_version": "2.0.0",
- "_model_name": "HTMLStyleModel",
- "_view_count": null,
- "_view_module": "@jupyter-widgets/base",
- "_view_module_version": "2.0.0",
- "_view_name": "StyleView",
- "background": null,
- "description_width": "",
- "font_size": null,
- "text_color": null
- }
- },
- "3a44a5c5ee5c4b96850a89c761d0c7ca": {
+ "ed9f33f51f074911864f875b24b7eb50": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "HTMLModel",
@@ -386,12 +386,12 @@
"_view_name": "HTMLView",
"description": "",
"description_allow_html": false,
- "layout": "IPY_MODEL_0e6c56b5543f4eafb347593eae89d5b7",
+ "layout": "IPY_MODEL_7db82c190a0b49e2b5432b4efa5ad260",
"placeholder": "",
- "style": "IPY_MODEL_37a8219d5eab470cb4a1afb9395dfbe7",
+ "style": "IPY_MODEL_3e45c91a8b3d433ba6cac19ef0542074",
"tabbable": null,
"tooltip": null,
- "value": ""
+ "value": ""
}
}
},
diff --git a/experiments/Heart-GatedCT_To_USD/3-transform_dynamic_and_static_contours.ipynb b/experiments/Heart-GatedCT_To_USD/3-transform_dynamic_and_static_contours.ipynb
index 8547a33..13f726b 100644
--- a/experiments/Heart-GatedCT_To_USD/3-transform_dynamic_and_static_contours.ipynb
+++ b/experiments/Heart-GatedCT_To_USD/3-transform_dynamic_and_static_contours.ipynb
@@ -6,10 +6,10 @@
"id": "3ce61753-11ad-4ade-9afe-6ad1bc748e25",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:46:19.341128Z",
- "iopub.status.busy": "2026-01-26T15:46:19.341128Z",
- "iopub.status.idle": "2026-01-26T15:46:35.977101Z",
- "shell.execute_reply": "2026-01-26T15:46:35.975949Z"
+ "iopub.execute_input": "2026-01-30T06:49:03.805357Z",
+ "iopub.status.busy": "2026-01-30T06:49:03.805357Z",
+ "iopub.status.idle": "2026-01-30T06:49:18.920492Z",
+ "shell.execute_reply": "2026-01-30T06:49:18.919499Z"
}
},
"outputs": [],
@@ -20,7 +20,7 @@
"import pyvista as pv\n",
"\n",
"from physiomotion4d.contour_tools import ContourTools\n",
- "from physiomotion4d.convert_vtk_to_usd import ConvertVTKToUSD\n",
+ "from physiomotion4d import ConvertVTKToUSD\n",
"from physiomotion4d.segment_chest_total_segmentator import SegmentChestTotalSegmentator\n",
"from physiomotion4d.usd_anatomy_tools import USDAnatomyTools"
]
@@ -31,10 +31,10 @@
"id": "240f1d14",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:46:35.979348Z",
- "iopub.status.busy": "2026-01-26T15:46:35.978752Z",
- "iopub.status.idle": "2026-01-26T15:46:35.991923Z",
- "shell.execute_reply": "2026-01-26T15:46:35.990925Z"
+ "iopub.execute_input": "2026-01-30T06:49:18.922490Z",
+ "iopub.status.busy": "2026-01-30T06:49:18.922490Z",
+ "iopub.status.idle": "2026-01-30T06:49:18.935492Z",
+ "shell.execute_reply": "2026-01-30T06:49:18.934505Z"
}
},
"outputs": [],
@@ -56,10 +56,10 @@
"id": "ef6002e8",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:46:35.994164Z",
- "iopub.status.busy": "2026-01-26T15:46:35.993581Z",
- "iopub.status.idle": "2026-01-26T15:46:36.007050Z",
- "shell.execute_reply": "2026-01-26T15:46:36.005776Z"
+ "iopub.execute_input": "2026-01-30T06:49:18.937504Z",
+ "iopub.status.busy": "2026-01-30T06:49:18.937504Z",
+ "iopub.status.idle": "2026-01-30T06:49:18.950493Z",
+ "shell.execute_reply": "2026-01-30T06:49:18.949490Z"
}
},
"outputs": [],
@@ -99,11 +99,14 @@
"\n",
" polydata = [pv.read(f) for f in files]\n",
"\n",
+ " # For cardiac gated CT data with 21 time points (0-20), each frame = 1 second\n",
+ " # so we use times_per_second=1.0 instead of the default 24.0\n",
" converter = ConvertVTKToUSD(\n",
" project_name,\n",
" polydata,\n",
" all_mask_ids,\n",
" compute_normals=compute_normals,\n",
+ " times_per_second=1.0,\n",
" )\n",
" stage = converter.convert(\n",
" os.path.join(output_dir, f\"{project_name}.{base_name}.usd\"),\n",
@@ -124,10 +127,10 @@
"id": "90c291bb",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:46:36.009421Z",
- "iopub.status.busy": "2026-01-26T15:46:36.009421Z",
- "iopub.status.idle": "2026-01-26T15:46:38.218265Z",
- "shell.execute_reply": "2026-01-26T15:46:38.217273Z"
+ "iopub.execute_input": "2026-01-30T06:49:18.951492Z",
+ "iopub.status.busy": "2026-01-30T06:49:18.951492Z",
+ "iopub.status.idle": "2026-01-30T06:49:21.049372Z",
+ "shell.execute_reply": "2026-01-30T06:49:21.048307Z"
}
},
"outputs": [],
@@ -159,10 +162,10 @@
"id": "e3d48ddc",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T15:46:38.220274Z",
- "iopub.status.busy": "2026-01-26T15:46:38.220274Z",
- "iopub.status.idle": "2026-01-26T16:08:38.200894Z",
- "shell.execute_reply": "2026-01-26T16:08:38.199899Z"
+ "iopub.execute_input": "2026-01-30T06:49:21.051005Z",
+ "iopub.status.busy": "2026-01-30T06:49:21.050001Z",
+ "iopub.status.idle": "2026-01-30T07:10:23.436307Z",
+ "shell.execute_reply": "2026-01-30T07:10:23.435312Z"
}
},
"outputs": [],
@@ -192,10 +195,10 @@
"id": "06d06123",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T16:08:38.202893Z",
- "iopub.status.busy": "2026-01-26T16:08:38.202893Z",
- "iopub.status.idle": "2026-01-26T17:16:33.324822Z",
- "shell.execute_reply": "2026-01-26T17:16:33.320299Z"
+ "iopub.execute_input": "2026-01-30T07:10:23.438307Z",
+ "iopub.status.busy": "2026-01-30T07:10:23.438307Z",
+ "iopub.status.idle": "2026-01-30T07:22:25.093692Z",
+ "shell.execute_reply": "2026-01-30T07:22:25.092774Z"
}
},
"outputs": [],
diff --git a/experiments/Heart-GatedCT_To_USD/4-merge_dynamic_and_static_usd.ipynb b/experiments/Heart-GatedCT_To_USD/4-merge_dynamic_and_static_usd.ipynb
index b36155a..e8d4cca 100644
--- a/experiments/Heart-GatedCT_To_USD/4-merge_dynamic_and_static_usd.ipynb
+++ b/experiments/Heart-GatedCT_To_USD/4-merge_dynamic_and_static_usd.ipynb
@@ -2,14 +2,14 @@
"cells": [
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": null,
"id": "3ce61753-11ad-4ade-9afe-6ad1bc748e25",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T17:16:51.961127Z",
- "iopub.status.busy": "2026-01-26T17:16:51.960134Z",
- "iopub.status.idle": "2026-01-26T17:17:25.063841Z",
- "shell.execute_reply": "2026-01-26T17:17:25.062413Z"
+ "iopub.execute_input": "2026-01-30T07:22:32.942422Z",
+ "iopub.status.busy": "2026-01-30T07:22:32.942422Z",
+ "iopub.status.idle": "2026-01-30T07:22:48.458227Z",
+ "shell.execute_reply": "2026-01-30T07:22:48.457302Z"
}
},
"outputs": [],
@@ -21,36 +21,17 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": null,
"id": "4c0ece8e",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-26T17:17:25.068335Z",
- "iopub.status.busy": "2026-01-26T17:17:25.067236Z",
- "iopub.status.idle": "2026-01-26T17:18:11.504444Z",
- "shell.execute_reply": "2026-01-26T17:18:11.503440Z"
+ "iopub.execute_input": "2026-01-30T07:22:48.459867Z",
+ "iopub.status.busy": "2026-01-30T07:22:48.459867Z",
+ "iopub.status.idle": "2026-01-30T07:22:57.527010Z",
+ "shell.execute_reply": "2026-01-30T07:22:57.526212Z"
}
},
- "outputs": [
- {
- "name": "stderr",
- "output_type": "stream",
- "text": [
- "2026-01-28 13:47:29 INFO USDTools Copying /World to /World\n",
- "2026-01-28 13:47:31 INFO USDTools Copying /World to /World\n",
- "2026-01-28 13:47:31 INFO USDTools Set stage time range: 0.0 to 20.0\n",
- "2026-01-28 13:47:31 INFO USDTools Time codes per second: 1.0, Frames per second: 24.0\n",
- "2026-01-28 13:47:45 INFO USDTools Referencing files: 1/2 (50.0%)\n",
- "2026-01-28 13:47:45 INFO USDTools Referencing files: 2/2 (100.0%)\n",
- "2026-01-28 13:47:45 INFO USDTools Time range: 0.0 to 20.0\n",
- "2026-01-28 13:47:45 INFO USDTools Time codes per second: 1.0, Frames per second: 24.0\n",
- "2026-01-28 13:47:45 INFO USDTools Flattening composed stage...\n",
- "2026-01-28 13:47:46 INFO USDTools Set output TimeCodesPerSecond: 1.0\n",
- "2026-01-28 13:47:46 INFO USDTools Set output FramesPerSecond: 24.0\n",
- "2026-01-28 13:47:46 INFO USDTools Exporting to results/Slicer_CardiacGatedCT.flattened_merged_painted.usd\n"
- ]
- }
- ],
+ "outputs": [],
"source": [
"usd_tools = USDTools()\n",
"\n",
diff --git a/experiments/Heart-GatedCT_To_USD/test_vista3d_class.ipynb b/experiments/Heart-GatedCT_To_USD/test_vista3d_class.ipynb
index 03f07fb..c748c92 100644
--- a/experiments/Heart-GatedCT_To_USD/test_vista3d_class.ipynb
+++ b/experiments/Heart-GatedCT_To_USD/test_vista3d_class.ipynb
@@ -4,7 +4,14 @@
"cell_type": "code",
"execution_count": null,
"id": "726856a2",
- "metadata": {},
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2026-01-30T07:23:03.574155Z",
+ "iopub.status.busy": "2026-01-30T07:23:03.574155Z",
+ "iopub.status.idle": "2026-01-30T07:23:18.672872Z",
+ "shell.execute_reply": "2026-01-30T07:23:18.672086Z"
+ }
+ },
"outputs": [],
"source": [
"import os\n",
@@ -22,7 +29,14 @@
"cell_type": "code",
"execution_count": null,
"id": "ab698a69",
- "metadata": {},
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2026-01-30T07:23:18.675085Z",
+ "iopub.status.busy": "2026-01-30T07:23:18.674471Z",
+ "iopub.status.idle": "2026-01-30T07:25:34.446133Z",
+ "shell.execute_reply": "2026-01-30T07:25:34.445140Z"
+ }
+ },
"outputs": [],
"source": [
"seg = SegmentChestVista3D()\n",
@@ -60,6 +74,374 @@
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.11"
+ },
+ "widgets": {
+ "application/vnd.jupyter.widget-state+json": {
+ "state": {
+ "1c9455ed442c42779d7b5d849ea93c19": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_allow_html": false,
+ "layout": "IPY_MODEL_3cb7bcc2544e4040a8335525502efd56",
+ "placeholder": "",
+ "style": "IPY_MODEL_8040237a8fbc4166acd5e9b52b2faf40",
+ "tabbable": null,
+ "tooltip": null,
+ "value": "Fetching 18 files: 100%"
+ }
+ },
+ "3cb7bcc2544e4040a8335525502efd56": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "2.0.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "2.0.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border_bottom": null,
+ "border_left": null,
+ "border_right": null,
+ "border_top": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "4307cec0a95d47c28bf0853f84ba67eb": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_allow_html": false,
+ "layout": "IPY_MODEL_b362dcf8b3b0404e8dc8d52ad1bb880e",
+ "max": 18.0,
+ "min": 0.0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_71842a4bd8824ac39ef26c29acd0c5da",
+ "tabbable": null,
+ "tooltip": null,
+ "value": 18.0
+ }
+ },
+ "58ca1681c8b14b72bede8bf01cd13f3b": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_allow_html": false,
+ "layout": "IPY_MODEL_69d487b72ed046fc9264761b3aee84dc",
+ "placeholder": "",
+ "style": "IPY_MODEL_892fd92c31df4d01a9aff22f5d4ece2d",
+ "tabbable": null,
+ "tooltip": null,
+ "value": " 18/18 [00:00<00:00, 802.84it/s]"
+ }
+ },
+ "69d487b72ed046fc9264761b3aee84dc": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "2.0.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "2.0.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border_bottom": null,
+ "border_left": null,
+ "border_right": null,
+ "border_top": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "71842a4bd8824ac39ef26c29acd0c5da": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "8040237a8fbc4166acd5e9b52b2faf40": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "StyleView",
+ "background": null,
+ "description_width": "",
+ "font_size": null,
+ "text_color": null
+ }
+ },
+ "892fd92c31df4d01a9aff22f5d4ece2d": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "StyleView",
+ "background": null,
+ "description_width": "",
+ "font_size": null,
+ "text_color": null
+ }
+ },
+ "b362dcf8b3b0404e8dc8d52ad1bb880e": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "2.0.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "2.0.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border_bottom": null,
+ "border_left": null,
+ "border_right": null,
+ "border_top": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "c7657ead7ac841ed97cf8d01192b1710": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "2.0.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "2.0.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border_bottom": null,
+ "border_left": null,
+ "border_right": null,
+ "border_top": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "dba9447e62024101ab0a9a136d47c84f": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_1c9455ed442c42779d7b5d849ea93c19",
+ "IPY_MODEL_4307cec0a95d47c28bf0853f84ba67eb",
+ "IPY_MODEL_58ca1681c8b14b72bede8bf01cd13f3b"
+ ],
+ "layout": "IPY_MODEL_c7657ead7ac841ed97cf8d01192b1710",
+ "tabbable": null,
+ "tooltip": null
+ }
+ }
+ },
+ "version_major": 2,
+ "version_minor": 0
+ }
}
},
"nbformat": 4,
diff --git a/experiments/Heart-GatedCT_To_USD/test_vista3d_inMem.ipynb b/experiments/Heart-GatedCT_To_USD/test_vista3d_inMem.ipynb
index 3ae5bb3..e54ca89 100644
--- a/experiments/Heart-GatedCT_To_USD/test_vista3d_inMem.ipynb
+++ b/experiments/Heart-GatedCT_To_USD/test_vista3d_inMem.ipynb
@@ -4,7 +4,14 @@
"cell_type": "code",
"execution_count": null,
"id": "30066e92",
- "metadata": {},
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2026-01-30T07:25:43.179447Z",
+ "iopub.status.busy": "2026-01-30T07:25:43.179447Z",
+ "iopub.status.idle": "2026-01-30T07:25:44.868390Z",
+ "shell.execute_reply": "2026-01-30T07:25:44.866770Z"
+ }
+ },
"outputs": [],
"source": [
"import numpy as np\n",
@@ -176,7 +183,14 @@
"cell_type": "code",
"execution_count": null,
"id": "c0e5a477",
- "metadata": {},
+ "metadata": {
+ "execution": {
+ "iopub.execute_input": "2026-01-30T07:25:44.870332Z",
+ "iopub.status.busy": "2026-01-30T07:25:44.869712Z",
+ "iopub.status.idle": "2026-01-30T07:25:56.909477Z",
+ "shell.execute_reply": "2026-01-30T07:25:56.907477Z"
+ }
+ },
"outputs": [],
"source": [
"import itk\n",
@@ -210,6 +224,374 @@
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.11"
+ },
+ "widgets": {
+ "application/vnd.jupyter.widget-state+json": {
+ "state": {
+ "1db286c9d1b146749ee41e223f114dca": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "2.0.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "2.0.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border_bottom": null,
+ "border_left": null,
+ "border_right": null,
+ "border_top": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "24e6acb9f38c4484a0395183243e2922": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_allow_html": false,
+ "layout": "IPY_MODEL_1db286c9d1b146749ee41e223f114dca",
+ "placeholder": "",
+ "style": "IPY_MODEL_86fba89c14c541b084ca788a47ff7659",
+ "tabbable": null,
+ "tooltip": null,
+ "value": "Fetching 22 files: 100%"
+ }
+ },
+ "2b30d6db4e9e4ca7af9604586daa3b81": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "ProgressStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "StyleView",
+ "bar_color": null,
+ "description_width": ""
+ }
+ },
+ "4e3ecbf102dd4bc7a674706e23f0cbe2": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "2.0.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "2.0.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border_bottom": null,
+ "border_left": null,
+ "border_right": null,
+ "border_top": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "86fba89c14c541b084ca788a47ff7659": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "StyleView",
+ "background": null,
+ "description_width": "",
+ "font_size": null,
+ "text_color": null
+ }
+ },
+ "96f98272868f419fbbad655457917984": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLStyleModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLStyleModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "StyleView",
+ "background": null,
+ "description_width": "",
+ "font_size": null,
+ "text_color": null
+ }
+ },
+ "c7465905a44c46888f36b0ee14b7be93": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HBoxModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HBoxModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "HBoxView",
+ "box_style": "",
+ "children": [
+ "IPY_MODEL_24e6acb9f38c4484a0395183243e2922",
+ "IPY_MODEL_d49926c8b1844d8f971dfe74aeb48c76",
+ "IPY_MODEL_e8950a6331e44844a74926566196c3ec"
+ ],
+ "layout": "IPY_MODEL_4e3ecbf102dd4bc7a674706e23f0cbe2",
+ "tabbable": null,
+ "tooltip": null
+ }
+ },
+ "cf948bf3f6a3402aa03d3983682e9ce9": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "2.0.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "2.0.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border_bottom": null,
+ "border_left": null,
+ "border_right": null,
+ "border_top": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ },
+ "d49926c8b1844d8f971dfe74aeb48c76": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "FloatProgressModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "ProgressView",
+ "bar_style": "success",
+ "description": "",
+ "description_allow_html": false,
+ "layout": "IPY_MODEL_fad7bca382a34f0ea8d4a2a4c3c86f73",
+ "max": 22.0,
+ "min": 0.0,
+ "orientation": "horizontal",
+ "style": "IPY_MODEL_2b30d6db4e9e4ca7af9604586daa3b81",
+ "tabbable": null,
+ "tooltip": null,
+ "value": 22.0
+ }
+ },
+ "e8950a6331e44844a74926566196c3ec": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_allow_html": false,
+ "layout": "IPY_MODEL_cf948bf3f6a3402aa03d3983682e9ce9",
+ "placeholder": "",
+ "style": "IPY_MODEL_96f98272868f419fbbad655457917984",
+ "tabbable": null,
+ "tooltip": null,
+ "value": " 22/22 [00:00<00:00, 814.78it/s]"
+ }
+ },
+ "fad7bca382a34f0ea8d4a2a4c3c86f73": {
+ "model_module": "@jupyter-widgets/base",
+ "model_module_version": "2.0.0",
+ "model_name": "LayoutModel",
+ "state": {
+ "_model_module": "@jupyter-widgets/base",
+ "_model_module_version": "2.0.0",
+ "_model_name": "LayoutModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/base",
+ "_view_module_version": "2.0.0",
+ "_view_name": "LayoutView",
+ "align_content": null,
+ "align_items": null,
+ "align_self": null,
+ "border_bottom": null,
+ "border_left": null,
+ "border_right": null,
+ "border_top": null,
+ "bottom": null,
+ "display": null,
+ "flex": null,
+ "flex_flow": null,
+ "grid_area": null,
+ "grid_auto_columns": null,
+ "grid_auto_flow": null,
+ "grid_auto_rows": null,
+ "grid_column": null,
+ "grid_gap": null,
+ "grid_row": null,
+ "grid_template_areas": null,
+ "grid_template_columns": null,
+ "grid_template_rows": null,
+ "height": null,
+ "justify_content": null,
+ "justify_items": null,
+ "left": null,
+ "margin": null,
+ "max_height": null,
+ "max_width": null,
+ "min_height": null,
+ "min_width": null,
+ "object_fit": null,
+ "object_position": null,
+ "order": null,
+ "overflow": null,
+ "padding": null,
+ "right": null,
+ "top": null,
+ "visibility": null,
+ "width": null
+ }
+ }
+ },
+ "version_major": 2,
+ "version_minor": 0
+ }
}
},
"nbformat": 4,
diff --git a/experiments/Heart-Model_To_Patient/heart_model_to_model_icp_itk.ipynb b/experiments/Heart-Model_To_Patient/heart_model_to_model_icp_itk.ipynb
index 3796249..63e4002 100644
--- a/experiments/Heart-Model_To_Patient/heart_model_to_model_icp_itk.ipynb
+++ b/experiments/Heart-Model_To_Patient/heart_model_to_model_icp_itk.ipynb
@@ -27,10 +27,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:53:53.742202Z",
- "iopub.status.busy": "2026-01-28T04:53:53.742202Z",
- "iopub.status.idle": "2026-01-28T04:54:10.211821Z",
- "shell.execute_reply": "2026-01-28T04:54:10.210829Z"
+ "iopub.execute_input": "2026-01-30T07:26:03.582244Z",
+ "iopub.status.busy": "2026-01-30T07:26:03.581672Z",
+ "iopub.status.idle": "2026-01-30T07:26:18.705325Z",
+ "shell.execute_reply": "2026-01-30T07:26:18.704318Z"
}
},
"outputs": [],
@@ -65,10 +65,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:54:10.213827Z",
- "iopub.status.busy": "2026-01-28T04:54:10.213827Z",
- "iopub.status.idle": "2026-01-28T04:54:10.226834Z",
- "shell.execute_reply": "2026-01-28T04:54:10.225840Z"
+ "iopub.execute_input": "2026-01-30T07:26:18.707318Z",
+ "iopub.status.busy": "2026-01-30T07:26:18.707318Z",
+ "iopub.status.idle": "2026-01-30T07:26:18.720312Z",
+ "shell.execute_reply": "2026-01-30T07:26:18.719310Z"
}
},
"outputs": [],
@@ -102,10 +102,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:54:10.242361Z",
- "iopub.status.busy": "2026-01-28T04:54:10.242361Z",
- "iopub.status.idle": "2026-01-28T04:54:15.606185Z",
- "shell.execute_reply": "2026-01-28T04:54:15.605225Z"
+ "iopub.execute_input": "2026-01-30T07:26:18.733315Z",
+ "iopub.status.busy": "2026-01-30T07:26:18.733315Z",
+ "iopub.status.idle": "2026-01-30T07:26:23.864146Z",
+ "shell.execute_reply": "2026-01-30T07:26:23.863131Z"
}
},
"outputs": [],
@@ -143,10 +143,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:54:15.608742Z",
- "iopub.status.busy": "2026-01-28T04:54:15.608190Z",
- "iopub.status.idle": "2026-01-28T04:54:15.636899Z",
- "shell.execute_reply": "2026-01-28T04:54:15.636394Z"
+ "iopub.execute_input": "2026-01-30T07:26:23.865853Z",
+ "iopub.status.busy": "2026-01-30T07:26:23.865853Z",
+ "iopub.status.idle": "2026-01-30T07:26:23.894051Z",
+ "shell.execute_reply": "2026-01-30T07:26:23.893212Z"
}
},
"outputs": [],
@@ -164,10 +164,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:54:15.639140Z",
- "iopub.status.busy": "2026-01-28T04:54:15.639140Z",
- "iopub.status.idle": "2026-01-28T04:54:15.983405Z",
- "shell.execute_reply": "2026-01-28T04:54:15.982901Z"
+ "iopub.execute_input": "2026-01-30T07:26:23.895882Z",
+ "iopub.status.busy": "2026-01-30T07:26:23.895882Z",
+ "iopub.status.idle": "2026-01-30T07:26:24.227242Z",
+ "shell.execute_reply": "2026-01-30T07:26:24.225960Z"
}
},
"outputs": [],
@@ -223,10 +223,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:54:15.985490Z",
- "iopub.status.busy": "2026-01-28T04:54:15.985490Z",
- "iopub.status.idle": "2026-01-28T04:54:16.485426Z",
- "shell.execute_reply": "2026-01-28T04:54:16.484217Z"
+ "iopub.execute_input": "2026-01-30T07:26:24.228975Z",
+ "iopub.status.busy": "2026-01-30T07:26:24.227964Z",
+ "iopub.status.idle": "2026-01-30T07:26:24.682363Z",
+ "shell.execute_reply": "2026-01-30T07:26:24.681509Z"
}
},
"outputs": [],
@@ -255,10 +255,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:54:16.487035Z",
- "iopub.status.busy": "2026-01-28T04:54:16.487035Z",
- "iopub.status.idle": "2026-01-28T04:58:37.962064Z",
- "shell.execute_reply": "2026-01-28T04:58:37.961151Z"
+ "iopub.execute_input": "2026-01-30T07:26:24.684241Z",
+ "iopub.status.busy": "2026-01-30T07:26:24.684241Z",
+ "iopub.status.idle": "2026-01-30T07:30:37.003624Z",
+ "shell.execute_reply": "2026-01-30T07:30:37.002635Z"
}
},
"outputs": [],
@@ -303,10 +303,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:58:37.964061Z",
- "iopub.status.busy": "2026-01-28T04:58:37.964061Z",
- "iopub.status.idle": "2026-01-28T04:58:41.925393Z",
- "shell.execute_reply": "2026-01-28T04:58:41.924315Z"
+ "iopub.execute_input": "2026-01-30T07:30:37.005630Z",
+ "iopub.status.busy": "2026-01-30T07:30:37.005630Z",
+ "iopub.status.idle": "2026-01-30T07:30:41.058467Z",
+ "shell.execute_reply": "2026-01-30T07:30:41.057471Z"
}
},
"outputs": [],
@@ -333,10 +333,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:58:41.927133Z",
- "iopub.status.busy": "2026-01-28T04:58:41.927133Z",
- "iopub.status.idle": "2026-01-28T04:58:44.346432Z",
- "shell.execute_reply": "2026-01-28T04:58:44.345436Z"
+ "iopub.execute_input": "2026-01-30T07:30:41.059979Z",
+ "iopub.status.busy": "2026-01-30T07:30:41.059979Z",
+ "iopub.status.idle": "2026-01-30T07:30:43.413972Z",
+ "shell.execute_reply": "2026-01-30T07:30:43.413972Z"
}
},
"outputs": [],
@@ -376,30 +376,7 @@
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {
- "5652c700abd24c5ba4fd978202aa93d0": {
- "model_module": "@jupyter-widgets/controls",
- "model_module_version": "2.0.0",
- "model_name": "HTMLModel",
- "state": {
- "_dom_classes": [],
- "_model_module": "@jupyter-widgets/controls",
- "_model_module_version": "2.0.0",
- "_model_name": "HTMLModel",
- "_view_count": null,
- "_view_module": "@jupyter-widgets/controls",
- "_view_module_version": "2.0.0",
- "_view_name": "HTMLView",
- "description": "",
- "description_allow_html": false,
- "layout": "IPY_MODEL_b1d4b84b44c141cbbb4e543180e75873",
- "placeholder": "",
- "style": "IPY_MODEL_7f099bbf876a4689bb37f515331a40db",
- "tabbable": null,
- "tooltip": null,
- "value": ""
- }
- },
- "7f099bbf876a4689bb37f515331a40db": {
+ "169c0c434aa84ef5a8098aeba4941992": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "HTMLStyleModel",
@@ -417,7 +394,7 @@
"text_color": null
}
},
- "b1d4b84b44c141cbbb4e543180e75873": {
+ "a4ee709f69e8423ab6abc2ad2207ca3d": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
@@ -469,6 +446,29 @@
"visibility": null,
"width": null
}
+ },
+ "db31c555fc4043ada79e6afded189dae": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_allow_html": false,
+ "layout": "IPY_MODEL_a4ee709f69e8423ab6abc2ad2207ca3d",
+ "placeholder": "",
+ "style": "IPY_MODEL_169c0c434aa84ef5a8098aeba4941992",
+ "tabbable": null,
+ "tooltip": null,
+ "value": ""
+ }
}
},
"version_major": 2,
diff --git a/experiments/Heart-Model_To_Patient/heart_model_to_model_registration_pca.ipynb b/experiments/Heart-Model_To_Patient/heart_model_to_model_registration_pca.ipynb
index bd856da..040e38e 100644
--- a/experiments/Heart-Model_To_Patient/heart_model_to_model_registration_pca.ipynb
+++ b/experiments/Heart-Model_To_Patient/heart_model_to_model_registration_pca.ipynb
@@ -28,10 +28,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:58:51.452983Z",
- "iopub.status.busy": "2026-01-28T04:58:51.452983Z",
- "iopub.status.idle": "2026-01-28T04:59:07.507777Z",
- "shell.execute_reply": "2026-01-28T04:59:07.506545Z"
+ "iopub.execute_input": "2026-01-30T07:30:51.390829Z",
+ "iopub.status.busy": "2026-01-30T07:30:51.390829Z",
+ "iopub.status.idle": "2026-01-30T07:31:07.324798Z",
+ "shell.execute_reply": "2026-01-30T07:31:07.323806Z"
}
},
"outputs": [],
@@ -67,10 +67,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:59:07.510391Z",
- "iopub.status.busy": "2026-01-28T04:59:07.509377Z",
- "iopub.status.idle": "2026-01-28T04:59:07.522327Z",
- "shell.execute_reply": "2026-01-28T04:59:07.521437Z"
+ "iopub.execute_input": "2026-01-30T07:31:07.326798Z",
+ "iopub.status.busy": "2026-01-30T07:31:07.326798Z",
+ "iopub.status.idle": "2026-01-30T07:31:07.339814Z",
+ "shell.execute_reply": "2026-01-30T07:31:07.338813Z"
}
},
"outputs": [],
@@ -111,10 +111,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:59:07.536643Z",
- "iopub.status.busy": "2026-01-28T04:59:07.536227Z",
- "iopub.status.idle": "2026-01-28T04:59:12.862400Z",
- "shell.execute_reply": "2026-01-28T04:59:12.861388Z"
+ "iopub.execute_input": "2026-01-30T07:31:07.354322Z",
+ "iopub.status.busy": "2026-01-30T07:31:07.354322Z",
+ "iopub.status.idle": "2026-01-30T07:31:12.486959Z",
+ "shell.execute_reply": "2026-01-30T07:31:12.485970Z"
}
},
"outputs": [],
@@ -152,10 +152,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:59:12.864413Z",
- "iopub.status.busy": "2026-01-28T04:59:12.863405Z",
- "iopub.status.idle": "2026-01-28T04:59:12.893198Z",
- "shell.execute_reply": "2026-01-28T04:59:12.892189Z"
+ "iopub.execute_input": "2026-01-30T07:31:12.488959Z",
+ "iopub.status.busy": "2026-01-30T07:31:12.488959Z",
+ "iopub.status.idle": "2026-01-30T07:31:12.500959Z",
+ "shell.execute_reply": "2026-01-30T07:31:12.500959Z"
}
},
"outputs": [],
@@ -173,10 +173,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:59:12.895205Z",
- "iopub.status.busy": "2026-01-28T04:59:12.895205Z",
- "iopub.status.idle": "2026-01-28T04:59:13.240466Z",
- "shell.execute_reply": "2026-01-28T04:59:13.239436Z"
+ "iopub.execute_input": "2026-01-30T07:31:12.502959Z",
+ "iopub.status.busy": "2026-01-30T07:31:12.502959Z",
+ "iopub.status.idle": "2026-01-30T07:31:12.834190Z",
+ "shell.execute_reply": "2026-01-30T07:31:12.833196Z"
}
},
"outputs": [],
@@ -232,10 +232,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:59:13.242448Z",
- "iopub.status.busy": "2026-01-28T04:59:13.242448Z",
- "iopub.status.idle": "2026-01-28T04:59:13.709334Z",
- "shell.execute_reply": "2026-01-28T04:59:13.708342Z"
+ "iopub.execute_input": "2026-01-30T07:31:12.836191Z",
+ "iopub.status.busy": "2026-01-30T07:31:12.835203Z",
+ "iopub.status.idle": "2026-01-30T07:31:13.287902Z",
+ "shell.execute_reply": "2026-01-30T07:31:13.286992Z"
}
},
"outputs": [],
@@ -264,10 +264,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:59:13.711338Z",
- "iopub.status.busy": "2026-01-28T04:59:13.711338Z",
- "iopub.status.idle": "2026-01-28T04:59:41.708720Z",
- "shell.execute_reply": "2026-01-28T04:59:41.707726Z"
+ "iopub.execute_input": "2026-01-30T07:31:13.289993Z",
+ "iopub.status.busy": "2026-01-30T07:31:13.289005Z",
+ "iopub.status.idle": "2026-01-30T07:31:41.526605Z",
+ "shell.execute_reply": "2026-01-30T07:31:41.525611Z"
}
},
"outputs": [],
@@ -309,10 +309,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:59:41.710720Z",
- "iopub.status.busy": "2026-01-28T04:59:41.710720Z",
- "iopub.status.idle": "2026-01-28T04:59:45.590398Z",
- "shell.execute_reply": "2026-01-28T04:59:45.589453Z"
+ "iopub.execute_input": "2026-01-30T07:31:41.528604Z",
+ "iopub.status.busy": "2026-01-30T07:31:41.528604Z",
+ "iopub.status.idle": "2026-01-30T07:31:45.580949Z",
+ "shell.execute_reply": "2026-01-30T07:31:45.579956Z"
}
},
"outputs": [],
@@ -339,10 +339,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:59:45.591907Z",
- "iopub.status.busy": "2026-01-28T04:59:45.591907Z",
- "iopub.status.idle": "2026-01-28T04:59:55.952578Z",
- "shell.execute_reply": "2026-01-28T04:59:55.951525Z"
+ "iopub.execute_input": "2026-01-30T07:31:45.582968Z",
+ "iopub.status.busy": "2026-01-30T07:31:45.581949Z",
+ "iopub.status.idle": "2026-01-30T07:31:55.851841Z",
+ "shell.execute_reply": "2026-01-30T07:31:55.850787Z"
}
},
"outputs": [],
@@ -383,10 +383,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T04:59:55.955305Z",
- "iopub.status.busy": "2026-01-28T04:59:55.955305Z",
- "iopub.status.idle": "2026-01-28T05:01:06.260685Z",
- "shell.execute_reply": "2026-01-28T05:01:06.259363Z"
+ "iopub.execute_input": "2026-01-30T07:31:55.853890Z",
+ "iopub.status.busy": "2026-01-30T07:31:55.853890Z",
+ "iopub.status.idle": "2026-01-30T07:33:04.382164Z",
+ "shell.execute_reply": "2026-01-30T07:33:04.381512Z"
}
},
"outputs": [],
@@ -420,10 +420,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:06.262391Z",
- "iopub.status.busy": "2026-01-28T05:01:06.261826Z",
- "iopub.status.idle": "2026-01-28T05:01:06.274944Z",
- "shell.execute_reply": "2026-01-28T05:01:06.274440Z"
+ "iopub.execute_input": "2026-01-30T07:33:04.383931Z",
+ "iopub.status.busy": "2026-01-30T07:33:04.383931Z",
+ "iopub.status.idle": "2026-01-30T07:33:04.397865Z",
+ "shell.execute_reply": "2026-01-30T07:33:04.396469Z"
}
},
"outputs": [],
@@ -455,10 +455,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:06.276609Z",
- "iopub.status.busy": "2026-01-28T05:01:06.276609Z",
- "iopub.status.idle": "2026-01-28T05:01:06.545574Z",
- "shell.execute_reply": "2026-01-28T05:01:06.544204Z"
+ "iopub.execute_input": "2026-01-30T07:33:04.399532Z",
+ "iopub.status.busy": "2026-01-30T07:33:04.398970Z",
+ "iopub.status.idle": "2026-01-30T07:33:04.640270Z",
+ "shell.execute_reply": "2026-01-30T07:33:04.639292Z"
}
},
"outputs": [],
@@ -490,10 +490,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:06.549117Z",
- "iopub.status.busy": "2026-01-28T05:01:06.548567Z",
- "iopub.status.idle": "2026-01-28T05:01:08.798687Z",
- "shell.execute_reply": "2026-01-28T05:01:08.797605Z"
+ "iopub.execute_input": "2026-01-30T07:33:04.642144Z",
+ "iopub.status.busy": "2026-01-30T07:33:04.641841Z",
+ "iopub.status.idle": "2026-01-30T07:33:07.139818Z",
+ "shell.execute_reply": "2026-01-30T07:33:07.138818Z"
}
},
"outputs": [],
@@ -544,7 +544,30 @@
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {
- "3f6e61466def44298a2fc538775464f5": {
+ "0cd4f78993b346ffb24c701c02137862": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_allow_html": false,
+ "layout": "IPY_MODEL_40ff400f5a654f1ea99dfe9cc7f098fb",
+ "placeholder": "",
+ "style": "IPY_MODEL_af440de5249f47b1a69aeecf13a13627",
+ "tabbable": null,
+ "tooltip": null,
+ "value": ""
+ }
+ },
+ "40ff400f5a654f1ea99dfe9cc7f098fb": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
@@ -597,7 +620,7 @@
"width": null
}
},
- "ae666f4536a34d4b803b08ac02261a2b": {
+ "af440de5249f47b1a69aeecf13a13627": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "HTMLStyleModel",
@@ -614,29 +637,6 @@
"font_size": null,
"text_color": null
}
- },
- "ecadb909a1de4dd4abc239c7b312c969": {
- "model_module": "@jupyter-widgets/controls",
- "model_module_version": "2.0.0",
- "model_name": "HTMLModel",
- "state": {
- "_dom_classes": [],
- "_model_module": "@jupyter-widgets/controls",
- "_model_module_version": "2.0.0",
- "_model_name": "HTMLModel",
- "_view_count": null,
- "_view_module": "@jupyter-widgets/controls",
- "_view_module_version": "2.0.0",
- "_view_name": "HTMLView",
- "description": "",
- "description_allow_html": false,
- "layout": "IPY_MODEL_3f6e61466def44298a2fc538775464f5",
- "placeholder": "",
- "style": "IPY_MODEL_ae666f4536a34d4b803b08ac02261a2b",
- "tabbable": null,
- "tooltip": null,
- "value": ""
- }
}
},
"version_major": 2,
diff --git a/experiments/Heart-Model_To_Patient/heart_model_to_patient.ipynb b/experiments/Heart-Model_To_Patient/heart_model_to_patient.ipynb
index f14e929..c379e28 100644
--- a/experiments/Heart-Model_To_Patient/heart_model_to_patient.ipynb
+++ b/experiments/Heart-Model_To_Patient/heart_model_to_patient.ipynb
@@ -12,10 +12,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:15.881411Z",
- "iopub.status.busy": "2026-01-28T05:01:15.881411Z",
- "iopub.status.idle": "2026-01-28T05:01:31.910491Z",
- "shell.execute_reply": "2026-01-28T05:01:31.909641Z"
+ "iopub.execute_input": "2026-01-30T07:33:14.476386Z",
+ "iopub.status.busy": "2026-01-30T07:33:14.476386Z",
+ "iopub.status.idle": "2026-01-30T07:33:29.505238Z",
+ "shell.execute_reply": "2026-01-30T07:33:29.504402Z"
}
},
"outputs": [],
@@ -47,10 +47,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:31.912529Z",
- "iopub.status.busy": "2026-01-28T05:01:31.912529Z",
- "iopub.status.idle": "2026-01-28T05:01:31.925486Z",
- "shell.execute_reply": "2026-01-28T05:01:31.924541Z"
+ "iopub.execute_input": "2026-01-30T07:33:29.506845Z",
+ "iopub.status.busy": "2026-01-30T07:33:29.506845Z",
+ "iopub.status.idle": "2026-01-30T07:33:29.521185Z",
+ "shell.execute_reply": "2026-01-30T07:33:29.520016Z"
}
},
"outputs": [],
@@ -81,10 +81,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:31.927533Z",
- "iopub.status.busy": "2026-01-28T05:01:31.927533Z",
- "iopub.status.idle": "2026-01-28T05:01:32.377510Z",
- "shell.execute_reply": "2026-01-28T05:01:32.376511Z"
+ "iopub.execute_input": "2026-01-30T07:33:29.523259Z",
+ "iopub.status.busy": "2026-01-30T07:33:29.522513Z",
+ "iopub.status.idle": "2026-01-30T07:33:29.960509Z",
+ "shell.execute_reply": "2026-01-30T07:33:29.960004Z"
}
},
"outputs": [],
@@ -98,10 +98,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:32.379493Z",
- "iopub.status.busy": "2026-01-28T05:01:32.379493Z",
- "iopub.status.idle": "2026-01-28T05:01:32.408082Z",
- "shell.execute_reply": "2026-01-28T05:01:32.407029Z"
+ "iopub.execute_input": "2026-01-30T07:33:29.962544Z",
+ "iopub.status.busy": "2026-01-30T07:33:29.962544Z",
+ "iopub.status.idle": "2026-01-30T07:33:29.976329Z",
+ "shell.execute_reply": "2026-01-30T07:33:29.975504Z"
}
},
"outputs": [],
@@ -145,10 +145,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:32.410091Z",
- "iopub.status.busy": "2026-01-28T05:01:32.410091Z",
- "iopub.status.idle": "2026-01-28T05:01:32.754828Z",
- "shell.execute_reply": "2026-01-28T05:01:32.753925Z"
+ "iopub.execute_input": "2026-01-30T07:33:29.978061Z",
+ "iopub.status.busy": "2026-01-30T07:33:29.977312Z",
+ "iopub.status.idle": "2026-01-30T07:33:30.488828Z",
+ "shell.execute_reply": "2026-01-30T07:33:30.487557Z"
}
},
"outputs": [],
@@ -183,10 +183,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:32.756828Z",
- "iopub.status.busy": "2026-01-28T05:01:32.756828Z",
- "iopub.status.idle": "2026-01-28T05:01:44.256331Z",
- "shell.execute_reply": "2026-01-28T05:01:44.255302Z"
+ "iopub.execute_input": "2026-01-30T07:33:30.502130Z",
+ "iopub.status.busy": "2026-01-30T07:33:30.501529Z",
+ "iopub.status.idle": "2026-01-30T07:33:41.765624Z",
+ "shell.execute_reply": "2026-01-30T07:33:41.763915Z"
}
},
"outputs": [],
@@ -207,10 +207,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:44.259221Z",
- "iopub.status.busy": "2026-01-28T05:01:44.259221Z",
- "iopub.status.idle": "2026-01-28T05:01:51.224517Z",
- "shell.execute_reply": "2026-01-28T05:01:51.223247Z"
+ "iopub.execute_input": "2026-01-30T07:33:41.767650Z",
+ "iopub.status.busy": "2026-01-30T07:33:41.767146Z",
+ "iopub.status.idle": "2026-01-30T07:33:48.600301Z",
+ "shell.execute_reply": "2026-01-30T07:33:48.598794Z"
}
},
"outputs": [],
@@ -242,10 +242,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:01:51.226530Z",
- "iopub.status.busy": "2026-01-28T05:01:51.226530Z",
- "iopub.status.idle": "2026-01-28T05:06:05.306555Z",
- "shell.execute_reply": "2026-01-28T05:06:05.305412Z"
+ "iopub.execute_input": "2026-01-30T07:33:48.602313Z",
+ "iopub.status.busy": "2026-01-30T07:33:48.602313Z",
+ "iopub.status.idle": "2026-01-30T07:37:52.770298Z",
+ "shell.execute_reply": "2026-01-30T07:37:52.769310Z"
}
},
"outputs": [],
@@ -266,10 +266,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:06:05.308063Z",
- "iopub.status.busy": "2026-01-28T05:06:05.308063Z",
- "iopub.status.idle": "2026-01-28T05:07:43.163672Z",
- "shell.execute_reply": "2026-01-28T05:07:43.162768Z"
+ "iopub.execute_input": "2026-01-30T07:37:52.772298Z",
+ "iopub.status.busy": "2026-01-30T07:37:52.772298Z",
+ "iopub.status.idle": "2026-01-30T07:39:28.692496Z",
+ "shell.execute_reply": "2026-01-30T07:39:28.691506Z"
}
},
"outputs": [],
@@ -295,10 +295,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:07:43.165673Z",
- "iopub.status.busy": "2026-01-28T05:07:43.165673Z",
- "iopub.status.idle": "2026-01-28T05:09:52.930697Z",
- "shell.execute_reply": "2026-01-28T05:09:52.929287Z"
+ "iopub.execute_input": "2026-01-30T07:39:28.694383Z",
+ "iopub.status.busy": "2026-01-30T07:39:28.694383Z",
+ "iopub.status.idle": "2026-01-30T07:41:36.819527Z",
+ "shell.execute_reply": "2026-01-30T07:41:36.819527Z"
}
},
"outputs": [],
@@ -323,10 +323,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:09:52.932334Z",
- "iopub.status.busy": "2026-01-28T05:09:52.932334Z",
- "iopub.status.idle": "2026-01-28T05:10:28.133249Z",
- "shell.execute_reply": "2026-01-28T05:10:28.132335Z"
+ "iopub.execute_input": "2026-01-30T07:41:36.821528Z",
+ "iopub.status.busy": "2026-01-30T07:41:36.821528Z",
+ "iopub.status.idle": "2026-01-30T07:42:13.494257Z",
+ "shell.execute_reply": "2026-01-30T07:42:13.493421Z"
}
},
"outputs": [],
@@ -351,10 +351,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:10:28.135327Z",
- "iopub.status.busy": "2026-01-28T05:10:28.135327Z",
- "iopub.status.idle": "2026-01-28T05:10:28.997087Z",
- "shell.execute_reply": "2026-01-28T05:10:28.996191Z"
+ "iopub.execute_input": "2026-01-30T07:42:13.496082Z",
+ "iopub.status.busy": "2026-01-30T07:42:13.496082Z",
+ "iopub.status.idle": "2026-01-30T07:42:14.370681Z",
+ "shell.execute_reply": "2026-01-30T07:42:14.369746Z"
}
},
"outputs": [],
@@ -396,10 +396,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:10:28.999096Z",
- "iopub.status.busy": "2026-01-28T05:10:28.999096Z",
- "iopub.status.idle": "2026-01-28T05:10:45.787422Z",
- "shell.execute_reply": "2026-01-28T05:10:45.786534Z"
+ "iopub.execute_input": "2026-01-30T07:42:14.372682Z",
+ "iopub.status.busy": "2026-01-30T07:42:14.372682Z",
+ "iopub.status.idle": "2026-01-30T07:42:30.722849Z",
+ "shell.execute_reply": "2026-01-30T07:42:30.721978Z"
}
},
"outputs": [],
@@ -424,10 +424,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:10:45.789422Z",
- "iopub.status.busy": "2026-01-28T05:10:45.789422Z",
- "iopub.status.idle": "2026-01-28T05:10:48.149250Z",
- "shell.execute_reply": "2026-01-28T05:10:48.148258Z"
+ "iopub.execute_input": "2026-01-30T07:42:30.724761Z",
+ "iopub.status.busy": "2026-01-30T07:42:30.724761Z",
+ "iopub.status.idle": "2026-01-30T07:42:32.994105Z",
+ "shell.execute_reply": "2026-01-30T07:42:32.992754Z"
}
},
"outputs": [],
@@ -471,10 +471,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:10:48.151250Z",
- "iopub.status.busy": "2026-01-28T05:10:48.151250Z",
- "iopub.status.idle": "2026-01-28T05:10:48.420499Z",
- "shell.execute_reply": "2026-01-28T05:10:48.419505Z"
+ "iopub.execute_input": "2026-01-30T07:42:32.996772Z",
+ "iopub.status.busy": "2026-01-30T07:42:32.995763Z",
+ "iopub.status.idle": "2026-01-30T07:42:33.357195Z",
+ "shell.execute_reply": "2026-01-30T07:42:33.356408Z"
}
},
"outputs": [],
@@ -525,25 +525,53 @@
"widgets": {
"application/vnd.jupyter.widget-state+json": {
"state": {
- "2093ac4075e6409ea5710e134d6d6ce7": {
+ "1c9b7e465d0040e38ea1ec21659f659c": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
- "model_name": "HTMLStyleModel",
+ "model_name": "HTMLModel",
"state": {
+ "_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "2.0.0",
- "_model_name": "HTMLStyleModel",
+ "_model_name": "HTMLModel",
"_view_count": null,
- "_view_module": "@jupyter-widgets/base",
+ "_view_module": "@jupyter-widgets/controls",
"_view_module_version": "2.0.0",
- "_view_name": "StyleView",
- "background": null,
- "description_width": "",
- "font_size": null,
- "text_color": null
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_allow_html": false,
+ "layout": "IPY_MODEL_6f30edc41f7b403a821d6cbea90f60df",
+ "placeholder": "",
+ "style": "IPY_MODEL_7a69578ef1024a88ba292ab6f477b69d",
+ "tabbable": null,
+ "tooltip": null,
+ "value": ""
+ }
+ },
+ "221469af883c45ab9e0020defefef604": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_module_version": "2.0.0",
+ "model_name": "HTMLModel",
+ "state": {
+ "_dom_classes": [],
+ "_model_module": "@jupyter-widgets/controls",
+ "_model_module_version": "2.0.0",
+ "_model_name": "HTMLModel",
+ "_view_count": null,
+ "_view_module": "@jupyter-widgets/controls",
+ "_view_module_version": "2.0.0",
+ "_view_name": "HTMLView",
+ "description": "",
+ "description_allow_html": false,
+ "layout": "IPY_MODEL_7a238364a1cf45adba9ba3da3da6d67b",
+ "placeholder": "",
+ "style": "IPY_MODEL_abad8b5f83494f2d9e12f143bb56c806",
+ "tabbable": null,
+ "tooltip": null,
+ "value": ""
}
},
- "491867dd74e340e39f039d73e5348a7d": {
+ "6f30edc41f7b403a821d6cbea90f60df": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
@@ -596,30 +624,7 @@
"width": null
}
},
- "71fede1064a74135a22884ee12ec714e": {
- "model_module": "@jupyter-widgets/controls",
- "model_module_version": "2.0.0",
- "model_name": "HTMLModel",
- "state": {
- "_dom_classes": [],
- "_model_module": "@jupyter-widgets/controls",
- "_model_module_version": "2.0.0",
- "_model_name": "HTMLModel",
- "_view_count": null,
- "_view_module": "@jupyter-widgets/controls",
- "_view_module_version": "2.0.0",
- "_view_name": "HTMLView",
- "description": "",
- "description_allow_html": false,
- "layout": "IPY_MODEL_491867dd74e340e39f039d73e5348a7d",
- "placeholder": "",
- "style": "IPY_MODEL_2093ac4075e6409ea5710e134d6d6ce7",
- "tabbable": null,
- "tooltip": null,
- "value": ""
- }
- },
- "bfb58405421943f2b0be365536a1f655": {
+ "7a238364a1cf45adba9ba3da3da6d67b": {
"model_module": "@jupyter-widgets/base",
"model_module_version": "2.0.0",
"model_name": "LayoutModel",
@@ -672,30 +677,25 @@
"width": null
}
},
- "c0ff603527a746358621b8055640de71": {
+ "7a69578ef1024a88ba292ab6f477b69d": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
- "model_name": "HTMLModel",
+ "model_name": "HTMLStyleModel",
"state": {
- "_dom_classes": [],
"_model_module": "@jupyter-widgets/controls",
"_model_module_version": "2.0.0",
- "_model_name": "HTMLModel",
+ "_model_name": "HTMLStyleModel",
"_view_count": null,
- "_view_module": "@jupyter-widgets/controls",
+ "_view_module": "@jupyter-widgets/base",
"_view_module_version": "2.0.0",
- "_view_name": "HTMLView",
- "description": "",
- "description_allow_html": false,
- "layout": "IPY_MODEL_bfb58405421943f2b0be365536a1f655",
- "placeholder": "",
- "style": "IPY_MODEL_cb7e363f20e14fa78ae04aa06b7c13a6",
- "tabbable": null,
- "tooltip": null,
- "value": ""
+ "_view_name": "StyleView",
+ "background": null,
+ "description_width": "",
+ "font_size": null,
+ "text_color": null
}
},
- "cb7e363f20e14fa78ae04aa06b7c13a6": {
+ "abad8b5f83494f2d9e12f143bb56c806": {
"model_module": "@jupyter-widgets/controls",
"model_module_version": "2.0.0",
"model_name": "HTMLStyleModel",
diff --git a/experiments/Heart-VTKSeries_To_USD/0-download_and_convert_4d_to_3d.ipynb b/experiments/Heart-VTKSeries_To_USD/0-download_and_convert_4d_to_3d.ipynb
index 264957a..6b769b2 100644
--- a/experiments/Heart-VTKSeries_To_USD/0-download_and_convert_4d_to_3d.ipynb
+++ b/experiments/Heart-VTKSeries_To_USD/0-download_and_convert_4d_to_3d.ipynb
@@ -5,10 +5,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:10:32.956639Z",
- "iopub.status.busy": "2026-01-28T03:10:32.956639Z",
- "iopub.status.idle": "2026-01-28T03:10:52.527875Z",
- "shell.execute_reply": "2026-01-28T03:10:52.526864Z"
+ "iopub.execute_input": "2026-01-30T06:12:04.307570Z",
+ "iopub.status.busy": "2026-01-30T06:12:04.307018Z",
+ "iopub.status.idle": "2026-01-30T06:12:22.260048Z",
+ "shell.execute_reply": "2026-01-30T06:12:22.260048Z"
}
},
"outputs": [],
@@ -24,10 +24,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:10:52.529863Z",
- "iopub.status.busy": "2026-01-28T03:10:52.529863Z",
- "iopub.status.idle": "2026-01-28T03:10:52.542863Z",
- "shell.execute_reply": "2026-01-28T03:10:52.541871Z"
+ "iopub.execute_input": "2026-01-30T06:12:22.263043Z",
+ "iopub.status.busy": "2026-01-30T06:12:22.262041Z",
+ "iopub.status.idle": "2026-01-30T06:12:22.275569Z",
+ "shell.execute_reply": "2026-01-30T06:12:22.275569Z"
}
},
"outputs": [],
@@ -47,10 +47,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:10:52.544885Z",
- "iopub.status.busy": "2026-01-28T03:10:52.544885Z",
- "iopub.status.idle": "2026-01-28T03:10:52.557879Z",
- "shell.execute_reply": "2026-01-28T03:10:52.556885Z"
+ "iopub.execute_input": "2026-01-30T06:12:22.277564Z",
+ "iopub.status.busy": "2026-01-30T06:12:22.277564Z",
+ "iopub.status.idle": "2026-01-30T06:12:22.291565Z",
+ "shell.execute_reply": "2026-01-30T06:12:22.290571Z"
}
},
"outputs": [],
@@ -67,10 +67,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:10:52.559877Z",
- "iopub.status.busy": "2026-01-28T03:10:52.559877Z",
- "iopub.status.idle": "2026-01-28T03:10:52.573876Z",
- "shell.execute_reply": "2026-01-28T03:10:52.571757Z"
+ "iopub.execute_input": "2026-01-30T06:12:22.293569Z",
+ "iopub.status.busy": "2026-01-30T06:12:22.293569Z",
+ "iopub.status.idle": "2026-01-30T06:12:22.306566Z",
+ "shell.execute_reply": "2026-01-30T06:12:22.305570Z"
}
},
"outputs": [],
diff --git a/experiments/Heart-VTKSeries_To_USD/1-heart_vtkseries_to_usd.ipynb b/experiments/Heart-VTKSeries_To_USD/1-heart_vtkseries_to_usd.ipynb
index b7df2ec..81077ef 100644
--- a/experiments/Heart-VTKSeries_To_USD/1-heart_vtkseries_to_usd.ipynb
+++ b/experiments/Heart-VTKSeries_To_USD/1-heart_vtkseries_to_usd.ipynb
@@ -6,10 +6,10 @@
"id": "0e42811d",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:11:00.648640Z",
- "iopub.status.busy": "2026-01-28T03:11:00.647590Z",
- "iopub.status.idle": "2026-01-28T03:11:16.707171Z",
- "shell.execute_reply": "2026-01-28T03:11:16.706256Z"
+ "iopub.execute_input": "2026-01-30T06:12:28.685890Z",
+ "iopub.status.busy": "2026-01-30T06:12:28.685890Z",
+ "iopub.status.idle": "2026-01-30T06:12:43.552186Z",
+ "shell.execute_reply": "2026-01-30T06:12:43.551178Z"
}
},
"outputs": [],
@@ -19,7 +19,7 @@
"\n",
"import pyvista as pv\n",
"\n",
- "from physiomotion4d.convert_vtk_to_usd import ConvertVTKToUSD"
+ "from physiomotion4d import ConvertVTKToUSD"
]
},
{
@@ -28,10 +28,10 @@
"id": "689c2ca8",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:11:16.709241Z",
- "iopub.status.busy": "2026-01-28T03:11:16.709241Z",
- "iopub.status.idle": "2026-01-28T03:11:16.722251Z",
- "shell.execute_reply": "2026-01-28T03:11:16.721251Z"
+ "iopub.execute_input": "2026-01-30T06:12:43.554178Z",
+ "iopub.status.busy": "2026-01-30T06:12:43.553181Z",
+ "iopub.status.idle": "2026-01-30T06:12:43.566178Z",
+ "shell.execute_reply": "2026-01-30T06:12:43.566178Z"
}
},
"outputs": [],
@@ -77,10 +77,10 @@
"id": "3403756a",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:11:16.724246Z",
- "iopub.status.busy": "2026-01-28T03:11:16.724246Z",
- "iopub.status.idle": "2026-01-28T03:20:37.260399Z",
- "shell.execute_reply": "2026-01-28T03:20:37.259344Z"
+ "iopub.execute_input": "2026-01-30T06:12:43.568176Z",
+ "iopub.status.busy": "2026-01-30T06:12:43.568176Z",
+ "iopub.status.idle": "2026-01-30T06:14:55.555365Z",
+ "shell.execute_reply": "2026-01-30T06:14:55.555365Z"
}
},
"outputs": [],
diff --git a/experiments/Lung-GatedCT_To_USD/0-register_dirlab_4dct.ipynb b/experiments/Lung-GatedCT_To_USD/0-register_dirlab_4dct.ipynb
index 44aae3c..c6e5b16 100644
--- a/experiments/Lung-GatedCT_To_USD/0-register_dirlab_4dct.ipynb
+++ b/experiments/Lung-GatedCT_To_USD/0-register_dirlab_4dct.ipynb
@@ -5,10 +5,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:10:57.845147Z",
- "iopub.status.busy": "2026-01-28T05:10:57.845147Z",
- "iopub.status.idle": "2026-01-28T05:11:15.035220Z",
- "shell.execute_reply": "2026-01-28T05:11:15.034221Z"
+ "iopub.execute_input": "2026-01-30T07:42:39.966659Z",
+ "iopub.status.busy": "2026-01-30T07:42:39.966659Z",
+ "iopub.status.idle": "2026-01-30T07:42:54.752236Z",
+ "shell.execute_reply": "2026-01-30T07:42:54.751563Z"
}
},
"outputs": [],
@@ -42,10 +42,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:11:15.037311Z",
- "iopub.status.busy": "2026-01-28T05:11:15.037311Z",
- "iopub.status.idle": "2026-01-28T05:11:15.050359Z",
- "shell.execute_reply": "2026-01-28T05:11:15.049366Z"
+ "iopub.execute_input": "2026-01-30T07:42:54.754377Z",
+ "iopub.status.busy": "2026-01-30T07:42:54.754377Z",
+ "iopub.status.idle": "2026-01-30T07:42:54.767636Z",
+ "shell.execute_reply": "2026-01-30T07:42:54.766765Z"
}
},
"outputs": [],
@@ -122,10 +122,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T05:11:15.051369Z",
- "iopub.status.busy": "2026-01-28T05:11:15.051369Z",
- "iopub.status.idle": "2026-01-28T06:06:40.330576Z",
- "shell.execute_reply": "2026-01-28T06:06:40.329585Z"
+ "iopub.execute_input": "2026-01-30T07:42:54.769254Z",
+ "iopub.status.busy": "2026-01-30T07:42:54.769254Z",
+ "iopub.status.idle": "2026-01-30T08:37:31.177831Z",
+ "shell.execute_reply": "2026-01-30T08:37:31.176515Z"
}
},
"outputs": [],
diff --git a/experiments/Lung-GatedCT_To_USD/1-make_dirlab_models.ipynb b/experiments/Lung-GatedCT_To_USD/1-make_dirlab_models.ipynb
index dbb5fad..94e15bb 100644
--- a/experiments/Lung-GatedCT_To_USD/1-make_dirlab_models.ipynb
+++ b/experiments/Lung-GatedCT_To_USD/1-make_dirlab_models.ipynb
@@ -6,10 +6,10 @@
"id": "3ce61753-11ad-4ade-9afe-6ad1bc748e25",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:06:49.266434Z",
- "iopub.status.busy": "2026-01-28T06:06:49.266434Z",
- "iopub.status.idle": "2026-01-28T06:07:06.547812Z",
- "shell.execute_reply": "2026-01-28T06:07:06.546812Z"
+ "iopub.execute_input": "2026-01-30T08:37:39.612415Z",
+ "iopub.status.busy": "2026-01-30T08:37:39.611404Z",
+ "iopub.status.idle": "2026-01-30T08:37:55.829862Z",
+ "shell.execute_reply": "2026-01-30T08:37:55.829862Z"
}
},
"outputs": [],
@@ -20,7 +20,7 @@
"from data_dirlab_4d_ct import DataDirLab4DCT\n",
"\n",
"from physiomotion4d.contour_tools import ContourTools\n",
- "from physiomotion4d.convert_vtk_to_usd import ConvertVTKToUSD\n",
+ "from physiomotion4d import ConvertVTKToUSD\n",
"from physiomotion4d.segment_chest_total_segmentator import SegmentChestTotalSegmentator\n",
"\n",
"\n",
@@ -38,10 +38,10 @@
"id": "240f1d14",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:07:06.550830Z",
- "iopub.status.busy": "2026-01-28T06:07:06.549788Z",
- "iopub.status.idle": "2026-01-28T06:07:06.562706Z",
- "shell.execute_reply": "2026-01-28T06:07:06.561801Z"
+ "iopub.execute_input": "2026-01-30T08:37:55.831875Z",
+ "iopub.status.busy": "2026-01-30T08:37:55.831875Z",
+ "iopub.status.idle": "2026-01-30T08:37:55.844855Z",
+ "shell.execute_reply": "2026-01-30T08:37:55.844855Z"
}
},
"outputs": [],
@@ -71,10 +71,10 @@
"id": "4e3e9fa5",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:07:06.564706Z",
- "iopub.status.busy": "2026-01-28T06:07:06.564706Z",
- "iopub.status.idle": "2026-01-28T06:07:06.577709Z",
- "shell.execute_reply": "2026-01-28T06:07:06.576813Z"
+ "iopub.execute_input": "2026-01-30T08:37:55.847133Z",
+ "iopub.status.busy": "2026-01-30T08:37:55.847133Z",
+ "iopub.status.idle": "2026-01-30T08:37:55.861515Z",
+ "shell.execute_reply": "2026-01-30T08:37:55.860519Z"
}
},
"outputs": [],
@@ -131,10 +131,10 @@
"id": "61830d5f",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:07:06.580766Z",
- "iopub.status.busy": "2026-01-28T06:07:06.580766Z",
- "iopub.status.idle": "2026-01-28T06:20:33.496446Z",
- "shell.execute_reply": "2026-01-28T06:20:33.495447Z"
+ "iopub.execute_input": "2026-01-30T08:37:55.863513Z",
+ "iopub.status.busy": "2026-01-30T08:37:55.863513Z",
+ "iopub.status.idle": "2026-01-30T08:43:57.744011Z",
+ "shell.execute_reply": "2026-01-30T08:43:57.743010Z"
}
},
"outputs": [],
diff --git a/experiments/Lung-GatedCT_To_USD/2-paint_dirlab_models.ipynb b/experiments/Lung-GatedCT_To_USD/2-paint_dirlab_models.ipynb
index 37346e1..cf3603c 100644
--- a/experiments/Lung-GatedCT_To_USD/2-paint_dirlab_models.ipynb
+++ b/experiments/Lung-GatedCT_To_USD/2-paint_dirlab_models.ipynb
@@ -6,10 +6,10 @@
"id": "3ce61753-11ad-4ade-9afe-6ad1bc748e25",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:20:45.190874Z",
- "iopub.status.busy": "2026-01-28T06:20:45.190874Z",
- "iopub.status.idle": "2026-01-28T06:21:02.083178Z",
- "shell.execute_reply": "2026-01-28T06:21:02.082265Z"
+ "iopub.execute_input": "2026-01-30T08:44:08.480991Z",
+ "iopub.status.busy": "2026-01-30T08:44:08.480991Z",
+ "iopub.status.idle": "2026-01-30T08:44:24.437398Z",
+ "shell.execute_reply": "2026-01-30T08:44:24.436354Z"
}
},
"outputs": [],
@@ -34,10 +34,10 @@
"id": "3cc90c5c",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:21:02.085264Z",
- "iopub.status.busy": "2026-01-28T06:21:02.085264Z",
- "iopub.status.idle": "2026-01-28T06:21:03.484858Z",
- "shell.execute_reply": "2026-01-28T06:21:03.482705Z"
+ "iopub.execute_input": "2026-01-30T08:44:24.439766Z",
+ "iopub.status.busy": "2026-01-30T08:44:24.439172Z",
+ "iopub.status.idle": "2026-01-30T08:44:25.883711Z",
+ "shell.execute_reply": "2026-01-30T08:44:25.882799Z"
}
},
"outputs": [],
diff --git a/experiments/Lung-GatedCT_To_USD/Experiment_ArrangeOnStage.ipynb b/experiments/Lung-GatedCT_To_USD/Experiment_ArrangeOnStage.ipynb
index 64e9180..0772cf4 100644
--- a/experiments/Lung-GatedCT_To_USD/Experiment_ArrangeOnStage.ipynb
+++ b/experiments/Lung-GatedCT_To_USD/Experiment_ArrangeOnStage.ipynb
@@ -5,10 +5,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:21:09.961444Z",
- "iopub.status.busy": "2026-01-28T06:21:09.960762Z",
- "iopub.status.idle": "2026-01-28T06:21:26.113875Z",
- "shell.execute_reply": "2026-01-28T06:21:26.112758Z"
+ "iopub.execute_input": "2026-01-30T08:44:32.038875Z",
+ "iopub.status.busy": "2026-01-30T08:44:32.038875Z",
+ "iopub.status.idle": "2026-01-30T08:44:47.655595Z",
+ "shell.execute_reply": "2026-01-30T08:44:47.654682Z"
}
},
"outputs": [],
@@ -25,10 +25,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:21:26.115881Z",
- "iopub.status.busy": "2026-01-28T06:21:26.114881Z",
- "iopub.status.idle": "2026-01-28T06:21:26.129326Z",
- "shell.execute_reply": "2026-01-28T06:21:26.128332Z"
+ "iopub.execute_input": "2026-01-30T08:44:47.657595Z",
+ "iopub.status.busy": "2026-01-30T08:44:47.657595Z",
+ "iopub.status.idle": "2026-01-30T08:44:47.671281Z",
+ "shell.execute_reply": "2026-01-30T08:44:47.670401Z"
}
},
"outputs": [],
@@ -41,10 +41,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:21:26.130327Z",
- "iopub.status.busy": "2026-01-28T06:21:26.130327Z",
- "iopub.status.idle": "2026-01-28T06:21:26.538058Z",
- "shell.execute_reply": "2026-01-28T06:21:26.537084Z"
+ "iopub.execute_input": "2026-01-30T08:44:47.673512Z",
+ "iopub.status.busy": "2026-01-30T08:44:47.673512Z",
+ "iopub.status.idle": "2026-01-30T08:44:48.215223Z",
+ "shell.execute_reply": "2026-01-30T08:44:48.214311Z"
}
},
"outputs": [],
diff --git a/experiments/Lung-GatedCT_To_USD/Experiment_CombineModels.ipynb b/experiments/Lung-GatedCT_To_USD/Experiment_CombineModels.ipynb
index bc4c8ef..c4b1f42 100644
--- a/experiments/Lung-GatedCT_To_USD/Experiment_CombineModels.ipynb
+++ b/experiments/Lung-GatedCT_To_USD/Experiment_CombineModels.ipynb
@@ -5,10 +5,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:21:34.595236Z",
- "iopub.status.busy": "2026-01-28T06:21:34.595236Z",
- "iopub.status.idle": "2026-01-28T06:21:51.245476Z",
- "shell.execute_reply": "2026-01-28T06:21:51.244557Z"
+ "iopub.execute_input": "2026-01-30T08:44:56.423412Z",
+ "iopub.status.busy": "2026-01-30T08:44:56.423412Z",
+ "iopub.status.idle": "2026-01-30T08:45:11.476358Z",
+ "shell.execute_reply": "2026-01-30T08:45:11.475365Z"
}
},
"outputs": [],
@@ -25,10 +25,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:21:51.248570Z",
- "iopub.status.busy": "2026-01-28T06:21:51.247477Z",
- "iopub.status.idle": "2026-01-28T06:21:51.260536Z",
- "shell.execute_reply": "2026-01-28T06:21:51.259572Z"
+ "iopub.execute_input": "2026-01-30T08:45:11.478358Z",
+ "iopub.status.busy": "2026-01-30T08:45:11.477358Z",
+ "iopub.status.idle": "2026-01-30T08:45:11.491358Z",
+ "shell.execute_reply": "2026-01-30T08:45:11.490365Z"
}
},
"outputs": [],
@@ -41,10 +41,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:21:51.263590Z",
- "iopub.status.busy": "2026-01-28T06:21:51.263590Z",
- "iopub.status.idle": "2026-01-28T06:21:52.258962Z",
- "shell.execute_reply": "2026-01-28T06:21:52.258962Z"
+ "iopub.execute_input": "2026-01-30T08:45:11.492357Z",
+ "iopub.status.busy": "2026-01-30T08:45:11.492357Z",
+ "iopub.status.idle": "2026-01-30T08:45:12.404341Z",
+ "shell.execute_reply": "2026-01-30T08:45:12.402994Z"
}
},
"outputs": [],
@@ -66,10 +66,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:21:52.260965Z",
- "iopub.status.busy": "2026-01-28T06:21:52.260965Z",
- "iopub.status.idle": "2026-01-28T06:21:52.607773Z",
- "shell.execute_reply": "2026-01-28T06:21:52.605773Z"
+ "iopub.execute_input": "2026-01-30T08:45:12.406653Z",
+ "iopub.status.busy": "2026-01-30T08:45:12.406653Z",
+ "iopub.status.idle": "2026-01-30T08:45:12.739690Z",
+ "shell.execute_reply": "2026-01-30T08:45:12.737593Z"
}
},
"outputs": [],
@@ -92,10 +92,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:21:52.610798Z",
- "iopub.status.busy": "2026-01-28T06:21:52.610798Z",
- "iopub.status.idle": "2026-01-28T06:21:52.622882Z",
- "shell.execute_reply": "2026-01-28T06:21:52.620893Z"
+ "iopub.execute_input": "2026-01-30T08:45:12.741672Z",
+ "iopub.status.busy": "2026-01-30T08:45:12.741672Z",
+ "iopub.status.idle": "2026-01-30T08:45:12.754044Z",
+ "shell.execute_reply": "2026-01-30T08:45:12.752719Z"
}
},
"outputs": [],
@@ -108,10 +108,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:21:52.627599Z",
- "iopub.status.busy": "2026-01-28T06:21:52.627599Z",
- "iopub.status.idle": "2026-01-28T06:21:58.637615Z",
- "shell.execute_reply": "2026-01-28T06:21:58.637615Z"
+ "iopub.execute_input": "2026-01-30T08:45:12.756926Z",
+ "iopub.status.busy": "2026-01-30T08:45:12.756926Z",
+ "iopub.status.idle": "2026-01-30T08:45:18.997656Z",
+ "shell.execute_reply": "2026-01-30T08:45:18.996663Z"
}
},
"outputs": [],
diff --git a/experiments/Lung-GatedCT_To_USD/Experiment_SegReg.ipynb b/experiments/Lung-GatedCT_To_USD/Experiment_SegReg.ipynb
index 52c946b..9bec704 100644
--- a/experiments/Lung-GatedCT_To_USD/Experiment_SegReg.ipynb
+++ b/experiments/Lung-GatedCT_To_USD/Experiment_SegReg.ipynb
@@ -5,10 +5,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:22:07.933354Z",
- "iopub.status.busy": "2026-01-28T06:22:07.933354Z",
- "iopub.status.idle": "2026-01-28T06:22:23.810941Z",
- "shell.execute_reply": "2026-01-28T06:22:23.809983Z"
+ "iopub.execute_input": "2026-01-30T08:45:25.170581Z",
+ "iopub.status.busy": "2026-01-30T08:45:25.170031Z",
+ "iopub.status.idle": "2026-01-30T08:45:40.288626Z",
+ "shell.execute_reply": "2026-01-30T08:45:40.287631Z"
}
},
"outputs": [],
@@ -29,10 +29,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:22:23.813189Z",
- "iopub.status.busy": "2026-01-28T06:22:23.812762Z",
- "iopub.status.idle": "2026-01-28T06:22:24.053600Z",
- "shell.execute_reply": "2026-01-28T06:22:24.052481Z"
+ "iopub.execute_input": "2026-01-30T08:45:40.290625Z",
+ "iopub.status.busy": "2026-01-30T08:45:40.290625Z",
+ "iopub.status.idle": "2026-01-30T08:45:40.529152Z",
+ "shell.execute_reply": "2026-01-30T08:45:40.528152Z"
}
},
"outputs": [],
@@ -50,10 +50,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:22:24.055595Z",
- "iopub.status.busy": "2026-01-28T06:22:24.055237Z",
- "iopub.status.idle": "2026-01-28T06:23:25.813330Z",
- "shell.execute_reply": "2026-01-28T06:23:25.812458Z"
+ "iopub.execute_input": "2026-01-30T08:45:40.532153Z",
+ "iopub.status.busy": "2026-01-30T08:45:40.532153Z",
+ "iopub.status.idle": "2026-01-30T08:46:40.582649Z",
+ "shell.execute_reply": "2026-01-30T08:46:40.581655Z"
}
},
"outputs": [],
@@ -74,10 +74,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:23:25.816423Z",
- "iopub.status.busy": "2026-01-28T06:23:25.815408Z",
- "iopub.status.idle": "2026-01-28T06:24:44.780078Z",
- "shell.execute_reply": "2026-01-28T06:24:44.779086Z"
+ "iopub.execute_input": "2026-01-30T08:46:40.584647Z",
+ "iopub.status.busy": "2026-01-30T08:46:40.583660Z",
+ "iopub.status.idle": "2026-01-30T08:47:55.357554Z",
+ "shell.execute_reply": "2026-01-30T08:47:55.356568Z"
}
},
"outputs": [],
@@ -95,10 +95,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:24:44.782079Z",
- "iopub.status.busy": "2026-01-28T06:24:44.782079Z",
- "iopub.status.idle": "2026-01-28T06:24:44.795154Z",
- "shell.execute_reply": "2026-01-28T06:24:44.794079Z"
+ "iopub.execute_input": "2026-01-30T08:47:55.359675Z",
+ "iopub.status.busy": "2026-01-30T08:47:55.359675Z",
+ "iopub.status.idle": "2026-01-30T08:47:55.371645Z",
+ "shell.execute_reply": "2026-01-30T08:47:55.371645Z"
}
},
"outputs": [],
diff --git a/experiments/Lung-GatedCT_To_USD/Experiment_SubSurfaceScatter.ipynb b/experiments/Lung-GatedCT_To_USD/Experiment_SubSurfaceScatter.ipynb
index be26213..0b20f3f 100644
--- a/experiments/Lung-GatedCT_To_USD/Experiment_SubSurfaceScatter.ipynb
+++ b/experiments/Lung-GatedCT_To_USD/Experiment_SubSurfaceScatter.ipynb
@@ -5,10 +5,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:24:53.147223Z",
- "iopub.status.busy": "2026-01-28T06:24:53.147223Z",
- "iopub.status.idle": "2026-01-28T06:24:53.278166Z",
- "shell.execute_reply": "2026-01-28T06:24:53.277157Z"
+ "iopub.execute_input": "2026-01-30T08:48:02.007890Z",
+ "iopub.status.busy": "2026-01-30T08:48:02.007890Z",
+ "iopub.status.idle": "2026-01-30T08:48:02.109660Z",
+ "shell.execute_reply": "2026-01-30T08:48:02.108681Z"
}
},
"outputs": [],
@@ -23,10 +23,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:24:53.280641Z",
- "iopub.status.busy": "2026-01-28T06:24:53.280641Z",
- "iopub.status.idle": "2026-01-28T06:24:53.293065Z",
- "shell.execute_reply": "2026-01-28T06:24:53.292194Z"
+ "iopub.execute_input": "2026-01-30T08:48:02.111673Z",
+ "iopub.status.busy": "2026-01-30T08:48:02.111673Z",
+ "iopub.status.idle": "2026-01-30T08:48:02.124556Z",
+ "shell.execute_reply": "2026-01-30T08:48:02.123642Z"
}
},
"outputs": [],
@@ -39,10 +39,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T06:24:53.295069Z",
- "iopub.status.busy": "2026-01-28T06:24:53.295069Z",
- "iopub.status.idle": "2026-01-28T06:24:53.338447Z",
- "shell.execute_reply": "2026-01-28T06:24:53.337546Z"
+ "iopub.execute_input": "2026-01-30T08:48:02.126647Z",
+ "iopub.status.busy": "2026-01-30T08:48:02.126647Z",
+ "iopub.status.idle": "2026-01-30T08:48:02.170469Z",
+ "shell.execute_reply": "2026-01-30T08:48:02.169466Z"
}
},
"outputs": [],
diff --git a/experiments/Reconstruct4DCT/reconstruct_4d_ct.ipynb b/experiments/Reconstruct4DCT/reconstruct_4d_ct.ipynb
index 567d24c..95aa37c 100644
--- a/experiments/Reconstruct4DCT/reconstruct_4d_ct.ipynb
+++ b/experiments/Reconstruct4DCT/reconstruct_4d_ct.ipynb
@@ -6,10 +6,10 @@
"id": "e22cbc66",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:30.856632Z",
- "iopub.status.busy": "2026-01-28T01:08:30.856632Z",
- "iopub.status.idle": "2026-01-28T01:08:45.835184Z",
- "shell.execute_reply": "2026-01-28T01:08:45.834192Z"
+ "iopub.execute_input": "2026-01-30T04:18:23.770478Z",
+ "iopub.status.busy": "2026-01-30T04:18:23.770478Z",
+ "iopub.status.idle": "2026-01-30T04:18:38.190666Z",
+ "shell.execute_reply": "2026-01-30T04:18:38.189669Z"
}
},
"outputs": [],
@@ -28,10 +28,10 @@
"id": "bce36d34",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:45.837185Z",
- "iopub.status.busy": "2026-01-28T01:08:45.837185Z",
- "iopub.status.idle": "2026-01-28T01:08:54.394121Z",
- "shell.execute_reply": "2026-01-28T01:08:54.393133Z"
+ "iopub.execute_input": "2026-01-30T04:18:38.192663Z",
+ "iopub.status.busy": "2026-01-30T04:18:38.191837Z",
+ "iopub.status.idle": "2026-01-30T04:18:45.824308Z",
+ "shell.execute_reply": "2026-01-30T04:18:45.823092Z"
}
},
"outputs": [],
@@ -89,10 +89,10 @@
"id": "18b6e62a",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:54.396121Z",
- "iopub.status.busy": "2026-01-28T01:08:54.396121Z",
- "iopub.status.idle": "2026-01-28T01:08:54.408645Z",
- "shell.execute_reply": "2026-01-28T01:08:54.408645Z"
+ "iopub.execute_input": "2026-01-30T04:18:45.825991Z",
+ "iopub.status.busy": "2026-01-30T04:18:45.825991Z",
+ "iopub.status.idle": "2026-01-30T04:18:45.839439Z",
+ "shell.execute_reply": "2026-01-30T04:18:45.838446Z"
}
},
"outputs": [],
@@ -310,10 +310,10 @@
"id": "c1f34f87",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T01:08:54.411156Z",
- "iopub.status.busy": "2026-01-28T01:08:54.411156Z",
- "iopub.status.idle": "2026-01-28T02:23:55.854599Z",
- "shell.execute_reply": "2026-01-28T02:23:55.836115Z"
+ "iopub.execute_input": "2026-01-30T04:18:45.841201Z",
+ "iopub.status.busy": "2026-01-30T04:18:45.841201Z",
+ "iopub.status.idle": "2026-01-30T05:26:51.444160Z",
+ "shell.execute_reply": "2026-01-30T05:26:51.439168Z"
}
},
"outputs": [],
@@ -343,10 +343,10 @@
"id": "3bb5099c",
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T02:23:55.868225Z",
- "iopub.status.busy": "2026-01-28T02:23:55.868225Z",
- "iopub.status.idle": "2026-01-28T02:27:47.365606Z",
- "shell.execute_reply": "2026-01-28T02:27:47.364605Z"
+ "iopub.execute_input": "2026-01-30T05:26:51.467161Z",
+ "iopub.status.busy": "2026-01-30T05:26:51.467161Z",
+ "iopub.status.idle": "2026-01-30T05:30:30.701483Z",
+ "shell.execute_reply": "2026-01-30T05:30:30.700491Z"
}
},
"outputs": [],
diff --git a/experiments/Reconstruct4DCT/reconstruct_4d_ct_class.ipynb b/experiments/Reconstruct4DCT/reconstruct_4d_ct_class.ipynb
index 0e148da..95746ae 100644
--- a/experiments/Reconstruct4DCT/reconstruct_4d_ct_class.ipynb
+++ b/experiments/Reconstruct4DCT/reconstruct_4d_ct_class.ipynb
@@ -19,10 +19,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T02:27:58.753365Z",
- "iopub.status.busy": "2026-01-28T02:27:58.753365Z",
- "iopub.status.idle": "2026-01-28T02:28:24.604923Z",
- "shell.execute_reply": "2026-01-28T02:28:24.603868Z"
+ "iopub.execute_input": "2026-01-30T05:30:40.531804Z",
+ "iopub.status.busy": "2026-01-30T05:30:40.530805Z",
+ "iopub.status.idle": "2026-01-30T05:30:58.672594Z",
+ "shell.execute_reply": "2026-01-30T05:30:58.671605Z"
}
},
"outputs": [],
@@ -49,10 +49,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T02:28:24.607503Z",
- "iopub.status.busy": "2026-01-28T02:28:24.606992Z",
- "iopub.status.idle": "2026-01-28T02:28:24.619692Z",
- "shell.execute_reply": "2026-01-28T02:28:24.618754Z"
+ "iopub.execute_input": "2026-01-30T05:30:58.674142Z",
+ "iopub.status.busy": "2026-01-30T05:30:58.674142Z",
+ "iopub.status.idle": "2026-01-30T05:30:58.687762Z",
+ "shell.execute_reply": "2026-01-30T05:30:58.686664Z"
}
},
"outputs": [],
@@ -73,10 +73,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T02:28:24.630197Z",
- "iopub.status.busy": "2026-01-28T02:28:24.629184Z",
- "iopub.status.idle": "2026-01-28T02:28:24.634457Z",
- "shell.execute_reply": "2026-01-28T02:28:24.633702Z"
+ "iopub.execute_input": "2026-01-30T05:30:58.703987Z",
+ "iopub.status.busy": "2026-01-30T05:30:58.703987Z",
+ "iopub.status.idle": "2026-01-30T05:30:58.718882Z",
+ "shell.execute_reply": "2026-01-30T05:30:58.717396Z"
}
},
"outputs": [],
@@ -137,10 +137,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T02:28:24.636172Z",
- "iopub.status.busy": "2026-01-28T02:28:24.636172Z",
- "iopub.status.idle": "2026-01-28T02:28:27.563046Z",
- "shell.execute_reply": "2026-01-28T02:28:27.562056Z"
+ "iopub.execute_input": "2026-01-30T05:30:58.720894Z",
+ "iopub.status.busy": "2026-01-30T05:30:58.720391Z",
+ "iopub.status.idle": "2026-01-30T05:31:01.369428Z",
+ "shell.execute_reply": "2026-01-30T05:31:01.369428Z"
}
},
"outputs": [],
@@ -167,10 +167,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T02:28:27.565236Z",
- "iopub.status.busy": "2026-01-28T02:28:27.564217Z",
- "iopub.status.idle": "2026-01-28T02:28:27.578230Z",
- "shell.execute_reply": "2026-01-28T02:28:27.577229Z"
+ "iopub.execute_input": "2026-01-30T05:31:01.371427Z",
+ "iopub.status.busy": "2026-01-30T05:31:01.371427Z",
+ "iopub.status.idle": "2026-01-30T05:31:01.385441Z",
+ "shell.execute_reply": "2026-01-30T05:31:01.384447Z"
}
},
"outputs": [],
@@ -198,10 +198,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T02:28:27.581229Z",
- "iopub.status.busy": "2026-01-28T02:28:27.580228Z",
- "iopub.status.idle": "2026-01-28T02:54:39.563351Z",
- "shell.execute_reply": "2026-01-28T02:54:39.562350Z"
+ "iopub.execute_input": "2026-01-30T05:31:01.386441Z",
+ "iopub.status.busy": "2026-01-30T05:31:01.386441Z",
+ "iopub.status.idle": "2026-01-30T05:56:33.019788Z",
+ "shell.execute_reply": "2026-01-30T05:56:33.018784Z"
}
},
"outputs": [],
@@ -278,10 +278,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T02:54:39.570420Z",
- "iopub.status.busy": "2026-01-28T02:54:39.570420Z",
- "iopub.status.idle": "2026-01-28T03:06:02.874064Z",
- "shell.execute_reply": "2026-01-28T03:06:02.872057Z"
+ "iopub.execute_input": "2026-01-30T05:56:33.022804Z",
+ "iopub.status.busy": "2026-01-30T05:56:33.022804Z",
+ "iopub.status.idle": "2026-01-30T06:07:43.092299Z",
+ "shell.execute_reply": "2026-01-30T06:07:43.091298Z"
}
},
"outputs": [],
@@ -375,10 +375,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:06:02.877072Z",
- "iopub.status.busy": "2026-01-28T03:06:02.877072Z",
- "iopub.status.idle": "2026-01-28T03:07:19.325787Z",
- "shell.execute_reply": "2026-01-28T03:07:19.324787Z"
+ "iopub.execute_input": "2026-01-30T06:07:43.094810Z",
+ "iopub.status.busy": "2026-01-30T06:07:43.094810Z",
+ "iopub.status.idle": "2026-01-30T06:08:56.143047Z",
+ "shell.execute_reply": "2026-01-30T06:08:56.142072Z"
}
},
"outputs": [],
@@ -436,10 +436,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:07:19.327786Z",
- "iopub.status.busy": "2026-01-28T03:07:19.327786Z",
- "iopub.status.idle": "2026-01-28T03:07:19.341663Z",
- "shell.execute_reply": "2026-01-28T03:07:19.339906Z"
+ "iopub.execute_input": "2026-01-30T06:08:56.144782Z",
+ "iopub.status.busy": "2026-01-30T06:08:56.143778Z",
+ "iopub.status.idle": "2026-01-30T06:08:56.158247Z",
+ "shell.execute_reply": "2026-01-30T06:08:56.157442Z"
}
},
"outputs": [],
@@ -474,10 +474,10 @@
"execution_count": null,
"metadata": {
"execution": {
- "iopub.execute_input": "2026-01-28T03:07:19.344450Z",
- "iopub.status.busy": "2026-01-28T03:07:19.344450Z",
- "iopub.status.idle": "2026-01-28T03:10:22.657425Z",
- "shell.execute_reply": "2026-01-28T03:10:22.656436Z"
+ "iopub.execute_input": "2026-01-30T06:08:56.159843Z",
+ "iopub.status.busy": "2026-01-30T06:08:56.159843Z",
+ "iopub.status.idle": "2026-01-30T06:11:55.366054Z",
+ "shell.execute_reply": "2026-01-30T06:11:55.365129Z"
}
},
"outputs": [],
diff --git a/experiments/convert_vtk_to_usd_lib/convert_chop_valve_to_usd.ipynb b/experiments/convert_vtk_to_usd_lib/convert_chop_valve_to_usd.ipynb
new file mode 100644
index 0000000..9744cdb
--- /dev/null
+++ b/experiments/convert_vtk_to_usd_lib/convert_chop_valve_to_usd.ipynb
@@ -0,0 +1,1958 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Cardiac Valve 4D Time-Series Conversion to USD\n",
+ "\n",
+ "This notebook demonstrates converting time-varying cardiac valve simulation data from VTK format to animated USD.\n",
+ "\n",
+ "## Dataset: CHOP-Valve4D\n",
+ "\n",
+ "Two cardiac valve models with time-varying geometry:\n",
+ "\n",
+ "- **Alterra**: 232 time steps (cardiac cycle simulation)\n",
+ "- **TPV25**: 265 time steps (cardiac cycle simulation)\n",
+ "\n",
+ "These datasets represent 4D (3D + time) simulations of prosthetic heart valves during a cardiac cycle.\n",
+ "\n",
+ "## Goals\n",
+ "\n",
+ "1. Load and inspect time-varying VTK data\n",
+ "2. Convert entire time series to animated USD\n",
+ "3. Handle large datasets efficiently\n",
+ "4. Preserve all simulation data as USD primvars\n",
+ "5. Create multiple variations (full resolution, subsampled, etc.)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Project root: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\n",
+ "Source path: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\src\n"
+ ]
+ }
+ ],
+ "source": [
+ "import sys\n",
+ "from pathlib import Path\n",
+ "import re\n",
+ "import time as time_module\n",
+ "\n",
+ "# Add src to path\n",
+ "project_root = Path.cwd().parent.parent\n",
+ "src_path = project_root / \"src\"\n",
+ "if str(src_path) not in sys.path:\n",
+ " sys.path.insert(0, str(src_path))\n",
+ "\n",
+ "print(f\"Project root: {project_root}\")\n",
+ "print(f\"Source path: {src_path}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Configuration\n",
+ "\n",
+ "Control which time series conversions to compute."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Time Series Configuration:\n",
+ " - Compute Full Time Series: False\n",
+ " - Compute Subsampled Time Series: Always enabled\n",
+ "\n",
+ "⚠️ Full time series conversion is DISABLED for faster execution.\n",
+ " Set COMPUTE_FULL_TIME_SERIES = True to enable full conversion.\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Configuration: Control which conversions to run\n",
+ "# Set to True to compute full time series (all frames) - takes longer\n",
+ "# Set to False to only compute subsampled time series (faster, for preview)\n",
+ "COMPUTE_FULL_TIME_SERIES = False # Default: only subsampled\n",
+ "\n",
+ "print(\"Time Series Configuration:\")\n",
+ "print(f\" - Compute Full Time Series: {COMPUTE_FULL_TIME_SERIES}\")\n",
+ "print(\" - Compute Subsampled Time Series: Always enabled\")\n",
+ "print()\n",
+ "if not COMPUTE_FULL_TIME_SERIES:\n",
+ " print(\"⚠️ Full time series conversion is DISABLED for faster execution.\")\n",
+ " print(\" Set COMPUTE_FULL_TIME_SERIES = True to enable full conversion.\")\n",
+ "else:\n",
+ " print(\"✓ Full time series conversion is ENABLED (this will take longer).\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import logging\n",
+ "import numpy as np\n",
+ "from pxr import Usd, UsdGeom\n",
+ "\n",
+ "# Import the vtk_to_usd library\n",
+ "from physiomotion4d.vtk_to_usd import (\n",
+ " VTKToUSDConverter,\n",
+ " ConversionSettings,\n",
+ " MaterialData,\n",
+ " read_vtk_file,\n",
+ " validate_time_series_topology,\n",
+ ")\n",
+ "\n",
+ "# Import USDTools for post-processing colormap\n",
+ "from physiomotion4d.usd_tools import USDTools\n",
+ "\n",
+ "# Configure logging\n",
+ "logging.basicConfig(level=logging.INFO, format=\"%(levelname)s: %(message)s\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 1. Discover and Organize Time-Series Files"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Data directory: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\n",
+ "Output directory: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\n",
+ "\n",
+ "Directory status:\n",
+ " Alterra: ✓ c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\n",
+ " TPV25: ✓ c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Define data directories\n",
+ "data_dir = project_root / \"data\" / \"CHOP-Valve4D\"\n",
+ "alterra_dir = data_dir / \"Alterra\"\n",
+ "tpv25_dir = data_dir / \"TPV25\"\n",
+ "output_dir = Path.cwd() / \"output\" / \"valve4d\"\n",
+ "output_dir.mkdir(parents=True, exist_ok=True)\n",
+ "\n",
+ "print(f\"Data directory: {data_dir}\")\n",
+ "print(f\"Output directory: {output_dir}\")\n",
+ "print(\"\\nDirectory status:\")\n",
+ "print(f\" Alterra: {'✓' if alterra_dir.exists() else '✗'} {alterra_dir}\")\n",
+ "print(f\" TPV25: {'✓' if tpv25_dir.exists() else '✗'} {tpv25_dir}\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Time-Series Discovery\n",
+ "============================================================\n",
+ "\n",
+ "Alterra:\n",
+ " Files found: 232\n",
+ " Time range: t0 to t231\n",
+ " First file: Alterra_output.t000.vtk\n",
+ " Last file: Alterra_output.t231.vtk\n",
+ "\n",
+ "TPV25:\n",
+ " Files found: 265\n",
+ " Time range: t0 to t264\n",
+ " First file: TPV25_output.t000.vtk\n",
+ " Last file: TPV25_output.t264.vtk\n"
+ ]
+ }
+ ],
+ "source": [
+ "def discover_time_series(directory, pattern=r\"\\.t(\\d+)\\.vtk$\"):\n",
+ " \"\"\"Discover and sort time-series VTK files.\n",
+ "\n",
+ " Args:\n",
+ " directory: Directory containing VTK files\n",
+ " pattern: Regex pattern to extract time step number\n",
+ "\n",
+ " Returns:\n",
+ " list: Sorted list of (time_step, file_path) tuples\n",
+ " \"\"\"\n",
+ " vtk_files = list(Path(directory).glob(\"*.vtk\"))\n",
+ "\n",
+ " # Extract time step numbers and pair with files\n",
+ " time_series = []\n",
+ " for vtk_file in vtk_files:\n",
+ " match = re.search(pattern, vtk_file.name)\n",
+ " if match:\n",
+ " time_step = int(match.group(1))\n",
+ " time_series.append((time_step, vtk_file))\n",
+ "\n",
+ " # Sort by time step\n",
+ " time_series.sort(key=lambda x: x[0])\n",
+ "\n",
+ " return time_series\n",
+ "\n",
+ "\n",
+ "# Discover both datasets\n",
+ "alterra_series = discover_time_series(alterra_dir)\n",
+ "tpv25_series = discover_time_series(tpv25_dir)\n",
+ "\n",
+ "print(\"=\" * 60)\n",
+ "print(\"Time-Series Discovery\")\n",
+ "print(\"=\" * 60)\n",
+ "print(\"\\nAlterra:\")\n",
+ "print(f\" Files found: {len(alterra_series)}\")\n",
+ "if alterra_series:\n",
+ " print(f\" Time range: t{alterra_series[0][0]} to t{alterra_series[-1][0]}\")\n",
+ " print(f\" First file: {alterra_series[0][1].name}\")\n",
+ " print(f\" Last file: {alterra_series[-1][1].name}\")\n",
+ "\n",
+ "print(\"\\nTPV25:\")\n",
+ "print(f\" Files found: {len(tpv25_series)}\")\n",
+ "if tpv25_series:\n",
+ " print(f\" Time range: t{tpv25_series[0][0]} to t{tpv25_series[-1][0]}\")\n",
+ " print(f\" First file: {tpv25_series[0][1].name}\")\n",
+ " print(f\" Last file: {tpv25_series[-1][1].name}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 2. Inspect First Frame\n",
+ "\n",
+ "Examine the first time step to understand the data structure."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t000.vtk\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Alterra - First Frame Analysis\n",
+ "============================================================\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "File: Alterra_output.t000.vtk\n",
+ "\n",
+ "Geometry:\n",
+ " Points: 48,482\n",
+ " Faces: 83,634\n",
+ " Normals: Yes\n",
+ " Colors: No\n",
+ "\n",
+ "Bounding Box:\n",
+ " Min: [-28.819, -29.731, -72.836]\n",
+ " Max: [39.019, 51.354, 65.425]\n",
+ " Size: [67.837, 81.085, 138.261]\n",
+ "\n",
+ "Data Arrays (4):\n",
+ " 1. displacement:\n",
+ " - Type: float\n",
+ " - Components: 3\n",
+ " - Interpolation: vertex\n",
+ " - Elements: 48,482\n",
+ " - Range: [0.000000, 0.000000]\n",
+ " 2. shell_thickness:\n",
+ " - Type: float\n",
+ " - Components: 1\n",
+ " - Interpolation: vertex\n",
+ " - Elements: 48,482\n",
+ " - Range: [0.000000, 2.000000]\n",
+ " 3. stress:\n",
+ " - Type: float\n",
+ " - Components: 9\n",
+ " - Interpolation: vertex\n",
+ " - Elements: 48,482\n",
+ " - Range: [0.000000, 0.000000]\n",
+ " 4. relative_volume:\n",
+ " - Type: float\n",
+ " - Components: 1\n",
+ " - Interpolation: vertex\n",
+ " - Elements: 48,482\n",
+ " - Range: [1.000000, 1.000000]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Read first frame of Alterra\n",
+ "if alterra_series:\n",
+ " print(\"=\" * 60)\n",
+ " print(\"Alterra - First Frame Analysis\")\n",
+ " print(\"=\" * 60)\n",
+ "\n",
+ " first_file = alterra_series[0][1]\n",
+ " mesh_data = read_vtk_file(first_file, extract_surface=True)\n",
+ "\n",
+ " print(f\"\\nFile: {first_file.name}\")\n",
+ " print(\"\\nGeometry:\")\n",
+ " print(f\" Points: {len(mesh_data.points):,}\")\n",
+ " print(f\" Faces: {len(mesh_data.face_vertex_counts):,}\")\n",
+ " print(f\" Normals: {'Yes' if mesh_data.normals is not None else 'No'}\")\n",
+ " print(f\" Colors: {'Yes' if mesh_data.colors is not None else 'No'}\")\n",
+ "\n",
+ " # Bounding box\n",
+ " bbox_min = np.min(mesh_data.points, axis=0)\n",
+ " bbox_max = np.max(mesh_data.points, axis=0)\n",
+ " bbox_size = bbox_max - bbox_min\n",
+ " print(\"\\nBounding Box:\")\n",
+ " print(f\" Min: [{bbox_min[0]:.3f}, {bbox_min[1]:.3f}, {bbox_min[2]:.3f}]\")\n",
+ " print(f\" Max: [{bbox_max[0]:.3f}, {bbox_max[1]:.3f}, {bbox_max[2]:.3f}]\")\n",
+ " print(f\" Size: [{bbox_size[0]:.3f}, {bbox_size[1]:.3f}, {bbox_size[2]:.3f}]\")\n",
+ "\n",
+ " print(f\"\\nData Arrays ({len(mesh_data.generic_arrays)}):\")\n",
+ " for i, array in enumerate(mesh_data.generic_arrays, 1):\n",
+ " print(f\" {i}. {array.name}:\")\n",
+ " print(f\" - Type: {array.data_type.value}\")\n",
+ " print(f\" - Components: {array.num_components}\")\n",
+ " print(f\" - Interpolation: {array.interpolation}\")\n",
+ " print(f\" - Elements: {len(array.data):,}\")\n",
+ " if array.data.size > 0:\n",
+ " print(f\" - Range: [{np.min(array.data):.6f}, {np.max(array.data):.6f}]\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t000.vtk\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "TPV25 - First Frame Analysis\n",
+ "============================================================\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "File: TPV25_output.t000.vtk\n",
+ "\n",
+ "Geometry:\n",
+ " Points: 38,301\n",
+ " Faces: 63,332\n",
+ " Normals: Yes\n",
+ " Colors: No\n",
+ "\n",
+ "Bounding Box:\n",
+ " Min: [-30.500, -31.550, -75.453]\n",
+ " Max: [38.519, 48.022, 57.744]\n",
+ " Size: [69.018, 79.573, 133.198]\n",
+ "\n",
+ "Data Arrays (4):\n",
+ " 1. displacement:\n",
+ " - Type: float\n",
+ " - Components: 3\n",
+ " - Interpolation: vertex\n",
+ " - Elements: 38,301\n",
+ " - Range: [0.000000, 0.000000]\n",
+ " 2. shell_thickness:\n",
+ " - Type: float\n",
+ " - Components: 1\n",
+ " - Interpolation: vertex\n",
+ " - Elements: 38,301\n",
+ " - Range: [0.000000, 2.000000]\n",
+ " 3. stress:\n",
+ " - Type: float\n",
+ " - Components: 9\n",
+ " - Interpolation: vertex\n",
+ " - Elements: 38,301\n",
+ " - Range: [0.000000, 0.000000]\n",
+ " 4. relative_volume:\n",
+ " - Type: float\n",
+ " - Components: 1\n",
+ " - Interpolation: vertex\n",
+ " - Elements: 38,301\n",
+ " - Range: [1.000000, 1.000000]\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Read first frame of TPV25\n",
+ "if tpv25_series:\n",
+ " print(\"=\" * 60)\n",
+ " print(\"TPV25 - First Frame Analysis\")\n",
+ " print(\"=\" * 60)\n",
+ "\n",
+ " first_file = tpv25_series[0][1]\n",
+ " mesh_data = read_vtk_file(first_file, extract_surface=True)\n",
+ "\n",
+ " print(f\"\\nFile: {first_file.name}\")\n",
+ " print(\"\\nGeometry:\")\n",
+ " print(f\" Points: {len(mesh_data.points):,}\")\n",
+ " print(f\" Faces: {len(mesh_data.face_vertex_counts):,}\")\n",
+ " print(f\" Normals: {'Yes' if mesh_data.normals is not None else 'No'}\")\n",
+ " print(f\" Colors: {'Yes' if mesh_data.colors is not None else 'No'}\")\n",
+ "\n",
+ " # Bounding box\n",
+ " bbox_min = np.min(mesh_data.points, axis=0)\n",
+ " bbox_max = np.max(mesh_data.points, axis=0)\n",
+ " bbox_size = bbox_max - bbox_min\n",
+ " print(\"\\nBounding Box:\")\n",
+ " print(f\" Min: [{bbox_min[0]:.3f}, {bbox_min[1]:.3f}, {bbox_min[2]:.3f}]\")\n",
+ " print(f\" Max: [{bbox_max[0]:.3f}, {bbox_max[1]:.3f}, {bbox_max[2]:.3f}]\")\n",
+ " print(f\" Size: [{bbox_size[0]:.3f}, {bbox_size[1]:.3f}, {bbox_size[2]:.3f}]\")\n",
+ "\n",
+ " print(f\"\\nData Arrays ({len(mesh_data.generic_arrays)}):\")\n",
+ " for i, array in enumerate(mesh_data.generic_arrays, 1):\n",
+ " print(f\" {i}. {array.name}:\")\n",
+ " print(f\" - Type: {array.data_type.value}\")\n",
+ " print(f\" - Components: {array.num_components}\")\n",
+ " print(f\" - Interpolation: {array.interpolation}\")\n",
+ " print(f\" - Elements: {len(array.data):,}\")\n",
+ " if array.data.size > 0:\n",
+ " print(f\" - Range: [{np.min(array.data):.6f}, {np.max(array.data):.6f}]\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Colorization will be applied after USD conversion using USDTools methods\n",
+ " - USDTools.list_mesh_primvars() for inspection\n",
+ " - USDTools.pick_color_primvar() for selection\n",
+ " - USDTools.apply_colormap_from_primvar() for coloring\n",
+ " - Colormap: plasma\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Note: Helper functions removed - now using USDTools for primvar inspection and colorization\n",
+ "# The workflow has changed to: convert to USD first, then apply colormap post-processing\n",
+ "\n",
+ "# Configuration: choose colormap for visualization\n",
+ "DEFAULT_COLORMAP = \"plasma\" # matplotlib colormap name\n",
+ "\n",
+ "# Enable automatic colorization (will pick strain/stress primvars if available)\n",
+ "ENABLE_AUTO_COLORIZATION = True\n",
+ "\n",
+ "print(\"Colorization will be applied after USD conversion using USDTools methods\")\n",
+ "print(\" - USDTools.list_mesh_primvars() for inspection\")\n",
+ "print(\" - USDTools.pick_color_primvar() for selection\")\n",
+ "print(\" - USDTools.apply_colormap_from_primvar() for coloring\")\n",
+ "print(f\" - Colormap: {DEFAULT_COLORMAP}\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Conversion settings configured\n",
+ " - Triangulate: True\n",
+ " - FPS: 60.0\n",
+ " - Up axis: Y\n"
+ ]
+ }
+ ],
+ "source": [
+ "## 2. Configure Conversion Settings\n",
+ "\n",
+ "# Create converter settings\n",
+ "settings = ConversionSettings(\n",
+ " triangulate_meshes=True,\n",
+ " compute_normals=False, # Use existing normals if available\n",
+ " preserve_point_arrays=True,\n",
+ " preserve_cell_arrays=True,\n",
+ " up_axis=\"Y\",\n",
+ " times_per_second=60.0, # 60 FPS for smooth animation\n",
+ " use_time_samples=True,\n",
+ ")\n",
+ "\n",
+ "print(\"Conversion settings configured\")\n",
+ "print(f\" - Triangulate: {settings.triangulate_meshes}\")\n",
+ "print(f\" - FPS: {settings.times_per_second}\")\n",
+ "print(f\" - Up axis: {settings.up_axis}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 3. Convert Full Time Series - Alterra\n",
+ "\n",
+ "Convert the complete Alterra dataset to animated USD."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Converting Alterra Time Series\n",
+ "============================================================\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Create material for Alterra\n",
+ "# Note: Vertex colors will be applied post-conversion by USDTools\n",
+ "alterra_material = MaterialData(\n",
+ " name=\"alterra_valve\",\n",
+ " diffuse_color=(0.4, 0.5, 0.8),\n",
+ " roughness=0.3,\n",
+ " metallic=0.1,\n",
+ " use_vertex_colors=False, # USDTools will bind vertex color material during colorization\n",
+ ")\n",
+ "\n",
+ "print(\"=\" * 60)\n",
+ "print(\"Converting Alterra Time Series\")\n",
+ "print(\"=\" * 60)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "⏭️ Skipping Alterra full time series (COMPUTE_FULL_TIME_SERIES = False)\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Convert Alterra (full resolution)\n",
+ "if COMPUTE_FULL_TIME_SERIES and alterra_series:\n",
+ " converter = VTKToUSDConverter(settings)\n",
+ "\n",
+ " # Extract file paths and time codes\n",
+ " alterra_files = [file_path for _, file_path in alterra_series]\n",
+ " alterra_times = [float(time_step) for time_step, _ in alterra_series]\n",
+ "\n",
+ " output_usd = output_dir / \"alterra_full.usd\"\n",
+ "\n",
+ " print(f\"\\nConverting to: {output_usd}\")\n",
+ " print(f\"Time codes: {alterra_times[0]:.1f} to {alterra_times[-1]:.1f}\")\n",
+ " print(\"\\nThis may take several minutes...\\n\")\n",
+ "\n",
+ " start_time = time_module.time()\n",
+ "\n",
+ " # Read MeshData\n",
+ " mesh_data_sequence = [read_vtk_file(f, extract_surface=True) for f in alterra_files]\n",
+ "\n",
+ " # Validate topology consistency across time series\n",
+ " validation_report = validate_time_series_topology(\n",
+ " mesh_data_sequence, filenames=alterra_files\n",
+ " )\n",
+ " if not validation_report[\"is_consistent\"]:\n",
+ " print(\n",
+ " f\"Warning: Found {len(validation_report['warnings'])} topology/primvar issues\"\n",
+ " )\n",
+ " if validation_report[\"topology_changes\"]:\n",
+ " print(\n",
+ " f\" Topology changes in {len(validation_report['topology_changes'])} frames\"\n",
+ " )\n",
+ "\n",
+ " # Convert to USD (preserves all primvars from VTK)\n",
+ " stage = converter.convert_mesh_data_sequence(\n",
+ " mesh_data_sequence=mesh_data_sequence,\n",
+ " output_usd=output_usd,\n",
+ " mesh_name=\"AlterraValve\",\n",
+ " time_codes=alterra_times,\n",
+ " material=alterra_material,\n",
+ " )\n",
+ "\n",
+ " # Repair elementSize for multi-component primvars (e.g. 9-component stress tensor)\n",
+ " usd_tools = USDTools()\n",
+ " mesh_path = \"/World/Meshes/AlterraValve\"\n",
+ " repair_report = usd_tools.repair_mesh_primvar_element_sizes(\n",
+ " str(output_usd), mesh_path\n",
+ " )\n",
+ " if repair_report[\"updated\"]:\n",
+ " print(f\"Repaired elementSize for {len(repair_report['updated'])} primvar(s)\")\n",
+ "\n",
+ " # Post-process: apply colormap visualization using USDTools\n",
+ " if ENABLE_AUTO_COLORIZATION:\n",
+ " # Inspect and select primvar for coloring\n",
+ " primvars = usd_tools.list_mesh_primvars(str(output_usd), mesh_path)\n",
+ " color_primvar = usd_tools.pick_color_primvar(\n",
+ " primvars, keywords=(\"strain\", \"stress\")\n",
+ " )\n",
+ "\n",
+ " if color_primvar:\n",
+ " print(f\"\\nApplying colormap to '{color_primvar}' using {DEFAULT_COLORMAP}\")\n",
+ " usd_tools.apply_colormap_from_primvar(\n",
+ " str(output_usd),\n",
+ " mesh_path,\n",
+ " color_primvar,\n",
+ " cmap=DEFAULT_COLORMAP,\n",
+ " bind_vertex_color_material=True,\n",
+ " )\n",
+ " else:\n",
+ " print(\"\\nNo strain/stress primvar found for coloring\")\n",
+ "\n",
+ " elapsed = time_module.time() - start_time\n",
+ "\n",
+ " print(f\"\\n✓ Conversion completed in {elapsed:.1f} seconds\")\n",
+ " print(f\" Output: {output_usd}\")\n",
+ " print(f\" Size: {output_usd.stat().st_size / (1024 * 1024):.2f} MB\")\n",
+ " print(f\" Time range: {stage.GetStartTimeCode()} - {stage.GetEndTimeCode()}\")\n",
+ " print(\n",
+ " f\" Duration: {(stage.GetEndTimeCode() - stage.GetStartTimeCode()) / settings.times_per_second:.2f} seconds @ {settings.times_per_second} FPS\"\n",
+ " )\n",
+ "elif not COMPUTE_FULL_TIME_SERIES:\n",
+ " print(\"⏭️ Skipping Alterra full time series (COMPUTE_FULL_TIME_SERIES = False)\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 4. Convert Subsampled Time Series - Alterra\n",
+ "\n",
+ "For faster previews, create a subsampled version (every Nth frame)."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t000.vtk\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Converting Subsampled Alterra (every 5th frame)\n",
+ "============================================================\n",
+ "Frames: 232 → 47\n",
+ "\n",
+ "Converting to: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\alterra_subsample_5x.usd\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t005.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t010.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t015.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t020.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t025.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t030.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t035.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t040.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t045.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t050.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t055.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t060.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t065.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t070.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t075.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t080.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t085.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t090.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t095.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t100.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t105.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t110.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t115.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t120.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t125.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t130.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t135.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t140.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t145.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t150.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t155.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t160.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t165.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t170.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t175.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t180.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t185.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t190.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t195.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t200.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t205.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t210.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t215.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t220.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t225.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t230.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Converting sequence of 47 MeshData to c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\alterra_subsample_5x.usd\n",
+ "INFO: Creating time-varying mesh at: /World/Meshes/AlterraValve with 47 time steps\n",
+ "INFO: Creating USD mesh at: /World/Meshes/AlterraValve\n",
+ "INFO: Created mesh with 48482 points, 96888 faces\n",
+ "INFO: Created time-varying mesh with 47 time samples\n",
+ "INFO: Saved USD file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\alterra_subsample_5x.usd\n",
+ "2026-01-29 21:22:25 INFO USDTools Saved USD file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\alterra_subsample_5x.usd\n",
+ "2026-01-29 21:22:25 INFO USDTools Processing 47 time samples for primvar 'vtk_point_stress_c0'\n",
+ "2026-01-29 21:22:25 INFO USDTools Value range: 0 to 8.5216e+07\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "Applying colormap to 'vtk_point_stress_c0' using plasma\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-01-29 21:22:28 INFO USDTools Wrote displayColor primvar with 47 time samples\n",
+ "2026-01-29 21:22:28 INFO USDTools Created vertex color material: /World/Looks/VertexColorMaterial\n",
+ "2026-01-29 21:22:28 INFO USDTools Saved USD file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\alterra_subsample_5x.usd\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "✓ Conversion completed in 64.8 seconds\n",
+ " Output: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\alterra_subsample_5x.usd\n",
+ " Size: 198.03 MB\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Subsample Alterra (every 5th frame)\n",
+ "if alterra_series:\n",
+ " subsample_rate = 5\n",
+ " alterra_subsampled = alterra_series[::subsample_rate]\n",
+ "\n",
+ " print(\"=\" * 60)\n",
+ " print(f\"Converting Subsampled Alterra (every {subsample_rate}th frame)\")\n",
+ " print(\"=\" * 60)\n",
+ " print(f\"Frames: {len(alterra_series)} → {len(alterra_subsampled)}\")\n",
+ "\n",
+ " converter = VTKToUSDConverter(settings)\n",
+ "\n",
+ " alterra_files_sub = [file_path for _, file_path in alterra_subsampled]\n",
+ " alterra_times_sub = [float(time_step) for time_step, _ in alterra_subsampled]\n",
+ "\n",
+ " output_usd_sub = output_dir / f\"alterra_subsample_{subsample_rate}x.usd\"\n",
+ "\n",
+ " print(f\"\\nConverting to: {output_usd_sub}\")\n",
+ "\n",
+ " start_time = time_module.time()\n",
+ "\n",
+ " # Read MeshData\n",
+ " mesh_data_sequence = [\n",
+ " read_vtk_file(f, extract_surface=True) for f in alterra_files_sub\n",
+ " ]\n",
+ "\n",
+ " # Validate topology consistency across time series\n",
+ " validation_report = validate_time_series_topology(\n",
+ " mesh_data_sequence, filenames=alterra_files_sub\n",
+ " )\n",
+ " if not validation_report[\"is_consistent\"]:\n",
+ " print(\n",
+ " f\"Warning: Found {len(validation_report['warnings'])} topology/primvar issues\"\n",
+ " )\n",
+ " if validation_report[\"topology_changes\"]:\n",
+ " print(\n",
+ " f\" Topology changes in {len(validation_report['topology_changes'])} frames\"\n",
+ " )\n",
+ "\n",
+ " # Convert to USD (preserves all primvars from VTK)\n",
+ " stage_sub = converter.convert_mesh_data_sequence(\n",
+ " mesh_data_sequence=mesh_data_sequence,\n",
+ " output_usd=output_usd_sub,\n",
+ " mesh_name=\"AlterraValve\",\n",
+ " time_codes=alterra_times_sub,\n",
+ " material=alterra_material,\n",
+ " )\n",
+ "\n",
+ " # Repair elementSize for multi-component primvars (e.g. 9-component stress tensor)\n",
+ " usd_tools = USDTools()\n",
+ " mesh_path = \"/World/Meshes/AlterraValve\"\n",
+ " repair_report = usd_tools.repair_mesh_primvar_element_sizes(\n",
+ " str(output_usd_sub), mesh_path\n",
+ " )\n",
+ " if repair_report[\"updated\"]:\n",
+ " print(f\"Repaired elementSize for {len(repair_report['updated'])} primvar(s)\")\n",
+ "\n",
+ " # Post-process: apply colormap visualization using USDTools\n",
+ " if ENABLE_AUTO_COLORIZATION:\n",
+ " # Inspect and select primvar for coloring\n",
+ " primvars = usd_tools.list_mesh_primvars(str(output_usd_sub), mesh_path)\n",
+ " color_primvar = usd_tools.pick_color_primvar(\n",
+ " primvars, keywords=(\"strain\", \"stress\")\n",
+ " )\n",
+ "\n",
+ " if color_primvar:\n",
+ " print(f\"\\nApplying colormap to '{color_primvar}' using {DEFAULT_COLORMAP}\")\n",
+ " usd_tools.apply_colormap_from_primvar(\n",
+ " str(output_usd_sub),\n",
+ " mesh_path,\n",
+ " color_primvar,\n",
+ " cmap=DEFAULT_COLORMAP,\n",
+ " bind_vertex_color_material=True,\n",
+ " )\n",
+ " else:\n",
+ " print(\"\\nNo strain/stress primvar found for coloring\")\n",
+ "\n",
+ " elapsed = time_module.time() - start_time\n",
+ "\n",
+ " print(f\"\\n✓ Conversion completed in {elapsed:.1f} seconds\")\n",
+ " print(f\" Output: {output_usd_sub}\")\n",
+ " print(f\" Size: {output_usd_sub.stat().st_size / (1024 * 1024):.2f} MB\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 5. Convert Full Time Series - TPV25"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Converting TPV25 Time Series\n",
+ "============================================================\n",
+ "Dataset: 265 frames\n",
+ "⏭️ Skipping TPV25 full time series (COMPUTE_FULL_TIME_SERIES = False)\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Create material for TPV25\n",
+ "# Note: Vertex colors will be applied post-conversion by USDTools\n",
+ "# Create material for TPV25\n",
+ "# Note: Vertex colors will be applied post-conversion by USDTools\n",
+ "tpv25_material = MaterialData(\n",
+ " name=\"tpv25_valve\",\n",
+ " diffuse_color=(0.85, 0.4, 0.4),\n",
+ " roughness=0.4,\n",
+ " metallic=0.0,\n",
+ " use_vertex_colors=False, # USDTools will bind vertex color material during colorization\n",
+ ")\n",
+ "\n",
+ "print(\"=\" * 60)\n",
+ "print(\"Converting TPV25 Time Series\")\n",
+ "print(\"=\" * 60)\n",
+ "print(f\"Dataset: {len(tpv25_series)} frames\")\n",
+ "\n",
+ "# Convert TPV25 (full resolution)\n",
+ "if COMPUTE_FULL_TIME_SERIES and tpv25_series:\n",
+ " converter = VTKToUSDConverter(settings)\n",
+ "\n",
+ " tpv25_files = [file_path for _, file_path in tpv25_series]\n",
+ " tpv25_times = [float(time_step) for time_step, _ in tpv25_series]\n",
+ "\n",
+ " output_usd = output_dir / \"tpv25_full.usd\"\n",
+ "\n",
+ " print(f\"\\nConverting to: {output_usd}\")\n",
+ " print(f\"Time codes: {tpv25_times[0]:.1f} to {tpv25_times[-1]:.1f}\")\n",
+ " print(\"\\nThis may take several minutes...\\n\")\n",
+ "\n",
+ " start_time = time_module.time()\n",
+ "\n",
+ " # Read MeshData\n",
+ " mesh_data_sequence = [read_vtk_file(f, extract_surface=True) for f in tpv25_files]\n",
+ "\n",
+ " # Validate topology consistency across time series\n",
+ " validation_report = validate_time_series_topology(\n",
+ " mesh_data_sequence, filenames=tpv25_files\n",
+ " )\n",
+ " if not validation_report[\"is_consistent\"]:\n",
+ " print(\n",
+ " f\"Warning: Found {len(validation_report['warnings'])} topology/primvar issues\"\n",
+ " )\n",
+ " if validation_report[\"topology_changes\"]:\n",
+ " print(\n",
+ " f\" Topology changes in {len(validation_report['topology_changes'])} frames\"\n",
+ " )\n",
+ "\n",
+ " # Convert to USD (preserves all primvars from VTK)\n",
+ " stage = converter.convert_mesh_data_sequence(\n",
+ " mesh_data_sequence=mesh_data_sequence,\n",
+ " output_usd=output_usd,\n",
+ " mesh_name=\"TPV25Valve\",\n",
+ " time_codes=tpv25_times,\n",
+ " material=tpv25_material,\n",
+ " )\n",
+ "\n",
+ " # Post-process: apply colormap visualization using USDTools\n",
+ " if ENABLE_AUTO_COLORIZATION:\n",
+ " usd_tools = USDTools()\n",
+ " mesh_path = \"/World/Meshes/TPV25Valve\"\n",
+ "\n",
+ " # Inspect and select primvar for coloring\n",
+ " primvars = usd_tools.list_mesh_primvars(str(output_usd), mesh_path)\n",
+ " color_primvar = usd_tools.pick_color_primvar(\n",
+ " primvars, keywords=(\"strain\", \"stress\")\n",
+ " )\n",
+ "\n",
+ " if color_primvar:\n",
+ " print(f\"\\nApplying colormap to '{color_primvar}' using {DEFAULT_COLORMAP}\")\n",
+ " usd_tools.apply_colormap_from_primvar(\n",
+ " str(output_usd),\n",
+ " mesh_path,\n",
+ " color_primvar,\n",
+ " cmap=DEFAULT_COLORMAP,\n",
+ " bind_vertex_color_material=True,\n",
+ " )\n",
+ " else:\n",
+ " print(\"\\nNo strain/stress primvar found for coloring\")\n",
+ "\n",
+ " elapsed = time_module.time() - start_time\n",
+ "\n",
+ " print(f\"\\n✓ Conversion completed in {elapsed:.1f} seconds\")\n",
+ " print(f\" Output: {output_usd}\")\n",
+ " print(f\" Size: {output_usd.stat().st_size / (1024 * 1024):.2f} MB\")\n",
+ " print(f\" Time range: {stage.GetStartTimeCode()} - {stage.GetEndTimeCode()}\")\n",
+ " print(\n",
+ " f\" Duration: {(stage.GetEndTimeCode() - stage.GetStartTimeCode()) / settings.times_per_second:.2f} seconds @ {settings.times_per_second} FPS\"\n",
+ " )\n",
+ "elif not COMPUTE_FULL_TIME_SERIES:\n",
+ " print(\"⏭️ Skipping TPV25 full time series (COMPUTE_FULL_TIME_SERIES = False)\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 6. Convert Subsampled Time Series - TPV25"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t000.vtk\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Converting Subsampled TPV25 (every 5th frame)\n",
+ "============================================================\n",
+ "Frames: 265 → 53\n",
+ "\n",
+ "Converting to: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\tpv25_subsample_5x.usd\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t005.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t010.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t015.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t020.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t025.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t030.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t035.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t040.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t045.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t050.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t055.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t060.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t065.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t070.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t075.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t080.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t085.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t090.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t095.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t100.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t105.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t110.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t115.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t120.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t125.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t130.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t135.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t140.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t145.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t150.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t155.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t160.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t165.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t170.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t175.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t180.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t185.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t190.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t195.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t200.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t205.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t210.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t215.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t220.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t225.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t230.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t235.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t240.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t245.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t250.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t255.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\TPV25\\TPV25_output.t260.vtk\n",
+ "INFO: Loaded mesh: 38301 points, 63332 faces, 4 data arrays\n",
+ "INFO: Converting sequence of 53 MeshData to c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\tpv25_subsample_5x.usd\n",
+ "INFO: Creating time-varying mesh at: /World/Meshes/TPV25Valve with 53 time steps\n",
+ "INFO: Creating USD mesh at: /World/Meshes/TPV25Valve\n",
+ "INFO: Created mesh with 38301 points, 76586 faces\n",
+ "INFO: Created time-varying mesh with 53 time samples\n",
+ "INFO: Saved USD file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\tpv25_subsample_5x.usd\n",
+ "2026-01-29 21:23:28 INFO USDTools Processing 53 time samples for primvar 'vtk_point_stress_c0'\n",
+ "2026-01-29 21:23:28 INFO USDTools Value range: 0 to 1.64397e+08\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "Applying colormap to 'vtk_point_stress_c0' using plasma\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-01-29 21:23:31 INFO USDTools Wrote displayColor primvar with 53 time samples\n",
+ "2026-01-29 21:23:31 INFO USDTools Created vertex color material: /World/Looks/VertexColorMaterial\n",
+ "2026-01-29 21:23:31 INFO USDTools Saved USD file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\tpv25_subsample_5x.usd\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "✓ Conversion completed in 62.4 seconds\n",
+ " Output: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\tpv25_subsample_5x.usd\n",
+ " Size: 176.81 MB\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Subsample TPV25 (every 5th frame)\n",
+ "if tpv25_series:\n",
+ " subsample_rate = 5\n",
+ " tpv25_subsampled = tpv25_series[::subsample_rate]\n",
+ "\n",
+ " print(\"=\" * 60)\n",
+ " print(f\"Converting Subsampled TPV25 (every {subsample_rate}th frame)\")\n",
+ " print(\"=\" * 60)\n",
+ " print(f\"Frames: {len(tpv25_series)} → {len(tpv25_subsampled)}\")\n",
+ "\n",
+ " converter = VTKToUSDConverter(settings)\n",
+ "\n",
+ " tpv25_files_sub = [file_path for _, file_path in tpv25_subsampled]\n",
+ " tpv25_times_sub = [float(time_step) for time_step, _ in tpv25_subsampled]\n",
+ "\n",
+ " output_usd_sub = output_dir / f\"tpv25_subsample_{subsample_rate}x.usd\"\n",
+ "\n",
+ " print(f\"\\nConverting to: {output_usd_sub}\")\n",
+ "\n",
+ " start_time = time_module.time()\n",
+ "\n",
+ " # Read MeshData\n",
+ " mesh_data_sequence = [\n",
+ " read_vtk_file(f, extract_surface=True) for f in tpv25_files_sub\n",
+ " ]\n",
+ "\n",
+ " # Validate topology consistency across time series\n",
+ " validation_report = validate_time_series_topology(\n",
+ " mesh_data_sequence, filenames=tpv25_files_sub\n",
+ " )\n",
+ " if not validation_report[\"is_consistent\"]:\n",
+ " print(\n",
+ " f\"Warning: Found {len(validation_report['warnings'])} topology/primvar issues\"\n",
+ " )\n",
+ " if validation_report[\"topology_changes\"]:\n",
+ " print(\n",
+ " f\" Topology changes in {len(validation_report['topology_changes'])} frames\"\n",
+ " )\n",
+ "\n",
+ " # Convert to USD (preserves all primvars from VTK)\n",
+ " stage_sub = converter.convert_mesh_data_sequence(\n",
+ " mesh_data_sequence=mesh_data_sequence,\n",
+ " output_usd=output_usd_sub,\n",
+ " mesh_name=\"TPV25Valve\",\n",
+ " time_codes=tpv25_times_sub,\n",
+ " material=tpv25_material,\n",
+ " )\n",
+ "\n",
+ " # Post-process: apply colormap visualization using USDTools\n",
+ " if ENABLE_AUTO_COLORIZATION:\n",
+ " usd_tools = USDTools()\n",
+ " mesh_path = \"/World/Meshes/TPV25Valve\"\n",
+ "\n",
+ " # Inspect and select primvar for coloring\n",
+ " primvars = usd_tools.list_mesh_primvars(str(output_usd_sub), mesh_path)\n",
+ " color_primvar = usd_tools.pick_color_primvar(\n",
+ " primvars, keywords=(\"strain\", \"stress\")\n",
+ " )\n",
+ "\n",
+ " if color_primvar:\n",
+ " print(f\"\\nApplying colormap to '{color_primvar}' using {DEFAULT_COLORMAP}\")\n",
+ " usd_tools.apply_colormap_from_primvar(\n",
+ " str(output_usd_sub),\n",
+ " mesh_path,\n",
+ " color_primvar,\n",
+ " cmap=DEFAULT_COLORMAP,\n",
+ " bind_vertex_color_material=True,\n",
+ " )\n",
+ " else:\n",
+ " print(\"\\nNo strain/stress primvar found for coloring\")\n",
+ "\n",
+ " elapsed = time_module.time() - start_time\n",
+ "\n",
+ " print(f\"\\n✓ Conversion completed in {elapsed:.1f} seconds\")\n",
+ " print(f\" Output: {output_usd_sub}\")\n",
+ " print(f\" Size: {output_usd_sub.stat().st_size / (1024 * 1024):.2f} MB\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 7. Create Combined Scene\n",
+ "\n",
+ "Create a single USD file with both valves side-by-side for comparison."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-01-29 21:23:31 INFO USDTools Grid layout: 1 rows x 2 cols\n",
+ "2026-01-29 21:23:31 INFO USDTools Copying c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\alterra_subsample_5x.usd:/World/Looks\n",
+ "2026-01-29 21:23:31 WARNING USDTools No valid bounding box found for prim: /World/Looks/VertexColorMaterial\n",
+ "2026-01-29 21:23:31 INFO USDTools Copying c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\alterra_subsample_5x.usd:/World/Meshes\n",
+ "2026-01-29 21:23:31 WARNING USDTools No valid bounding box found for prim: /World/Meshes/AlterraValve\n",
+ "2026-01-29 21:23:31 INFO USDTools Copying c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\tpv25_subsample_5x.usd:/World/Looks\n",
+ "2026-01-29 21:23:31 WARNING USDTools No valid bounding box found for prim: /World/Looks/VertexColorMaterial\n",
+ "2026-01-29 21:23:31 INFO USDTools Copying c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\tpv25_subsample_5x.usd:/World/Meshes\n",
+ "2026-01-29 21:23:31 WARNING USDTools No valid bounding box found for prim: /World/Meshes/TPV25Valve\n",
+ "2026-01-29 21:23:31 INFO USDTools Exporting stage...\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Creating Combined Scene\n",
+ "============================================================\n",
+ "Input files:\n",
+ " - alterra_subsample_5x.usd\n",
+ " - tpv25_subsample_5x.usd\n",
+ "Output: valves_combined.usd\n",
+ "\n",
+ "✓ Combined scene created: valves_combined.usd\n",
+ " - Both valves arranged in a spatial grid\n",
+ " - Ready to view in Omniverse or USDView\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Create combined scene with both valves using USDTools\n",
+ "if alterra_series and tpv25_series:\n",
+ " print(\"=\" * 60)\n",
+ " print(\"Creating Combined Scene\")\n",
+ " print(\"=\" * 60)\n",
+ "\n",
+ " # Use the subsampled USD files created earlier\n",
+ " subsample_rate = 5\n",
+ " alterra_usd = output_dir / f\"alterra_subsample_{subsample_rate}x.usd\"\n",
+ " tpv25_usd = output_dir / f\"tpv25_subsample_{subsample_rate}x.usd\"\n",
+ "\n",
+ " # Check if the files exist\n",
+ " if alterra_usd.exists() and tpv25_usd.exists():\n",
+ " combined_usd = output_dir / \"valves_combined.usd\"\n",
+ "\n",
+ " print(\"Input files:\")\n",
+ " print(f\" - {alterra_usd.name}\")\n",
+ " print(f\" - {tpv25_usd.name}\")\n",
+ " print(f\"Output: {combined_usd.name}\")\n",
+ "\n",
+ " # Use USDTools to arrange the valves side-by-side\n",
+ " from physiomotion4d.usd_tools import USDTools\n",
+ "\n",
+ " usd_tools = USDTools()\n",
+ "\n",
+ " usd_tools.save_usd_file_arrangement(\n",
+ " str(combined_usd), [str(alterra_usd), str(tpv25_usd)]\n",
+ " )\n",
+ "\n",
+ " print(f\"\\n✓ Combined scene created: {combined_usd.name}\")\n",
+ " print(\" - Both valves arranged in a spatial grid\")\n",
+ " print(\" - Ready to view in Omniverse or USDView\")\n",
+ " else:\n",
+ " print(\"\\n⚠ Subsampled USD files not found.\")\n",
+ " print(\"Run the conversion cells above first to create:\")\n",
+ " print(f\" - {alterra_usd.name}\")\n",
+ " print(f\" - {tpv25_usd.name}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 8. Summary and File Inspection"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Conversion Summary\n",
+ "============================================================\n",
+ "\n",
+ "alterra_subsample_5x.usd:\n",
+ " Size: 198.03 MB\n",
+ " Time range: 0 - 230\n",
+ " Duration: 3.83 seconds @ 60 FPS\n",
+ " Frames: 231\n",
+ " Meshes: 1\n",
+ "\n",
+ "alterra_subsample_5x_colored.usd:\n",
+ " Size: 224.11 MB\n",
+ " Time range: 0 - 230\n",
+ " Duration: 3.83 seconds @ 60 FPS\n",
+ " Frames: 231\n",
+ " Meshes: 1\n",
+ "\n",
+ "tpv25_subsample_5x.usd:\n",
+ " Size: 176.81 MB\n",
+ " Time range: 0 - 260\n",
+ " Duration: 4.33 seconds @ 60 FPS\n",
+ " Frames: 261\n",
+ " Meshes: 1\n",
+ "\n",
+ "valves_combined.usd:\n",
+ " Size: 374.84 MB\n",
+ " Time range: 0 - 230\n",
+ " Duration: 3.83 seconds @ 60 FPS\n",
+ " Frames: 231\n",
+ " Meshes: 2\n",
+ "\n",
+ "============================================================\n",
+ "Total size: 973.80 MB\n",
+ "Total files: 4\n",
+ "Output directory: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\n",
+ "============================================================\n"
+ ]
+ }
+ ],
+ "source": [
+ "import os\n",
+ "\n",
+ "print(\"=\" * 60)\n",
+ "print(\"Conversion Summary\")\n",
+ "print(\"=\" * 60)\n",
+ "\n",
+ "# List all generated USD files\n",
+ "usd_files = list(output_dir.glob(\"*.usd\"))\n",
+ "usd_files.extend(output_dir.glob(\"*.usda\"))\n",
+ "usd_files.extend(output_dir.glob(\"*.usdc\"))\n",
+ "\n",
+ "total_size = 0\n",
+ "\n",
+ "for usd_file in sorted(usd_files):\n",
+ " size_mb = os.path.getsize(usd_file) / (1024 * 1024)\n",
+ " total_size += size_mb\n",
+ "\n",
+ " print(f\"\\n{usd_file.name}:\")\n",
+ " print(f\" Size: {size_mb:.2f} MB\")\n",
+ "\n",
+ " # Open and inspect\n",
+ " stage = Usd.Stage.Open(str(usd_file))\n",
+ " if stage:\n",
+ " if stage.HasAuthoredTimeCodeRange():\n",
+ " duration = (\n",
+ " stage.GetEndTimeCode() - stage.GetStartTimeCode()\n",
+ " ) / stage.GetTimeCodesPerSecond()\n",
+ " print(\n",
+ " f\" Time range: {stage.GetStartTimeCode():.0f} - {stage.GetEndTimeCode():.0f}\"\n",
+ " )\n",
+ " print(\n",
+ " f\" Duration: {duration:.2f} seconds @ {stage.GetTimeCodesPerSecond():.0f} FPS\"\n",
+ " )\n",
+ " print(\n",
+ " f\" Frames: {int(stage.GetEndTimeCode() - stage.GetStartTimeCode() + 1)}\"\n",
+ " )\n",
+ "\n",
+ " # Count meshes\n",
+ " mesh_count = 0\n",
+ " for prim in stage.Traverse():\n",
+ " if prim.IsA(UsdGeom.Mesh):\n",
+ " mesh_count += 1\n",
+ " print(f\" Meshes: {mesh_count}\")\n",
+ "\n",
+ "print(f\"\\n{'=' * 60}\")\n",
+ "print(f\"Total size: {total_size:.2f} MB\")\n",
+ "print(f\"Total files: {len(usd_files)}\")\n",
+ "print(f\"Output directory: {output_dir}\")\n",
+ "print(f\"{'=' * 60}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 9. Detailed USD Inspection\n",
+ "\n",
+ "Examine the converted USD files to verify data preservation."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Detailed Inspection: alterra_subsample_5x.usd\n",
+ "============================================================\n",
+ "\n",
+ "Mesh: /World/Meshes/AlterraValve\n",
+ "\n",
+ "Geometry (at t=0):\n",
+ " Points: 48,482\n",
+ " Faces: 96,888\n",
+ "\n",
+ "Time-Varying Attributes:\n",
+ " Points: 47 time samples\n",
+ "\n",
+ "Primvars (8):\n",
+ " - displayColor:\n",
+ " Type: color3f[]\n",
+ " Interpolation: vertex\n",
+ " Elements: 48,482\n",
+ " Time samples: 47\n",
+ " - displayOpacity:\n",
+ " Type: float[]\n",
+ " Interpolation: constant\n",
+ " Elements: 0\n",
+ " - vtk_point_displacement:\n",
+ " Type: float3[]\n",
+ " Interpolation: vertex\n",
+ " Elements: 48,482\n",
+ " Time samples: 47\n",
+ " - vtk_point_relative_volume:\n",
+ " Type: float[]\n",
+ " Interpolation: vertex\n",
+ " Elements: 48,482\n",
+ " Time samples: 47\n",
+ " - vtk_point_shell_thickness:\n",
+ " Type: float[]\n",
+ " Interpolation: vertex\n",
+ " Elements: 48,482\n",
+ " Time samples: 47\n",
+ " - vtk_point_stress_c0:\n",
+ " Type: float3[]\n",
+ " Interpolation: vertex\n",
+ " Elements: 48,482\n",
+ " Time samples: 47\n",
+ " - vtk_point_stress_c1:\n",
+ " Type: float3[]\n",
+ " Interpolation: vertex\n",
+ " Elements: 48,482\n",
+ " Time samples: 47\n",
+ " - vtk_point_stress_c2:\n",
+ " Type: float3[]\n",
+ " Interpolation: vertex\n",
+ " Elements: 48,482\n",
+ " Time samples: 47\n",
+ "\n",
+ "Material: /World/Looks/VertexColorMaterial\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Inspect one of the converted files in detail\n",
+ "inspect_file = output_dir / \"alterra_subsample_5x.usd\"\n",
+ "\n",
+ "if inspect_file.exists():\n",
+ " print(\"=\" * 60)\n",
+ " print(f\"Detailed Inspection: {inspect_file.name}\")\n",
+ " print(\"=\" * 60)\n",
+ "\n",
+ " stage = Usd.Stage.Open(str(inspect_file))\n",
+ "\n",
+ " # Find mesh prim\n",
+ " mesh_prim = None\n",
+ " for prim in stage.Traverse():\n",
+ " if prim.IsA(UsdGeom.Mesh):\n",
+ " mesh_prim = prim\n",
+ " break\n",
+ "\n",
+ " if mesh_prim:\n",
+ " mesh = UsdGeom.Mesh(mesh_prim)\n",
+ "\n",
+ " print(f\"\\nMesh: {mesh_prim.GetPath()}\")\n",
+ "\n",
+ " # Geometry at first frame\n",
+ " first_time = stage.GetStartTimeCode()\n",
+ " points = mesh.GetPointsAttr().Get(first_time)\n",
+ " faces = mesh.GetFaceVertexCountsAttr().Get()\n",
+ "\n",
+ " print(f\"\\nGeometry (at t={first_time:.0f}):\")\n",
+ " print(f\" Points: {len(points):,}\")\n",
+ " print(f\" Faces: {len(faces):,}\")\n",
+ "\n",
+ " # Check time-varying attributes\n",
+ " print(\"\\nTime-Varying Attributes:\")\n",
+ " points_attr = mesh.GetPointsAttr()\n",
+ " if points_attr.GetNumTimeSamples() > 0:\n",
+ " print(f\" Points: {points_attr.GetNumTimeSamples()} time samples\")\n",
+ "\n",
+ " # List primvars\n",
+ " primvars_api = UsdGeom.PrimvarsAPI(mesh)\n",
+ " primvars = primvars_api.GetPrimvars()\n",
+ "\n",
+ " print(f\"\\nPrimvars ({len(primvars)}):\")\n",
+ " for primvar in primvars:\n",
+ " name = primvar.GetPrimvarName()\n",
+ " interpolation = primvar.GetInterpolation()\n",
+ " type_name = primvar.GetTypeName()\n",
+ " value = primvar.Get(first_time)\n",
+ " size = len(value) if value else 0\n",
+ "\n",
+ " print(f\" - {name}:\")\n",
+ " print(f\" Type: {type_name}\")\n",
+ " print(f\" Interpolation: {interpolation}\")\n",
+ " print(f\" Elements: {size:,}\")\n",
+ "\n",
+ " # Check if time-varying\n",
+ " if primvar.GetAttr().GetNumTimeSamples() > 0:\n",
+ " print(f\" Time samples: {primvar.GetAttr().GetNumTimeSamples()}\")\n",
+ "\n",
+ " # Material binding\n",
+ " from pxr import UsdShade\n",
+ "\n",
+ " binding_api = UsdShade.MaterialBindingAPI(mesh)\n",
+ " material_binding = binding_api.GetDirectBinding()\n",
+ " if material_binding:\n",
+ " print(f\"\\nMaterial: {material_binding.GetMaterialPath()}\")\n",
+ "else:\n",
+ " print(f\"File not found: {inspect_file}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 9.5. Post-Process USD with USDTools\n",
+ "\n",
+ "Demonstrate using the new `USDTools` methods to inspect primvars and apply colormap visualization to existing USD files."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-01-29 21:23:31 INFO USDTools Processing 47 time samples for primvar 'vtk_point_stress_c0'\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Post-Processing: alterra_subsample_5x.usd\n",
+ "============================================================\n",
+ "\n",
+ "Available primvars on /World/Meshes/AlterraValve:\n",
+ " - displayColor: vertex, 48482 elements, 47 time samples, range=0.0298..0.528\n",
+ " - displayOpacity: constant, 0 elements\n",
+ " - vtk_point_displacement: vertex, 0 elements, 47 time samples\n",
+ " - vtk_point_relative_volume: vertex, 0 elements, 47 time samples\n",
+ " - vtk_point_shell_thickness: vertex, 0 elements, 47 time samples\n",
+ " - vtk_point_stress_c0: vertex, 0 elements, 47 time samples\n",
+ " - vtk_point_stress_c1: vertex, 0 elements, 47 time samples\n",
+ " - vtk_point_stress_c2: vertex, 0 elements, 47 time samples\n",
+ "\n",
+ "Auto-selected for coloring: vtk_point_stress_c0\n",
+ "\n",
+ "Applying 'plasma' colormap to 'vtk_point_stress_c0'...\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "2026-01-29 21:23:32 INFO USDTools Value range: 0 to 8.5216e+07\n",
+ "2026-01-29 21:23:35 INFO USDTools Wrote displayColor primvar with 47 time samples\n",
+ "2026-01-29 21:23:35 INFO USDTools Saved USD file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\\alterra_subsample_5x_colored.usd\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "✓ Created colored visualization: alterra_subsample_5x_colored.usd\n",
+ " - displayColor primvar added with colormap from vtk_point_stress_c0\n",
+ " - Vertex color material bound for immediate visualization\n",
+ " - Ready to open in Omniverse with default coloring\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Example: Post-process an existing USD file to add colormap visualization\n",
+ "from physiomotion4d.usd_tools import USDTools\n",
+ "\n",
+ "usd_tools = USDTools()\n",
+ "\n",
+ "# Pick a USD file to post-process\n",
+ "postprocess_file = output_dir / \"alterra_subsample_5x.usd\"\n",
+ "\n",
+ "if postprocess_file.exists():\n",
+ " print(\"=\" * 60)\n",
+ " print(f\"Post-Processing: {postprocess_file.name}\")\n",
+ " print(\"=\" * 60)\n",
+ "\n",
+ " # 1. List available primvars on the mesh\n",
+ " mesh_path = \"/World/Meshes/AlterraValve\"\n",
+ " primvars = usd_tools.list_mesh_primvars(str(postprocess_file), mesh_path)\n",
+ "\n",
+ " print(f\"\\nAvailable primvars on {mesh_path}:\")\n",
+ " for pv in primvars:\n",
+ " time_info = (\n",
+ " f\", {pv['num_time_samples']} time samples\"\n",
+ " if pv[\"num_time_samples\"] > 0\n",
+ " else \"\"\n",
+ " )\n",
+ " range_info = (\n",
+ " f\", range={pv['range'][0]:.3g}..{pv['range'][1]:.3g}\" if pv[\"range\"] else \"\"\n",
+ " )\n",
+ " print(\n",
+ " f\" - {pv['name']}: {pv['interpolation']}, {pv['elements']} elements{time_info}{range_info}\"\n",
+ " )\n",
+ "\n",
+ " # 2. Pick best primvar for coloring (prefer strain/stress)\n",
+ " color_primvar = usd_tools.pick_color_primvar(primvars)\n",
+ " print(f\"\\nAuto-selected for coloring: {color_primvar}\")\n",
+ "\n",
+ " # 3. Apply colormap to create displayColor visualization\n",
+ " # Note: This modifies the USD file in-place\n",
+ " if color_primvar:\n",
+ " print(f\"\\nApplying 'plasma' colormap to '{color_primvar}'...\")\n",
+ "\n",
+ " # Create a copy for demonstration (optional)\n",
+ " demo_file = output_dir / f\"{postprocess_file.stem}_colored.usd\"\n",
+ " import shutil\n",
+ "\n",
+ " shutil.copy(postprocess_file, demo_file)\n",
+ "\n",
+ " usd_tools.apply_colormap_from_primvar(\n",
+ " str(demo_file),\n",
+ " mesh_path,\n",
+ " color_primvar,\n",
+ " cmap=\"plasma\",\n",
+ " write_default_at_t0=True,\n",
+ " bind_vertex_color_material=True,\n",
+ " )\n",
+ "\n",
+ " print(f\"\\n✓ Created colored visualization: {demo_file.name}\")\n",
+ " print(f\" - displayColor primvar added with colormap from {color_primvar}\")\n",
+ " print(\" - Vertex color material bound for immediate visualization\")\n",
+ " print(\" - Ready to open in Omniverse with default coloring\")\n",
+ " else:\n",
+ " print(\"\\n⚠️ No suitable primvar found for coloring\")\n",
+ "else:\n",
+ " print(f\"File not found: {postprocess_file}\")\n",
+ " print(\"Run the conversion cells first to generate USD files.\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 10. Performance Analysis"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t000.vtk\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "============================================================\n",
+ "Performance Analysis\n",
+ "============================================================\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t116.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n",
+ "INFO: Reading legacy VTK file: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\data\\CHOP-Valve4D\\Alterra\\Alterra_output.t231.vtk\n",
+ "INFO: Loaded mesh: 48482 points, 83634 faces, 4 data arrays\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "Alterra Dataset:\n",
+ " Average points per frame: 48,482\n",
+ " Average faces per frame: 83,634\n",
+ " Average data arrays per frame: 4\n",
+ " Total frames: 232\n",
+ " Estimated total points: 11,247,824\n",
+ " Estimated total faces: 19,403,088\n",
+ "\n",
+ "============================================================\n",
+ "\n",
+ "✓ All conversions completed!\n",
+ "\n",
+ "View the results:\n",
+ " - USDView: usdview .usd\n",
+ " - Omniverse: Open in Create/View/Composer\n",
+ "\n",
+ "Output files: c:\\src\\Projects\\PhysioMotion\\physiomotion4d\\experiments\\convert_vtk_to_usd_lib\\output\\valve4d\n",
+ "============================================================\n"
+ ]
+ }
+ ],
+ "source": [
+ "# Analyze conversion performance\n",
+ "print(\"=\" * 60)\n",
+ "print(\"Performance Analysis\")\n",
+ "print(\"=\" * 60)\n",
+ "\n",
+ "# Read a few frames to estimate per-frame metrics\n",
+ "if alterra_series:\n",
+ " sample_files = [\n",
+ " alterra_series[0][1],\n",
+ " alterra_series[len(alterra_series) // 2][1],\n",
+ " alterra_series[-1][1],\n",
+ " ]\n",
+ "\n",
+ " total_points = 0\n",
+ " total_faces = 0\n",
+ " total_arrays = 0\n",
+ "\n",
+ " for sample_file in sample_files:\n",
+ " mesh_data = read_vtk_file(sample_file, extract_surface=True)\n",
+ " total_points += len(mesh_data.points)\n",
+ " total_faces += len(mesh_data.face_vertex_counts)\n",
+ " total_arrays += len(mesh_data.generic_arrays)\n",
+ "\n",
+ " avg_points = total_points / len(sample_files)\n",
+ " avg_faces = total_faces / len(sample_files)\n",
+ " avg_arrays = total_arrays / len(sample_files)\n",
+ "\n",
+ " print(\"\\nAlterra Dataset:\")\n",
+ " print(f\" Average points per frame: {avg_points:,.0f}\")\n",
+ " print(f\" Average faces per frame: {avg_faces:,.0f}\")\n",
+ " print(f\" Average data arrays per frame: {avg_arrays:.0f}\")\n",
+ " print(f\" Total frames: {len(alterra_series)}\")\n",
+ " print(f\" Estimated total points: {avg_points * len(alterra_series):,.0f}\")\n",
+ " print(f\" Estimated total faces: {avg_faces * len(alterra_series):,.0f}\")\n",
+ "\n",
+ "print(f\"\\n{'=' * 60}\")\n",
+ "print(\"\\n✓ All conversions completed!\")\n",
+ "print(\"\\nView the results:\")\n",
+ "print(\" - USDView: usdview .usd\")\n",
+ "print(\" - Omniverse: Open in Create/View/Composer\")\n",
+ "print(f\"\\nOutput files: {output_dir}\")\n",
+ "print(\"=\" * 60)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Conclusion\n",
+ "\n",
+ "This notebook demonstrated converting large-scale time-varying cardiac valve simulation data to USD:\n",
+ "\n",
+ "### Key Accomplishments\n",
+ "\n",
+ "1. **Discovered and organized** 200+ frame time-series datasets\n",
+ "2. **Converted full-resolution** datasets to animated USD\n",
+ "3. **Created subsampled versions** for faster preview\n",
+ "4. **Preserved all simulation data** as USD primvars\n",
+ "5. **Applied custom materials** for visualization\n",
+ "6. **Handled coordinate systems** (RAS → Y-up)\n",
+ "\n",
+ "### File Outputs\n",
+ "\n",
+ "- `alterra_full.usd` - Complete 232-frame animation\n",
+ "- `alterra_subsample_5x.usd` - Subsampled for preview\n",
+ "- `tpv25_full.usd` - Complete 265-frame animation\n",
+ "- `tpv25_subsample_5x.usd` - Subsampled for preview\n",
+ "\n",
+ "### Performance Notes\n",
+ "\n",
+ "- Full conversions may take several minutes due to large frame counts\n",
+ "- Subsampling provides faster iteration during development\n",
+ "- All VTK point and cell data arrays are preserved as primvars\n",
+ "- Time-sampled attributes enable efficient animation\n",
+ "\n",
+ "### Next Steps\n",
+ "\n",
+ "1. **View animations** in USDView or Omniverse\n",
+ "2. **Analyze primvars** to visualize simulation data\n",
+ "3. **Create custom materials** based on data arrays\n",
+ "4. **Compose scenes** with multiple valves for comparison\n",
+ "5. **Add cameras and lighting** for publication-quality renders"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "venv",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.11"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/experiments/convert_vtk_to_usd_lib/convert_vtk_to_usd_lib.ipynb b/experiments/convert_vtk_to_usd_lib/convert_vtk_to_usd_lib.ipynb
new file mode 100644
index 0000000..f7a6675
--- /dev/null
+++ b/experiments/convert_vtk_to_usd_lib/convert_vtk_to_usd_lib.ipynb
@@ -0,0 +1,607 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# VTK to USD Converter Test Notebook\n",
+ "\n",
+ "This notebook demonstrates the usage of the new `vtk_to_usd` library for converting VTK files to USD format.\n",
+ "\n",
+ "The library is based on the ParaViewConnector architecture from Omniverse but simplified for file-based conversion only.\n",
+ "\n",
+ "## Features\n",
+ "\n",
+ "- **File Format Support**: VTK legacy (.vtk), XML PolyData (.vtp), XML UnstructuredGrid (.vtu)\n",
+ "- **Geometry Preservation**: Points, faces, normals, colors\n",
+ "- **Data Arrays**: VTK point and cell data arrays → USD primvars\n",
+ "- **Time-Series**: Support for animated/time-varying data\n",
+ "- **Materials**: UsdPreviewSurface materials with customizable properties\n",
+ "- **Coordinate System**: Automatic conversion from RAS (medical imaging) to USD Y-up\n",
+ "\n",
+ "## Test Data\n",
+ "\n",
+ "We'll use the KCL Heart Model data:\n",
+ "- `average_surface.vtp`: Surface mesh of the heart\n",
+ "- `average_mesh.vtk`: Volumetric mesh of the heart"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import sys\n",
+ "from pathlib import Path\n",
+ "\n",
+ "# Add src to path\n",
+ "project_root = Path.cwd().parent.parent\n",
+ "src_path = project_root / \"src\"\n",
+ "if str(src_path) not in sys.path:\n",
+ " sys.path.insert(0, str(src_path))\n",
+ "\n",
+ "print(f\"Project root: {project_root}\")\n",
+ "print(f\"Source path: {src_path}\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import logging\n",
+ "import numpy as np\n",
+ "from pxr import Usd, UsdGeom\n",
+ "\n",
+ "# Import the new vtk_to_usd library\n",
+ "from physiomotion4d.vtk_to_usd import (\n",
+ " VTKToUSDConverter,\n",
+ " ConversionSettings,\n",
+ " MaterialData,\n",
+ " convert_vtk_file,\n",
+ " read_vtk_file,\n",
+ ")\n",
+ "\n",
+ "# Configure logging\n",
+ "logging.basicConfig(level=logging.INFO, format=\"%(levelname)s: %(message)s\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 1. Basic Conversion: VTP to USD\n",
+ "\n",
+ "Let's start with the simplest use case: converting a single VTP file to USD."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Define file paths\n",
+ "data_dir = project_root / \"data\" / \"KCL-Heart-Model\"\n",
+ "output_dir = Path.cwd() / \"output\"\n",
+ "output_dir.mkdir(exist_ok=True)\n",
+ "\n",
+ "vtp_file = data_dir / \"average_surface.vtp\"\n",
+ "vtk_file = data_dir / \"average_mesh.vtk\"\n",
+ "\n",
+ "print(\"Input files:\")\n",
+ "print(f\" VTP: {vtp_file.exists()} - {vtp_file}\")\n",
+ "print(f\" VTK: {vtk_file.exists()} - {vtk_file}\")\n",
+ "print(f\"\\nOutput directory: {output_dir}\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Simple conversion using convenience function\n",
+ "output_usd = output_dir / \"heart_surface_simple.usd\"\n",
+ "\n",
+ "stage = convert_vtk_file(\n",
+ " vtk_file=vtp_file, output_usd=output_usd, mesh_name=\"HeartSurface\"\n",
+ ")\n",
+ "\n",
+ "print(f\"\\nCreated USD file: {output_usd}\")\n",
+ "print(\"Stage info:\")\n",
+ "print(f\" Root layer: {stage.GetRootLayer().identifier}\")\n",
+ "print(f\" Default prim: {stage.GetDefaultPrim().GetPath()}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 2. Inspect VTK File Data\n",
+ "\n",
+ "Let's examine what data arrays are present in the VTK files."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Read and inspect the VTP file\n",
+ "mesh_data = read_vtk_file(vtp_file)\n",
+ "\n",
+ "print(\"=\" * 60)\n",
+ "print(\"VTP File (average_surface.vtp) Contents:\")\n",
+ "print(\"=\" * 60)\n",
+ "print(\"\\nGeometry:\")\n",
+ "print(f\" Points: {len(mesh_data.points)}\")\n",
+ "print(f\" Faces: {len(mesh_data.face_vertex_counts)}\")\n",
+ "print(f\" Normals: {'Yes' if mesh_data.normals is not None else 'No'}\")\n",
+ "print(f\" Colors: {'Yes' if mesh_data.colors is not None else 'No'}\")\n",
+ "\n",
+ "print(f\"\\nData Arrays ({len(mesh_data.generic_arrays)}):\")\n",
+ "for i, array in enumerate(mesh_data.generic_arrays, 1):\n",
+ " print(f\" {i}. {array.name}:\")\n",
+ " print(f\" - Components: {array.num_components}\")\n",
+ " print(f\" - Type: {array.data_type.value}\")\n",
+ " print(f\" - Interpolation: {array.interpolation}\")\n",
+ " print(f\" - Shape: {array.data.shape}\")\n",
+ " if array.data.size > 0:\n",
+ " print(f\" - Range: [{np.min(array.data):.6f}, {np.max(array.data):.6f}]\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Read and inspect the VTK file\n",
+ "mesh_data_vtk = read_vtk_file(vtk_file, extract_surface=True)\n",
+ "\n",
+ "print(\"=\" * 60)\n",
+ "print(\"VTK File (average_mesh.vtk) Contents:\")\n",
+ "print(\"=\" * 60)\n",
+ "print(\"\\nGeometry:\")\n",
+ "print(f\" Points: {len(mesh_data_vtk.points)}\")\n",
+ "print(f\" Faces: {len(mesh_data_vtk.face_vertex_counts)}\")\n",
+ "print(f\" Normals: {'Yes' if mesh_data_vtk.normals is not None else 'No'}\")\n",
+ "print(f\" Colors: {'Yes' if mesh_data_vtk.colors is not None else 'No'}\")\n",
+ "\n",
+ "print(f\"\\nData Arrays ({len(mesh_data_vtk.generic_arrays)}):\")\n",
+ "for i, array in enumerate(mesh_data_vtk.generic_arrays, 1):\n",
+ " print(f\" {i}. {array.name}:\")\n",
+ " print(f\" - Components: {array.num_components}\")\n",
+ " print(f\" - Type: {array.data_type.value}\")\n",
+ " print(f\" - Interpolation: {array.interpolation}\")\n",
+ " print(f\" - Shape: {array.data.shape}\")\n",
+ " if array.data.size > 0:\n",
+ " print(f\" - Range: [{np.min(array.data):.6f}, {np.max(array.data):.6f}]\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 3. Advanced Conversion with Custom Settings\n",
+ "\n",
+ "Now let's use custom settings to control the conversion process."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Create custom conversion settings\n",
+ "settings = ConversionSettings(\n",
+ " triangulate_meshes=True, # Ensure all faces are triangles\n",
+ " compute_normals=True, # Compute normals if not present\n",
+ " preserve_point_arrays=True, # Keep all point data as primvars\n",
+ " preserve_cell_arrays=True, # Keep all cell data as primvars\n",
+ " meters_per_unit=0.001, # Assume VTK data is in millimeters\n",
+ " up_axis=\"Y\", # Use Y-up (USD standard)\n",
+ ")\n",
+ "\n",
+ "# Create custom material\n",
+ "material = MaterialData(\n",
+ " name=\"heart_material\",\n",
+ " diffuse_color=(0.9, 0.3, 0.3), # Reddish color for heart\n",
+ " roughness=0.4,\n",
+ " metallic=0.0,\n",
+ ")\n",
+ "\n",
+ "# Create converter\n",
+ "converter = VTKToUSDConverter(settings)\n",
+ "\n",
+ "# Convert with custom settings\n",
+ "output_usd_custom = output_dir / \"heart_surface_custom.usd\"\n",
+ "stage_custom = converter.convert_file(\n",
+ " vtk_file=vtp_file,\n",
+ " output_usd=output_usd_custom,\n",
+ " mesh_name=\"HeartSurface\",\n",
+ " material=material,\n",
+ ")\n",
+ "\n",
+ "print(f\"\\nCreated custom USD file: {output_usd_custom}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 4. Convert VTK Legacy Format\n",
+ "\n",
+ "Now let's convert the legacy VTK format file."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Convert VTK file with custom material\n",
+ "output_usd_vtk = output_dir / \"heart_mesh.usd\"\n",
+ "\n",
+ "material_mesh = MaterialData(\n",
+ " name=\"heart_mesh_material\",\n",
+ " diffuse_color=(0.8, 0.4, 0.4),\n",
+ " roughness=0.5,\n",
+ " metallic=0.0,\n",
+ ")\n",
+ "\n",
+ "stage_vtk = converter.convert_file(\n",
+ " vtk_file=vtk_file,\n",
+ " output_usd=output_usd_vtk,\n",
+ " mesh_name=\"HeartMesh\",\n",
+ " material=material_mesh,\n",
+ " extract_surface=True, # Extract surface from volumetric mesh\n",
+ ")\n",
+ "\n",
+ "print(f\"\\nCreated VTK USD file: {output_usd_vtk}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 5. Inspect USD Output\n",
+ "\n",
+ "Let's examine the created USD file to verify all data was preserved."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Open the USD file for inspection\n",
+ "inspect_stage = Usd.Stage.Open(str(output_usd_custom))\n",
+ "\n",
+ "print(\"=\" * 60)\n",
+ "print(\"USD File Inspection\")\n",
+ "print(\"=\" * 60)\n",
+ "\n",
+ "# Get the mesh prim\n",
+ "mesh_path = \"/World/Meshes/HeartSurface\"\n",
+ "mesh_prim = inspect_stage.GetPrimAtPath(mesh_path)\n",
+ "\n",
+ "if mesh_prim:\n",
+ " mesh = UsdGeom.Mesh(mesh_prim)\n",
+ "\n",
+ " print(f\"\\nMesh: {mesh_path}\")\n",
+ " print(f\" Type: {mesh_prim.GetTypeName()}\")\n",
+ "\n",
+ " # Geometry attributes\n",
+ " points = mesh.GetPointsAttr().Get()\n",
+ " face_counts = mesh.GetFaceVertexCountsAttr().Get()\n",
+ " face_indices = mesh.GetFaceVertexIndicesAttr().Get()\n",
+ "\n",
+ " print(\"\\nGeometry:\")\n",
+ " print(f\" Points: {len(points) if points else 0}\")\n",
+ " print(f\" Faces: {len(face_counts) if face_counts else 0}\")\n",
+ " print(f\" Face indices: {len(face_indices) if face_indices else 0}\")\n",
+ "\n",
+ " # Check normals\n",
+ " normals_attr = mesh.GetNormalsAttr()\n",
+ " if normals_attr:\n",
+ " normals = normals_attr.Get()\n",
+ " print(f\" Normals: {len(normals) if normals else 0}\")\n",
+ "\n",
+ " # List primvars\n",
+ " primvars_api = UsdGeom.PrimvarsAPI(mesh)\n",
+ " primvars = primvars_api.GetPrimvars()\n",
+ "\n",
+ " print(f\"\\nPrimvars ({len(primvars)}):\")\n",
+ " for primvar in primvars:\n",
+ " name = primvar.GetPrimvarName()\n",
+ " interpolation = primvar.GetInterpolation()\n",
+ " type_name = primvar.GetTypeName()\n",
+ " value = primvar.Get()\n",
+ " size = len(value) if value else 0\n",
+ " print(f\" - {name}: {type_name} ({interpolation}), {size} elements\")\n",
+ "\n",
+ " # Check material binding\n",
+ " from pxr import UsdShade\n",
+ "\n",
+ " binding_api = UsdShade.MaterialBindingAPI(mesh)\n",
+ " material_binding = binding_api.GetDirectBinding()\n",
+ " if material_binding:\n",
+ " material_path = material_binding.GetMaterialPath()\n",
+ " print(f\"\\nMaterial Binding: {material_path}\")\n",
+ "else:\n",
+ " print(f\"\\nMesh not found at path: {mesh_path}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 6. Create Time-Series Data (Simulated)\n",
+ "\n",
+ "Demonstrate time-series conversion by creating synthetic deformation of the mesh."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Create a simple time-series by deforming the mesh\n",
+ "from physiomotion4d.vtk_to_usd import GenericArray, DataType\n",
+ "\n",
+ "\n",
+ "def create_deformed_mesh(base_mesh_data, time_step, num_steps=10):\n",
+ " \"\"\"Create a deformed version of the mesh for animation.\"\"\"\n",
+ " # Clone the mesh data\n",
+ " import copy\n",
+ "\n",
+ " deformed_mesh = copy.deepcopy(base_mesh_data)\n",
+ "\n",
+ " # Apply sinusoidal deformation\n",
+ " t = time_step / num_steps * 2 * np.pi\n",
+ " scale_factor = 1.0 + 0.1 * np.sin(t) # 10% amplitude\n",
+ "\n",
+ " # Scale points radially from centroid\n",
+ " centroid = np.mean(deformed_mesh.points, axis=0)\n",
+ " deformed_mesh.points = centroid + (deformed_mesh.points - centroid) * scale_factor\n",
+ "\n",
+ " # Add a time-varying scalar field (simulated pressure)\n",
+ " num_points = len(deformed_mesh.points)\n",
+ " pressure = np.sin(t + np.linspace(0, 2 * np.pi, num_points))\n",
+ " pressure_array = GenericArray(\n",
+ " name=\"pressure\",\n",
+ " data=pressure,\n",
+ " num_components=1,\n",
+ " data_type=DataType.FLOAT,\n",
+ " interpolation=\"vertex\",\n",
+ " )\n",
+ "\n",
+ " # Add to generic arrays if not already present\n",
+ " array_names = [arr.name for arr in deformed_mesh.generic_arrays]\n",
+ " if \"pressure\" not in array_names:\n",
+ " deformed_mesh.generic_arrays.append(pressure_array)\n",
+ " else:\n",
+ " # Replace existing pressure array\n",
+ " for i, arr in enumerate(deformed_mesh.generic_arrays):\n",
+ " if arr.name == \"pressure\":\n",
+ " deformed_mesh.generic_arrays[i] = pressure_array\n",
+ " break\n",
+ "\n",
+ " return deformed_mesh\n",
+ "\n",
+ "\n",
+ "# Create sequence of deformed meshes\n",
+ "num_time_steps = 10\n",
+ "mesh_sequence = []\n",
+ "time_codes = list(range(num_time_steps))\n",
+ "\n",
+ "for t in range(num_time_steps):\n",
+ " deformed = create_deformed_mesh(mesh_data, t, num_time_steps)\n",
+ " mesh_sequence.append(deformed)\n",
+ " print(f\"Created time step {t + 1}/{num_time_steps}\")\n",
+ "\n",
+ "print(f\"\\nCreated {len(mesh_sequence)} time steps\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "# Convert time series to USD\n",
+ "output_usd_anim = output_dir / \"heart_surface_animated.usd\"\n",
+ "\n",
+ "material_anim = MaterialData(\n",
+ " name=\"heart_animated_material\",\n",
+ " diffuse_color=(0.9, 0.2, 0.2),\n",
+ " roughness=0.3,\n",
+ " metallic=0.0,\n",
+ ")\n",
+ "\n",
+ "stage_anim = converter.convert_mesh_data_sequence(\n",
+ " mesh_data_sequence=mesh_sequence,\n",
+ " output_usd=output_usd_anim,\n",
+ " mesh_name=\"HeartAnimated\",\n",
+ " time_codes=time_codes,\n",
+ " material=material_anim,\n",
+ ")\n",
+ "\n",
+ "print(f\"\\nCreated animated USD file: {output_usd_anim}\")\n",
+ "print(f\"Time range: {stage_anim.GetStartTimeCode()} to {stage_anim.GetEndTimeCode()}\")\n",
+ "print(f\"Time codes per second: {stage_anim.GetTimeCodesPerSecond()}\")"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 7. Summary\n",
+ "\n",
+ "Let's summarize what we've created."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import os\n",
+ "\n",
+ "print(\"=\" * 60)\n",
+ "print(\"Generated USD Files\")\n",
+ "print(\"=\" * 60)\n",
+ "\n",
+ "usd_files = list(output_dir.glob(\"*.usd\"))\n",
+ "usd_files.extend(output_dir.glob(\"*.usda\"))\n",
+ "usd_files.extend(output_dir.glob(\"*.usdc\"))\n",
+ "\n",
+ "for usd_file in sorted(usd_files):\n",
+ " size_kb = os.path.getsize(usd_file) / 1024\n",
+ " print(f\"\\n{usd_file.name}:\")\n",
+ " print(f\" Size: {size_kb:.2f} KB\")\n",
+ " print(f\" Path: {usd_file}\")\n",
+ "\n",
+ " # Quick inspection\n",
+ " stage = Usd.Stage.Open(str(usd_file))\n",
+ " if stage:\n",
+ " print(f\" Up axis: {UsdGeom.GetStageUpAxis(stage)}\")\n",
+ " print(f\" Meters per unit: {UsdGeom.GetStageMetersPerUnit(stage)}\")\n",
+ " if stage.HasAuthoredTimeCodeRange():\n",
+ " print(\n",
+ " f\" Time range: {stage.GetStartTimeCode()} - {stage.GetEndTimeCode()}\"\n",
+ " )\n",
+ "\n",
+ "print(\"\\n\" + \"=\" * 60)\n",
+ "print(\"✓ All conversions completed successfully!\")\n",
+ "print(\"=\" * 60)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## 8. Verification\n",
+ "\n",
+ "Verify that the USD files can be opened and contain the expected data."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "def verify_usd_file(usd_path):\n",
+ " \"\"\"Verify USD file integrity.\"\"\"\n",
+ " print(f\"\\nVerifying: {usd_path.name}\")\n",
+ " print(\"-\" * 40)\n",
+ "\n",
+ " stage = Usd.Stage.Open(str(usd_path))\n",
+ " if not stage:\n",
+ " print(\" ✗ Failed to open stage\")\n",
+ " return False\n",
+ "\n",
+ " # Check default prim\n",
+ " default_prim = stage.GetDefaultPrim()\n",
+ " if not default_prim:\n",
+ " print(\" ✗ No default prim\")\n",
+ " return False\n",
+ " print(f\" ✓ Default prim: {default_prim.GetPath()}\")\n",
+ "\n",
+ " # Find mesh prims\n",
+ " mesh_count = 0\n",
+ " for prim in stage.Traverse():\n",
+ " if prim.IsA(UsdGeom.Mesh):\n",
+ " mesh_count += 1\n",
+ " mesh = UsdGeom.Mesh(prim)\n",
+ " points = mesh.GetPointsAttr().Get()\n",
+ " if points:\n",
+ " print(f\" ✓ Mesh '{prim.GetName()}': {len(points)} points\")\n",
+ "\n",
+ " if mesh_count == 0:\n",
+ " print(\" ✗ No meshes found\")\n",
+ " return False\n",
+ "\n",
+ " print(f\" ✓ Total meshes: {mesh_count}\")\n",
+ " return True\n",
+ "\n",
+ "\n",
+ "# Verify all generated files\n",
+ "print(\"=\" * 60)\n",
+ "print(\"USD File Verification\")\n",
+ "print(\"=\" * 60)\n",
+ "\n",
+ "all_valid = True\n",
+ "for usd_file in sorted(usd_files):\n",
+ " valid = verify_usd_file(usd_file)\n",
+ " all_valid = all_valid and valid\n",
+ "\n",
+ "print(\"\\n\" + \"=\" * 60)\n",
+ "if all_valid:\n",
+ " print(\"✓ All USD files are valid!\")\n",
+ "else:\n",
+ " print(\"✗ Some USD files have issues\")\n",
+ "print(\"=\" * 60)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Conclusion\n",
+ "\n",
+ "This notebook demonstrated the comprehensive features of the new `vtk_to_usd` library:\n",
+ "\n",
+ "1. **Simple Conversion**: One-line conversion of VTK files\n",
+ "2. **Data Inspection**: Reading and analyzing VTK data arrays\n",
+ "3. **Custom Settings**: Fine-grained control over conversion\n",
+ "4. **Multiple Formats**: Support for VTP, VTK, VTU files\n",
+ "5. **Material System**: Custom materials with UsdPreviewSurface\n",
+ "6. **Time-Series**: Animated meshes with time-varying attributes\n",
+ "7. **Data Preservation**: All VTK arrays preserved as USD primvars\n",
+ "8. **Coordinate Systems**: Automatic RAS to Y-up conversion\n",
+ "\n",
+ "The library is production-ready and can be used for converting medical imaging data, simulation results, and other VTK-based datasets to USD for visualization in Omniverse, USDView, or other USD-compatible applications.\n",
+ "\n",
+ "### Next Steps\n",
+ "\n",
+ "- View the generated USD files in USDView or Omniverse\n",
+ "- Experiment with different conversion settings\n",
+ "- Test with your own VTK datasets\n",
+ "- Explore advanced features like custom colormaps and transfer functions"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "venv",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.11"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 4
+}
diff --git a/src/physiomotion4d/__init__.py b/src/physiomotion4d/__init__.py
index 9748451..df48af7 100644
--- a/src/physiomotion4d/__init__.py
+++ b/src/physiomotion4d/__init__.py
@@ -19,14 +19,14 @@
__version__ = "2025.05.0"
+# VTK to USD library
+# VTK to USD library (new modular implementation)
+from . import vtk_to_usd
from .contour_tools import ContourTools
# Data processing utilities
from .convert_nrrd_4d_to_3d import ConvertNRRD4DTo3D
from .convert_vtk_to_usd import ConvertVTKToUSD
-from .convert_vtk_to_usd_base import ConvertVTKToUSDBase
-from .convert_vtk_to_usd_polymesh import ConvertVTKToUSDPolyMesh
-from .convert_vtk_to_usd_tetmesh import ConvertVTKToUSDTetMesh
# Utility classes
from .image_tools import ImageTools
@@ -61,7 +61,6 @@
WorkflowRegisterHeartModelToPatient,
)
-
__all__ = [
# Workflow classes
"WorkflowConvertHeartGatedCTToUSD",
@@ -93,7 +92,6 @@
# Data processing utilities
"ConvertNRRD4DTo3D",
"ConvertVTKToUSD",
- "ConvertVTKToUSDBase",
- "ConvertVTKToUSDPolyMesh",
- "ConvertVTKToUSDTetMesh",
+ # VTK to USD library
+ "vtk_to_usd",
]
diff --git a/src/physiomotion4d/convert_vtk_to_usd.py b/src/physiomotion4d/convert_vtk_to_usd.py
index 25fdfb4..b58fd89 100644
--- a/src/physiomotion4d/convert_vtk_to_usd.py
+++ b/src/physiomotion4d/convert_vtk_to_usd.py
@@ -1,60 +1,65 @@
-"""Unified facade for VTK to USD conversion supporting both PolyData and UnstructuredGrid."""
+"""Unified VTK to USD converter with advanced features.
+
+This module provides a high-level interface for converting VTK/PyVista meshes to USD,
+with support for:
+- Time-series animation
+- Anatomical region labeling (mask_ids)
+- Colormap visualization
+- Automatic topology change detection
+- Both surface and volumetric meshes
+
+Uses the vtk_to_usd library internally for core conversion functionality.
+"""
import logging
from collections.abc import Sequence
-from typing import Optional
+from pathlib import Path
+from typing import Any, Optional, cast
+import numpy as np
import pyvista as pv
import vtk
-from pxr import Usd
+from pxr import Usd, UsdGeom
-from .convert_vtk_to_usd_base import ConvertVTKToUSDBase
-from .convert_vtk_to_usd_polymesh import ConvertVTKToUSDPolyMesh
-from .convert_vtk_to_usd_tetmesh import ConvertVTKToUSDTetMesh
from .physiomotion4d_base import PhysioMotion4DBase
+from .vtk_to_usd import (
+ ConversionSettings,
+ DataType,
+ GenericArray,
+ MaterialData,
+ MaterialManager,
+ MeshData,
+ UsdMeshConverter,
+)
class ConvertVTKToUSD(PhysioMotion4DBase):
"""
- Unified converter supporting both PolyData and UnstructuredGrid.
-
- Automatically routes meshes to appropriate specialized converter:
- - PolyData → ConvertVTKToUSDPolyMesh
- - UnstructuredGrid (volumetric) → ConvertVTKToUSDTetMesh
- - UnstructuredGrid (surface) → ConvertVTKToUSDPolyMesh
-
- Maintains API compatibility with original ConvertVTKToUSD class.
-
- Supports:
- - PolyData: Surface meshes exported as UsdGeomMesh
- - UnstructuredGrid:
- * By default: Volumetric tetrahedral meshes exported as UsdGeomTetMesh
- (requires OpenUSD v24.03+)
- * With convert_to_surface=True: Extracted surface exported as UsdGeomMesh
- (compatible with all USD versions)
- - Time-varying topology: Automatically detects and handles topology changes
- across time steps (varying number of points/faces).
- When detected, creates separate mesh prims per timestep
- with visibility control instead of time-sampled attributes.
+ Advanced VTK to USD converter with colormap and anatomical labeling support.
+
+ This class extends the basic vtk_to_usd library with:
+ - Support for VTK/PyVista objects (not just files)
+ - Anatomical region labeling via mask_ids
+ - Colormap-based visualization
+ - Automatic topology change detection
+ - Time-series animation
Example Usage:
- >>> # Create converter with meshes
+ >>> # Create converter with time-series meshes
>>> converter = ConvertVTKToUSD(
- ... data_basename='CardiacModel', input_polydata=meshes, mask_ids=None
+ ... data_basename='CardiacModel',
+ ... input_polydata=meshes, # List of PyVista/VTK meshes
+ ... mask_ids={1: 'ventricle', 2: 'atrium'}
... )
>>>
- >>> # List available point data arrays
- >>> arrays = converter.list_available_arrays()
- >>> print(arrays.keys()) # ['transmembrane_potential', 'temperature', ...]
- >>>
- >>> # Configure colormap
+ >>> # Configure colormap visualization
>>> converter.set_colormap(
... color_by_array='transmembrane_potential',
... colormap='rainbow',
- ... intensity_range=(-80.0, 20.0),
+ ... intensity_range=(-80.0, 20.0)
... )
>>>
- >>> # Convert to USD (automatically handles topology changes)
+ >>> # Convert to USD
>>> stage = converter.convert('output.usd')
"""
@@ -64,52 +69,126 @@ def __init__(
input_polydata: Sequence[pv.DataSet | vtk.vtkDataSet],
mask_ids: Optional[dict[int, str]] = None,
compute_normals: bool = False,
- convert_to_surface: bool = False,
+ convert_to_surface: bool = True,
+ times_per_second: float = 24.0,
log_level: int | str = logging.INFO,
) -> None:
"""
- Initialize converter and store parameters for later routing.
+ Initialize converter.
Args:
- data_basename (str): Base name for the USD data
- input_polydata (list): List of PyVista PolyData or UnstructuredGrid meshes,
- one per time step. For UnstructuredGrid, tetrahedral
- cells will be exported as UsdGeomTetMesh by default.
- mask_ids (dict or None): Optional mapping of label IDs to label names for
- organizing meshes by anatomical regions.
- Default: None
- log_level: Logging level (default: logging.INFO)
+ data_basename: Base name for USD data (used in prim paths)
+ input_polydata: Sequence of PyVista/VTK meshes (one per time step)
+ mask_ids: Optional mapping of label IDs to anatomical region names.
+ If provided, meshes will be split by labeled regions.
+ compute_normals: Whether to compute vertex normals
+ convert_to_surface: If True, extract surface from volumetric meshes
+ times_per_second: Time codes per second (default 24.0).
+ For medical imaging time series where each frame = 1 second, use 1.0.
+ log_level: Logging level
"""
super().__init__(class_name=self.__class__.__name__, log_level=log_level)
self.data_basename = data_basename
- self.input_polydata = input_polydata
+ self.input_polydata = list(input_polydata)
self.mask_ids = mask_ids
-
self.compute_normals = compute_normals
self.convert_to_surface = convert_to_surface
- # Colormap settings (will be applied to specialized converter)
+ # Colormap settings
self.color_by_array: Optional[str] = None
self.colormap: str = "plasma"
self.intensity_range: Optional[tuple[float, float]] = None
- def list_available_arrays(self) -> dict:
+ # Conversion settings
+ self.settings = ConversionSettings(
+ triangulate_meshes=True,
+ compute_normals=compute_normals,
+ preserve_point_arrays=True,
+ preserve_cell_arrays=True,
+ meters_per_unit=1.0,
+ up_axis="Y",
+ times_per_second=times_per_second,
+ )
+
+ self.logger.info(
+ f"Initialized converter with {len(input_polydata)} time steps, "
+ f"mask_ids={'enabled' if mask_ids else 'disabled'}"
+ )
+
+ def supports_mesh_type(self, mesh: pv.DataSet | vtk.vtkDataSet) -> bool:
"""
- List all point data arrays available for coloring across all time steps.
+ Check if mesh type is supported for conversion.
- Creates a temporary PolyMesh converter to analyze arrays since the
- method is common to both mesh types.
+ Args:
+ mesh: PyVista or VTK mesh to check
Returns:
- dict: Dictionary with array names as keys and dict of metadata as values.
- Metadata includes: 'n_components', 'dtype', 'range', 'present_in_steps'
+ bool: True if mesh type is supported
"""
- # Create temporary converter to analyze arrays
- temp_converter = ConvertVTKToUSDPolyMesh(
- self.data_basename, self.input_polydata, self.mask_ids
+ # Wrap VTK objects
+ if isinstance(
+ mesh, (vtk.vtkPolyData, vtk.vtkUnstructuredGrid, vtk.vtkImageData)
+ ):
+ mesh = pv.wrap(mesh)
+
+ # Support most PyVista types
+ return isinstance(
+ mesh,
+ (
+ pv.PolyData,
+ pv.UnstructuredGrid,
+ pv.StructuredGrid,
+ pv.ImageData,
+ pv.RectilinearGrid,
+ ),
)
- return temp_converter.list_available_arrays()
+
+ def list_available_arrays(self) -> dict:
+ """
+ List all point data arrays available across all time steps.
+
+ Returns:
+ dict: Dictionary with array names as keys and metadata as values.
+ Metadata includes: 'n_components', 'dtype', 'range', 'present_in_steps'
+ """
+ available_arrays: dict[str, dict[str, Any]] = {}
+
+ for time_idx, mesh in enumerate(self.input_polydata):
+ # Wrap VTK objects
+ if isinstance(mesh, (vtk.vtkPolyData, vtk.vtkUnstructuredGrid)):
+ mesh = pv.wrap(mesh)
+
+ # Get point data arrays
+ if hasattr(mesh, "point_data"):
+ for array_name in mesh.point_data.keys():
+ if array_name not in available_arrays:
+ array_data = mesh.point_data[array_name]
+ available_arrays[array_name] = {
+ "n_components": int(
+ array_data.shape[1] if array_data.ndim > 1 else 1
+ ),
+ "dtype": str(array_data.dtype),
+ "range": (
+ float(np.min(array_data)),
+ float(np.max(array_data)),
+ ),
+ "present_in_steps": [time_idx],
+ }
+ else:
+ array_data = mesh.point_data[array_name]
+ meta = available_arrays[array_name]
+ current_min, current_max = cast(
+ tuple[float, float], meta["range"]
+ )
+ meta["range"] = (
+ min(current_min, float(np.min(array_data))),
+ max(current_max, float(np.max(array_data))),
+ )
+ cast(list[int], meta["present_in_steps"]).append(time_idx)
+
+ self.logger.debug(f"Found {len(available_arrays)} data arrays")
+ return available_arrays
def set_colormap(
self,
@@ -118,29 +197,31 @@ def set_colormap(
intensity_range: Optional[tuple[float, float]] = None,
) -> "ConvertVTKToUSD":
"""
- Configure colormap settings for vertex coloring.
-
- Settings are stored and will be applied to the specialized converter
- during convert().
+ Configure colormap for visualization.
Args:
- color_by_array (str or None): Name of point data array to use for
- vertex colors. If None, uses fixed label
- colors. Use list_available_arrays() to see
- available options.
- colormap (str): Colormap to use for color_by_array visualization.
- Available options: 'plasma', 'viridis', 'rainbow', 'heat',
- 'coolwarm', 'grayscale', 'random'
- intensity_range (tuple or None): Manual intensity range (vmin, vmax) for
- colormap. If None, uses automatic range
- from data.
+ color_by_array: Name of point data array to visualize. If None, uses solid colors.
+ colormap: Colormap name. Supports all matplotlib colormaps plus aliases:
+ - 'plasma', 'viridis', 'inferno', 'magma' (perceptually uniform)
+ - 'rainbow', 'jet' (spectral)
+ - 'hot', 'heat' (heat map, 'heat' is alias for 'hot')
+ - 'coolwarm', 'seismic' (diverging)
+ - 'gray', 'grayscale', 'grey', 'greyscale' (grayscale)
+ - 'random', 'tab20' (categorical/discrete colors)
+ intensity_range: Manual (vmin, vmax) range. If None, auto-computed from data.
Returns:
- self: Returns self for method chaining
+ self: For method chaining
"""
self.color_by_array = color_by_array
self.colormap = colormap
self.intensity_range = intensity_range
+
+ self.logger.info(
+ f"Colormap configured: array='{color_by_array}', "
+ f"colormap='{colormap}', range={intensity_range}"
+ )
+
return self
def convert(
@@ -150,88 +231,343 @@ def convert(
compute_normals: Optional[bool] = None,
) -> Usd.Stage:
"""
- Convert meshes to USD, automatically routing by mesh type.
-
- Analyzes input meshes and routes to appropriate specialized converter:
- 1. Only PolyData → ConvertVTKToUSDPolyMesh
- 2. Only UnstructuredGrid (volumetric) → ConvertVTKToUSDTetMesh
- 3. Only UnstructuredGrid (surface mode) → ConvertVTKToUSDPolyMesh
- 4. Mixed types → NotImplementedError (use original class)
+ Convert VTK meshes to USD.
Args:
- output_usd_file (str): Path to output USD file
- convert_to_surface (bool): If True, convert UnstructuredGrid to surface
- PolyData before processing. Useful for compatibility
- with older USD versions or when volumetric data is
- not needed. Default: False (preserve volumetric data)
+ output_usd_file: Path to output USD file
+ convert_to_surface: Override convert_to_surface setting
+ compute_normals: Override compute_normals setting
Returns:
- Usd.Stage: The created USD stage
+ Usd.Stage: Created USD stage
Raises:
- NotImplementedError: If mixed mesh types are detected
- ValueError: If no valid mesh data found
+ ValueError: If no valid meshes found
"""
if convert_to_surface is not None:
self.convert_to_surface = convert_to_surface
if compute_normals is not None:
- self.compute_normals = compute_normals
-
- # Analyze mesh types in input
- has_polydata = False
- has_ugrid = False
-
- for mesh in self.input_polydata:
- if isinstance(mesh, (pv.PolyData, vtk.vtkPolyData)):
- has_polydata = True
- elif isinstance(mesh, (pv.UnstructuredGrid, vtk.vtkUnstructuredGrid)):
- if convert_to_surface:
- has_polydata = True
- else:
- has_ugrid = True
-
- # Case 1: Only PolyData (or surface-converted UGrid)
- if has_polydata and not has_ugrid:
- self.log_info("Routing to PolyMesh converter (surface meshes)")
- poly_converter = ConvertVTKToUSDPolyMesh(
- self.data_basename,
- self.input_polydata,
- self.mask_ids,
- convert_to_surface=self.convert_to_surface,
- compute_normals=self.compute_normals,
- log_level=self.log_level,
+ self.settings.compute_normals = compute_normals
+
+ self.logger.info(
+ f"Converting {len(self.input_polydata)} meshes to {output_usd_file}"
+ )
+
+ # Remove existing file
+ output_path = Path(output_usd_file)
+ if output_path.exists():
+ output_path.unlink()
+ self.logger.debug(f"Removed existing file: {output_path}")
+
+ # Create USD stage
+ stage = Usd.Stage.CreateNew(str(output_path))
+ UsdGeom.SetStageMetersPerUnit(stage, self.settings.meters_per_unit)
+ UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
+
+ # Create root
+ root_path = f"/World/{self.data_basename}"
+ UsdGeom.Xform.Define(stage, root_path)
+ root_prim = stage.DefinePrim("/World", "Xform")
+ stage.SetDefaultPrim(root_prim)
+
+ # Set time range for animation
+ if len(self.input_polydata) > 1:
+ stage.SetStartTimeCode(0)
+ stage.SetEndTimeCode(len(self.input_polydata) - 1)
+ stage.SetTimeCodesPerSecond(self.settings.times_per_second)
+
+ # Initialize managers
+ material_mgr = MaterialManager(stage)
+ mesh_converter = UsdMeshConverter(stage, self.settings, material_mgr)
+
+ # Process meshes
+ if self.mask_ids:
+ # Split by anatomical regions
+ self._convert_with_labels(stage, root_path, material_mgr, mesh_converter)
+ else:
+ # Single mesh conversion
+ self._convert_unified(stage, root_path, material_mgr, mesh_converter)
+
+ # Save stage
+ stage.Save()
+ self.logger.info(f"Saved USD file: {output_path}")
+
+ return stage
+
+ def _convert_unified(
+ self,
+ stage: Usd.Stage,
+ root_path: str,
+ material_mgr: MaterialManager,
+ mesh_converter: UsdMeshConverter,
+ ) -> None:
+ """Convert all meshes as a single unified mesh."""
+ self.logger.debug("Converting as unified mesh (no label splitting)")
+
+ # Convert meshes to MeshData
+ mesh_data_sequence = []
+ for time_idx, vtk_mesh in enumerate(self.input_polydata):
+ mesh_data = self._vtk_to_mesh_data(vtk_mesh, time_idx)
+ mesh_data_sequence.append(mesh_data)
+
+ # Create material
+ material = self._create_material_from_colormap("unified_material")
+
+ # Convert to USD
+ mesh_path = f"{root_path}/Mesh"
+ if len(mesh_data_sequence) == 1:
+ # Single frame
+ mesh_data_sequence[0].material_id = material.name
+ material_mgr.get_or_create_material(material)
+ mesh_converter.create_mesh(
+ mesh_data_sequence[0], mesh_path, bind_material=True
)
- poly_converter.set_colormap(
- self.color_by_array, self.colormap, self.intensity_range
+ else:
+ # Time series
+ time_codes = [float(i) for i in range(len(mesh_data_sequence))]
+ for md in mesh_data_sequence:
+ md.material_id = material.name
+ material_mgr.get_or_create_material(material)
+ mesh_converter.create_time_varying_mesh(
+ mesh_data_sequence, mesh_path, time_codes, bind_material=True
)
- return poly_converter.convert(output_usd_file)
-
- # Case 2: Only UnstructuredGrid (tetmesh)
- if has_ugrid and not has_polydata:
- self.log_info("Routing to TetMesh converter (volumetric meshes)")
- tet_converter: ConvertVTKToUSDBase = ConvertVTKToUSDTetMesh(
- self.data_basename,
- self.input_polydata,
- self.mask_ids,
- convert_to_surface=self.convert_to_surface,
- compute_normals=self.compute_normals,
- log_level=self.log_level,
+
+ def _convert_with_labels(
+ self,
+ stage: Usd.Stage,
+ root_path: str,
+ material_mgr: MaterialManager,
+ mesh_converter: UsdMeshConverter,
+ ) -> None:
+ """Convert meshes split by anatomical labels."""
+ mask_ids = self.mask_ids
+ assert mask_ids is not None
+ self.logger.debug(f"Converting with {len(mask_ids)} anatomical labels")
+
+ # Extract labeled meshes for each time step
+ labeled_meshes_by_time = []
+ for time_idx, vtk_mesh in enumerate(self.input_polydata):
+ labeled_meshes = self._split_by_labels(vtk_mesh, time_idx)
+ labeled_meshes_by_time.append(labeled_meshes)
+
+ # Get all unique labels
+ all_labels: set[str] = set()
+ for labeled_meshes in labeled_meshes_by_time:
+ all_labels.update(labeled_meshes.keys())
+
+ # Convert each label separately
+ for label_name in sorted(all_labels):
+ self.logger.debug(f"Processing label: {label_name}")
+
+ # Collect mesh data for this label across time
+ label_mesh_sequence = []
+ for labeled_meshes in labeled_meshes_by_time:
+ if label_name in labeled_meshes:
+ label_mesh_sequence.append(labeled_meshes[label_name])
+ else:
+ # Label not present in this time step - use empty mesh or skip
+ self.logger.warning(f"Label '{label_name}' missing in time step")
+
+ if not label_mesh_sequence:
+ continue
+
+ # Create material for this label
+ material = self._create_material_from_colormap(f"{label_name}_material")
+
+ # Convert to USD
+ mesh_path = f"{root_path}/{label_name}"
+ if len(label_mesh_sequence) == 1:
+ label_mesh_sequence[0].material_id = material.name
+ material_mgr.get_or_create_material(material)
+ mesh_converter.create_mesh(
+ label_mesh_sequence[0], mesh_path, bind_material=True
+ )
+ else:
+ time_codes = [float(i) for i in range(len(label_mesh_sequence))]
+ for md in label_mesh_sequence:
+ md.material_id = material.name
+ material_mgr.get_or_create_material(material)
+ mesh_converter.create_time_varying_mesh(
+ label_mesh_sequence, mesh_path, time_codes, bind_material=True
+ )
+
+ def _vtk_to_mesh_data(
+ self, vtk_mesh: pv.DataSet | vtk.vtkDataSet, time_idx: int
+ ) -> MeshData:
+ """Convert VTK/PyVista mesh to MeshData."""
+ # Wrap VTK objects
+ if isinstance(vtk_mesh, (vtk.vtkPolyData, vtk.vtkUnstructuredGrid)):
+ vtk_mesh = pv.wrap(vtk_mesh)
+
+ # Extract surface if needed
+ if isinstance(vtk_mesh, pv.UnstructuredGrid) and self.convert_to_surface:
+ vtk_mesh = vtk_mesh.extract_surface()
+
+ # Get points
+ points = np.array(vtk_mesh.points, dtype=np.float64)
+
+ # Get faces
+ if hasattr(vtk_mesh, "faces"):
+ faces = vtk_mesh.faces
+ # Parse VTK face format: [n_points, i0, i1, ..., n_points, j0, j1, ...]
+ face_counts_list: list[int] = []
+ face_indices_list: list[int] = []
+ idx = 0
+ while idx < len(faces):
+ n = int(faces[idx])
+ face_counts_list.append(n)
+ face_indices_list.extend([int(v) for v in faces[idx + 1 : idx + 1 + n]])
+ idx += n + 1
+ face_counts = np.array(face_counts_list, dtype=np.int32)
+ face_indices = np.array(face_indices_list, dtype=np.int32)
+ else:
+ # No faces - might be point cloud or volumetric
+ raise ValueError("Mesh has no faces - surface extraction may be needed")
+
+ # Get normals
+ normals = None
+ if "Normals" in vtk_mesh.point_data:
+ normals = np.array(vtk_mesh.point_data["Normals"], dtype=np.float64)
+
+ # Get colors if using colormap
+ colors = None
+ if self.color_by_array and self.color_by_array in vtk_mesh.point_data:
+ colors = self._apply_colormap(vtk_mesh.point_data[self.color_by_array])
+
+ # Extract generic arrays
+ generic_arrays = []
+ for array_name in vtk_mesh.point_data.keys():
+ array_data = vtk_mesh.point_data[array_name]
+ num_components = int(array_data.shape[1] if array_data.ndim > 1 else 1)
+
+ # Determine data type
+ if array_data.dtype in [np.float32, np.float64]:
+ data_type = DataType.FLOAT
+ elif array_data.dtype in [np.int32, np.int64]:
+ data_type = DataType.INT
+ else:
+ data_type = DataType.FLOAT
+
+ generic_arrays.append(
+ GenericArray(
+ name=array_name,
+ data=array_data,
+ num_components=num_components,
+ data_type=data_type,
+ interpolation="vertex",
+ )
)
- tet_converter.set_colormap(
- self.color_by_array, self.colormap, self.intensity_range
+
+ return MeshData(
+ points=points,
+ face_vertex_counts=face_counts,
+ face_vertex_indices=face_indices,
+ normals=normals,
+ colors=colors,
+ generic_arrays=generic_arrays,
+ )
+
+ def _split_by_labels(
+ self, vtk_mesh: pv.DataSet | vtk.vtkDataSet, time_idx: int
+ ) -> dict[str, MeshData]:
+ """Split mesh by anatomical labels."""
+ mask_ids = self.mask_ids
+ assert mask_ids is not None
+ # Wrap VTK objects
+ if isinstance(vtk_mesh, (vtk.vtkPolyData, vtk.vtkUnstructuredGrid)):
+ vtk_mesh = pv.wrap(vtk_mesh)
+
+ # Extract surface if needed
+ if isinstance(vtk_mesh, pv.UnstructuredGrid) and self.convert_to_surface:
+ vtk_mesh = vtk_mesh.extract_surface()
+
+ # Get boundary labels
+ if "boundary_labels" not in vtk_mesh.cell_data:
+ self.logger.warning("No 'boundary_labels' array found - using unified mesh")
+ return {"default": self._vtk_to_mesh_data(vtk_mesh, time_idx)}
+
+ label_array = vtk_mesh.cell_data["boundary_labels"]
+
+ # Create submeshes for each label
+ labeled_meshes = {}
+ for label_id, label_name in mask_ids.items():
+ # Extract cells with this label
+ mask = label_array == label_id
+ if not np.any(mask):
+ continue
+
+ # Create submesh
+ cell_ids = np.where(mask)[0].astype(int).tolist()
+ submesh = vtk_mesh.extract_cells(cell_ids)
+
+ # Convert to MeshData
+ labeled_meshes[label_name] = self._vtk_to_mesh_data(submesh, time_idx)
+
+ return labeled_meshes
+
+ def _apply_colormap(self, scalar_data: np.ndarray) -> np.ndarray:
+ """Apply colormap to scalar data."""
+ from matplotlib import colormaps
+
+ # Map common/intuitive names to actual matplotlib colormap names
+ colormap_aliases = {
+ "heat": "hot",
+ "grayscale": "gray",
+ "greyscale": "grey",
+ "jet": "jet",
+ "random": "tab20", # Good for categorical data
+ }
+
+ # Flatten to 1D if needed
+ if scalar_data.ndim > 1:
+ scalar_data = np.linalg.norm(scalar_data, axis=1)
+
+ # Normalize
+ if self.intensity_range:
+ vmin, vmax = self.intensity_range
+ else:
+ vmin, vmax = np.min(scalar_data), np.max(scalar_data)
+
+ if vmax > vmin:
+ normalized = (scalar_data - vmin) / (vmax - vmin)
+ normalized = np.clip(normalized, 0.0, 1.0)
+ else:
+ normalized = np.ones_like(scalar_data) * 0.5
+
+ # Get colormap name (use alias if available)
+ cmap_name = colormap_aliases.get(self.colormap, self.colormap)
+
+ # Apply colormap with fallback
+ try:
+ cmap = colormaps[cmap_name]
+ except KeyError:
+ self.logger.warning(
+ f"Colormap '{self.colormap}' not found, falling back to 'viridis'"
)
- return tet_converter.convert(output_usd_file)
-
- # Case 3: Mixed - need custom handling
- if has_polydata and has_ugrid:
- raise NotImplementedError(
- "Mixed PolyData and UnstructuredGrid not yet supported in "
- "refactored version. Please use one of the following solutions:\n"
- "1. Use the original ConvertVTKToUSD class from "
- "convert_vtk_to_usd.py\n"
- "2. Separate your meshes by type and convert in two passes\n"
- "3. Convert all UnstructuredGrid to surface with convert_to_surface=True"
+ cmap = colormaps["viridis"]
+
+ colors_rgba = cmap(normalized)
+
+ # Return RGB (drop alpha)
+ return colors_rgba[:, :3].astype(np.float32)
+
+ def _create_material_from_colormap(self, name: str) -> MaterialData:
+ """Create material based on colormap settings."""
+ if self.color_by_array:
+ # Use vertex colors
+ return MaterialData(
+ name=name,
+ diffuse_color=(0.8, 0.8, 0.8),
+ roughness=0.5,
+ metallic=0.0,
+ use_vertex_colors=True,
+ )
+ else:
+ # Use solid color
+ return MaterialData(
+ name=name,
+ diffuse_color=(0.8, 0.8, 0.8),
+ roughness=0.5,
+ metallic=0.0,
+ use_vertex_colors=False,
)
-
- # Case 4: No valid meshes
- raise ValueError("No valid mesh data found in input_polydata")
diff --git a/src/physiomotion4d/convert_vtk_to_usd_base.py b/src/physiomotion4d/convert_vtk_to_usd_base.py
deleted file mode 100644
index 841598b..0000000
--- a/src/physiomotion4d/convert_vtk_to_usd_base.py
+++ /dev/null
@@ -1,650 +0,0 @@
-"""Abstract base class for converting VTK data to animated USD meshes."""
-
-import logging
-import os
-import time
-from abc import ABC, abstractmethod
-from dataclasses import dataclass
-from typing import Optional, TypeAlias
-from collections.abc import Mapping, Sequence
-
-from typing_extensions import Self
-
-import cupy as cp
-import numpy as np
-from numpy.typing import NDArray
-import pyvista as pv
-import vtk
-from matplotlib import cm
-from matplotlib.colors import Colormap
-from pxr import Gf, Usd, UsdGeom, Vt
-
-from physiomotion4d.physiomotion4d_base import PhysioMotion4DBase
-
-
-# VTK Cell Type Constants
-VTK_TRIANGLE = 5
-VTK_QUAD = 9
-VTK_TETRA = 10
-VTK_HEXAHEDRON = 12
-VTK_WEDGE = 13
-VTK_PYRAMID = 14
-
-
-@dataclass
-class ArrayMeta:
- n_components: int
- dtype: str
- range: tuple[float, float]
- present_in_steps: list[int]
-
-
-FloatArray: TypeAlias = NDArray[np.float32] | NDArray[np.float64]
-RgbColor: TypeAlias = tuple[float, float, float]
-MeshValue: TypeAlias = (
- str | list[Gf.Vec3f] | list[int] | list[float] | dict[int, int] | None
-)
-MeshLabelData: TypeAlias = dict[str, MeshValue]
-MeshTimeData: TypeAlias = dict[int, dict[str, MeshLabelData]]
-
-
-class ConvertVTKToUSDBase(PhysioMotion4DBase, ABC):
- """
- Abstract base class for VTK to USD conversion.
-
- Provides shared utilities for coordinate conversion, colormap handling,
- topology change detection, and normal computation. Subclasses must implement
- mesh-specific processing and USD creation methods.
- """
-
- def __init__(
- self,
- data_basename: str,
- input_polydata: Sequence[pv.DataSet | vtk.vtkDataSet],
- mask_ids: Optional[dict[int, str]] = None,
- convert_to_surface: bool = False,
- compute_normals: bool = False,
- log_level: int | str = logging.INFO,
- ) -> None:
- """
- Initialize VTK to USD converter.
-
- Args:
- data_basename (str): Base name for the USD data
- input_polydata (Sequence): Sequence of PyVista PolyData or UnstructuredGrid meshes,
- one per time step.
- mask_ids (dict or None): Optional mapping of label IDs to label names for
- organizing meshes by anatomical regions.
- Default: None
- convert_to_surface (bool): If True, convert UnstructuredGrid meshes to surface
- PolyData before processing. Only applicable for PolyMesh
- converter. Default: False
- log_level: Logging level (default: logging.INFO)
- """
- super().__init__(class_name=self.__class__.__name__, log_level=log_level)
-
- self.data_basename: str = data_basename
- self.input_polydata: list[pv.DataSet | vtk.vtkDataSet] = list(input_polydata)
- self.mask_ids: Optional[dict[int, str]] = mask_ids
-
- self.convert_to_surface: bool = convert_to_surface
-
- self.compute_normals: bool = compute_normals
-
- # Colormap settings (set via set_colormap())
- self.color_by_array: Optional[str] = None
- self.colormap: str = "plasma"
- self.intensity_range: Optional[tuple[float, float]] = None
-
- self.times: list[int] = list(range(len(input_polydata)))
- self.stage: Optional[Usd.Stage] = None
-
- # Define a set of distinct colors for different objects
- self.colors = [
- (0.7, 0.3, 0.3), # Red
- (0.3, 0.7, 0.3), # Green
- (0.3, 0.3, 0.7), # Blue
- (0.7, 0.7, 0.3), # Yellow
- (0.7, 0.3, 0.7), # Magenta
- (0.3, 0.7, 0.7), # Cyan
- (0.7, 0.5, 0.3), # Orange
- (0.5, 0.3, 0.7), # Purple
- (0.3, 0.5, 0.5), # Teal
- (0.5, 0.5, 0.3), # Olive
- ]
-
- def list_available_arrays(self) -> dict[str, ArrayMeta]:
- """
- List all point data arrays available for coloring across all time steps.
-
- Returns:
- dict: Dictionary with array names as keys and dict of metadata as values.
- Metadata includes: 'n_components', 'dtype', 'range', 'present_in_steps'
- """
- available_arrays: dict[str, ArrayMeta] = {}
-
- for time_idx, mesh in enumerate(self.input_polydata):
- # Convert to PyVista if needed
- if isinstance(mesh, (vtk.vtkPolyData, vtk.vtkUnstructuredGrid)):
- mesh = pv.wrap(mesh)
- assert hasattr(mesh, "point_data")
-
- # Get point data array names
- for array_name in mesh.point_data.keys():
- if array_name not in available_arrays:
- array_data = mesh.point_data[array_name]
- available_arrays[array_name] = ArrayMeta(
- n_components=(
- array_data.shape[1] if len(array_data.shape) > 1 else 1
- ),
- dtype=str(array_data.dtype),
- range=(float(np.min(array_data)), float(np.max(array_data))),
- present_in_steps=[time_idx],
- )
- else:
- # Update range and track presence
- array_data = mesh.point_data[array_name]
- current_min, current_max = available_arrays[array_name].range
- available_arrays[array_name].range = (
- min(current_min, float(np.min(array_data))),
- max(current_max, float(np.max(array_data))),
- )
- available_arrays[array_name].present_in_steps.append(time_idx)
-
- return available_arrays
-
- def set_colormap(
- self,
- color_by_array: Optional[str] = None,
- colormap: str = "plasma",
- intensity_range: Optional[tuple[float, float]] = None,
- ) -> Self:
- """
- Configure colormap settings for vertex coloring.
-
- Args:
- color_by_array (str or None): Name of point data array to use for
- vertex colors. If None, uses fixed label
- colors. Use list_available_arrays() to see
- available options.
- colormap (str): Colormap to use for color_by_array visualization.
- Available options: 'plasma', 'viridis', 'rainbow', 'heat',
- 'coolwarm', 'grayscale', 'random'
- intensity_range (tuple or None): Manual intensity range (vmin, vmax) for
- colormap. If None, uses automatic range
- from data.
-
- Returns:
- self: Returns self for method chaining
- """
- self.color_by_array = color_by_array
- self.colormap = colormap
- self.intensity_range = intensity_range
-
- """Validate that the chosen colormap is supported"""
- supported_colormaps = [
- "plasma",
- "viridis",
- "rainbow",
- "heat",
- "coolwarm",
- "grayscale",
- "random",
- ]
- if self.colormap not in supported_colormaps:
- raise ValueError(
- f"Unsupported colormap '{self.colormap}'. "
- f"Choose from: {', '.join(supported_colormaps)}"
- )
-
- # Initialize random seed for reproducible random colormap
- if self.colormap == "random":
- np.random.seed(42)
-
- return self
-
- def _ras_to_usd(self, point: Sequence[float]) -> Gf.Vec3f:
- """Convert RAS coordinates to USD's right-handed Y-up system"""
- return Gf.Vec3f(float(point[0]), float(point[2]), float(-point[1]))
-
- def _get_matplotlib_colormap(self, colormap_name: str) -> Optional[Colormap]:
- """
- Get matplotlib colormap object, with custom implementations for special cases.
-
- Args:
- colormap_name (str): Name of the colormap
-
- Returns:
- matplotlib colormap object
- """
- colormap_mapping = {
- "plasma": "plasma",
- "viridis": "viridis",
- "rainbow": "rainbow",
- "heat": "hot",
- "coolwarm": "coolwarm",
- "grayscale": "gray",
- }
-
- if colormap_name == "random":
- # Random colormap will be handled separately
- return None
-
- mpl_name = colormap_mapping.get(colormap_name, colormap_name)
- return cm.get_cmap(mpl_name)
-
- def _map_scalar_to_color(
- self,
- scalar_value: float
- | int
- | Sequence[float]
- | FloatArray
- | Mapping[int, float]
- | Mapping[str, float],
- vmin: float,
- vmax: float,
- colormap: str = "plasma",
- ) -> Gf.Vec3f:
- """
- Map a scalar value to RGB color using a colormap.
-
- Args:
- scalar_value: The scalar value to map
- vmin: Minimum value for normalization
- vmax: Maximum value for normalization
- colormap: Colormap name (default: 'plasma')
-
- Returns:
- Gf.Vec3f: RGB color value
- """
- # Coerce all supported inputs to a single float scalar.
- if isinstance(scalar_value, Mapping):
- scalar = float(
- np.linalg.norm(np.asarray(list(scalar_value.values()), dtype=float))
- )
- elif isinstance(scalar_value, (Sequence, np.ndarray)):
- scalar = float(np.linalg.norm(np.asarray(scalar_value, dtype=float)))
- else:
- scalar = float(scalar_value)
-
- # Handle random colormap specially
- if colormap == "random":
- # Use hash of value to get consistent random color
- hash_val = hash(scalar)
- np.random.seed(hash_val % (2**32))
- rgb = np.random.rand(3)
- return Gf.Vec3f(float(rgb[0]), float(rgb[1]), float(rgb[2]))
-
- # Normalize value to [0, 1]
- if vmax > vmin:
- normalized = (scalar - vmin) / (vmax - vmin)
- normalized = np.clip(normalized, 0.0, 1.0)
- else:
- normalized = 0.5
-
- # Get colormap
- cmap = self._get_matplotlib_colormap(colormap)
- assert cmap is not None
- rgba = cmap(normalized)
-
- return Gf.Vec3f(float(rgba[0]), float(rgba[1]), float(rgba[2]))
-
- def _compute_intensity_range(
- self, mesh_time_data: MeshTimeData, label: str
- ) -> tuple[float, float]:
- """
- Compute the intensity range for colormap mapping.
-
- Args:
- mesh_time_data (dict): Time-series mesh data
- label (str): Label identifier for the mesh
-
- Returns:
- tuple: (vmin, vmax) intensity range
- """
- if self.intensity_range is not None:
- # Use user-specified range
- return self.intensity_range
-
- # Compute automatic range from data
- all_values: list[float] = []
- for time_idx in range(len(self.times)):
- time_data = mesh_time_data[time_idx][label]
- if time_data.get("color_array") is not None:
- color_values = time_data["color_array"]
- if isinstance(color_values, list):
- all_values.extend(color_values)
-
- if len(all_values) > 0:
- return (float(np.min(all_values)), float(np.max(all_values)))
- # Fallback
- return (0.0, 1.0)
-
- def _extract_color_array(
- self, mesh: pv.DataSet | vtk.vtkDataSet
- ) -> Optional[FloatArray]:
- """
- Extract color array data from mesh point data.
-
- Args:
- mesh: PyVista mesh (PolyData or UnstructuredGrid)
-
- Returns:
- numpy.ndarray or None: Array of scalar values for coloring
- """
- if self.color_by_array is None:
- return None
-
- # Convert VTK to PyVista if needed
- if isinstance(mesh, (vtk.vtkPolyData, vtk.vtkUnstructuredGrid)):
- mesh = pv.wrap(mesh)
-
- # Check if array exists in point data
- if self.color_by_array in mesh.point_data:
- color_values = []
- for scalar_value in mesh.point_data[self.color_by_array]:
- if isinstance(scalar_value, list):
- scalar_value = np.linalg.norm(np.array(scalar_value))
- elif isinstance(scalar_value, np.ndarray):
- scalar_value = np.linalg.norm(scalar_value)
- elif isinstance(scalar_value, tuple):
- scalar_value = np.linalg.norm(np.array(scalar_value))
- elif isinstance(scalar_value, dict):
- scalar_value = np.linalg.norm(np.array(scalar_value.values()))
- elif isinstance(scalar_value, str):
- scalar_value = np.linalg.norm(np.array(scalar_value))
- else:
- scalar_value = float(scalar_value)
- color_values.append(scalar_value)
- return np.asarray(color_values, dtype=float)
- self.log_warning("Array '%s' not found in point data", self.color_by_array)
- return None
-
- def _check_topology_changes(self, mesh_time_data: MeshTimeData) -> dict[str, bool]:
- """
- Check if mesh topology changes across time steps.
-
- Args:
- mesh_time_data (dict): Dictionary mapping time_idx -> label -> mesh_data
-
- Returns:
- dict: Dictionary mapping label -> bool (True if topology changes)
- """
- topology_changes = {}
-
- # Get all labels from first time step
- first_time = mesh_time_data[0]
- labels = list(first_time.keys())
-
- for label in labels:
- has_change = False
- first_data = mesh_time_data[0][label]
- mesh_type = first_data.get("mesh_type", "polymesh")
-
- # Get reference topology from first timestep
- if mesh_type == "polymesh":
- ref_points = first_data["points"]
- ref_face_counts = first_data["face_vertex_counts"]
- assert isinstance(ref_points, list)
- assert isinstance(ref_face_counts, list)
- ref_num_points = len(ref_points)
- ref_num_faces = len(ref_face_counts)
- elif mesh_type == "tetmesh":
- ref_points = first_data["points"]
- ref_tets = first_data["tet_indices"]
- assert isinstance(ref_points, list)
- assert isinstance(ref_tets, list)
- ref_num_points = len(ref_points)
- ref_num_tets = len(ref_tets)
- else:
- # Unknown mesh type, assume no change
- topology_changes[label] = False
- continue
-
- # Check all subsequent time steps
- for time_idx in range(1, len(mesh_time_data)):
- if label not in mesh_time_data[time_idx]:
- # Label doesn't exist in this timestep - topology change
- has_change = True
- break
-
- curr_data = mesh_time_data[time_idx][label]
-
- if mesh_type == "polymesh":
- curr_points = curr_data["points"]
- curr_face_counts = curr_data["face_vertex_counts"]
- assert isinstance(curr_points, list)
- assert isinstance(curr_face_counts, list)
- curr_num_points = len(curr_points)
- curr_num_faces = len(curr_face_counts)
- if (
- curr_num_points != ref_num_points
- or curr_num_faces != ref_num_faces
- ):
- has_change = True
- break
- elif mesh_type == "tetmesh":
- curr_points = curr_data["points"]
- curr_tets = curr_data["tet_indices"]
- assert isinstance(curr_points, list)
- assert isinstance(curr_tets, list)
- curr_num_points = len(curr_points)
- curr_num_tets = len(curr_tets)
- if (
- curr_num_points != ref_num_points
- or curr_num_tets != ref_num_tets
- ):
- has_change = True
- break
-
- topology_changes[label] = has_change
-
- if has_change:
- self.log_info(
- "Detected topology changes for label '%s' - will use time-varying mesh approach",
- label,
- )
-
- return topology_changes
-
- def _compute_facevarying_normals_tri(
- self,
- points_vt: Vt.Vec3fArray,
- faceCounts_vt: Vt.IntArray,
- faceIndices_vt: Vt.IntArray,
- ) -> Vt.Vec3fArray:
- """
- Vectorized face-varying normals for a triangulated mesh.
-
- points_vt: Vt.Vec3fArray
- faceCounts_vt: Vt.IntArray (all must be 3)
- faceIndices_vt: Vt.IntArray (len == 3 * numFaces)
-
- Returns: Vt.Vec3fArray of length len(faceIndices_vt), one normal per corner.
- """
-
- # Convert Vt arrays to NumPy
- points = np.array(points_vt).astype(np.float32) # (N, 3)
- counts = np.array(faceCounts_vt).astype(np.int32) # (F,)
- indices = np.array(faceIndices_vt).astype(np.int32) # (3F,)
-
- # Sanity: assume triangulated mesh
- if not np.all(counts == 3):
- raise ValueError(
- "Mesh must be fully triangulated (all faceVertexCounts == 3)"
- )
-
- # Reshape indices into (F, 3)
- faces = indices.reshape(-1, 3) # (F, 3)
-
- # Gather per-face vertex positions (F, 3, 3)
- tris = points[faces] # (F, 3, 3)
-
- # Compute normals via vectorized cross product
- v1 = tris[:, 1] - tris[:, 0] # (F, 3)
- v2 = tris[:, 2] - tris[:, 0] # (F, 3)
- v1 = cp.array(v1)
- v2 = cp.array(v2)
- n = cp.cross(v1, v2) # (F, 3)
-
- # Normalize
- lengths = cp.linalg.norm(n, axis=1, keepdims=True) # (F, 1)
- mask = lengths[:, 0] > 0
- n[mask] /= lengths[mask]
-
- # Broadcast each face normal to 3 corners -> (F, 3, 3), then flatten
- n_fv = cp.repeat(n[:, cp.newaxis, :], 3, axis=1).reshape(-1, 3) # (3F, 3)
-
- # Convert back to Vt.Vec3fArray
- fv_vt = Vt.Vec3fArray.FromNumpy(n_fv.get())
-
- return fv_vt
-
- # Abstract methods that subclasses must implement
-
- @abstractmethod
- def supports_mesh_type(self, mesh: pv.DataSet | vtk.vtkDataSet) -> bool:
- """
- Check if this converter supports the given mesh type.
-
- Args:
- mesh: PyVista or VTK mesh object
-
- Returns:
- bool: True if this converter can process the mesh
- """
-
- @abstractmethod
- def _process_mesh_data(
- self, mesh: pv.DataSet | vtk.vtkDataSet
- ) -> dict[str, MeshLabelData]:
- """
- Process mesh and extract geometry data.
-
- Args:
- mesh: PyVista or VTK mesh object
-
- Returns:
- dict: Processed mesh data organized by labels or mesh type
- """
-
- @abstractmethod
- def _create_usd_mesh(
- self,
- transform_path: str,
- label: str,
- mesh_time_data: MeshTimeData,
- label_colors: dict[str, RgbColor],
- has_topology_change: bool,
- ) -> None:
- """
- Create USD mesh prim(s) for this label.
-
- Args:
- transform_path: USD path for the transform
- label: Label identifier
- mesh_time_data: Time-series mesh data
- label_colors: Color assignments for labels
- has_topology_change: Whether topology varies over time
- """
-
- def convert(
- self,
- output_usd_file: str,
- convert_to_surface: Optional[bool] = None,
- compute_normals: Optional[bool] = None,
- ) -> Usd.Stage:
- """
- Convert VTK meshes to USD format.
-
- Args:
- output_usd_file (str): Path to output USD file
- convert_to_surface (bool or None): If True, convert UnstructuredGrid to surface
- PolyData before processing. If None, uses the value
- set in __init__. Default: None
-
- Returns:
- Usd.Stage: The created USD stage
- """
- # Only override if explicitly provided
- if convert_to_surface is not None:
- self.convert_to_surface = convert_to_surface
-
- if compute_normals is not None:
- self.compute_normals = compute_normals
-
- # Remove existing file if it exists to avoid USD layer conflicts
- if os.path.exists(output_usd_file):
- os.remove(output_usd_file)
-
- # Create USD stage
- self.stage = Usd.Stage.CreateNew(output_usd_file)
- # Set the stage's linear scale to meters
- UsdGeom.SetStageMetersPerUnit(self.stage, 1.0)
- UsdGeom.SetStageUpAxis(self.stage, UsdGeom.Tokens.y)
-
- # Create a parent scope for all meshes
- root_path = f"/World/{self.data_basename}"
- UsdGeom.Xform.Define(self.stage, root_path)
-
- basename = os.path.basename(output_usd_file).split(".")[0]
- self.log_info("Converting %s", basename)
- root_path = f"{root_path}/Transform_{basename}"
- UsdGeom.Xform.Define(self.stage, root_path)
-
- root_scope = UsdGeom.Scope.Define(self.stage, "/World")
- self.stage.SetDefaultPrim(root_scope.GetPrim())
-
- # Collect the label data from each time point
- polydata_time_data: MeshTimeData = {}
- for fnum, mesh_data in enumerate(self.input_polydata):
- self.log_progress(
- fnum + 1, len(self.input_polydata), prefix="Processing time point"
- )
- polydata_time_data[fnum] = self._process_mesh_data(mesh_data)
-
- # Check for topology changes across time steps
- topology_changes = self._check_topology_changes(polydata_time_data)
-
- # Assign a unique color to each label
- label_colors: dict[str, RgbColor] = {}
- for fnum in range(len(polydata_time_data)):
- for label, _ in polydata_time_data[fnum].items():
- if label not in label_colors:
- label_colors[label] = self.colors[
- len(label_colors) % len(self.colors)
- ]
-
- # Process first polydata to get label groups
- first_data = polydata_time_data[0]
-
- # Create a mesh prim for each label group
- for idx, (label, data) in enumerate(first_data.items()):
- # Create a transform for each mesh
- transform_path = f"{root_path}/Transform_{label}"
- UsdGeom.Xform.Define(self.stage, transform_path)
-
- # Determine if topology changes for this label
- has_topology_change = topology_changes.get(label, False)
-
- # Call subclass-specific USD mesh creation
- start_time = time.time()
- self._create_usd_mesh(
- transform_path,
- label,
- polydata_time_data,
- label_colors,
- has_topology_change,
- )
- end_time = time.time()
- self.log_info(
- "Time taken to create USD mesh: %s seconds", end_time - start_time
- )
-
- # Set time range for the stage
- self.stage.SetStartTimeCode(self.times[0])
- self.stage.SetEndTimeCode(self.times[-1])
- self.stage.SetTimeCodesPerSecond(1.0)
-
- self.stage.Save()
- return self.stage
diff --git a/src/physiomotion4d/convert_vtk_to_usd_polymesh.py b/src/physiomotion4d/convert_vtk_to_usd_polymesh.py
deleted file mode 100644
index df3aa9c..0000000
--- a/src/physiomotion4d/convert_vtk_to_usd_polymesh.py
+++ /dev/null
@@ -1,641 +0,0 @@
-"""Converter for VTK PolyData to USD Mesh with surface meshes."""
-
-import itertools
-import time
-from typing import cast
-
-import pyvista as pv
-import vtk
-from pxr import Gf, Sdf, UsdGeom
-
-from .convert_vtk_to_usd_base import (
- ConvertVTKToUSDBase,
- MeshLabelData,
- MeshTimeData,
- RgbColor,
-)
-
-
-class ConvertVTKToUSDPolyMesh(ConvertVTKToUSDBase):
- """
- Converter for VTK PolyData to USD Mesh.
-
- Handles:
- - Surface meshes (PolyData)
- - UnstructuredGrid converted to surface
- - Time-varying topology via visibility control
- - Per-vertex colormap visualization
-
- Example Usage:
- >>> converter = ConvertVTKToUSDPolyMesh(
- ... data_basename='SurfaceModel', input_polydata=meshes, mask_ids=None
- ... )
- >>> converter.set_colormap(color_by_array='pressure', colormap='rainbow')
- >>> stage = converter.convert('output.usd')
- """
-
- def supports_mesh_type(self, mesh: pv.DataSet | vtk.vtkDataSet) -> bool:
- """
- Check if this converter supports the given mesh type.
-
- Supports:
- - PolyData meshes
- - UnstructuredGrid when convert_to_surface=True
-
- Args:
- mesh: PyVista or VTK mesh object
-
- Returns:
- bool: True if mesh is PolyData or can be converted to surface
- """
- if isinstance(mesh, (pv.PolyData, vtk.vtkPolyData)):
- return True
- if (
- isinstance(mesh, (pv.UnstructuredGrid, vtk.vtkUnstructuredGrid))
- and self.convert_to_surface
- ):
- return True
- return False
-
- def _process_mesh_data(
- self, mesh: pv.DataSet | vtk.vtkDataSet
- ) -> dict[str, MeshLabelData]:
- """
- Process mesh and extract geometry data.
-
- Args:
- mesh: PyVista PolyData or UnstructuredGrid
-
- Returns:
- dict: Processed mesh data organized by labels or 'default'
- """
- if isinstance(mesh, (pv.UnstructuredGrid, vtk.vtkUnstructuredGrid)):
- if self.convert_to_surface:
- # Convert UnstructuredGrid to surface PolyData first
- surface_mesh = self._convert_ugrid_to_surface(mesh)
- return self._process_polydata(surface_mesh)
- raise TypeError(
- "UnstructuredGrid not supported by PolyMesh converter. "
- "Use convert_to_surface=True or TetMesh converter."
- )
- if isinstance(mesh, (pv.PolyData, vtk.vtkPolyData)):
- return self._process_polydata(mesh)
- raise TypeError(f"Unsupported mesh type: {type(mesh)}")
-
- def _create_usd_mesh(
- self,
- transform_path: str,
- label: str,
- mesh_time_data: MeshTimeData,
- label_colors: dict[str, RgbColor],
- has_topology_change: bool,
- ) -> None:
- """
- Create USD mesh prim(s) for this label.
-
- Routes to appropriate method based on topology:
- - Constant topology: Single UsdGeomMesh with time-sampled points
- - Varying topology: Multiple UsdGeomMesh prims with visibility control
-
- Args:
- transform_path: USD path for the transform
- label: Label identifier
- mesh_time_data: Time-series mesh data
- label_colors: Color assignments for labels
- has_topology_change: Whether topology varies over time
- """
- if has_topology_change:
- self.log_info(
- "Creating time-varying UsdGeomMesh for label: %s (topology changes detected)",
- label,
- )
- self._create_usd_polymesh_varying(
- transform_path, label, mesh_time_data, label_colors
- )
- else:
- self.log_info("Creating UsdGeomMesh for label: %s", label)
- self._create_usd_polymesh(
- transform_path, label, mesh_time_data, label_colors
- )
-
- def _convert_ugrid_to_surface(
- self, ugrid: pv.UnstructuredGrid | vtk.vtkUnstructuredGrid
- ) -> pv.PolyData:
- """
- Extract surface from UnstructuredGrid and convert to PolyData.
-
- Args:
- ugrid: PyVista UnstructuredGrid or VTK vtkUnstructuredGrid
-
- Returns:
- pv.PolyData: Surface mesh extracted from the UnstructuredGrid
- """
- # Convert VTK to PyVista if needed
- if isinstance(ugrid, vtk.vtkUnstructuredGrid):
- ugrid = pv.wrap(ugrid)
-
- # Extract surface using PyVista's built-in method
- surface = cast(pv.PolyData, ugrid.extract_surface())
-
- # Preserve point and cell data arrays
- # Point data is automatically preserved by extract_surface
-
- return surface
-
- def _process_polydata(
- self, polydata: pv.PolyData | vtk.vtkPolyData
- ) -> dict[str, MeshLabelData]:
- """
- Process PolyData and extract geometry, labels, and attributes.
-
- Args:
- polydata: VTK or PyVista PolyData mesh
-
- Returns:
- dict: Processed mesh data with structure:
- {label: {'mesh_type', 'points', 'face_vertex_counts',
- 'face_vertex_indices', 'deformation_magnitude',
- 'color_array', 'point_mapping'}}
- """
- # Get points
- points = polydata.GetPoints()
- num_points = points.GetNumberOfPoints()
-
- # Get boundary labels if they exist
- boundary_labels = None
- if self.mask_ids is not None and polydata.GetCellData().HasArray(
- "boundary_labels"
- ):
- label_array = polydata.GetCellData().GetArray("boundary_labels")
- # Get all unique labels from both components
- tuple_values = [
- label_array.GetTuple(i) for i in range(label_array.GetNumberOfTuples())
- ]
- tuple_values_flattened = list(itertools.chain.from_iterable(tuple_values))
- boundary_labels = set(tuple_values_flattened)
- boundary_labels.discard(0)
-
- # Get deformation magnitude if it exists
- def_mag = None
- if polydata.GetPointData().HasArray("DeformationMagnitude"):
- intensity_array = polydata.GetPointData().GetArray("DeformationMagnitude")
- def_mag = [float(intensity_array.GetValue(i)) for i in range(num_points)]
-
- # Get color array if specified
- color_array = self._extract_color_array(polydata)
-
- # Get faces
- faces = polydata.GetPolys()
- connectivity = faces.GetConnectivityArray()
- offsets = faces.GetOffsetsArray()
-
- # Process face data
- num_faces = offsets.GetNumberOfValues() - 1
- start_idx = [offsets.GetValue(i) for i in range(num_faces)]
- end_idx = [offsets.GetValue(i + 1) for i in range(num_faces)]
- face_vertex_counts = [end_idx[i] - start_idx[i] for i in range(num_faces)]
- face_vertex_indices = []
- for i in range(num_faces):
- face_vertex_indices.extend(
- [connectivity.GetValue(j) for j in range(start_idx[i], end_idx[i])]
- )
-
- # Create objects for each cell based on its labels
- if boundary_labels:
- assert self.mask_ids is not None
- # Create a dictionary to store objects for each label
- label_objects: dict[str, MeshLabelData] = {}
-
- # Initialize objects for each unique label
- for label_id in boundary_labels:
- if int(label_id) != 0:
- label = self.mask_ids[int(label_id)]
- label_objects[label] = {
- "mesh_type": "polymesh",
- "points": [],
- "face_vertex_counts": [],
- "face_vertex_indices": [],
- "deformation_magnitude": [] if def_mag else None,
- "color_array": [] if color_array is not None else None,
- "point_mapping": {},
- }
-
- label_array = None
- if polydata.GetCellData().HasArray("boundary_labels"):
- label_array = polydata.GetCellData().GetArray("boundary_labels")
-
- # Process each cell
- start_time = time.time()
- for cell_id in range(polydata.GetNumberOfCells()):
- if cell_id % 1000000 == 0 or cell_id == polydata.GetNumberOfCells() - 1:
- self.log_progress(
- cell_id + 1,
- polydata.GetNumberOfCells(),
- prefix="Processing cells",
- )
-
- cell = polydata.GetCell(cell_id)
-
- # Get all labels for this cell
- if label_array:
- tuple_values = label_array.GetTuple(cell_id)
- cell_labels = [
- self.mask_ids[int(label_id)]
- for label_id in tuple_values
- if int(label_id) != 0
- ]
-
- # Get the points of this cell
- n_points = cell.GetNumberOfPoints()
- point_ids = [int(cell.GetPointId(i)) for i in range(n_points)]
- usd_points = [
- self._ras_to_usd(points.GetPoint(pnt_id))
- for pnt_id in point_ids
- ]
-
- # For each label of this cell, create a copy of the cell
- for label_str in cell_labels:
- obj = label_objects[label_str]
- point_mapping = cast(dict[int, int], obj["point_mapping"])
- obj_points = cast(list[Gf.Vec3f], obj["points"])
- obj_face_counts = cast(list[int], obj["face_vertex_counts"])
- obj_face_indices = cast(list[int], obj["face_vertex_indices"])
- cell_point_indices = []
- for pnt_num, pnt_id in enumerate(point_ids):
- indx = point_mapping.get(pnt_id, None)
- if indx is None:
- indx = len(obj_points)
- obj_points.append(usd_points[pnt_num])
- point_mapping[pnt_id] = indx
- if def_mag is not None:
- obj_def_mag = cast(
- list[float], obj["deformation_magnitude"]
- )
- obj_def_mag.append(def_mag[pnt_id])
- if color_array is not None:
- obj_color_array = cast(
- list[float], obj["color_array"]
- )
- obj_color_array.append(float(color_array[pnt_id]))
- cell_point_indices.append(indx)
-
- obj_face_counts.append(len(cell_point_indices))
- obj_face_indices.extend(cell_point_indices)
-
- end_time = time.time()
- self.log_info(
- "Time taken to process cells %d: %s seconds",
- polydata.GetNumberOfCells(),
- end_time - start_time,
- )
-
- # Convert color_array lists to numpy arrays
- # for label, obj in label_objects.items():
- # if obj['color_array'] is not None:
- # obj['color_array'] = np.array(obj['color_array'])
-
- return label_objects
- # If no boundary labels, return single group with all points and faces
- points_data = [self._ras_to_usd(points.GetPoint(i)) for i in range(num_points)]
- return {
- "default": {
- "mesh_type": "polymesh",
- "points": points_data,
- "face_vertex_counts": face_vertex_counts,
- "face_vertex_indices": face_vertex_indices,
- "deformation_magnitude": def_mag,
- "color_array": (
- color_array.tolist() if color_array is not None else None
- ),
- }
- }
-
- def _create_usd_polymesh(
- self,
- transform_path: str,
- label: str,
- mesh_time_data: MeshTimeData,
- label_colors: dict[str, RgbColor],
- ) -> None:
- """
- Create UsdGeomMesh for polygon surface data with constant topology.
-
- Uses time-sampled attributes for points, normals, and colors.
-
- Args:
- transform_path: USD path for the transform
- label: Label identifier
- mesh_time_data: Time-series mesh data
- label_colors: Color assignments for labels
- """
- assert self.stage is not None
- data = mesh_time_data[0][label]
-
- # Create mesh prim under the transform
- mesh_path = f"{transform_path}/{label}"
- mesh = UsdGeom.Mesh.Define(self.stage, mesh_path)
-
- # Set topology (assuming consistent topology across timesteps)
- mesh.CreateFaceVertexCountsAttr(data["face_vertex_counts"])
- mesh.CreatePointsAttr()
- mesh.CreateFaceVertexIndicesAttr(data["face_vertex_indices"])
-
- # Set mesh attributes for Index renderer compatibility
- mesh.CreateSubdivisionSchemeAttr("none") # Prevent unwanted subdivision
- mesh.CreateDoubleSidedAttr(True) # Ensure visibility from both sides
-
- # Create normals attribute
- # Normals will be computed per timestep since mesh deforms
- if self.compute_normals:
- normals_attr = mesh.CreateNormalsAttr()
- normals_attr.SetMetadata("interpolation", UsdGeom.Tokens.vertex)
-
- # Set display color - either per-vertex from color array or fixed label color
- use_color_array = self.color_by_array is not None and any(
- mesh_time_data[t][label].get("color_array") is not None
- for t in range(len(self.times))
- )
-
- if not use_color_array:
- # Use fixed label color with proper primvar
- display_color = label_colors[label]
- display_color_primvar = mesh.CreateDisplayColorPrimvar(
- UsdGeom.Tokens.constant
- )
- display_color_primvar.Set([display_color])
-
- # Create points attribute with time samples
- points_attr = mesh.CreatePointsAttr()
- extent_attr = mesh.CreateExtentAttr()
- time_samples = {}
-
- # Create display color primvar if using color array
- scalar_primvar = None
- display_color_primvar = None
- display_opacity_primvar = None
- if use_color_array:
- display_color_primvar = mesh.CreateDisplayColorPrimvar(
- UsdGeom.Tokens.vertex
- )
- display_opacity_primvar = mesh.CreateDisplayOpacityPrimvar(
- UsdGeom.Tokens.vertex
- )
-
- # Create custom primvar for raw scalar values (for colormap control)
- scalar_primvar = UsdGeom.PrimvarsAPI(mesh).CreatePrimvar(
- self.color_by_array,
- Sdf.ValueTypeNames.FloatArray,
- UsdGeom.Tokens.vertex,
- )
-
- # **USE NEW COLORMAP SYSTEM: Compute intensity range once**
- if use_color_array:
- vmin, vmax = self._compute_intensity_range(mesh_time_data, label)
- global_vmin = vmin
- global_vmax = vmax
- else:
- global_vmin = float("inf")
- global_vmax = float("-inf")
-
- num_times = len(self.times)
- for time_idx, time_code in enumerate(self.times):
- if time_idx % 10 == 0 or time_idx == num_times - 1:
- self.log_progress(
- time_idx + 1,
- num_times,
- prefix=f"Processing time samples for {label}",
- )
- time_data = mesh_time_data[time_idx][label]
-
- if self.compute_normals:
- vertex_normals = self._compute_facevarying_normals_tri(
- time_data["points"],
- time_data["face_vertex_counts"],
- time_data["face_vertex_indices"],
- )
-
- # Set points first
- time_samples[time_code] = {
- "points": time_data["points"],
- "extent": UsdGeom.Mesh.ComputeExtent(time_data["points"]),
- }
- if self.compute_normals:
- time_samples[time_code]["normals"] = vertex_normals
-
- # Compute per-vertex colors if using color array
- if use_color_array and time_data.get("color_array") is not None:
- color_values = cast(list[float], time_data["color_array"])
-
- # **USE CONFIGURED COLORMAP with consistent intensity range**
- vertex_colors = [
- self._map_scalar_to_color(float(v), vmin, vmax, self.colormap)
- for v in color_values
- ]
- time_samples[time_code]["vertex_colors"] = vertex_colors
- time_samples[time_code]["scalar_values"] = color_values
- time_samples[time_code]["vmin"] = vmin
- time_samples[time_code]["vmax"] = vmax
-
- # Set points, extents, and normals with explicit time codes
- for t_code, time_data_dict in time_samples.items():
- points_attr.Set(time_data_dict["points"], t_code)
- extent_attr.Set(time_data_dict["extent"], t_code)
- if self.compute_normals:
- normals_attr.Set(time_data_dict["normals"], t_code)
- if use_color_array and "vertex_colors" in time_data_dict:
- assert display_color_primvar is not None
- assert scalar_primvar is not None
- assert display_opacity_primvar is not None
- display_color_primvar.Set(time_data_dict["vertex_colors"], t_code)
- # Set raw scalar values for colormap control
- scalar_values = cast(list[float], time_data_dict["scalar_values"])
- scalar_primvar.Set(scalar_values, t_code)
- # Set opacity (full opacity by default)
- num_vertices = len(scalar_values)
- opacity_values = [1.0] * num_vertices
- display_opacity_primvar.Set(opacity_values, t_code)
-
- # Set initial values (non-timewarped)
- points_attr.Set(time_samples[self.times[0]]["points"])
- extent_attr.Set(time_samples[self.times[0]]["extent"])
- if self.compute_normals:
- normals_attr.Set(time_samples[self.times[0]]["normals"])
- if use_color_array and "vertex_colors" in time_samples[self.times[0]]:
- assert display_color_primvar is not None
- assert scalar_primvar is not None
- assert display_opacity_primvar is not None
- display_color_primvar.Set(time_samples[self.times[0]]["vertex_colors"])
- scalar_values_0 = cast(
- list[float], time_samples[self.times[0]]["scalar_values"]
- )
- scalar_primvar.Set(scalar_values_0)
- num_vertices = len(scalar_values_0)
- display_opacity_primvar.Set([1.0] * num_vertices)
-
- # Add metadata for colormap range and visualization controls (for colormap meshes only)
- if use_color_array:
- prim = mesh.GetPrim()
- prim.SetCustomDataByKey(f"{self.color_by_array}_min", global_vmin)
- prim.SetCustomDataByKey(f"{self.color_by_array}_max", global_vmax)
- prim.SetCustomDataByKey(f"{self.color_by_array}_colormap", self.colormap)
- prim.SetCustomDataByKey("visualizationDataArray", self.color_by_array)
-
- # Set deformation magnitude if it exists
- if any(
- mesh_time_data[time_idx][label]["deformation_magnitude"] is not None
- for time_idx in range(len(self.times))
- ):
- def_mag_attr = mesh.GetPrim().CreateAttribute(
- "deformationMagnitude", Sdf.ValueTypeNames.FloatArray
- )
-
- for time_idx, t_code in enumerate(self.times):
- def_mag = mesh_time_data[time_idx][label]["deformation_magnitude"]
- if def_mag is not None:
- def_mag_attr.Set(def_mag, t_code)
-
- def _create_usd_polymesh_varying(
- self,
- transform_path: str,
- label: str,
- mesh_time_data: MeshTimeData,
- label_colors: dict[str, RgbColor],
- ) -> None:
- """
- Create separate UsdGeomMesh prims for each timestep with visibility control.
-
- Used when topology changes over time (varying number of points/faces).
-
- Args:
- transform_path: USD path for the transform
- label: Label identifier
- mesh_time_data: Time-series mesh data
- label_colors: Color assignments for labels
- """
- assert self.stage is not None
- # Determine if using color array
- use_color_array = self.color_by_array is not None and any(
- mesh_time_data[t][label].get("color_array") is not None
- for t in range(len(self.times))
- )
-
- # Compute intensity range if using color array
- if use_color_array:
- vmin, vmax = self._compute_intensity_range(mesh_time_data, label)
-
- # Create a parent Xform for all time-varying meshes
- parent_path = f"{transform_path}/{label}"
- UsdGeom.Xform.Define(self.stage, parent_path)
-
- # Create separate mesh for each time step
- num_times = len(self.times)
- for time_idx, time_code in enumerate(self.times):
- if time_idx % 10 == 0 or time_idx == num_times - 1:
- self.log_progress(
- time_idx + 1, num_times, prefix=f"Creating meshes for {label}"
- )
- # Skip if label doesn't exist at this timestep
- if label not in mesh_time_data[time_idx]:
- continue
-
- time_data = mesh_time_data[time_idx][label]
-
- # Create mesh prim for this time step
- mesh_path = f"{parent_path}/mesh_t{time_code}"
- mesh = UsdGeom.Mesh.Define(self.stage, mesh_path)
-
- # Set topology (unique for this timestep)
- mesh.CreateFaceVertexCountsAttr(time_data["face_vertex_counts"])
- mesh.CreateFaceVertexIndicesAttr(time_data["face_vertex_indices"])
- mesh.CreatePointsAttr(time_data["points"])
-
- # Set mesh attributes for Index renderer compatibility
- mesh.CreateSubdivisionSchemeAttr("none")
- mesh.CreateDoubleSidedAttr(True)
-
- # Compute and set normals
- if self.compute_normals:
- vertex_normals = self._compute_facevarying_normals_tri(
- time_data["points"],
- time_data["face_vertex_counts"],
- time_data["face_vertex_indices"],
- )
- normals_attr = mesh.CreateNormalsAttr()
- normals_attr.SetMetadata("interpolation", UsdGeom.Tokens.vertex)
- normals_attr.Set(vertex_normals)
-
- # Set extent
- extent_attr = mesh.CreateExtentAttr()
- extent_attr.Set(UsdGeom.Mesh.ComputeExtent(time_data["points"]))
-
- # Set display color
- if use_color_array and time_data.get("color_array") is not None:
- color_values = cast(list[float], time_data["color_array"])
-
- # Map scalars to colors
- vertex_colors = [
- self._map_scalar_to_color(float(v), vmin, vmax, self.colormap)
- for v in color_values
- ]
- display_color_primvar = mesh.CreateDisplayColorPrimvar(
- UsdGeom.Tokens.vertex
- )
- display_color_primvar.Set(vertex_colors)
-
- # Set opacity
- display_opacity_primvar = mesh.CreateDisplayOpacityPrimvar(
- UsdGeom.Tokens.vertex
- )
- display_opacity_primvar.Set([1.0] * len(vertex_colors))
-
- # Store scalar values as primvar
- scalar_primvar = UsdGeom.PrimvarsAPI(mesh).CreatePrimvar(
- self.color_by_array,
- Sdf.ValueTypeNames.FloatArray,
- UsdGeom.Tokens.vertex,
- )
- scalar_list = (
- color_values.tolist()
- if hasattr(color_values, "tolist")
- else list(color_values)
- )
- scalar_primvar.Set(scalar_list)
-
- # Add colormap metadata
- prim = mesh.GetPrim()
- prim.SetCustomDataByKey(f"{self.color_by_array}_min", vmin)
- prim.SetCustomDataByKey(f"{self.color_by_array}_max", vmax)
- prim.SetCustomDataByKey(
- f"{self.color_by_array}_colormap", self.colormap
- )
- prim.SetCustomDataByKey("visualizationDataArray", self.color_by_array)
- else:
- # Use fixed label color
- display_color = label_colors[label]
- display_color_primvar = mesh.CreateDisplayColorPrimvar(
- UsdGeom.Tokens.constant
- )
- display_color_primvar.Set([display_color])
-
- # Set deformation magnitude if exists
- if time_data.get("deformation_magnitude") is not None:
- def_mag_attr = mesh.GetPrim().CreateAttribute(
- "deformationMagnitude", Sdf.ValueTypeNames.FloatArray
- )
- def_mag_attr.Set(time_data["deformation_magnitude"])
-
- # Set visibility based on time code
- # Mesh is visible only at its specific time code
- visibility_attr = mesh.CreateVisibilityAttr()
- for t_code in self.times:
- if t_code == time_code:
- visibility_attr.Set(UsdGeom.Tokens.inherited, t_code)
- else:
- visibility_attr.Set(UsdGeom.Tokens.invisible, t_code)
-
- # Set default visibility
- visibility_attr.Set(
- UsdGeom.Tokens.inherited
- if time_code == self.times[0]
- else UsdGeom.Tokens.invisible
- )
diff --git a/src/physiomotion4d/convert_vtk_to_usd_tetmesh.py b/src/physiomotion4d/convert_vtk_to_usd_tetmesh.py
deleted file mode 100644
index 3b54017..0000000
--- a/src/physiomotion4d/convert_vtk_to_usd_tetmesh.py
+++ /dev/null
@@ -1,524 +0,0 @@
-"""Converter for VTK UnstructuredGrid to USD TetMesh with volumetric meshes."""
-
-from typing import Optional, cast
-
-import numpy as np
-import pyvista as pv
-import vtk
-from pxr import Gf, Sdf, UsdGeom, Vt
-
-from .convert_vtk_to_usd_base import (
- VTK_QUAD,
- VTK_TETRA,
- VTK_TRIANGLE,
- ConvertVTKToUSDBase,
- MeshLabelData,
- MeshTimeData,
- RgbColor,
-)
-
-
-class ConvertVTKToUSDTetMesh(ConvertVTKToUSDBase):
- """
- Converter for VTK UnstructuredGrid to USD TetMesh.
-
- Handles:
- - Volumetric tetrahedral meshes
- - Surface cells (triangles/quads) from UnstructuredGrid
- - Time-varying topology via visibility control
-
- Requires OpenUSD v24.03+ for UsdGeomTetMesh support.
-
- Example Usage:
- >>> converter = ConvertVTKToUSDTetMesh(
- ... data_basename='VolumetricModel', input_polydata=ugrid_meshes, mask_ids=None
- ... )
- >>> stage = converter.convert('output.usd')
- """
-
- def supports_mesh_type(self, mesh: pv.DataSet | vtk.vtkDataSet) -> bool:
- """
- Check if this converter supports the given mesh type.
-
- Supports UnstructuredGrid when NOT converting to surface.
-
- Args:
- mesh: PyVista or VTK mesh object
-
- Returns:
- bool: True if mesh is UnstructuredGrid and not surface mode
- """
- return (
- isinstance(mesh, (pv.UnstructuredGrid, vtk.vtkUnstructuredGrid))
- and not self.convert_to_surface
- )
-
- def _process_mesh_data(
- self, mesh: pv.DataSet | vtk.vtkDataSet
- ) -> dict[str, MeshLabelData]:
- """
- Process mesh and extract geometry data.
-
- Args:
- mesh: PyVista UnstructuredGrid
-
- Returns:
- dict: Processed mesh data with tetrahedral or surface cell information
- """
- if not isinstance(mesh, (pv.UnstructuredGrid, vtk.vtkUnstructuredGrid)):
- raise TypeError(
- f"TetMesh converter only supports UnstructuredGrid. Got: {type(mesh)}"
- )
- return self._process_unstructured_grid(mesh)
-
- def _create_usd_mesh(
- self,
- transform_path: str,
- label: str,
- mesh_time_data: MeshTimeData,
- label_colors: dict[str, RgbColor],
- has_topology_change: bool,
- ) -> None:
- """
- Create USD mesh prim(s) for this label.
-
- Routes to appropriate method based on topology and mesh type:
- - TetMesh with constant topology: Single UsdGeomTetMesh
- - TetMesh with varying topology: Multiple UsdGeomTetMesh prims with visibility
- - PolyMesh (surface cells): Delegates to PolyMesh creation
-
- Args:
- transform_path: USD path for the transform
- label: Label identifier
- mesh_time_data: Time-series mesh data
- label_colors: Color assignments for labels
- has_topology_change: Whether topology varies over time
- """
- # Check mesh type from first timestep data
- mesh_type = mesh_time_data[0][label].get("mesh_type", "tetmesh")
-
- if mesh_type == "tetmesh":
- if has_topology_change:
- self.log_info(
- "Creating time-varying UsdGeomTetMesh for label: %s (topology changes detected)",
- label,
- )
- self._create_usd_tetmesh_varying(
- transform_path, label, mesh_time_data, label_colors
- )
- else:
- self.log_info("Creating UsdGeomTetMesh for label: %s", label)
- self._create_usd_tetmesh(
- transform_path, label, mesh_time_data, label_colors
- )
- else:
- # Surface cells from UnstructuredGrid - treat as polymesh
- # Note: This is a fallback for UnstructuredGrid with only surface cells
- raise ValueError(
- "UnstructuredGrid contains surface cells, not tetrahedra. "
- "Use convert_to_surface=True with PolyMesh converter."
- )
-
- def _process_unstructured_grid(
- self, ugrid: pv.UnstructuredGrid | vtk.vtkUnstructuredGrid
- ) -> dict[str, MeshLabelData]:
- """
- Process UnstructuredGrid and extract tetrahedral and surface geometry.
-
- Args:
- ugrid: PyVista UnstructuredGrid or VTK vtkUnstructuredGrid
-
- Returns:
- dict: Processed mesh data with 'mesh_type' key indicating 'tetmesh' or
- 'polymesh'
- """
- # Convert VTK to PyVista if needed
- if isinstance(ugrid, vtk.vtkUnstructuredGrid):
- ugrid = pv.wrap(ugrid)
-
- # Get points
- points = ugrid.points
-
- # Get deformation magnitude if it exists
- def_mag = None
- if "DeformationMagnitude" in ugrid.point_data:
- def_mag = ugrid.point_data["DeformationMagnitude"]
-
- # Get boundary labels if they exist
- boundary_labels = None
- if self.mask_ids is not None and "boundary_labels" in ugrid.cell_data:
- label_array = ugrid.cell_data["boundary_labels"]
- boundary_labels = set()
- if label_array.ndim > 1:
- # Multi-component array
- for row in label_array:
- for value in row:
- if int(value) != 0:
- boundary_labels.add(value)
- else:
- # Single component
- for value in label_array:
- if int(value) != 0:
- boundary_labels.add(value)
-
- # Parse cells and cell types
- cells = ugrid.cells
- celltypes = ugrid.celltypes
-
- # Separate tetrahedral cells from surface cells
- tet_cells = []
- surface_cells = []
- cell_labels = []
-
- idx = 0
- cell_id = 0
- while idx < len(cells):
- n_points = cells[idx]
- cell_type = celltypes[cell_id]
- cell_connectivity = cells[idx + 1 : idx + 1 + n_points]
-
- # Get cell label if available
- cell_label = None
- if boundary_labels is not None and "boundary_labels" in ugrid.cell_data:
- label_val = ugrid.cell_data["boundary_labels"][cell_id]
- if isinstance(label_val, (list, np.ndarray)):
- # Multi-component, take first non-zero
- for v in label_val:
- if int(v) != 0:
- cell_label = int(v)
- break
- elif int(label_val) != 0:
- cell_label = int(label_val)
-
- if cell_type == VTK_TETRA:
- tet_cells.append(cell_connectivity)
- cell_labels.append(cell_label)
- elif cell_type in [VTK_TRIANGLE, VTK_QUAD]:
- surface_cells.append((cell_connectivity, n_points))
-
- idx += n_points + 1
- cell_id += 1
-
- # Determine mesh type and process accordingly
- if len(tet_cells) > 0:
- # Process as tetrahedral mesh
- return self._process_tetrahedral_mesh(
- points, tet_cells, cell_labels, def_mag, boundary_labels
- )
- if len(surface_cells) > 0:
- # Process as surface mesh
- return self._process_surface_cells(
- points, surface_cells, def_mag, boundary_labels
- )
- raise ValueError(
- "UnstructuredGrid contains no supported cell types (tetrahedra or surface cells)"
- )
-
- def _process_tetrahedral_mesh(
- self,
- points: np.ndarray,
- tet_cells: list,
- cell_labels: list,
- def_mag: Optional[np.ndarray],
- boundary_labels: Optional[set],
- ) -> dict[str, MeshLabelData]:
- """
- Process tetrahedral cells for UsdGeomTetMesh export.
-
- Args:
- points: Array of point coordinates
- tet_cells: List of tetrahedral connectivity arrays
- cell_labels: List of cell labels (or None)
- def_mag: Deformation magnitude array (or None)
- boundary_labels: Set of boundary label IDs (or None)
-
- Returns:
- dict: Processed tetmesh data with structure:
- {'default': {'mesh_type', 'points', 'tet_indices',
- 'surface_face_indices', 'deformation_magnitude'}}
- """
- # Convert points to USD coordinates
- points_usd = [self._ras_to_usd(p) for p in points]
-
- # Convert tetrahedral indices to Vec4i format (required by UsdGeomTetMesh)
- tet_indices_vec4 = [
- Gf.Vec4i(int(tet[0]), int(tet[1]), int(tet[2]), int(tet[3]))
- for tet in tet_cells
- ]
-
- # Compute surface faces from tetrahedra for rendering
- # Each tetrahedron has 4 triangular faces
- surface_faces = []
- for tet in tet_cells:
- # The 4 faces of a tetrahedron with vertices [a, b, c, d]:
- # Face 0: [b, c, d], Face 1: [a, c, d], Face 2: [a, b, d], Face 3: [a, b, c]
- a, b, c, d = tet
- surface_faces.extend(
- [
- [int(b), int(c), int(d)],
- [int(a), int(c), int(d)],
- [int(a), int(b), int(d)],
- [int(a), int(b), int(c)],
- ]
- )
-
- # Flatten surface face indices to Vec3i format
- surface_face_indices = [Gf.Vec3i(f[0], f[1], f[2]) for f in surface_faces]
-
- result = {
- "default": {
- "mesh_type": "tetmesh",
- "points": points_usd,
- "tet_indices": tet_indices_vec4,
- "surface_face_indices": surface_face_indices,
- "deformation_magnitude": (
- def_mag.tolist() if def_mag is not None else None
- ),
- }
- }
-
- return result
-
- def _process_surface_cells(
- self,
- points: np.ndarray,
- surface_cells: list,
- def_mag: Optional[np.ndarray],
- boundary_labels: Optional[set],
- ) -> dict[str, MeshLabelData]:
- """
- Process surface cells (triangles/quads) for UsdGeomMesh export.
-
- Args:
- points: Array of point coordinates
- surface_cells: List of (connectivity, n_points) tuples
- def_mag: Deformation magnitude array (or None)
- boundary_labels: Set of boundary label IDs (or None)
-
- Returns:
- dict: Processed polymesh data with structure:
- {'default': {'mesh_type', 'points', 'face_vertex_counts',
- 'face_vertex_indices', 'deformation_magnitude'}}
- """
- points_usd = [self._ras_to_usd(p) for p in points]
-
- face_vertex_counts = []
- face_vertex_indices = []
-
- for cell_connectivity, n_points in surface_cells:
- face_vertex_counts.append(n_points)
- face_vertex_indices.extend(cell_connectivity.tolist())
-
- return {
- "default": {
- "mesh_type": "polymesh",
- "points": points_usd,
- "face_vertex_counts": face_vertex_counts,
- "face_vertex_indices": face_vertex_indices,
- "deformation_magnitude": (
- def_mag.tolist() if def_mag is not None else None
- ),
- }
- }
-
- def _create_usd_tetmesh(
- self,
- transform_path: str,
- label: str,
- mesh_time_data: MeshTimeData,
- label_colors: dict[str, RgbColor],
- ) -> None:
- """
- Create UsdGeomTetMesh for tetrahedral volume data with constant topology.
-
- Uses time-sampled attributes for points and normals.
-
- Args:
- transform_path: USD path for the transform
- label: Label identifier
- mesh_time_data: Time-series mesh data
- label_colors: Color assignments for labels
- """
- data = mesh_time_data[0][label]
-
- # Create tetrahedral mesh prim under the transform
- mesh_path = f"{transform_path}/{label}"
- tetmesh = UsdGeom.TetMesh.Define(self.stage, mesh_path)
-
- # Set tetrahedral topology (assuming consistent topology across timesteps)
- tetmesh.CreateTetVertexIndicesAttr(data["tet_indices"])
- tetmesh.CreateSurfaceFaceVertexIndicesAttr(data["surface_face_indices"])
-
- # Set mesh attributes for Index renderer compatibility
- tetmesh.CreateDoubleSidedAttr(True) # Ensure visibility from both sides
-
- # Create normals attribute
- # For tetrahedral meshes, we need normals for the surface vertices
- if self.compute_normals:
- normals_attr = tetmesh.CreateNormalsAttr()
- normals_attr.SetMetadata("interpolation", UsdGeom.Tokens.vertex)
-
- # Assign a unique color to the mesh with proper primvar
- display_color = label_colors[label]
- display_color_primvar = tetmesh.CreateDisplayColorPrimvar(
- UsdGeom.Tokens.constant
- )
- display_color_primvar.Set([display_color])
-
- # Create points attribute with time samples
- points_attr = tetmesh.CreatePointsAttr()
- extent_attr = tetmesh.CreateExtentAttr()
- time_samples = {}
-
- num_times = len(self.times)
- for time_idx, time_code in enumerate(self.times):
- if time_idx % 10 == 0 or time_idx == num_times - 1:
- self.log_progress(
- time_idx + 1, num_times, prefix=f"Processing time steps for {label}"
- )
- time_data = mesh_time_data[time_idx][label]
-
- # Compute per-vertex normals for surface faces
- # For tetrahedral meshes, compute normals based on surface triangulation
- # Surface faces are triangular, so each face has 3 vertices
- surface_indices_val = cast(list[int], time_data["surface_face_indices"])
- face_vertex_counts = [3] * (len(surface_indices_val) // 3)
- if self.compute_normals:
- points_val = cast(list[Gf.Vec3f], time_data["points"])
- vertex_normals = self._compute_facevarying_normals_tri(
- Vt.Vec3fArray(points_val),
- Vt.IntArray(face_vertex_counts),
- Vt.IntArray(surface_indices_val),
- )
-
- # Set points first
- time_samples[time_code] = {
- "points": time_data["points"],
- "extent": UsdGeom.TetMesh.ComputeExtent(time_data["points"]),
- }
- if self.compute_normals:
- time_samples[time_code]["normals"] = vertex_normals
-
- # Set points, extents, and normals with explicit time codes
- for t_code, time_data_dict in time_samples.items():
- points_attr.Set(time_data_dict["points"], t_code)
- extent_attr.Set(time_data_dict["extent"], t_code)
- if self.compute_normals:
- normals_attr.Set(time_data_dict["normals"], t_code)
-
- # Set initial values (non-timewarped)
- points_attr.Set(time_samples[self.times[0]]["points"])
- extent_attr.Set(time_samples[self.times[0]]["extent"])
- if self.compute_normals:
- normals_attr.Set(time_samples[self.times[0]]["normals"])
-
- # Set deformation magnitude if it exists
- if any(
- mesh_time_data[time_idx][label]["deformation_magnitude"] is not None
- for time_idx in range(len(self.times))
- ):
- def_mag_attr = tetmesh.GetPrim().CreateAttribute(
- "deformationMagnitude", Sdf.ValueTypeNames.FloatArray
- )
-
- for time_idx, t_code in enumerate(self.times):
- def_mag = mesh_time_data[time_idx][label]["deformation_magnitude"]
- if def_mag is not None:
- def_mag_attr.Set(def_mag, t_code)
-
- def _create_usd_tetmesh_varying(
- self,
- transform_path: str,
- label: str,
- mesh_time_data: MeshTimeData,
- label_colors: dict[str, RgbColor],
- ) -> None:
- """
- Create separate UsdGeomTetMesh prims for each timestep with visibility control.
-
- Used when topology changes over time (varying number of points/tetrahedra).
-
- Args:
- transform_path: USD path for the transform
- label: Label identifier
- mesh_time_data: Time-series mesh data
- label_colors: Color assignments for labels
- """
- # Create a parent Xform for all time-varying tetmeshes
- parent_path = f"{transform_path}/{label}"
- UsdGeom.Xform.Define(self.stage, parent_path)
-
- # Create separate tetmesh for each time step
- num_times = len(self.times)
- for time_idx, time_code in enumerate(self.times):
- if time_idx % 10 == 0 or time_idx == num_times - 1:
- self.log_progress(
- time_idx + 1, num_times, prefix=f"Creating tetmeshes for {label}"
- )
- # Skip if label doesn't exist at this timestep
- if label not in mesh_time_data[time_idx]:
- continue
-
- time_data = mesh_time_data[time_idx][label]
-
- # Create tetmesh prim for this time step
- mesh_path = f"{parent_path}/tetmesh_t{time_code}"
- tetmesh = UsdGeom.TetMesh.Define(self.stage, mesh_path)
-
- # Set topology (unique for this timestep)
- tetmesh.CreateTetVertexIndicesAttr(time_data["tet_indices"])
- tetmesh.CreateSurfaceFaceVertexIndicesAttr(
- time_data["surface_face_indices"]
- )
- tetmesh.CreatePointsAttr(time_data["points"])
-
- # Set mesh attributes for Index renderer compatibility
- tetmesh.CreateDoubleSidedAttr(True)
-
- # Compute and set normals
- surface_indices_val = cast(list[int], time_data["surface_face_indices"])
- face_vertex_counts = [3] * (len(surface_indices_val) // 3)
- if self.compute_normals:
- points_val = cast(list[Gf.Vec3f], time_data["points"])
- vertex_normals = self._compute_facevarying_normals_tri(
- Vt.Vec3fArray(points_val),
- Vt.IntArray(face_vertex_counts),
- Vt.IntArray(surface_indices_val),
- )
- normals_attr = tetmesh.CreateNormalsAttr()
- normals_attr.SetMetadata("interpolation", UsdGeom.Tokens.vertex)
- normals_attr.Set(vertex_normals)
-
- # Set extent
- extent_attr = tetmesh.CreateExtentAttr()
- extent_attr.Set(UsdGeom.TetMesh.ComputeExtent(time_data["points"]))
-
- # Set display color
- display_color = label_colors[label]
- display_color_primvar = tetmesh.CreateDisplayColorPrimvar(
- UsdGeom.Tokens.constant
- )
- display_color_primvar.Set([display_color])
-
- # Set deformation magnitude if exists
- if time_data.get("deformation_magnitude") is not None:
- def_mag_attr = tetmesh.GetPrim().CreateAttribute(
- "deformationMagnitude", Sdf.ValueTypeNames.FloatArray
- )
- def_mag_attr.Set(time_data["deformation_magnitude"])
-
- # Set visibility based on time code
- # Mesh is visible only at its specific time code
- visibility_attr = tetmesh.CreateVisibilityAttr()
- for t_code in self.times:
- if t_code == time_code:
- visibility_attr.Set(UsdGeom.Tokens.inherited, t_code)
- else:
- visibility_attr.Set(UsdGeom.Tokens.invisible, t_code)
-
- # Set default visibility
- visibility_attr.Set(
- UsdGeom.Tokens.inherited
- if time_code == self.times[0]
- else UsdGeom.Tokens.invisible
- )
diff --git a/src/physiomotion4d/usd_tools.py b/src/physiomotion4d/usd_tools.py
index f06d865..6e10cc0 100644
--- a/src/physiomotion4d/usd_tools.py
+++ b/src/physiomotion4d/usd_tools.py
@@ -11,6 +11,7 @@
"""
import logging
+from collections.abc import Sequence
from typing import Any
import numpy as np
@@ -98,6 +99,10 @@ def traverse_prim(current_prim: Any) -> None:
bbox = UsdGeom.Boundable.ComputeExtentFromPlugins(
UsdGeom.Boundable(current_prim), Usd.TimeCode.Default()
)
+ # Skip if bbox computation returned None
+ if bbox is None or len(bbox) != 2:
+ return
+
if first_bbox:
bbox_min = bbox[0]
bbox_max = bbox[1]
@@ -112,6 +117,11 @@ def traverse_prim(current_prim: Any) -> None:
traverse_prim(prim)
+ # If no valid bounding boxes were found, return default values
+ if first_bbox:
+ self.log_warning(f"No valid bounding box found for prim: {prim.GetPath()}")
+ return np.array([0, 0, 0]), np.array([0, 0, 0])
+
return bbox_min, bbox_max
def save_usd_file_arrangement(
@@ -203,32 +213,36 @@ def save_usd_file_arrangement(
)
xform_op.Set(translate, Usd.TimeCode.Default())
- for prim in source_stage.Traverse():
- if prim.IsA(UsdGeom.Mesh):
- bindingAPI = UsdShade.MaterialBindingAPI(prim)
- mesh_material = bindingAPI.ComputeBoundMaterial()
- if bool(mesh_material):
- material_path = (
- str(mesh_material[0].GetPath())
- if isinstance(mesh_material, tuple)
- and len(mesh_material) > 0
- else str(mesh_material.GetPath())
- )
- self.log_debug(
- " Mesh %s has material %s",
- prim.GetPrimPath(),
- material_path,
- )
- new_prim = new_stage.GetPrimAtPath(prim.GetPrimPath())
- material = UsdShade.Material.Get(new_stage, material_path)
- if new_prim is not None and new_prim.IsValid():
- binding_api = UsdShade.MaterialBindingAPI.Apply(new_prim)
- binding_api.Bind(material)
- else:
- self.log_warning(
- " Cannot bind. No new prim found for %s",
- prim.GetPrimPath(),
- )
+ # Note: Material bindings are preserved through references/payloads,
+ # so we don't need to explicitly rebind them. The code below is
+ # commented out to avoid cross-layer material binding issues.
+ #
+ # for prim in source_stage.Traverse():
+ # if prim.IsA(UsdGeom.Mesh):
+ # bindingAPI = UsdShade.MaterialBindingAPI(prim)
+ # mesh_material = bindingAPI.ComputeBoundMaterial()
+ # if bool(mesh_material):
+ # material_path = (
+ # str(mesh_material[0].GetPath())
+ # if isinstance(mesh_material, tuple)
+ # and len(mesh_material) > 0
+ # else str(mesh_material.GetPath())
+ # )
+ # self.log_debug(
+ # " Mesh %s has material %s",
+ # prim.GetPrimPath(),
+ # material_path,
+ # )
+ # new_prim = new_stage.GetPrimAtPath(prim.GetPrimPath())
+ # material = UsdShade.Material.Get(new_stage, material_path)
+ # if new_prim is not None and new_prim.IsValid() and material:
+ # binding_api = UsdShade.MaterialBindingAPI.Apply(new_prim)
+ # binding_api.Bind(material)
+ # else:
+ # self.log_warning(
+ # " Cannot bind. No new prim found for %s",
+ # prim.GetPrimPath(),
+ # )
self.log_info("Exporting stage...")
new_stage.Export(new_stage_name)
@@ -320,7 +334,15 @@ def _copy_prim(src_prim: Any, target_path: str) -> None:
# Copy default value if it exists
if attr.HasValue():
- new_attr.Set(attr.Get())
+ value = attr.Get()
+ # Skip if value is None or invalid
+ if value is not None:
+ try:
+ new_attr.Set(value)
+ except Exception as e:
+ self.log_warning(
+ f"Failed to copy attribute {attr.GetName()}: {e}"
+ )
# Copy all time samples for time-varying attributes
time_samples = attr.GetTimeSamples()
@@ -338,7 +360,9 @@ def _copy_prim(src_prim: Any, target_path: str) -> None:
new_rel = new_prim.CreateRelationship(
rel.GetName(), custom=rel.IsCustom()
)
- new_rel.SetTargets(rel.GetTargets())
+ targets = rel.GetTargets()
+ if targets:
+ new_rel.SetTargets(targets)
# Copy transforms if applicable
if src_prim.IsA(UsdGeom.Xformable):
@@ -538,3 +562,637 @@ def merge_usd_files_flattened(
# Export the flattened layer with corrected metadata
self.log_info("Exporting to %s", output_filename)
output_stage.Export(output_filename)
+
+ def list_mesh_primvars(
+ self,
+ stage_or_path: Usd.Stage | str,
+ mesh_path: str,
+ time_code: float | None = None,
+ ) -> list[dict]:
+ """
+ List all primvars on a USD mesh with metadata.
+
+ Inspects a mesh and returns information about each primvar including
+ name, type, interpolation, time samples, and value range when feasible.
+ This is useful for understanding what simulation data is available on
+ the mesh for visualization.
+
+ Args:
+ stage_or_path: USD Stage or path to USD file
+ mesh_path: Path to mesh prim (e.g., "/World/Meshes/MyMesh")
+ time_code: Optional time code to sample values. If None, uses default.
+
+ Returns:
+ list[dict]: List of primvar metadata dictionaries containing:
+ - name: Primvar name
+ - type_name: USD type name (e.g., "float[]", "color3f[]")
+ - interpolation: Interpolation mode ("vertex", "uniform", "constant")
+ - num_time_samples: Number of time samples (0 if static)
+ - elements: Number of elements in the array
+ - range: Tuple (min, max) for numeric arrays, None otherwise
+
+ Example:
+ >>> usd_tools = USDTools()
+ >>> primvars = usd_tools.list_mesh_primvars("valve.usd", "/World/Meshes/Valve")
+ >>> for pv in primvars:
+ ... print(f"{pv['name']}: {pv['interpolation']}, {pv['elements']} elements")
+ """
+ # Open stage if needed
+ if isinstance(stage_or_path, str):
+ stage = Usd.Stage.Open(stage_or_path)
+ else:
+ stage = stage_or_path
+
+ # Get mesh prim
+ mesh_prim = stage.GetPrimAtPath(mesh_path)
+ if not mesh_prim.IsValid():
+ raise ValueError(f"Invalid mesh prim at path: {mesh_path}")
+
+ if not mesh_prim.IsA(UsdGeom.Mesh):
+ raise ValueError(f"Prim at {mesh_path} is not a Mesh")
+
+ mesh = UsdGeom.Mesh(mesh_prim)
+ primvars_api = UsdGeom.PrimvarsAPI(mesh)
+ primvars = primvars_api.GetPrimvars()
+
+ # Use provided time code or default
+ tc = (
+ Usd.TimeCode(time_code) if time_code is not None else Usd.TimeCode.Default()
+ )
+
+ result = []
+ for primvar in primvars:
+ pv_info = {
+ "name": primvar.GetPrimvarName(),
+ "type_name": str(primvar.GetTypeName()),
+ "interpolation": primvar.GetInterpolation(),
+ "num_time_samples": primvar.GetAttr().GetNumTimeSamples(),
+ "elements": 0,
+ "range": None,
+ }
+
+ # Get value at time code
+ try:
+ value = primvar.Get(tc)
+ if value is not None:
+ pv_info["elements"] = len(value) if hasattr(value, "__len__") else 1
+
+ # Compute range for numeric types
+ if hasattr(value, "__iter__") and len(value) > 0:
+ try:
+ # Convert to numpy for easy min/max
+ arr = np.asarray(value)
+ if np.issubdtype(arr.dtype, np.number):
+ pv_info["range"] = (
+ float(np.min(arr)),
+ float(np.max(arr)),
+ )
+ except (TypeError, ValueError):
+ pass # Skip range for non-numeric data
+ except Exception as e:
+ self.log_debug(
+ f"Could not get value for primvar {pv_info['name']}: {e}"
+ )
+
+ result.append(pv_info)
+
+ return result
+
+ def pick_color_primvar(
+ self,
+ primvar_infos: list[dict[str, Any]],
+ keywords: tuple[str, ...] = ("strain", "stress"),
+ ) -> str | None:
+ """
+ Select a primvar for coloring based on keywords and preferences.
+
+ Examines a list of primvar metadata and picks the best candidate for
+ default coloring visualization. Prefers primvars containing keywords
+ like "strain" or "stress" that are commonly used in biomechanical
+ simulations.
+
+ Selection priority:
+ 1. Name contains first keyword ("strain") over later keywords ("stress")
+ 2. Vertex interpolation preferred over uniform (face) interpolation
+ 3. Alphabetically first if multiple candidates tie
+
+ Args:
+ primvar_infos: List of primvar metadata dicts (from list_mesh_primvars)
+ keywords: Tuple of keywords to search for in primvar names (case-insensitive)
+
+ Returns:
+ str | None: Name of selected primvar, or None if no candidates found
+
+ Example:
+ >>> primvars = usd_tools.list_mesh_primvars("valve.usd", "/World/Meshes/Valve")
+ >>> color_primvar = usd_tools.pick_color_primvar(primvars)
+ >>> print(f"Selected for coloring: {color_primvar}")
+ """
+ candidates: list[tuple[dict[str, Any], int]] = []
+
+ for pv in primvar_infos:
+ name_lower = pv["name"].lower()
+ for keyword_idx, keyword in enumerate(keywords):
+ if keyword in name_lower:
+ candidates.append((pv, keyword_idx))
+ break
+
+ if not candidates:
+ return None
+
+ # Sort by: keyword index, interpolation (vertex=0, else=1), name
+ def sort_key(item: tuple[dict[str, Any], int]) -> tuple[int, int, str]:
+ pv, kw_idx = item
+ interp_priority = 0 if str(pv.get("interpolation")) == "vertex" else 1
+ return (int(kw_idx), int(interp_priority), str(pv.get("name")))
+
+ candidates.sort(key=sort_key)
+ name_obj = candidates[0][0].get("name")
+ if name_obj is None:
+ return None
+ return str(name_obj)
+
+ def apply_colormap_from_primvar(
+ self,
+ stage_or_path: Usd.Stage | str,
+ mesh_path: str,
+ source_primvar: str,
+ *,
+ cmap: str = "viridis",
+ time_codes: list[float] | None = None,
+ write_default_at_t0: bool = True,
+ bind_vertex_color_material: bool = True,
+ ) -> None:
+ """
+ Apply colormap visualization by converting a primvar to displayColor.
+
+ Reads numeric data from a source primvar (like vtk_cell_stress or
+ vtk_point_displacement) and generates RGB vertex colors using a matplotlib
+ colormap. Writes these colors to the mesh's displayColor primvar and
+ optionally binds a material that uses vertex colors for rendering.
+
+ This is especially useful for post-processing USD files to add default
+ visualization colors based on simulation data like stress or strain fields.
+
+ Key features:
+ - Handles multi-component data (vectors/tensors) by computing magnitude
+ - Converts uniform (per-face) data to vertex data by averaging
+ - Computes global value range across all time samples for consistent coloring
+ - Writes both default and time-sampled displayColor for Omniverse compatibility
+
+ Args:
+ stage_or_path: USD Stage or path to USD file
+ mesh_path: Path to mesh prim (e.g., "/World/Meshes/MyMesh")
+ source_primvar: Name of primvar to visualize (e.g., "vtk_cell_stress")
+ cmap: Matplotlib colormap name (default: "viridis")
+ time_codes: List of time codes to process. If None, uses stage time range.
+ write_default_at_t0: If True, also write default value at t=0
+ bind_vertex_color_material: If True, create/bind material using displayColor
+
+ Raises:
+ ValueError: If mesh or primvar not found
+ ImportError: If matplotlib is not available
+
+ Example:
+ >>> usd_tools = USDTools()
+ >>> usd_tools.apply_colormap_from_primvar(
+ ... "valve.usd",
+ ... "/World/Meshes/Valve",
+ ... "vtk_cell_stress",
+ ... cmap="plasma"
+ ... )
+ """
+ # Check matplotlib availability
+ try:
+ from matplotlib import colormaps as mpl_colormaps
+ except ImportError:
+ raise ImportError(
+ "matplotlib is required for colormap coloring. "
+ "Install with: pip install matplotlib"
+ )
+
+ # Open stage if needed
+ if isinstance(stage_or_path, str):
+ stage = Usd.Stage.Open(stage_or_path)
+ stage_path = stage_or_path
+ else:
+ stage = stage_or_path
+ stage_path = None
+
+ # Get mesh prim
+ mesh_prim = stage.GetPrimAtPath(mesh_path)
+ if not mesh_prim.IsValid():
+ raise ValueError(f"Invalid mesh prim at path: {mesh_path}")
+
+ if not mesh_prim.IsA(UsdGeom.Mesh):
+ raise ValueError(f"Prim at {mesh_path} is not a Mesh")
+
+ mesh = UsdGeom.Mesh(mesh_prim)
+
+ # Get source primvar
+ primvars_api = UsdGeom.PrimvarsAPI(mesh)
+ source_pv = primvars_api.GetPrimvar(source_primvar)
+ if not source_pv:
+ raise ValueError(
+ f"Primvar '{source_primvar}' not found on mesh {mesh_path}"
+ )
+
+ # Determine time codes to process
+ if time_codes is None:
+ # Prefer the source primvar's authored samples (avoid inventing in-between frames).
+ pv_samples = list(source_pv.GetAttr().GetTimeSamples())
+ if pv_samples:
+ time_codes = pv_samples
+ else:
+ # Fallback to points samples; last resort is default time.
+ pts_samples = list(mesh.GetPointsAttr().GetTimeSamples())
+ if pts_samples:
+ time_codes = pts_samples
+ elif stage.HasAuthoredTimeCodeRange():
+ time_codes = [float(stage.GetStartTimeCode())]
+ else:
+ time_codes = [Usd.TimeCode.Default().GetValue()]
+
+ # Get mesh topology (needed for uniform->vertex conversion)
+ # For time-varying meshes, get topology at the first time code
+ first_time = (
+ Usd.TimeCode(time_codes[0]) if time_codes else Usd.TimeCode.Default()
+ )
+ face_vertex_counts = mesh.GetFaceVertexCountsAttr().Get(first_time)
+ face_vertex_indices = mesh.GetFaceVertexIndicesAttr().Get(first_time)
+ points_attr = mesh.GetPointsAttr()
+ points_data = points_attr.Get(first_time)
+ if points_data is None:
+ self.log_error(f"Cannot get points data for mesh at {mesh_path}")
+ return
+ n_points = len(points_data)
+
+ source_interp = source_pv.GetInterpolation()
+ element_size = int(source_pv.GetElementSize() or 1)
+
+ # Process all time samples to compute global range
+ self.log_info(
+ f"Processing {len(time_codes)} time samples for primvar '{source_primvar}'"
+ )
+ scalar_samples: list[tuple[float, np.ndarray]] = []
+ n_faces = len(face_vertex_counts) if face_vertex_counts is not None else 0
+
+ for tc in time_codes:
+ time_code = Usd.TimeCode(tc)
+ values = source_pv.Get(time_code)
+
+ if values is None:
+ self.log_warning(
+ f"No values for primvar '{source_primvar}' at time {tc}"
+ )
+ continue
+
+ # Convert to numpy array
+ arr = np.asarray(values)
+
+ # If the primvar is stored as a flattened array with an elementSize, reshape it
+ # back to (N, elementSize) so multi-component reduction works.
+ if arr.ndim == 1:
+ inferred = None
+ if element_size > 1 and len(arr) % element_size == 0:
+ inferred = element_size
+ else:
+ # Try to infer element size from expected element count.
+ expected = n_points if source_interp == "vertex" else n_faces
+ if expected and len(arr) % expected == 0 and len(arr) != expected:
+ inferred = len(arr) // expected
+ if inferred and inferred > 1 and len(arr) % inferred == 0:
+ arr = arr.reshape(-1, int(inferred))
+
+ # Reduce multi-component to scalar magnitude
+ if arr.ndim == 2 and arr.shape[1] > 1:
+ scalar = np.linalg.norm(arr, axis=1)
+ elif arr.ndim == 1:
+ scalar = arr
+ else:
+ scalar = arr.flatten()
+
+ # Convert uniform (per-face) to vertex (per-point)
+ if source_interp == "uniform":
+ if len(scalar) != n_faces:
+ self.log_warning(
+ f"Skipping time {tc} for primvar '{source_primvar}': "
+ f"size mismatch (got {len(scalar)}, expected {n_faces} faces)"
+ )
+ continue
+ vertex_scalar = self._uniform_to_vertex_scalar(
+ scalar, face_vertex_counts, face_vertex_indices, n_points
+ )
+ elif source_interp == "vertex":
+ if len(scalar) != n_points:
+ self.log_warning(
+ f"Skipping time {tc} for primvar '{source_primvar}': "
+ f"size mismatch (got {len(scalar)}, expected {n_points} points)"
+ )
+ continue
+ vertex_scalar = scalar
+ else:
+ raise ValueError(
+ f"Unsupported interpolation '{source_interp}' for primvar '{source_primvar}'"
+ )
+
+ scalar_samples.append(
+ (float(tc), np.asarray(vertex_scalar, dtype=np.float32))
+ )
+
+ if not scalar_samples:
+ raise ValueError(f"No valid data found for primvar '{source_primvar}'")
+
+ # Compute global value range
+ all_values = np.concatenate([s for _, s in scalar_samples])
+ vmin = float(np.min(all_values))
+ vmax = float(np.max(all_values))
+ self.log_info(f"Value range: {vmin:.6g} to {vmax:.6g}")
+
+ # Apply colormap to each time sample
+ try:
+ cmap_obj = mpl_colormaps[cmap]
+ except KeyError:
+ raise ValueError(
+ f"Colormap '{cmap}' not found. "
+ f"Available: {', '.join(list(mpl_colormaps.keys())[:10])}..."
+ )
+
+ # Create or get displayColor primvar
+ from pxr import Gf, Sdf, Vt
+
+ display_color_pv = primvars_api.CreatePrimvar(
+ "displayColor", Sdf.ValueTypeNames.Color3fArray, UsdGeom.Tokens.vertex
+ )
+ # If we're rewriting displayColor, clear any previously-authored time samples first.
+ # This prevents leaving behind stale/corrupt samples at times we no longer author.
+ try:
+ dc_attr = display_color_pv.GetAttr()
+ for t in list(dc_attr.GetTimeSamples()):
+ dc_attr.ClearAtTime(t)
+ except Exception:
+ pass
+
+ for idx, (tc, scalar) in enumerate(scalar_samples):
+ # Normalize to [0, 1]
+ if vmax > vmin:
+ normalized = (scalar - vmin) / (vmax - vmin)
+ else:
+ normalized = np.full_like(scalar, 0.5)
+ normalized = np.clip(normalized, 0.0, 1.0)
+
+ # Apply colormap
+ rgba = cmap_obj(normalized)
+ rgb = rgba[:, :3].astype(np.float32)
+ if len(rgb) != n_points:
+ self.log_warning(
+ f"Skipping displayColor write at time {tc}: "
+ f"color length {len(rgb)} != n_points {n_points}"
+ )
+ continue
+
+ # Convert to USD Vec3f array
+ color_array = Vt.Vec3fArray(
+ [Gf.Vec3f(float(c[0]), float(c[1]), float(c[2])) for c in rgb]
+ )
+
+ time_code = Usd.TimeCode(tc)
+
+ # Write default at t=0 for Omniverse compatibility
+ if write_default_at_t0 and idx == 0:
+ display_color_pv.Set(color_array)
+
+ # Write time sample
+ display_color_pv.Set(color_array, time_code)
+
+ self.log_info(f"Wrote displayColor primvar with {len(time_codes)} time samples")
+
+ # Bind vertex color material if requested
+ if bind_vertex_color_material:
+ self._ensure_vertex_color_material(stage, mesh_prim)
+
+ # Save stage if we opened it from a path
+ if stage_path:
+ stage.Save()
+ self.log_info(f"Saved USD file: {stage_path}")
+
+ def repair_mesh_primvar_element_sizes(
+ self,
+ stage_or_path: Usd.Stage | str,
+ mesh_path: str,
+ *,
+ time_code: float | None = None,
+ save: bool = True,
+ ) -> dict:
+ """
+ Repair missing/incorrect primvar elementSize metadata for a mesh.
+
+ Some multi-component primvars (e.g. 9-component stress tensors) may be authored
+ as a flat array (float[]) but require primvar elementSize > 1 so that viewers
+ interpret them as tuples-per-point rather than extra points. This can prevent
+ Omniverse/Hydra crashes during animation evaluation.
+
+ Heuristic:
+ - For vertex primvars: infer elementSize if raw_len % n_points == 0
+ - For uniform primvars: infer elementSize if raw_len % n_faces == 0
+ - Only updates when inferred elementSize > 1
+
+ Returns:
+ dict with keys: updated (list), skipped (list)
+ """
+ if isinstance(stage_or_path, str):
+ stage = Usd.Stage.Open(stage_or_path)
+ stage_path = stage_or_path
+ else:
+ stage = stage_or_path
+ stage_path = None
+
+ mesh_prim = stage.GetPrimAtPath(mesh_path)
+ if not mesh_prim.IsValid() or not mesh_prim.IsA(UsdGeom.Mesh):
+ raise ValueError(f"Invalid mesh prim at path: {mesh_path}")
+
+ mesh = UsdGeom.Mesh(mesh_prim)
+ tc = (
+ Usd.TimeCode(time_code) if time_code is not None else Usd.TimeCode.Default()
+ )
+
+ pts = mesh.GetPointsAttr().Get(tc)
+ if pts is None:
+ samples = mesh.GetPointsAttr().GetTimeSamples()
+ if samples:
+ pts = mesh.GetPointsAttr().Get(Usd.TimeCode(samples[0]))
+ n_points = len(pts) if pts is not None else 0
+
+ face_counts = mesh.GetFaceVertexCountsAttr().Get()
+ n_faces = len(face_counts) if face_counts is not None else 0
+
+ updated: list[dict] = []
+ skipped: list[dict] = []
+
+ api = UsdGeom.PrimvarsAPI(mesh)
+ for pv in api.GetPrimvars():
+ interp = pv.GetInterpolation()
+ if interp not in ("vertex", "uniform"):
+ skipped.append({"name": pv.GetName(), "reason": f"interp={interp}"})
+ continue
+
+ exp = n_points if interp == "vertex" else n_faces
+ if exp <= 0:
+ skipped.append({"name": pv.GetName(), "reason": "no topology"})
+ continue
+
+ ts = pv.GetAttr().GetTimeSamples()
+ t0 = Usd.TimeCode(ts[0]) if ts else tc
+ v = pv.Get(t0)
+ if v is None:
+ skipped.append({"name": pv.GetName(), "reason": "no value"})
+ continue
+
+ raw_len = len(v)
+ current_elem = int(pv.GetElementSize() or 1)
+ eff_len = raw_len // current_elem if current_elem else raw_len
+
+ if eff_len == exp:
+ skipped.append({"name": pv.GetName(), "reason": "already consistent"})
+ continue
+
+ if raw_len % exp != 0:
+ skipped.append(
+ {
+ "name": pv.GetName(),
+ "reason": f"not divisible (raw={raw_len}, exp={exp})",
+ }
+ )
+ continue
+
+ inferred = raw_len // exp
+ if inferred <= 1:
+ skipped.append({"name": pv.GetName(), "reason": "inferred<=1"})
+ continue
+
+ try:
+ pv.SetElementSize(int(inferred))
+ updated.append(
+ {
+ "name": pv.GetName(),
+ "interp": interp,
+ "raw_len": raw_len,
+ "exp": exp,
+ "old_elementSize": current_elem,
+ "new_elementSize": int(inferred),
+ }
+ )
+ except Exception as e:
+ skipped.append(
+ {"name": pv.GetName(), "reason": f"SetElementSize failed: {e}"}
+ )
+
+ if stage_path and save:
+ stage.Save()
+ self.log_info(f"Saved USD file: {stage_path}")
+
+ return {"updated": updated, "skipped": skipped}
+
+ def _uniform_to_vertex_scalar(
+ self,
+ face_scalar: np.ndarray,
+ face_vertex_counts: Sequence[int] | np.ndarray,
+ face_vertex_indices: Sequence[int] | np.ndarray,
+ n_points: int,
+ ) -> np.ndarray:
+ """
+ Convert per-face scalar data to per-vertex by averaging incident faces.
+
+ Args:
+ face_scalar: Scalar value per face
+ face_vertex_counts: Number of vertices per face
+ face_vertex_indices: Flattened vertex indices for all faces
+ n_points: Total number of vertices in mesh
+
+ Returns:
+ np.ndarray: Scalar value per vertex
+ """
+ from typing import cast
+
+ # (mypy) numpy stubs often treat np.asarray(...) as Any, so cast explicitly.
+ counts_arr = cast(np.ndarray, np.asarray(face_vertex_counts, dtype=np.int32))
+ indices_arr = cast(np.ndarray, np.asarray(face_vertex_indices, dtype=np.int32))
+
+ # Create face ID for each vertex reference
+ face_ids = np.repeat(np.arange(len(counts_arr)), counts_arr)
+
+ # Accumulate values at each vertex
+ acc = np.zeros(n_points, dtype=np.float64)
+ cnt = np.zeros(n_points, dtype=np.int32)
+
+ np.add.at(acc, indices_arr, face_scalar[face_ids])
+ np.add.at(cnt, indices_arr, 1)
+
+ # Average
+ vertex_scalar = acc / np.maximum(cnt, 1)
+ return cast(np.ndarray, vertex_scalar.astype(np.float32))
+
+ def _ensure_vertex_color_material(
+ self, stage: Usd.Stage, mesh_prim: Usd.Prim
+ ) -> None:
+ """
+ Create or reuse a vertex color material and bind it to the mesh.
+
+ Creates a UsdPreviewSurface material that reads displayColor via
+ UsdPrimvarReader_float3, following Omniverse best practices.
+
+ Args:
+ stage: USD Stage
+ mesh_prim: Mesh prim to bind material to
+ """
+ from pxr import Sdf
+
+ material_name = "VertexColorMaterial"
+ material_path = f"/World/Looks/{material_name}"
+
+ # Check if material already exists
+ material_prim = stage.GetPrimAtPath(material_path)
+ if material_prim.IsValid() and material_prim.IsA(UsdShade.Material):
+ material = UsdShade.Material(material_prim)
+ self.log_debug(f"Reusing existing material: {material_path}")
+ else:
+ # Create material scope if needed
+ looks_prim = stage.GetPrimAtPath("/World/Looks")
+ if not looks_prim.IsValid():
+ stage.DefinePrim("/World/Looks", "Scope")
+
+ # Create material
+ material = UsdShade.Material.Define(stage, material_path)
+
+ # Create PreviewSurface shader
+ shader_path = f"{material_path}/PreviewSurface"
+ shader = UsdShade.Shader.Define(stage, shader_path)
+ shader.CreateIdAttr("UsdPreviewSurface")
+
+ # Create PrimvarReader for displayColor
+ reader_path = f"{material_path}/PrimvarReader_displayColor"
+ reader = UsdShade.Shader.Define(stage, reader_path)
+ reader.CreateIdAttr("UsdPrimvarReader_float3")
+ reader.CreateInput("varname", Sdf.ValueTypeNames.Token).Set("displayColor")
+
+ # Connect reader output to shader diffuseColor input
+ reader_output = reader.CreateOutput("result", Sdf.ValueTypeNames.Color3f)
+ diffuse_input = shader.CreateInput(
+ "diffuseColor", Sdf.ValueTypeNames.Color3f
+ )
+ diffuse_input.ConnectToSource(reader_output)
+
+ # Set other shader properties
+ shader.CreateInput("roughness", Sdf.ValueTypeNames.Float).Set(0.5)
+ shader.CreateInput("metallic", Sdf.ValueTypeNames.Float).Set(0.0)
+
+ # Connect shader to material surface
+ surface_output = shader.CreateOutput("surface", Sdf.ValueTypeNames.Token)
+ material.CreateSurfaceOutput().ConnectToSource(surface_output)
+
+ self.log_info(f"Created vertex color material: {material_path}")
+
+ # Bind material to mesh
+ binding_api = UsdShade.MaterialBindingAPI.Apply(mesh_prim)
+ binding_api.Bind(material)
+ self.log_debug(f"Bound material to mesh: {mesh_prim.GetPath()}")
diff --git a/src/physiomotion4d/vtk_to_usd/README.md b/src/physiomotion4d/vtk_to_usd/README.md
new file mode 100644
index 0000000..5bf2679
--- /dev/null
+++ b/src/physiomotion4d/vtk_to_usd/README.md
@@ -0,0 +1,314 @@
+# VTK to USD Converter Library
+
+A comprehensive Python library for converting VTK files (VTK, VTP, VTU) to USD (Universal Scene Description) format. Based on the architecture of NVIDIA's ParaViewConnector for Omniverse, but simplified for file-based conversion without ParaView or Qt dependencies.
+
+## Features
+
+### File Format Support
+- **Legacy VTK** (`.vtk`): Binary and ASCII formats
+- **XML PolyData** (`.vtp`): Surface meshes
+- **XML UnstructuredGrid** (`.vtu`): Volumetric meshes (with surface extraction)
+
+### Data Preservation
+- **Geometry**: Points, faces, topology
+- **Normals**: Automatic computation or preservation from source
+- **Colors**: Vertex colors (RGB/RGBA)
+- **Data Arrays**: All VTK point and cell data arrays converted to USD primvars
+- **Time-Series**: Support for animated/time-varying data
+
+### USD Features
+- **Materials**: UsdPreviewSurface with customizable properties
+- **Primvars**: VTK data arrays → USD primvars with appropriate interpolation
+- **Coordinate Systems**: Automatic conversion from RAS (medical imaging) to USD Y-up
+- **Time Sampling**: Efficient time-varying attribute encoding
+
+### Architecture
+
+The library is organized into modular components inspired by ParaViewConnector:
+
+```
+vtk_to_usd/
+├── data_structures.py # Data containers (MeshData, MaterialData, etc.)
+├── vtk_reader.py # VTK file readers (VTK, VTP, VTU)
+├── usd_utils.py # USD utility functions (coordinate conversion, primvars)
+├── material_manager.py # Material creation and binding
+├── usd_mesh_converter.py # Mesh conversion to USD
+├── converter.py # High-level API
+└── __init__.py # Public exports
+```
+
+## Installation
+
+The library is part of the PhysioMotion4D package. Ensure you have the required dependencies:
+
+```bash
+pip install vtk pxr numpy
+```
+
+## Quick Start
+
+### Simple Conversion
+
+```python
+from physiomotion4d.vtk_to_usd import convert_vtk_file
+
+# Convert a single file
+stage = convert_vtk_file('mesh.vtp', 'output.usd')
+```
+
+### With Custom Settings
+
+```python
+from physiomotion4d.vtk_to_usd import VTKToUSDConverter, ConversionSettings, MaterialData
+
+# Configure conversion
+settings = ConversionSettings(
+ triangulate_meshes=True,
+ compute_normals=True,
+ preserve_point_arrays=True,
+ meters_per_unit=0.001, # mm to meters
+)
+
+# Define material
+material = MaterialData(
+ name="my_material",
+ diffuse_color=(0.8, 0.3, 0.3),
+ roughness=0.4,
+)
+
+# Convert
+converter = VTKToUSDConverter(settings)
+stage = converter.convert_file(
+ vtk_file='mesh.vtp',
+ output_usd='output.usd',
+ material=material,
+)
+```
+
+### Time-Series Data
+
+```python
+from physiomotion4d.vtk_to_usd import VTKToUSDConverter
+
+converter = VTKToUSDConverter()
+
+# Convert sequence of VTK files
+files = ['frame_0.vtp', 'frame_1.vtp', 'frame_2.vtp']
+time_codes = [0.0, 0.1, 0.2] # seconds
+
+stage = converter.convert_sequence(
+ vtk_files=files,
+ output_usd='animated.usd',
+ time_codes=time_codes,
+)
+```
+
+### Direct MeshData Conversion
+
+```python
+from physiomotion4d.vtk_to_usd import read_vtk_file, VTKToUSDConverter
+
+# Read VTK file
+mesh_data = read_vtk_file('mesh.vtp')
+
+# Inspect data
+print(f"Points: {len(mesh_data.points)}")
+print(f"Faces: {len(mesh_data.face_vertex_counts)}")
+print(f"Data arrays: {len(mesh_data.generic_arrays)}")
+
+for array in mesh_data.generic_arrays:
+ print(f" - {array.name}: {array.num_components} components, {array.data_type}")
+
+# Convert to USD
+converter = VTKToUSDConverter()
+stage = converter.convert_mesh_data(mesh_data, 'output.usd')
+```
+
+## API Reference
+
+### ConversionSettings
+
+Configuration for conversion process:
+
+```python
+@dataclass
+class ConversionSettings:
+ # Output settings
+ output_binary: bool = False # Binary or ASCII USD
+ meters_per_unit: float = 1.0 # Unit scale
+ up_axis: str = "Y" # "Y" or "Z"
+
+ # Mesh processing
+ triangulate_meshes: bool = True # Convert all faces to triangles
+ compute_normals: bool = True # Compute normals if missing
+ preserve_point_arrays: bool = True # Keep point data arrays
+ preserve_cell_arrays: bool = True # Keep cell data arrays
+
+ # Material settings
+ use_preview_surface: bool = True # Use UsdPreviewSurface
+ default_color: tuple = (0.8, 0.8, 0.8)
+
+ # Time settings
+ times_per_second: float = 24.0 # FPS for animation
+ use_time_samples: bool = True # Use time sampling
+
+ # Array prefixes
+ point_array_prefix: str = "vtk_point_"
+ cell_array_prefix: str = "vtk_cell_"
+```
+
+### MaterialData
+
+Material properties:
+
+```python
+@dataclass
+class MaterialData:
+ name: str = "default_material"
+ diffuse_color: tuple[float, float, float] = (0.8, 0.8, 0.8)
+ specular_color: tuple[float, float, float] = (0.0, 0.0, 0.0)
+ emissive_color: tuple[float, float, float] = (0.0, 0.0, 0.0)
+ opacity: float = 1.0
+ roughness: float = 0.5
+ metallic: float = 0.0
+ ior: float = 1.5
+ use_vertex_colors: bool = False
+```
+
+### MeshData
+
+Mesh geometry and data:
+
+```python
+@dataclass
+class MeshData:
+ points: NDArray # (N, 3) array
+ face_vertex_counts: NDArray # (F,) array
+ face_vertex_indices: NDArray # Flat array of indices
+ normals: Optional[NDArray] = None # (N, 3) or facevarying
+ uvs: Optional[NDArray] = None # (N, 2) texture coordinates
+ colors: Optional[NDArray] = None # (N, 3) or (N, 4) vertex colors
+ generic_arrays: list[GenericArray] = [] # Data arrays
+ material_id: str = "default_material"
+```
+
+### VTKToUSDConverter
+
+Main converter class:
+
+- `convert_file(vtk_file, output_usd, **kwargs)`: Convert single file
+- `convert_sequence(vtk_files, output_usd, time_codes, **kwargs)`: Convert time series
+- `convert_mesh_data(mesh_data, output_usd, **kwargs)`: Convert MeshData
+- `convert_mesh_data_sequence(mesh_data_list, output_usd, **kwargs)`: Convert MeshData sequence
+
+## Data Array Handling
+
+VTK data arrays are automatically converted to USD primvars with appropriate types and interpolation:
+
+### Point Data → Vertex Primvars
+- Interpolation: `vertex`
+- Naming: `vtk_point_`
+- Example: `vtk_point_pressure`, `vtk_point_temperature`
+
+### Cell Data → Uniform Primvars
+- Interpolation: `uniform` (per-face)
+- Naming: `vtk_cell_`
+- Example: `vtk_cell_region_id`
+
+### Type Mapping
+
+| VTK Type | USD Type | Components |
+| ------------ | ----------- | ---------- |
+| Float/Double | FloatArray | 1 |
+| Float/Double | Float2Array | 2 |
+| Float/Double | Float3Array | 3 |
+| Float/Double | Float4Array | 4 |
+| Int/Long | IntArray | 1-4 |
+| UInt | UIntArray | 1+ |
+| UChar/Char | UCharArray | 1+ |
+
+## Coordinate System Conversion
+
+The library automatically converts from RAS (Right-Anterior-Superior) coordinate system used in medical imaging to USD's Y-up coordinate system:
+
+**RAS (Medical Imaging):**
+- X: Patient's right
+- Y: Patient's anterior (front)
+- Z: Patient's superior (head)
+
+**USD Y-up:**
+- X: Right
+- Y: Up
+- Z: Back (toward camera)
+
+**Conversion:** `USD(x, y, z) = RAS(x, z, -y)`
+
+## Design Principles
+
+Based on ParaViewConnector but adapted for file-based conversion:
+
+1. **No Omniverse Dependencies**: Pure file-based USD output
+2. **No ParaView/Qt**: Direct VTK API usage
+3. **Modular Architecture**: Separate concerns (reading, conversion, materials)
+4. **Data Preservation**: All VTK arrays preserved as primvars
+5. **Standards Compliant**: Uses UsdPreviewSurface and standard USD schemas
+
+## Comparison with ParaViewConnector
+
+| Feature | ParaViewConnector | vtk_to_usd |
+| ---------------- | ----------------------- | ----------------------- |
+| **Input** | ParaView proxies | VTK files |
+| **Output** | Omniverse/Files | Files only |
+| **Dependencies** | ParaView, Qt, Omniverse | VTK, USD |
+| **Use Case** | Interactive pipeline | Batch conversion |
+| **Materials** | MDL + PreviewSurface | PreviewSurface |
+| **Time Series** | Full clip system | Time-sampled attributes |
+| **Volumes** | OpenVDB support | Surface extraction |
+
+## Examples
+
+See `experiments/convert_vtk_to_usd_lib/test_vtk_to_usd_converter.ipynb` for comprehensive examples including:
+
+1. Basic file conversion
+2. Data array inspection
+3. Custom materials and settings
+4. Time-series animation
+5. USD file verification
+
+## Testing
+
+Run the test notebook to verify the installation:
+
+```bash
+cd experiments/convert_vtk_to_usd_lib
+jupyter notebook test_vtk_to_usd_converter.ipynb
+```
+
+## Known Limitations
+
+1. **Volumetric Meshes**: VTU files are converted to surface meshes (via extract_surface)
+2. **Complex Materials**: Only UsdPreviewSurface supported (no MDL)
+3. **Topology Changes**: Time-varying topology requires separate prims per frame
+4. **Large Datasets**: Memory-limited (entire mesh loaded at once)
+
+## Future Enhancements
+
+Potential improvements based on ParaViewConnector:
+
+- [ ] OpenVDB volume support
+- [ ] Clip-based time management for varying topology
+- [ ] MDL material support
+- [ ] Texture coordinate generation
+- [ ] Point cloud support (UsdGeomPoints)
+- [ ] Curve/line support (UsdGeomBasisCurves)
+- [ ] Streaming for large datasets
+
+## License
+
+Part of the PhysioMotion4D project.
+
+## References
+
+- ParaViewConnector: https://github.com/NVIDIA-Omniverse/ParaViewConnector
+- USD Documentation: https://openusd.org/
+- VTK Documentation: https://vtk.org/
diff --git a/src/physiomotion4d/vtk_to_usd/__init__.py b/src/physiomotion4d/vtk_to_usd/__init__.py
new file mode 100644
index 0000000..e39f439
--- /dev/null
+++ b/src/physiomotion4d/vtk_to_usd/__init__.py
@@ -0,0 +1,88 @@
+"""VTK to USD conversion library.
+
+A comprehensive library for converting VTK files (VTK, VTP, VTU) to USD format.
+Based on the ParaViewConnector architecture but simplified for file-based conversion.
+
+Features:
+- Supports VTK legacy format (.vtk), XML PolyData (.vtp), and UnstructuredGrid (.vtu)
+- Preserves geometry, topology, normals, and colors
+- Converts VTK data arrays to USD primvars
+- Supports time-series/animated data
+- Material system with UsdPreviewSurface
+- Coordinate conversion from RAS to USD Y-up
+
+Example Usage:
+ >>> from physiomotion4d.vtk_to_usd import convert_vtk_file
+ >>> stage = convert_vtk_file('mesh.vtp', 'output.usd')
+
+ >>> # Advanced usage with custom settings
+ >>> from physiomotion4d.vtk_to_usd import VTKToUSDConverter, ConversionSettings
+ >>> settings = ConversionSettings(triangulate_meshes=True, compute_normals=True)
+ >>> converter = VTKToUSDConverter(settings)
+ >>> converter.convert_sequence(['mesh_0.vtp', 'mesh_1.vtp'], 'output.usd')
+"""
+
+from .converter import VTKToUSDConverter, convert_vtk_file, convert_vtk_sequence
+from .data_structures import (
+ ConversionSettings,
+ DataType,
+ GenericArray,
+ MaterialData,
+ MeshData,
+ TimeStepData,
+ VolumeData,
+)
+from .material_manager import MaterialManager
+from .usd_mesh_converter import UsdMeshConverter
+from .usd_utils import (
+ compute_mesh_extent,
+ create_primvar,
+ ras_normals_to_usd,
+ ras_points_to_usd,
+ ras_to_usd,
+ sanitize_primvar_name,
+ triangulate_face,
+)
+from .vtk_reader import (
+ LegacyVTKReader,
+ PolyDataReader,
+ UnstructuredGridReader,
+ VTKReader,
+ read_vtk_file,
+ validate_time_series_topology,
+)
+
+__all__ = [
+ # Main converter
+ "VTKToUSDConverter",
+ "convert_vtk_file",
+ "convert_vtk_sequence",
+ # Data structures
+ "ConversionSettings",
+ "DataType",
+ "GenericArray",
+ "MaterialData",
+ "MeshData",
+ "TimeStepData",
+ "VolumeData",
+ # Managers
+ "MaterialManager",
+ "UsdMeshConverter",
+ # Utilities
+ "ras_to_usd",
+ "ras_points_to_usd",
+ "ras_normals_to_usd",
+ "create_primvar",
+ "sanitize_primvar_name",
+ "triangulate_face",
+ "compute_mesh_extent",
+ # Readers
+ "VTKReader",
+ "PolyDataReader",
+ "LegacyVTKReader",
+ "UnstructuredGridReader",
+ "read_vtk_file",
+ "validate_time_series_topology",
+]
+
+__version__ = "0.1.0"
diff --git a/src/physiomotion4d/vtk_to_usd/converter.py b/src/physiomotion4d/vtk_to_usd/converter.py
new file mode 100644
index 0000000..a63b186
--- /dev/null
+++ b/src/physiomotion4d/vtk_to_usd/converter.py
@@ -0,0 +1,393 @@
+"""Main VTK to USD converter interface.
+
+Provides high-level API for converting VTK files to USD format.
+"""
+
+import logging
+from pathlib import Path
+from typing import Any, Optional
+
+from pxr import Usd, UsdGeom
+
+from .data_structures import ConversionSettings, MaterialData, MeshData
+from .material_manager import MaterialManager
+from .usd_mesh_converter import UsdMeshConverter
+from .vtk_reader import read_vtk_file
+
+logger = logging.getLogger(__name__)
+
+
+class VTKToUSDConverter:
+ """High-level converter for VTK files to USD.
+
+ Provides simple API for converting single or multiple VTK files to USD format.
+ Handles material creation, primvar mapping, and time-series data.
+
+ Example:
+ >>> converter = VTKToUSDConverter()
+ >>> converter.convert_file('mesh.vtp', 'output.usd')
+
+ >>> # Time-series conversion
+ >>> converter = VTKToUSDConverter()
+ >>> files = ['mesh_0.vtp', 'mesh_1.vtp', 'mesh_2.vtp']
+ >>> converter.convert_sequence(files, 'output.usd')
+ """
+
+ def __init__(self, settings: Optional[ConversionSettings] = None) -> None:
+ """Initialize converter.
+
+ Args:
+ settings: Optional conversion settings. If None, uses defaults.
+ """
+ self.settings = settings or ConversionSettings()
+ self.stage: Optional[Usd.Stage] = None
+ self.material_mgr: Optional[MaterialManager] = None
+ self.mesh_converter: Optional[UsdMeshConverter] = None
+
+ def convert_file(
+ self,
+ vtk_file: str | Path,
+ output_usd: str | Path,
+ mesh_name: str = "Mesh",
+ material: Optional[MaterialData] = None,
+ extract_surface: bool = True,
+ ) -> Usd.Stage:
+ """Convert a single VTK file to USD.
+
+ Args:
+ vtk_file: Path to VTK file (.vtk, .vtp, or .vtu)
+ output_usd: Path to output USD file
+ mesh_name: Name for the mesh in USD
+ material: Optional material data. If None, uses default.
+ extract_surface: For .vtu files, whether to extract surface
+
+ Returns:
+ Usd.Stage: Created USD stage
+ """
+ logger.info(f"Converting {vtk_file} to {output_usd}")
+
+ # Read VTK file
+ mesh_data = read_vtk_file(vtk_file, extract_surface=extract_surface)
+
+ # Set material ID if provided
+ if material is not None:
+ mesh_data.material_id = material.name
+
+ # Create USD stage
+ self._create_stage(output_usd)
+ stage = self.stage
+ mesh_converter = self.mesh_converter
+ material_mgr = self.material_mgr
+ assert stage is not None
+ assert mesh_converter is not None
+ assert material_mgr is not None
+
+ # Create material if provided
+ if material is not None:
+ material_mgr.get_or_create_material(material)
+
+ # Create mesh
+ mesh_path = f"/World/Meshes/{mesh_name}"
+ self._ensure_parent_path(mesh_path)
+
+ mesh_converter.create_mesh(mesh_data, mesh_path, bind_material=True)
+
+ # Save stage
+ stage.Save()
+ logger.info(f"Saved USD file: {output_usd}")
+
+ return stage
+
+ def convert_sequence(
+ self,
+ vtk_files: list[str | Path],
+ output_usd: str | Path,
+ mesh_name: str = "Mesh",
+ time_codes: Optional[list[float]] = None,
+ material: Optional[MaterialData] = None,
+ extract_surface: bool = True,
+ ) -> Usd.Stage:
+ """Convert a sequence of VTK files to time-varying USD.
+
+ Args:
+ vtk_files: List of VTK file paths (one per time step)
+ output_usd: Path to output USD file
+ mesh_name: Name for the mesh in USD
+ time_codes: Optional list of time codes. If None, uses sequential integers.
+ material: Optional material data. If None, uses default.
+ extract_surface: For .vtu files, whether to extract surface
+
+ Returns:
+ Usd.Stage: Created USD stage
+ """
+ if len(vtk_files) == 0:
+ raise ValueError("Empty file list")
+
+ logger.info(f"Converting sequence of {len(vtk_files)} files to {output_usd}")
+
+ # Generate time codes if not provided
+ if time_codes is None:
+ time_codes = [float(i) for i in range(len(vtk_files))]
+ elif len(time_codes) != len(vtk_files):
+ raise ValueError(
+ f"Number of time codes ({len(time_codes)}) must match "
+ f"number of files ({len(vtk_files)})"
+ )
+
+ # Read all mesh data
+ mesh_data_sequence = []
+ for vtk_file in vtk_files:
+ mesh_data = read_vtk_file(vtk_file, extract_surface=extract_surface)
+ if material is not None:
+ mesh_data.material_id = material.name
+ mesh_data_sequence.append(mesh_data)
+
+ # Create USD stage
+ self._create_stage(output_usd)
+ stage = self.stage
+ mesh_converter = self.mesh_converter
+ material_mgr = self.material_mgr
+ assert stage is not None
+ assert mesh_converter is not None
+ assert material_mgr is not None
+
+ # Create material if provided
+ if material is not None:
+ material_mgr.get_or_create_material(material)
+
+ # Set time range
+ stage.SetStartTimeCode(time_codes[0])
+ stage.SetEndTimeCode(time_codes[-1])
+ stage.SetTimeCodesPerSecond(self.settings.times_per_second)
+
+ # Create time-varying mesh
+ mesh_path = f"/World/Meshes/{mesh_name}"
+ self._ensure_parent_path(mesh_path)
+
+ mesh_converter.create_time_varying_mesh(
+ mesh_data_sequence, mesh_path, time_codes, bind_material=True
+ )
+
+ # Save stage
+ stage.Save()
+ logger.info(f"Saved USD file: {output_usd}")
+
+ return stage
+
+ def convert_mesh_data(
+ self,
+ mesh_data: MeshData,
+ output_usd: str | Path,
+ mesh_name: str = "Mesh",
+ material: Optional[MaterialData] = None,
+ ) -> Usd.Stage:
+ """Convert MeshData directly to USD.
+
+ Useful when you already have MeshData from other sources.
+
+ Args:
+ mesh_data: Mesh data to convert
+ output_usd: Path to output USD file
+ mesh_name: Name for the mesh in USD
+ material: Optional material data
+
+ Returns:
+ Usd.Stage: Created USD stage
+ """
+ logger.info(f"Converting MeshData to {output_usd}")
+
+ if material is not None:
+ mesh_data.material_id = material.name
+
+ # Create USD stage
+ self._create_stage(output_usd)
+ stage = self.stage
+ mesh_converter = self.mesh_converter
+ material_mgr = self.material_mgr
+ assert stage is not None
+ assert mesh_converter is not None
+ assert material_mgr is not None
+
+ # Create material if provided
+ if material is not None:
+ material_mgr.get_or_create_material(material)
+
+ # Create mesh
+ mesh_path = f"/World/Meshes/{mesh_name}"
+ self._ensure_parent_path(mesh_path)
+
+ mesh_converter.create_mesh(mesh_data, mesh_path, bind_material=True)
+
+ # Save stage
+ stage.Save()
+ logger.info(f"Saved USD file: {output_usd}")
+
+ return stage
+
+ def convert_mesh_data_sequence(
+ self,
+ mesh_data_sequence: list[MeshData],
+ output_usd: str | Path,
+ mesh_name: str = "Mesh",
+ time_codes: Optional[list[float]] = None,
+ material: Optional[MaterialData] = None,
+ ) -> Usd.Stage:
+ """Convert sequence of MeshData to time-varying USD.
+
+ Args:
+ mesh_data_sequence: List of MeshData (one per time step)
+ output_usd: Path to output USD file
+ mesh_name: Name for the mesh in USD
+ time_codes: Optional list of time codes
+ material: Optional material data
+
+ Returns:
+ Usd.Stage: Created USD stage
+ """
+ if len(mesh_data_sequence) == 0:
+ raise ValueError("Empty mesh data sequence")
+
+ logger.info(
+ f"Converting sequence of {len(mesh_data_sequence)} MeshData to {output_usd}"
+ )
+
+ # Generate time codes if not provided
+ if time_codes is None:
+ time_codes = [float(i) for i in range(len(mesh_data_sequence))]
+ elif len(time_codes) != len(mesh_data_sequence):
+ raise ValueError(
+ f"Number of time codes ({len(time_codes)}) must match "
+ f"number of mesh data ({len(mesh_data_sequence)})"
+ )
+
+ # Set material for all mesh data
+ if material is not None:
+ for mesh_data in mesh_data_sequence:
+ mesh_data.material_id = material.name
+
+ # Create USD stage
+ self._create_stage(output_usd)
+ stage = self.stage
+ mesh_converter = self.mesh_converter
+ material_mgr = self.material_mgr
+ assert stage is not None
+ assert mesh_converter is not None
+ assert material_mgr is not None
+
+ # Create material if provided
+ if material is not None:
+ material_mgr.get_or_create_material(material)
+
+ # Set time range
+ stage.SetStartTimeCode(time_codes[0])
+ stage.SetEndTimeCode(time_codes[-1])
+ stage.SetTimeCodesPerSecond(self.settings.times_per_second)
+
+ # Create time-varying mesh
+ mesh_path = f"/World/Meshes/{mesh_name}"
+ self._ensure_parent_path(mesh_path)
+
+ mesh_converter.create_time_varying_mesh(
+ mesh_data_sequence, mesh_path, time_codes, bind_material=True
+ )
+
+ # Save stage
+ stage.Save()
+ logger.info(f"Saved USD file: {output_usd}")
+
+ return stage
+
+ def _create_stage(self, output_path: str | Path) -> None:
+ """Create a new USD stage.
+
+ Args:
+ output_path: Path for the USD file
+ """
+ output_path = Path(output_path)
+
+ # Remove existing file
+ if output_path.exists():
+ output_path.unlink()
+ logger.debug(f"Removed existing file: {output_path}")
+
+ # Create stage
+ stage = Usd.Stage.CreateNew(str(output_path))
+ assert stage is not None
+ self.stage = stage
+
+ # Set stage metadata
+ UsdGeom.SetStageMetersPerUnit(stage, self.settings.meters_per_unit)
+ if self.settings.up_axis.upper() == "Y":
+ UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.y)
+ else:
+ UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.z)
+
+ # Create root
+ root_prim = stage.DefinePrim("/World", "Xform")
+ stage.SetDefaultPrim(root_prim)
+
+ # Initialize managers
+ self.material_mgr = MaterialManager(stage)
+ self.mesh_converter = UsdMeshConverter(stage, self.settings, self.material_mgr)
+
+ logger.debug(f"Created USD stage: {output_path}")
+
+ def _ensure_parent_path(self, path: str) -> None:
+ """Ensure all parent prims in path exist.
+
+ Args:
+ path: USD path (e.g., "/World/Meshes/MyMesh")
+ """
+ parts = path.strip("/").split("/")
+ current_path = ""
+ stage = self.stage
+ assert stage is not None
+ for part in parts[:-1]: # Skip the last part (the actual mesh)
+ current_path += f"/{part}"
+ if not stage.GetPrimAtPath(current_path):
+ stage.DefinePrim(current_path, "Xform")
+
+
+# Convenience functions
+
+
+def convert_vtk_file(
+ vtk_file: str | Path,
+ output_usd: str | Path,
+ settings: Optional[ConversionSettings] = None,
+ **kwargs: Any,
+) -> Usd.Stage:
+ """Convenience function to convert a single VTK file.
+
+ Args:
+ vtk_file: Path to VTK file
+ output_usd: Path to output USD file
+ settings: Optional conversion settings
+ **kwargs: Additional arguments passed to convert_file()
+
+ Returns:
+ Usd.Stage: Created USD stage
+ """
+ converter = VTKToUSDConverter(settings)
+ return converter.convert_file(vtk_file, output_usd, **kwargs)
+
+
+def convert_vtk_sequence(
+ vtk_files: list[str | Path],
+ output_usd: str | Path,
+ settings: Optional[ConversionSettings] = None,
+ **kwargs: Any,
+) -> Usd.Stage:
+ """Convenience function to convert a sequence of VTK files.
+
+ Args:
+ vtk_files: List of VTK file paths
+ output_usd: Path to output USD file
+ settings: Optional conversion settings
+ **kwargs: Additional arguments passed to convert_sequence()
+
+ Returns:
+ Usd.Stage: Created USD stage
+ """
+ converter = VTKToUSDConverter(settings)
+ return converter.convert_sequence(vtk_files, output_usd, **kwargs)
diff --git a/src/physiomotion4d/vtk_to_usd/data_structures.py b/src/physiomotion4d/vtk_to_usd/data_structures.py
new file mode 100644
index 0000000..0fd81dd
--- /dev/null
+++ b/src/physiomotion4d/vtk_to_usd/data_structures.py
@@ -0,0 +1,146 @@
+"""Data structures for VTK to USD conversion.
+
+Based on OmniConnectData from ParaViewConnector but simplified for file-based conversion.
+"""
+
+from dataclasses import dataclass, field
+from enum import Enum
+from typing import Optional
+
+from numpy.typing import NDArray
+
+
+class DataType(Enum):
+ """Data type enumeration for generic arrays."""
+
+ UCHAR = "uchar"
+ CHAR = "char"
+ USHORT = "ushort"
+ SHORT = "short"
+ UINT = "uint"
+ INT = "int"
+ ULONG = "ulong"
+ LONG = "long"
+ FLOAT = "float"
+ DOUBLE = "double"
+
+
+@dataclass
+class GenericArray:
+ """Generic data array that can be converted to USD primvar.
+
+ Represents a named array of data with a specific type and number of components.
+ Mimics OmniConnectGenericArray from ParaViewConnector.
+ """
+
+ name: str
+ data: NDArray
+ num_components: int
+ data_type: DataType
+ interpolation: str = "vertex" # 'vertex', 'uniform' (per-face), 'constant'
+
+ def __post_init__(self) -> None:
+ """Validate data shape."""
+ if self.data.ndim == 1 and self.num_components == 1:
+ # Scalar array
+ pass
+ elif self.data.ndim == 2 and self.data.shape[1] == self.num_components:
+ # Vector/multi-component array
+ pass
+ else:
+ raise ValueError(
+ f"Data shape {self.data.shape} incompatible with "
+ f"num_components={self.num_components}"
+ )
+
+
+@dataclass
+class MaterialData:
+ """Material properties for USD conversion.
+
+ Mimics OmniConnectMaterialData from ParaViewConnector.
+ """
+
+ name: str = "default_material"
+ diffuse_color: tuple[float, float, float] = (0.8, 0.8, 0.8)
+ specular_color: tuple[float, float, float] = (0.0, 0.0, 0.0)
+ emissive_color: tuple[float, float, float] = (0.0, 0.0, 0.0)
+ opacity: float = 1.0
+ roughness: float = 0.5
+ metallic: float = 0.0
+ ior: float = 1.5
+ use_vertex_colors: bool = False
+
+
+@dataclass
+class MeshData:
+ """Mesh geometry data for USD conversion.
+
+ Mimics OmniConnectMeshData from ParaViewConnector.
+ """
+
+ points: NDArray # Shape: (N, 3)
+ face_vertex_counts: NDArray # Shape: (F,)
+ face_vertex_indices: NDArray # Shape: (sum(face_vertex_counts),)
+ normals: Optional[NDArray] = None # Shape: (N, 3) or (sum(face_vertex_counts), 3)
+ uvs: Optional[NDArray] = None # Shape: (N, 2) or (sum(face_vertex_counts), 2)
+ colors: Optional[NDArray] = None # Shape: (N, 3) or (N, 4)
+ generic_arrays: list[GenericArray] = field(default_factory=list)
+ material_id: str = "default_material"
+
+ def __post_init__(self) -> None:
+ """Validate mesh data."""
+ if self.points.shape[1] != 3:
+ raise ValueError(f"Points must have shape (N, 3), got {self.points.shape}")
+
+
+@dataclass
+class VolumeData:
+ """Volume data for USD conversion.
+
+ Mimics OmniConnectVolumeData from ParaViewConnector.
+ """
+
+ image_data: NDArray # 3D array
+ spacing: tuple[float, float, float] = (1.0, 1.0, 1.0)
+ origin: tuple[float, float, float] = (0.0, 0.0, 0.0)
+ scalar_range: Optional[tuple[float, float]] = None
+ generic_arrays: list[GenericArray] = field(default_factory=list)
+
+
+@dataclass
+class TimeStepData:
+ """Data for a single time step."""
+
+ time_code: float
+ meshes: dict[str, MeshData] = field(default_factory=dict)
+ volumes: dict[str, VolumeData] = field(default_factory=dict)
+ materials: dict[str, MaterialData] = field(default_factory=dict)
+
+
+@dataclass
+class ConversionSettings:
+ """Settings for VTK to USD conversion."""
+
+ # Output settings
+ output_binary: bool = False
+ meters_per_unit: float = 1.0
+ up_axis: str = "Y" # 'Y' or 'Z'
+
+ # Mesh processing
+ triangulate_meshes: bool = True
+ compute_normals: bool = True
+ preserve_point_arrays: bool = True
+ preserve_cell_arrays: bool = True
+
+ # Material settings
+ use_preview_surface: bool = True
+ default_color: tuple[float, float, float] = (0.8, 0.8, 0.8)
+
+ # Time settings
+ times_per_second: float = 24.0
+ use_time_samples: bool = True
+
+ # Array prefixes
+ point_array_prefix: str = "vtk_point_"
+ cell_array_prefix: str = "vtk_cell_"
diff --git a/src/physiomotion4d/vtk_to_usd/material_manager.py b/src/physiomotion4d/vtk_to_usd/material_manager.py
new file mode 100644
index 0000000..abdc459
--- /dev/null
+++ b/src/physiomotion4d/vtk_to_usd/material_manager.py
@@ -0,0 +1,180 @@
+"""Material management for USD export.
+
+Creates and manages UsdPreviewSurface materials for mesh rendering.
+"""
+
+import logging
+from typing import Optional
+
+from pxr import Gf, Sdf, Usd, UsdGeom, UsdShade
+
+from .data_structures import MaterialData
+
+logger = logging.getLogger(__name__)
+
+
+class MaterialManager:
+ """Manages creation and binding of USD materials.
+
+ Creates UsdPreviewSurface materials based on MaterialData specifications.
+ Handles material caching to avoid duplicate creation.
+ """
+
+ def __init__(self, stage: Usd.Stage, materials_scope_path: str = "/World/Looks"):
+ """Initialize material manager.
+
+ Args:
+ stage: USD stage
+ materials_scope_path: Path where materials will be created
+ """
+ self.stage = stage
+ self.materials_scope_path = materials_scope_path
+ self.material_cache: dict[str, UsdShade.Material] = {}
+
+ # Create materials scope
+ UsdGeom.Scope.Define(stage, materials_scope_path)
+
+ def create_material(
+ self, mat_data: MaterialData, time_code: Optional[float] = None
+ ) -> UsdShade.Material:
+ """Create a UsdPreviewSurface material.
+
+ Args:
+ mat_data: Material data specification
+ time_code: Optional time code for time-varying materials
+
+ Returns:
+ UsdShade.Material: Created material
+ """
+ # Check cache
+ if mat_data.name in self.material_cache:
+ logger.debug(f"Returning cached material: {mat_data.name}")
+ return self.material_cache[mat_data.name]
+
+ logger.info(f"Creating material: {mat_data.name}")
+
+ # Create material path
+ mat_path = f"{self.materials_scope_path}/{mat_data.name}"
+
+ # Create material
+ material = UsdShade.Material.Define(self.stage, mat_path)
+
+ # Create shader
+ shader_path = f"{mat_path}/PreviewSurface"
+ shader = UsdShade.Shader.Define(self.stage, shader_path)
+ shader.CreateIdAttr("UsdPreviewSurface")
+
+ # Set shader inputs
+ # NOTE (Omniverse/ParaViewConnector compatibility):
+ # When a material is bound, many viewers (including Omniverse Kit) will NOT
+ # automatically use the mesh's `displayColor` primvar. ParaViewConnector
+ # explicitly wires `UsdPrimvarReader_float3(varname=displayColor)` into
+ # `UsdPreviewSurface.inputs:diffuseColor`. We mirror that behavior here.
+ diffuse_input = shader.CreateInput("diffuseColor", Sdf.ValueTypeNames.Color3f)
+ if mat_data.use_vertex_colors:
+ vc_reader_path = f"{mat_path}/PrimvarReader_displayColor"
+ vc_reader = UsdShade.Shader.Define(self.stage, vc_reader_path)
+ vc_reader.CreateIdAttr("UsdPrimvarReader_float3")
+ vc_reader.CreateInput("varname", Sdf.ValueTypeNames.Token).Set(
+ "displayColor"
+ )
+ vc_out = vc_reader.CreateOutput("result", Sdf.ValueTypeNames.Color3f)
+ diffuse_input.ConnectToSource(vc_out)
+ else:
+ diffuse_color = Gf.Vec3f(*mat_data.diffuse_color)
+ if time_code is not None:
+ diffuse_input.Set(diffuse_color, time_code)
+ else:
+ diffuse_input.Set(diffuse_color)
+
+ # Specular color
+ if mat_data.specular_color != (0.0, 0.0, 0.0):
+ specular_input = shader.CreateInput(
+ "specularColor", Sdf.ValueTypeNames.Color3f
+ )
+ specular_color = Gf.Vec3f(*mat_data.specular_color)
+ specular_input.Set(specular_color)
+
+ # Emissive color
+ if mat_data.emissive_color != (0.0, 0.0, 0.0):
+ emissive_input = shader.CreateInput(
+ "emissiveColor", Sdf.ValueTypeNames.Color3f
+ )
+ emissive_color = Gf.Vec3f(*mat_data.emissive_color)
+ emissive_input.Set(emissive_color)
+
+ # Opacity
+ opacity_input = shader.CreateInput("opacity", Sdf.ValueTypeNames.Float)
+ opacity_input.Set(mat_data.opacity)
+
+ # Roughness
+ roughness_input = shader.CreateInput("roughness", Sdf.ValueTypeNames.Float)
+ roughness_input.Set(mat_data.roughness)
+
+ # Metallic
+ metallic_input = shader.CreateInput("metallic", Sdf.ValueTypeNames.Float)
+ metallic_input.Set(mat_data.metallic)
+
+ # IOR
+ ior_input = shader.CreateInput("ior", Sdf.ValueTypeNames.Float)
+ ior_input.Set(mat_data.ior)
+
+ # Connect shader to material surface output
+ surface_output = shader.CreateOutput("surface", Sdf.ValueTypeNames.Token)
+ material.CreateSurfaceOutput().ConnectToSource(surface_output)
+
+ # Cache material
+ self.material_cache[mat_data.name] = material
+
+ logger.debug(f"Created material '{mat_data.name}' at {mat_path}")
+
+ return material
+
+ def bind_material(
+ self, geom_prim: UsdGeom.Gprim, material: UsdShade.Material
+ ) -> None:
+ """Bind a material to a geometry prim.
+
+ Args:
+ geom_prim: Geometry prim (Mesh, Points, etc.)
+ material: Material to bind
+ """
+ binding_api = UsdShade.MaterialBindingAPI(geom_prim)
+ binding_api.Bind(material)
+
+ logger.debug(
+ f"Bound material '{material.GetPath()}' to '{geom_prim.GetPath()}'"
+ )
+
+ def get_or_create_material(
+ self, mat_data: MaterialData, time_code: Optional[float] = None
+ ) -> UsdShade.Material:
+ """Get existing material from cache or create new one.
+
+ Args:
+ mat_data: Material data specification
+ time_code: Optional time code for time-varying materials
+
+ Returns:
+ UsdShade.Material: Material (cached or newly created)
+ """
+ if mat_data.name in self.material_cache:
+ return self.material_cache[mat_data.name]
+ return self.create_material(mat_data, time_code)
+
+ def create_default_material(
+ self, name: str = "default", color: tuple[float, float, float] = (0.8, 0.8, 0.8)
+ ) -> UsdShade.Material:
+ """Create a simple default material.
+
+ Args:
+ name: Material name
+ color: RGB color
+
+ Returns:
+ UsdShade.Material: Created default material
+ """
+ mat_data = MaterialData(
+ name=name, diffuse_color=color, roughness=0.5, metallic=0.0
+ )
+ return self.create_material(mat_data)
diff --git a/src/physiomotion4d/vtk_to_usd/usd_mesh_converter.py b/src/physiomotion4d/vtk_to_usd/usd_mesh_converter.py
new file mode 100644
index 0000000..b95439a
--- /dev/null
+++ b/src/physiomotion4d/vtk_to_usd/usd_mesh_converter.py
@@ -0,0 +1,365 @@
+"""USD Mesh converter for creating UsdGeomMesh from MeshData.
+
+Handles geometry, normals, colors, primvars, and time-varying attributes.
+"""
+
+import logging
+from typing import Optional
+
+import numpy as np
+from pxr import Gf, Usd, UsdGeom, Vt
+
+from .data_structures import ConversionSettings, GenericArray, MeshData
+from .material_manager import MaterialManager
+from .usd_utils import (
+ compute_mesh_extent,
+ create_primvar,
+ ras_normals_to_usd,
+ ras_points_to_usd,
+ triangulate_face,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class UsdMeshConverter:
+ """Converts MeshData to UsdGeomMesh with full feature support.
+
+ Handles:
+ - Geometry (points, faces, normals)
+ - Vertex colors and display color primvars
+ - Generic data arrays as primvars
+ - Time-varying attributes
+ - Material binding
+ """
+
+ def __init__(
+ self,
+ stage: Usd.Stage,
+ settings: ConversionSettings,
+ material_mgr: MaterialManager,
+ ):
+ """Initialize mesh converter.
+
+ Args:
+ stage: USD stage
+ settings: Conversion settings
+ material_mgr: Material manager for material binding
+ """
+ self.stage = stage
+ self.settings = settings
+ self.material_mgr = material_mgr
+
+ def create_mesh(
+ self,
+ mesh_data: MeshData,
+ mesh_path: str,
+ time_code: Optional[float] = None,
+ bind_material: bool = True,
+ ) -> UsdGeom.Mesh:
+ """Create a UsdGeomMesh from MeshData.
+
+ Args:
+ mesh_data: Mesh data to convert
+ mesh_path: USD path for the mesh
+ time_code: Optional time code for time-varying data
+ bind_material: Whether to create and bind material
+
+ Returns:
+ UsdGeom.Mesh: Created USD mesh
+ """
+ logger.info(f"Creating USD mesh at: {mesh_path}")
+
+ # Create mesh prim
+ mesh = UsdGeom.Mesh.Define(self.stage, mesh_path)
+
+ # Convert points to USD coordinates
+ usd_points = ras_points_to_usd(mesh_data.points)
+
+ # Handle triangulation if requested
+ face_counts = mesh_data.face_vertex_counts
+ face_indices = mesh_data.face_vertex_indices
+
+ if self.settings.triangulate_meshes:
+ # Check if any faces are not triangles
+ if not all(count == 3 for count in face_counts):
+ logger.debug("Triangulating mesh faces")
+ face_counts, face_indices = triangulate_face(face_counts, face_indices)
+
+ # Convert to Vt arrays
+ face_counts_vt = Vt.IntArray(face_counts.tolist())
+ face_indices_vt = Vt.IntArray(face_indices.tolist())
+
+ # Set topology (static - doesn't change with time)
+ mesh.CreateFaceVertexCountsAttr(face_counts_vt)
+ mesh.CreateFaceVertexIndicesAttr(face_indices_vt)
+
+ # Set points (time-varying if time_code provided)
+ points_attr = mesh.CreatePointsAttr()
+ if time_code is not None:
+ points_attr.Set(usd_points, time_code)
+ else:
+ points_attr.Set(usd_points)
+
+ # Set extent (bounding box)
+ extent = compute_mesh_extent(usd_points)
+ extent_attr = mesh.CreateExtentAttr()
+ if time_code is not None:
+ extent_attr.Set(extent, time_code)
+ else:
+ extent_attr.Set(extent)
+
+ # Set mesh attributes
+ mesh.CreateSubdivisionSchemeAttr("none") # No subdivision
+ mesh.CreateDoubleSidedAttr(True) # Visible from both sides
+
+ # Handle normals
+ if mesh_data.normals is not None:
+ logger.debug("Adding normals to mesh")
+ usd_normals = ras_normals_to_usd(mesh_data.normals)
+ normals_attr = mesh.CreateNormalsAttr()
+ normals_attr.SetMetadata("interpolation", UsdGeom.Tokens.vertex)
+ if time_code is not None:
+ normals_attr.Set(usd_normals, time_code)
+ else:
+ normals_attr.Set(usd_normals)
+ elif self.settings.compute_normals:
+ logger.debug("Computing normals for mesh")
+ # Normals will be computed by renderer or in post-process
+ pass
+
+ # Handle vertex colors
+ if mesh_data.colors is not None:
+ logger.debug("Adding vertex colors to mesh")
+ self._add_vertex_colors(mesh, mesh_data.colors, time_code)
+
+ # Handle generic arrays (primvars)
+ if self.settings.preserve_point_arrays or self.settings.preserve_cell_arrays:
+ self._add_generic_arrays(mesh, mesh_data, time_code)
+
+ # Bind material (if material_id is provided and material exists in cache)
+ if bind_material and mesh_data.material_id:
+ if mesh_data.material_id in self.material_mgr.material_cache:
+ material = self.material_mgr.material_cache[mesh_data.material_id]
+ self.material_mgr.bind_material(mesh, material)
+
+ logger.info(
+ f"Created mesh with {len(mesh_data.points)} points, "
+ f"{len(face_counts)} faces"
+ )
+
+ return mesh
+
+ def _add_vertex_colors(
+ self, mesh: UsdGeom.Mesh, colors: Vt.Vec3fArray, time_code: Optional[float]
+ ) -> None:
+ """Add vertex colors to mesh as displayColor primvar.
+
+ Args:
+ mesh: USD mesh
+ colors: Color array (N, 3) or (N, 4)
+ time_code: Optional time code
+ """
+ # Convert to Vec3f if needed
+ if colors.shape[1] == 4:
+ # RGBA -> RGB
+ colors_rgb = colors[:, :3]
+ else:
+ colors_rgb = colors
+
+ # Create displayColor primvar
+ display_color_primvar = mesh.CreateDisplayColorPrimvar(UsdGeom.Tokens.vertex)
+
+ # Convert to Vt.Vec3fArray (convert numpy float32 to Python float)
+ color_array = Vt.Vec3fArray(
+ [Gf.Vec3f(float(c[0]), float(c[1]), float(c[2])) for c in colors_rgb]
+ )
+
+ if time_code is not None:
+ # Author a default value for viewers that don't evaluate time samples unless
+ # an explicit time is set (common in some Omniverse/Kit workflows).
+ if float(time_code) == 0.0:
+ display_color_primvar.Set(color_array)
+ display_color_primvar.Set(color_array, time_code)
+ else:
+ display_color_primvar.Set(color_array)
+
+ # Handle opacity if RGBA
+ if colors.shape[1] == 4:
+ display_opacity_primvar = mesh.CreateDisplayOpacityPrimvar(
+ UsdGeom.Tokens.vertex
+ )
+ opacity_array = Vt.FloatArray(colors[:, 3].tolist())
+ if time_code is not None:
+ if float(time_code) == 0.0:
+ display_opacity_primvar.Set(opacity_array)
+ display_opacity_primvar.Set(opacity_array, time_code)
+ else:
+ display_opacity_primvar.Set(opacity_array)
+
+ def _add_generic_arrays(
+ self, mesh: UsdGeom.Mesh, mesh_data: MeshData, time_code: Optional[float]
+ ) -> None:
+ """Add generic data arrays as primvars.
+
+ Args:
+ mesh: USD mesh
+ mesh_data: Mesh data containing arrays
+ time_code: Optional time code
+ """
+ for array in mesh_data.generic_arrays:
+ # Avoid authoring large multi-component tensors as flat float[] vertex primvars.
+ # Omniverse/Hydra can be unstable when such primvars have elementSize > 1.
+ # Instead, split into multiple primvars with <= 3 components each.
+ if array.num_components > 4:
+ try:
+ data = np.asarray(array.data)
+ if data.ndim == 1:
+ if (
+ array.num_components <= 0
+ or (len(data) % array.num_components) != 0
+ ):
+ logger.warning(
+ "Skipping primvar %s: cannot reshape flat data len=%d into (%s, %d)",
+ array.name,
+ len(data),
+ "?",
+ array.num_components,
+ )
+ continue
+ data = data.reshape(-1, array.num_components)
+
+ if data.ndim != 2 or data.shape[1] != array.num_components:
+ logger.warning(
+ "Skipping primvar %s: unexpected shape %s for num_components=%d",
+ array.name,
+ getattr(data, "shape", None),
+ array.num_components,
+ )
+ continue
+
+ # Determine prefix based on interpolation
+ if array.interpolation == "vertex":
+ prefix = self.settings.point_array_prefix
+ elif array.interpolation == "uniform":
+ prefix = self.settings.cell_array_prefix
+ else:
+ prefix = ""
+
+ # Split into chunks of 3 components (last chunk may be 1 or 2)
+ for chunk_idx, start in enumerate(
+ range(0, array.num_components, 3)
+ ):
+ chunk = data[:, start : start + 3]
+ if chunk.size == 0:
+ continue
+ chunk_name = f"{array.name}_c{chunk_idx}"
+ chunk_arr = GenericArray(
+ name=chunk_name,
+ data=chunk,
+ num_components=int(chunk.shape[1]),
+ data_type=array.data_type,
+ interpolation=array.interpolation,
+ )
+ create_primvar(mesh, chunk_arr, prefix, time_code)
+
+ except Exception as e:
+ logger.warning("Failed to split primvar %s: %s", array.name, e)
+ continue
+
+ # Determine prefix based on interpolation
+ if array.interpolation == "vertex":
+ prefix = self.settings.point_array_prefix
+ elif array.interpolation == "uniform":
+ prefix = self.settings.cell_array_prefix
+ else:
+ prefix = ""
+
+ # Skip if not preserving this type of array
+ if (
+ array.interpolation == "vertex"
+ and not self.settings.preserve_point_arrays
+ ):
+ continue
+ if (
+ array.interpolation == "uniform"
+ and not self.settings.preserve_cell_arrays
+ ):
+ continue
+
+ try:
+ create_primvar(mesh, array, prefix, time_code)
+ except Exception as e:
+ logger.warning(f"Failed to create primvar for {array.name}: {e}")
+
+ def create_time_varying_mesh(
+ self,
+ mesh_data_sequence: list[MeshData],
+ mesh_path: str,
+ time_codes: list[float],
+ bind_material: bool = True,
+ ) -> UsdGeom.Mesh:
+ """Create a mesh with time-varying attributes.
+
+ Assumes constant topology (same number of points/faces).
+
+ Args:
+ mesh_data_sequence: List of MeshData for each time step
+ mesh_path: USD path for the mesh
+ time_codes: List of time codes
+ bind_material: Whether to create and bind material
+
+ Returns:
+ UsdGeom.Mesh: Created USD mesh with time samples
+ """
+ if len(mesh_data_sequence) != len(time_codes):
+ raise ValueError(
+ f"Number of mesh data ({len(mesh_data_sequence)}) must match "
+ f"number of time codes ({len(time_codes)})"
+ )
+
+ if len(mesh_data_sequence) == 0:
+ raise ValueError("Empty mesh data sequence")
+
+ logger.info(
+ f"Creating time-varying mesh at: {mesh_path} "
+ f"with {len(time_codes)} time steps"
+ )
+
+ # Create mesh with first time step
+ first_mesh_data = mesh_data_sequence[0]
+ mesh = self.create_mesh(
+ first_mesh_data, mesh_path, time_codes[0], bind_material=bind_material
+ )
+
+ # Add time samples for subsequent steps
+ for mesh_data, time_code in zip(
+ mesh_data_sequence[1:], time_codes[1:], strict=False
+ ):
+ # Update points
+ usd_points = ras_points_to_usd(mesh_data.points)
+ mesh.GetPointsAttr().Set(usd_points, time_code)
+
+ # Update extent
+ extent = compute_mesh_extent(usd_points)
+ mesh.GetExtentAttr().Set(extent, time_code)
+
+ # Update normals if present
+ if mesh_data.normals is not None:
+ usd_normals = ras_normals_to_usd(mesh_data.normals)
+ mesh.GetNormalsAttr().Set(usd_normals, time_code)
+
+ # Update colors if present
+ if mesh_data.colors is not None:
+ self._add_vertex_colors(mesh, mesh_data.colors, time_code)
+
+ # Update generic arrays
+ if (
+ self.settings.preserve_point_arrays
+ or self.settings.preserve_cell_arrays
+ ):
+ self._add_generic_arrays(mesh, mesh_data, time_code)
+
+ logger.info(f"Created time-varying mesh with {len(time_codes)} time samples")
+
+ return mesh
diff --git a/src/physiomotion4d/vtk_to_usd/usd_utils.py b/src/physiomotion4d/vtk_to_usd/usd_utils.py
new file mode 100644
index 0000000..462ce61
--- /dev/null
+++ b/src/physiomotion4d/vtk_to_usd/usd_utils.py
@@ -0,0 +1,398 @@
+"""USD utility functions for VTK to USD conversion.
+
+Provides helper functions for coordinate conversion, primvar creation, and USD type mapping.
+"""
+
+import logging
+from typing import Any
+
+import numpy as np
+from numpy.typing import NDArray
+from pxr import Gf, Sdf, Usd, UsdGeom, Vt
+
+from .data_structures import DataType, GenericArray
+
+logger = logging.getLogger(__name__)
+
+
+def ras_to_usd(point: NDArray | tuple | list) -> Gf.Vec3f:
+ """Convert RAS (Right-Anterior-Superior) coordinates to USD's right-handed Y-up system.
+
+ VTK/Medical imaging typically uses RAS coordinate system:
+ - R (Right): X-axis points to patient's right
+ - A (Anterior): Y-axis points to patient's front
+ - S (Superior): Z-axis points to patient's head
+
+ USD uses right-handed Y-up:
+ - X: right
+ - Y: up
+ - Z: back (toward camera)
+
+ Conversion: USD(x, y, z) = RAS(x, z, -y)
+
+ Args:
+ point: Point in RAS coordinates [x, y, z]
+
+ Returns:
+ Gf.Vec3f: Point in USD coordinates
+ """
+ if isinstance(point, (tuple, list)):
+ return Gf.Vec3f(float(point[0]), float(point[2]), float(-point[1]))
+ else:
+ return Gf.Vec3f(float(point[0]), float(point[2]), float(-point[1]))
+
+
+def ras_points_to_usd(points: NDArray) -> Vt.Vec3fArray:
+ """Convert array of RAS points to USD coordinates.
+
+ Args:
+ points: Array of points with shape (N, 3)
+
+ Returns:
+ Vt.Vec3fArray: Points in USD coordinates
+ """
+ if points.shape[1] != 3:
+ raise ValueError(f"Points must have shape (N, 3), got {points.shape}")
+
+ # Vectorized conversion: USD(x, y, z) = RAS(x, z, -y)
+ usd_points = np.empty_like(points)
+ usd_points[:, 0] = points[:, 0] # X stays the same
+ usd_points[:, 1] = points[:, 2] # Y = Z
+ usd_points[:, 2] = -points[:, 1] # Z = -Y
+
+ # Convert to USD Vec3fArray
+ return Vt.Vec3fArray.FromNumpy(usd_points.astype(np.float32))
+
+
+def ras_normals_to_usd(normals: NDArray) -> Vt.Vec3fArray:
+ """Convert array of RAS normals to USD coordinates.
+
+ Same transformation as points since normals are vectors.
+
+ Args:
+ normals: Array of normals with shape (N, 3)
+
+ Returns:
+ Vt.Vec3fArray: Normals in USD coordinates
+ """
+ return ras_points_to_usd(normals)
+
+
+def numpy_to_vt_array(array: NDArray, data_type: DataType) -> Any:
+ """Convert numpy array to appropriate VtArray type.
+
+ Args:
+ array: Numpy array to convert
+ data_type: Target data type
+
+ Returns:
+ Appropriate VtArray based on data_type and array shape
+ """
+ # Ensure contiguous array for efficient conversion
+ array = np.ascontiguousarray(array)
+
+ # Determine number of components
+ if array.ndim == 1:
+ num_components = 1
+ elif array.ndim == 2:
+ num_components = array.shape[1]
+ else:
+ raise ValueError(f"Unsupported array shape: {array.shape}")
+
+ # Convert based on type and components
+ if data_type in [DataType.FLOAT, DataType.DOUBLE]:
+ array_f = array.astype(np.float32)
+ if num_components == 1:
+ return Vt.FloatArray.FromNumpy(array_f)
+ elif num_components == 2:
+ return Vt.Vec2fArray.FromNumpy(array_f)
+ elif num_components == 3:
+ return Vt.Vec3fArray.FromNumpy(array_f)
+ elif num_components == 4:
+ return Vt.Vec4fArray.FromNumpy(array_f)
+ else:
+ # Fallback: flatten to float array
+ return Vt.FloatArray.FromNumpy(array_f.flatten())
+
+ elif data_type in [DataType.INT, DataType.LONG]:
+ array_i = array.astype(np.int32)
+ if num_components == 1:
+ return Vt.IntArray.FromNumpy(array_i)
+ elif num_components == 2:
+ return Vt.Vec2iArray.FromNumpy(array_i)
+ elif num_components == 3:
+ return Vt.Vec3iArray.FromNumpy(array_i)
+ elif num_components == 4:
+ return Vt.Vec4iArray.FromNumpy(array_i)
+ else:
+ return Vt.IntArray.FromNumpy(array_i.flatten())
+
+ elif data_type in [DataType.UINT, DataType.ULONG]:
+ array_ui = array.astype(np.uint32)
+ if num_components == 1:
+ return Vt.UIntArray.FromNumpy(array_ui)
+ else:
+ # No Vec types for uint, flatten
+ return Vt.UIntArray.FromNumpy(array_ui.flatten())
+
+ elif data_type in [DataType.UCHAR, DataType.CHAR]:
+ array_uc = array.astype(np.uint8)
+ return Vt.UCharArray.FromNumpy(array_uc.flatten())
+
+ elif data_type in [DataType.SHORT, DataType.USHORT]:
+ # Convert to int
+ array_i = array.astype(np.int32)
+ return Vt.IntArray.FromNumpy(array_i.flatten())
+
+ else:
+ # Fallback to float
+ array_f = array.astype(np.float32)
+ return Vt.FloatArray.FromNumpy(array_f.flatten())
+
+
+def get_sdf_value_type(data_type: DataType, num_components: int) -> Sdf.ValueTypeName:
+ """Get appropriate SDF value type for primvar creation.
+
+ Args:
+ data_type: Data type
+ num_components: Number of components (1, 2, 3, or 4)
+
+ Returns:
+ Sdf.ValueTypeName: Appropriate USD type
+ """
+ if data_type in [DataType.FLOAT, DataType.DOUBLE]:
+ if num_components == 1:
+ return Sdf.ValueTypeNames.FloatArray
+ elif num_components == 2:
+ return Sdf.ValueTypeNames.Float2Array
+ elif num_components == 3:
+ return Sdf.ValueTypeNames.Float3Array
+ elif num_components == 4:
+ return Sdf.ValueTypeNames.Float4Array
+ else:
+ return Sdf.ValueTypeNames.FloatArray
+
+ elif data_type in [DataType.INT, DataType.LONG]:
+ if num_components == 1:
+ return Sdf.ValueTypeNames.IntArray
+ elif num_components == 2:
+ return Sdf.ValueTypeNames.Int2Array
+ elif num_components == 3:
+ return Sdf.ValueTypeNames.Int3Array
+ elif num_components == 4:
+ return Sdf.ValueTypeNames.Int4Array
+ else:
+ return Sdf.ValueTypeNames.IntArray
+
+ elif data_type in [DataType.UINT, DataType.ULONG]:
+ return Sdf.ValueTypeNames.UIntArray
+
+ elif data_type in [DataType.UCHAR, DataType.CHAR]:
+ return Sdf.ValueTypeNames.UCharArray
+
+ elif data_type in [DataType.SHORT, DataType.USHORT]:
+ return Sdf.ValueTypeNames.IntArray
+
+ else:
+ return Sdf.ValueTypeNames.FloatArray
+
+
+def sanitize_primvar_name(name: str) -> str:
+ """Sanitize a name to be USD-compliant.
+
+ USD attribute names must:
+ - Start with a letter or underscore
+ - Contain only letters, numbers, and underscores
+ - Not contain dots, spaces, or special characters
+
+ Args:
+ name: Original name
+
+ Returns:
+ str: Sanitized name safe for USD
+ """
+ import re
+
+ # Replace dots with underscores
+ name = name.replace(".", "_")
+
+ # Replace spaces and other special characters with underscores
+ name = re.sub(r"[^a-zA-Z0-9_]", "_", name)
+
+ # Ensure it starts with a letter or underscore
+ if name and name[0].isdigit():
+ name = "_" + name
+
+ # Remove consecutive underscores
+ name = re.sub(r"_+", "_", name)
+
+ # Remove trailing underscores
+ name = name.rstrip("_")
+
+ return name
+
+
+def create_primvar(
+ geom: UsdGeom.Gprim,
+ array: GenericArray,
+ array_name_prefix: str = "",
+ time_code: float | None = None,
+) -> UsdGeom.Primvar | None:
+ """Create a USD primvar from a GenericArray.
+
+ Args:
+ geom: USD geometry prim (Mesh, Points, etc.)
+ array: GenericArray containing data
+ array_name_prefix: Prefix for primvar name (e.g., "vtk_point_")
+ time_code: Optional time code for time-varying data
+
+ Returns:
+ UsdGeom.Primvar: Created primvar, or None if validation failed
+ """
+ # Sanitize the array name to be USD-compliant
+ sanitized_name = sanitize_primvar_name(array.name)
+ primvar_name = f"{array_name_prefix}{sanitized_name}"
+
+ # Log if name was changed
+ if sanitized_name != array.name:
+ logger.debug(f"Sanitized primvar name: '{array.name}' → '{sanitized_name}'")
+
+ # Validate array size for meshes
+ if isinstance(geom, UsdGeom.Mesh):
+ mesh = UsdGeom.Mesh(geom)
+
+ # Check size matches expected count based on interpolation
+ if array.interpolation == "vertex":
+ # Get number of points
+ points_attr = mesh.GetPointsAttr()
+ if points_attr:
+ points = points_attr.Get(
+ time_code if time_code is not None else Usd.TimeCode.Default()
+ )
+ if points and len(array.data) != len(points):
+ logger.warning(
+ f"Skipping primvar '{primvar_name}': size mismatch "
+ f"(got {len(array.data)}, expected {len(points)} vertices)"
+ )
+ return None
+
+ elif array.interpolation == "uniform":
+ # Get number of faces
+ face_counts_attr = mesh.GetFaceVertexCountsAttr()
+ if face_counts_attr:
+ face_counts = face_counts_attr.Get(
+ time_code if time_code is not None else Usd.TimeCode.Default()
+ )
+ if face_counts and len(array.data) != len(face_counts):
+ logger.warning(
+ f"Skipping primvar '{primvar_name}': size mismatch "
+ f"(got {len(array.data)}, expected {len(face_counts)} faces)"
+ )
+ return None
+
+ # Skip if array has no data
+ if len(array.data) == 0:
+ logger.debug(f"Skipping empty primvar '{primvar_name}'")
+ return None
+
+ # Get primvars API
+ primvars_api = UsdGeom.PrimvarsAPI(geom)
+
+ # Get appropriate USD type
+ sdf_type = get_sdf_value_type(array.data_type, array.num_components)
+
+ # Create primvar
+ primvar = primvars_api.CreatePrimvar(primvar_name, sdf_type)
+
+ # Set interpolation
+ if array.interpolation == "vertex":
+ primvar.SetInterpolation(UsdGeom.Tokens.vertex)
+ elif array.interpolation == "uniform":
+ primvar.SetInterpolation(UsdGeom.Tokens.uniform)
+ elif array.interpolation == "constant":
+ primvar.SetInterpolation(UsdGeom.Tokens.constant)
+ else:
+ primvar.SetInterpolation(UsdGeom.Tokens.vertex)
+
+ # If this is a multi-component array that we're storing in a scalar array type
+ # (e.g. FloatArray for >4 components), preserve the component grouping via elementSize.
+ # This makes downstream tools (and USDTools.apply_colormap_from_primvar) able to reshape.
+ if array.num_components > 1 and sdf_type in (
+ Sdf.ValueTypeNames.FloatArray,
+ Sdf.ValueTypeNames.IntArray,
+ Sdf.ValueTypeNames.UIntArray,
+ Sdf.ValueTypeNames.UCharArray,
+ ):
+ try:
+ primvar.SetElementSize(int(array.num_components))
+ except Exception:
+ # Not fatal; continue without elementSize.
+ pass
+
+ # Convert data to VtArray
+ vt_array = numpy_to_vt_array(array.data, array.data_type)
+
+ # Set value (with or without time code)
+ if time_code is not None:
+ primvar.Set(vt_array, time_code)
+ else:
+ primvar.Set(vt_array)
+
+ logger.debug(
+ f"Created primvar '{primvar_name}' with {len(array.data)} elements, "
+ f"{array.num_components} components, type {array.data_type.value}"
+ )
+
+ return primvar
+
+
+def triangulate_face(face_counts: NDArray, face_indices: NDArray) -> tuple:
+ """Triangulate polygonal faces.
+
+ Converts quads and polygons to triangles using simple fan triangulation.
+
+ Args:
+ face_counts: Array of vertex counts per face
+ face_indices: Array of vertex indices
+
+ Returns:
+ tuple: (triangulated_counts, triangulated_indices)
+ """
+ tri_counts: list[int] = []
+ tri_indices: list[int] = []
+
+ idx = 0
+ for count in face_counts:
+ if count == 3:
+ # Already a triangle
+ tri_counts.append(3)
+ tri_indices.extend(face_indices[idx : idx + 3])
+ elif count == 4:
+ # Quad -> 2 triangles
+ v0, v1, v2, v3 = face_indices[idx : idx + 4]
+ tri_counts.extend([3, 3])
+ tri_indices.extend([v0, v1, v2, v0, v2, v3])
+ else:
+ # Polygon -> fan triangulation
+ v0 = face_indices[idx]
+ for i in range(1, count - 1):
+ tri_counts.append(3)
+ tri_indices.extend(
+ [v0, face_indices[idx + i], face_indices[idx + i + 1]]
+ )
+
+ idx += count
+
+ return np.array(tri_counts, dtype=np.int32), np.array(tri_indices, dtype=np.int32)
+
+
+def compute_mesh_extent(points: Vt.Vec3fArray) -> Vt.Vec3fArray:
+ """Compute bounding box extent for a mesh.
+
+ Args:
+ points: Array of points
+
+ Returns:
+ Vt.Vec3fArray: Extent as [min_point, max_point]
+ """
+ return UsdGeom.Mesh.ComputeExtent(points)
diff --git a/src/physiomotion4d/vtk_to_usd/vtk_reader.py b/src/physiomotion4d/vtk_to_usd/vtk_reader.py
new file mode 100644
index 0000000..ead97db
--- /dev/null
+++ b/src/physiomotion4d/vtk_to_usd/vtk_reader.py
@@ -0,0 +1,688 @@
+"""VTK file readers for various VTK formats (VTK, VTP, VTU).
+
+Reads VTK files and extracts geometry, topology, and data arrays.
+"""
+
+import logging
+from pathlib import Path
+from typing import Optional
+
+import numpy as np
+import vtk
+from numpy.typing import NDArray
+from vtk.util import numpy_support
+
+from .data_structures import DataType, GenericArray, MeshData
+
+logger = logging.getLogger(__name__)
+
+
+class VTKReader:
+ """Base class for VTK file readers."""
+
+ @staticmethod
+ def _vtk_to_numpy_type(vtk_type: int) -> DataType:
+ """Convert VTK data type to our DataType enum."""
+ type_map = {
+ vtk.VTK_UNSIGNED_CHAR: DataType.UCHAR,
+ vtk.VTK_CHAR: DataType.CHAR,
+ vtk.VTK_UNSIGNED_SHORT: DataType.USHORT,
+ vtk.VTK_SHORT: DataType.SHORT,
+ vtk.VTK_UNSIGNED_INT: DataType.UINT,
+ vtk.VTK_INT: DataType.INT,
+ vtk.VTK_UNSIGNED_LONG: DataType.ULONG,
+ vtk.VTK_LONG: DataType.LONG,
+ vtk.VTK_FLOAT: DataType.FLOAT,
+ vtk.VTK_DOUBLE: DataType.DOUBLE,
+ }
+ return type_map.get(vtk_type, DataType.FLOAT)
+
+ @staticmethod
+ def _extract_point_data_arrays(polydata: vtk.vtkPolyData) -> list[GenericArray]:
+ """Extract all point data arrays as GenericArray objects."""
+ arrays = []
+ point_data = polydata.GetPointData()
+ num_arrays = point_data.GetNumberOfArrays()
+
+ # Get expected number of points for validation
+ num_points = polydata.GetNumberOfPoints()
+
+ for i in range(num_arrays):
+ vtk_array = point_data.GetArray(i)
+ name = vtk_array.GetName()
+ if name is None or name == "":
+ continue
+
+ # Convert to numpy
+ numpy_array = numpy_support.vtk_to_numpy(vtk_array)
+ num_components = vtk_array.GetNumberOfComponents()
+
+ # Determine data type
+ vtk_type = vtk_array.GetDataType()
+ data_type = VTKReader._vtk_to_numpy_type(vtk_type)
+
+ # Reshape if multi-component
+ if num_components > 1 and numpy_array.ndim == 1:
+ numpy_array = numpy_array.reshape(-1, num_components)
+
+ # Validate array size matches number of points
+ array_size = len(numpy_array)
+ if array_size != num_points:
+ logger.warning(
+ f"Point data array '{name}' size mismatch: "
+ f"got {array_size} values, expected {num_points} points. "
+ f"This array will be skipped to avoid USD corruption."
+ )
+ continue # Skip this array
+
+ arrays.append(
+ GenericArray(
+ name=name,
+ data=numpy_array,
+ num_components=num_components,
+ data_type=data_type,
+ interpolation="vertex",
+ )
+ )
+
+ return arrays
+
+ @staticmethod
+ def _extract_cell_data_arrays(polydata: vtk.vtkPolyData) -> list[GenericArray]:
+ """Extract all cell data arrays as GenericArray objects."""
+ arrays = []
+ cell_data = polydata.GetCellData()
+ num_arrays = cell_data.GetNumberOfArrays()
+
+ # Get expected number of cells for validation
+ num_cells = polydata.GetNumberOfCells()
+
+ for i in range(num_arrays):
+ vtk_array = cell_data.GetArray(i)
+ name = vtk_array.GetName()
+ if name is None or name == "":
+ continue
+
+ # Convert to numpy
+ numpy_array = numpy_support.vtk_to_numpy(vtk_array)
+ num_components = vtk_array.GetNumberOfComponents()
+
+ # Determine data type
+ vtk_type = vtk_array.GetDataType()
+ data_type = VTKReader._vtk_to_numpy_type(vtk_type)
+
+ # Reshape if multi-component
+ if num_components > 1 and numpy_array.ndim == 1:
+ numpy_array = numpy_array.reshape(-1, num_components)
+
+ # Validate array size matches number of cells
+ array_size = len(numpy_array)
+ if array_size != num_cells:
+ logger.warning(
+ f"Cell data array '{name}' size mismatch: "
+ f"got {array_size} values, expected {num_cells} cells. "
+ f"This array will be skipped to avoid USD corruption."
+ )
+ continue # Skip this array
+
+ arrays.append(
+ GenericArray(
+ name=name,
+ data=numpy_array,
+ num_components=num_components,
+ data_type=data_type,
+ interpolation="uniform", # Cell data -> uniform interpolation
+ )
+ )
+
+ return arrays
+
+ @staticmethod
+ def _extract_geometry_from_polydata(polydata: vtk.vtkPolyData) -> tuple:
+ """Extract points, face counts, and face indices from vtkPolyData."""
+ # Get points
+ vtk_points = polydata.GetPoints()
+ num_points = vtk_points.GetNumberOfPoints()
+ points = np.array([vtk_points.GetPoint(i) for i in range(num_points)])
+
+ # Get cells (faces)
+ polys = polydata.GetPolys()
+ num_polys = polys.GetNumberOfCells()
+
+ face_vertex_counts = []
+ face_vertex_indices = []
+
+ polys.InitTraversal()
+ id_list = vtk.vtkIdList()
+ for _ in range(num_polys):
+ polys.GetNextCell(id_list)
+ num_pts = id_list.GetNumberOfIds()
+ face_vertex_counts.append(num_pts)
+ face_vertex_indices.extend([id_list.GetId(j) for j in range(num_pts)])
+
+ return (
+ points,
+ np.array(face_vertex_counts, dtype=np.int32),
+ np.array(face_vertex_indices, dtype=np.int32),
+ )
+
+ @staticmethod
+ def _extract_normals(polydata: vtk.vtkPolyData) -> Optional[NDArray]:
+ """Extract normals if they exist, or compute them."""
+ from typing import cast
+
+ # Check if normals exist in point data
+ point_data = polydata.GetPointData()
+ normals_array = point_data.GetNormals()
+
+ if normals_array is not None:
+ return cast(NDArray, numpy_support.vtk_to_numpy(normals_array))
+
+ # Compute normals if they don't exist
+ normal_generator = vtk.vtkPolyDataNormals()
+ normal_generator.SetInputData(polydata)
+ normal_generator.ComputePointNormalsOn()
+ normal_generator.ComputeCellNormalsOff()
+ normal_generator.SplittingOff()
+ normal_generator.ConsistencyOn()
+ normal_generator.AutoOrientNormalsOn()
+ normal_generator.Update()
+
+ output = normal_generator.GetOutput()
+ normals_array = output.GetPointData().GetNormals()
+
+ if normals_array is not None:
+ return cast(NDArray, numpy_support.vtk_to_numpy(normals_array))
+
+ return None
+
+ @staticmethod
+ def _extract_colors(polydata: vtk.vtkPolyData) -> Optional[NDArray]:
+ """Extract vertex colors if they exist."""
+ from typing import cast
+
+ point_data = polydata.GetPointData()
+ scalars = point_data.GetScalars()
+
+ if scalars is not None:
+ colors = cast(NDArray, numpy_support.vtk_to_numpy(scalars))
+ num_components = scalars.GetNumberOfComponents()
+
+ # Handle different color formats
+ if num_components == 3: # RGB
+ return cast(NDArray, colors.reshape(-1, 3))
+ elif num_components == 4: # RGBA
+ return cast(NDArray, colors.reshape(-1, 4))
+ elif num_components == 1: # Scalar - could be mapped to color later
+ return None
+
+ return None
+
+
+class PolyDataReader(VTKReader):
+ """Reader for VTK PolyData files (.vtp)."""
+
+ @staticmethod
+ def read(filename: str | Path) -> MeshData:
+ """Read a VTP file and return MeshData.
+
+ Args:
+ filename: Path to .vtp file
+
+ Returns:
+ MeshData: Extracted mesh data
+ """
+ filename = Path(filename)
+ if not filename.exists():
+ raise FileNotFoundError(f"File not found: {filename}")
+
+ logger.info(f"Reading PolyData file: {filename}")
+
+ # Read VTP file
+ reader = vtk.vtkXMLPolyDataReader()
+ reader.SetFileName(str(filename))
+ reader.Update()
+ polydata = reader.GetOutput()
+
+ # Extract geometry
+ points, face_counts, face_indices = VTKReader._extract_geometry_from_polydata(
+ polydata
+ )
+
+ # Extract normals
+ normals = VTKReader._extract_normals(polydata)
+
+ # Extract colors
+ colors = VTKReader._extract_colors(polydata)
+
+ # Extract point and cell data arrays
+ point_arrays = VTKReader._extract_point_data_arrays(polydata)
+ cell_arrays = VTKReader._extract_cell_data_arrays(polydata)
+
+ # Combine arrays
+ generic_arrays = point_arrays + cell_arrays
+
+ mesh_data = MeshData(
+ points=points,
+ face_vertex_counts=face_counts,
+ face_vertex_indices=face_indices,
+ normals=normals,
+ colors=colors,
+ generic_arrays=generic_arrays,
+ )
+
+ logger.info(
+ f"Loaded mesh: {len(points)} points, {len(face_counts)} faces, "
+ f"{len(generic_arrays)} data arrays"
+ )
+
+ return mesh_data
+
+
+class LegacyVTKReader(VTKReader):
+ """Reader for legacy VTK files (.vtk).
+
+ Handles all legacy VTK dataset types:
+ - POLYDATA
+ - UNSTRUCTURED_GRID
+ - STRUCTURED_GRID
+ - STRUCTURED_POINTS
+ - RECTILINEAR_GRID
+ """
+
+ @staticmethod
+ def read(filename: str | Path, extract_surface: bool = True) -> MeshData:
+ """Read a legacy VTK file and return MeshData.
+
+ Args:
+ filename: Path to .vtk file
+ extract_surface: If True, extract surface from volumetric data
+
+ Returns:
+ MeshData: Extracted mesh data
+ """
+ filename = Path(filename)
+ if not filename.exists():
+ raise FileNotFoundError(f"File not found: {filename}")
+
+ logger.info(f"Reading legacy VTK file: {filename}")
+
+ # Use generic reader to auto-detect dataset type
+ reader = vtk.vtkGenericDataObjectReader()
+ reader.SetFileName(str(filename))
+ reader.Update()
+
+ output = reader.GetOutput()
+
+ if output is None:
+ raise ValueError(f"Failed to read VTK file: {filename}")
+
+ # Check what type of dataset we got
+ if reader.IsFilePolyData():
+ logger.debug("Detected POLYDATA format")
+ polydata = reader.GetPolyDataOutput()
+ elif reader.IsFileUnstructuredGrid():
+ logger.debug("Detected UNSTRUCTURED_GRID format")
+ ugrid = reader.GetUnstructuredGridOutput()
+ if extract_surface:
+ # Convert cell data to point data before surface extraction
+ # This preserves cell-based arrays (like stress, strain) as point data
+ cell_to_point = vtk.vtkCellDataToPointData()
+ cell_to_point.SetInputData(ugrid)
+ cell_to_point.PassCellDataOn() # Keep cell data temporarily
+ cell_to_point.Update()
+
+ # Extract surface
+ surface_filter = vtk.vtkDataSetSurfaceFilter()
+ surface_filter.SetInputConnection(cell_to_point.GetOutputPort())
+ surface_filter.Update()
+ polydata = surface_filter.GetOutput()
+
+ # Clear cell data after surface extraction
+ # (Cell data from volume is invalid for surface topology)
+ polydata.GetCellData().Initialize()
+
+ logger.debug(
+ "Extracted surface from UnstructuredGrid (with cell->point data conversion)"
+ )
+ else:
+ raise ValueError(
+ "UnstructuredGrid without surface extraction not supported. "
+ "Set extract_surface=True"
+ )
+ elif reader.IsFileStructuredGrid():
+ logger.debug("Detected STRUCTURED_GRID format")
+ sgrid = reader.GetStructuredGridOutput()
+ # Convert cell data to point data before surface extraction
+ cell_to_point = vtk.vtkCellDataToPointData()
+ cell_to_point.SetInputData(sgrid)
+ cell_to_point.PassCellDataOn() # Keep cell data temporarily
+ cell_to_point.Update()
+ # Extract surface from structured grid
+ surface_filter = vtk.vtkDataSetSurfaceFilter()
+ surface_filter.SetInputConnection(cell_to_point.GetOutputPort())
+ surface_filter.Update()
+ polydata = surface_filter.GetOutput()
+
+ # Clear cell data after surface extraction
+ polydata.GetCellData().Initialize()
+
+ logger.debug(
+ "Extracted surface from StructuredGrid (with cell->point data conversion)"
+ )
+ elif reader.IsFileStructuredPoints():
+ logger.debug("Detected STRUCTURED_POINTS format")
+ spoints = reader.GetStructuredPointsOutput()
+ # Convert cell data to point data before surface extraction
+ cell_to_point = vtk.vtkCellDataToPointData()
+ cell_to_point.SetInputData(spoints)
+ cell_to_point.PassCellDataOn() # Keep cell data temporarily
+ cell_to_point.Update()
+ # Extract surface from structured points (image data)
+ surface_filter = vtk.vtkDataSetSurfaceFilter()
+ surface_filter.SetInputConnection(cell_to_point.GetOutputPort())
+ surface_filter.Update()
+ polydata = surface_filter.GetOutput()
+
+ # Clear cell data after surface extraction
+ polydata.GetCellData().Initialize()
+
+ logger.debug(
+ "Extracted surface from StructuredPoints (with cell->point data conversion)"
+ )
+ elif reader.IsFileRectilinearGrid():
+ logger.debug("Detected RECTILINEAR_GRID format")
+ rgrid = reader.GetRectilinearGridOutput()
+ # Convert cell data to point data before surface extraction
+ cell_to_point = vtk.vtkCellDataToPointData()
+ cell_to_point.SetInputData(rgrid)
+ cell_to_point.PassCellDataOn() # Keep cell data temporarily
+ cell_to_point.Update()
+ # Extract surface from rectilinear grid
+ surface_filter = vtk.vtkDataSetSurfaceFilter()
+ surface_filter.SetInputConnection(cell_to_point.GetOutputPort())
+ surface_filter.Update()
+ polydata = surface_filter.GetOutput()
+
+ # Clear cell data after surface extraction
+ polydata.GetCellData().Initialize()
+
+ logger.debug(
+ "Extracted surface from RectilinearGrid (with cell->point data conversion)"
+ )
+ else:
+ raise ValueError(f"Unsupported VTK dataset type in file: {filename}")
+
+ # Verify we have valid polydata
+ if polydata is None or polydata.GetPoints() is None:
+ raise ValueError(f"No valid geometry found in file: {filename}")
+
+ # Extract geometry
+ points, face_counts, face_indices = VTKReader._extract_geometry_from_polydata(
+ polydata
+ )
+
+ # Extract normals
+ normals = VTKReader._extract_normals(polydata)
+
+ # Extract colors
+ colors = VTKReader._extract_colors(polydata)
+
+ # Extract point and cell data arrays
+ point_arrays = VTKReader._extract_point_data_arrays(polydata)
+ cell_arrays = VTKReader._extract_cell_data_arrays(polydata)
+
+ # Combine arrays
+ generic_arrays = point_arrays + cell_arrays
+
+ mesh_data = MeshData(
+ points=points,
+ face_vertex_counts=face_counts,
+ face_vertex_indices=face_indices,
+ normals=normals,
+ colors=colors,
+ generic_arrays=generic_arrays,
+ )
+
+ logger.info(
+ f"Loaded mesh: {len(points)} points, {len(face_counts)} faces, "
+ f"{len(generic_arrays)} data arrays"
+ )
+
+ return mesh_data
+
+
+class UnstructuredGridReader(VTKReader):
+ """Reader for VTK UnstructuredGrid files (.vtu)."""
+
+ @staticmethod
+ def read(filename: str | Path, extract_surface: bool = True) -> MeshData:
+ """Read a VTU file and return MeshData.
+
+ Args:
+ filename: Path to .vtu file
+ extract_surface: If True, extract surface as PolyData
+
+ Returns:
+ MeshData: Extracted mesh data
+ """
+ filename = Path(filename)
+ if not filename.exists():
+ raise FileNotFoundError(f"File not found: {filename}")
+
+ logger.info(f"Reading UnstructuredGrid file: {filename}")
+
+ # Read VTU file
+ reader = vtk.vtkXMLUnstructuredGridReader()
+ reader.SetFileName(str(filename))
+ reader.Update()
+ ugrid = reader.GetOutput()
+
+ if extract_surface:
+ # Convert cell data to point data before surface extraction
+ # This preserves cell-based arrays (like stress, strain) as point data
+ cell_to_point = vtk.vtkCellDataToPointData()
+ cell_to_point.SetInputData(ugrid)
+ cell_to_point.PassCellDataOn() # Keep cell data temporarily
+ cell_to_point.Update()
+
+ # Extract surface
+ surface_filter = vtk.vtkDataSetSurfaceFilter()
+ surface_filter.SetInputConnection(cell_to_point.GetOutputPort())
+ surface_filter.Update()
+ polydata = surface_filter.GetOutput()
+
+ # Clear cell data after surface extraction
+ # (Cell data from volume is invalid for surface topology)
+ polydata.GetCellData().Initialize()
+ else:
+ # Convert directly to polydata (may not work for all cell types)
+ logger.warning("Converting UnstructuredGrid directly to PolyData")
+ polydata = vtk.vtkPolyData()
+ polydata.SetPoints(ugrid.GetPoints())
+
+ # Extract geometry
+ points, face_counts, face_indices = VTKReader._extract_geometry_from_polydata(
+ polydata
+ )
+
+ # Extract normals
+ normals = VTKReader._extract_normals(polydata)
+
+ # Extract colors
+ colors = VTKReader._extract_colors(polydata)
+
+ # Extract point and cell data arrays (from original ugrid to preserve all data)
+ point_arrays = []
+ point_data = ugrid.GetPointData()
+ num_arrays = point_data.GetNumberOfArrays()
+
+ for i in range(num_arrays):
+ vtk_array = point_data.GetArray(i)
+ name = vtk_array.GetName()
+ if name is None or name == "":
+ continue
+
+ numpy_array = numpy_support.vtk_to_numpy(vtk_array)
+ num_components = vtk_array.GetNumberOfComponents()
+ vtk_type = vtk_array.GetDataType()
+ data_type = VTKReader._vtk_to_numpy_type(vtk_type)
+
+ if num_components > 1 and numpy_array.ndim == 1:
+ numpy_array = numpy_array.reshape(-1, num_components)
+
+ point_arrays.append(
+ GenericArray(
+ name=name,
+ data=numpy_array,
+ num_components=num_components,
+ data_type=data_type,
+ interpolation="vertex",
+ )
+ )
+
+ # Note: cell arrays from ugrid may not map correctly to surface cells
+ # We'll extract from the surface polydata instead
+ cell_arrays = VTKReader._extract_cell_data_arrays(polydata)
+
+ # Combine arrays
+ generic_arrays = point_arrays + cell_arrays
+
+ mesh_data = MeshData(
+ points=points,
+ face_vertex_counts=face_counts,
+ face_vertex_indices=face_indices,
+ normals=normals,
+ colors=colors,
+ generic_arrays=generic_arrays,
+ )
+
+ logger.info(
+ f"Loaded mesh: {len(points)} points, {len(face_counts)} faces, "
+ f"{len(generic_arrays)} data arrays"
+ )
+
+ return mesh_data
+
+
+def read_vtk_file(filename: str | Path, extract_surface: bool = True) -> MeshData:
+ """Auto-detect VTK file format and read appropriately.
+
+ Args:
+ filename: Path to VTK file (.vtk, .vtp, or .vtu)
+ extract_surface: For .vtu and .vtk files, whether to extract surface from volumetric data
+
+ Returns:
+ MeshData: Extracted mesh data
+
+ Raises:
+ ValueError: If file format is not supported
+ """
+ filename = Path(filename)
+ suffix = filename.suffix.lower()
+
+ if suffix == ".vtp":
+ return PolyDataReader.read(filename)
+ elif suffix == ".vtk":
+ return LegacyVTKReader.read(filename, extract_surface=extract_surface)
+ elif suffix == ".vtu":
+ return UnstructuredGridReader.read(filename, extract_surface=extract_surface)
+ else:
+ raise ValueError(
+ f"Unsupported file format: {suffix}. Supported formats: .vtk, .vtp, .vtu"
+ )
+
+
+def validate_time_series_topology(
+ mesh_data_sequence: list[MeshData], filenames: list[str | Path] | None = None
+) -> dict:
+ """Validate topology consistency across a time series of meshes.
+
+ Checks for:
+ - Changes in number of points/cells over time
+ - Inconsistent primvar sizes
+
+ Args:
+ mesh_data_sequence: List of MeshData objects
+ filenames: Optional list of filenames for better error messages
+
+ Returns:
+ dict: Validation report with warnings and statistics
+ """
+ if not mesh_data_sequence:
+ return {"warnings": [], "is_consistent": True}
+
+ warnings = []
+ first_mesh = mesh_data_sequence[0]
+ first_n_points = len(first_mesh.points)
+ first_n_faces = len(first_mesh.face_vertex_counts)
+
+ # Track topology changes
+ topology_changes = []
+
+ for idx, mesh_data in enumerate(mesh_data_sequence):
+ frame_label = f"frame {idx}"
+ if filenames and idx < len(filenames):
+ frame_label = f"{Path(filenames[idx]).name}"
+
+ n_points = len(mesh_data.points)
+ n_faces = len(mesh_data.face_vertex_counts)
+
+ # Check for topology changes
+ if n_points != first_n_points or n_faces != first_n_faces:
+ topology_changes.append(
+ {
+ "frame": idx,
+ "label": frame_label,
+ "points": n_points,
+ "faces": n_faces,
+ }
+ )
+ warnings.append(
+ f"{frame_label}: Topology change detected - "
+ f"{n_points} points (expected {first_n_points}), "
+ f"{n_faces} faces (expected {first_n_faces})"
+ )
+
+ # Validate primvar sizes
+ for array in mesh_data.generic_arrays:
+ array_size = len(array.data)
+
+ if array.interpolation == "vertex":
+ if array_size != n_points:
+ warnings.append(
+ f"{frame_label}: Point data '{array.name}' size mismatch - "
+ f"got {array_size}, expected {n_points} points"
+ )
+
+ elif array.interpolation == "uniform":
+ if array_size != n_faces:
+ warnings.append(
+ f"{frame_label}: Cell data '{array.name}' size mismatch - "
+ f"got {array_size}, expected {n_faces} faces"
+ )
+
+ # Log summary
+ if topology_changes:
+ logger.warning(
+ f"Topology changes detected in {len(topology_changes)}/{len(mesh_data_sequence)} frames"
+ )
+ logger.warning(f"First frame: {first_n_points} points, {first_n_faces} faces")
+ for change in topology_changes[:5]: # Show first 5
+ logger.warning(
+ f" {change['label']}: {change['points']} points, {change['faces']} faces"
+ )
+ if len(topology_changes) > 5:
+ logger.warning(f" ... and {len(topology_changes) - 5} more frames")
+
+ if warnings and not topology_changes:
+ logger.warning(
+ f"Found {len(warnings)} primvar size mismatches across time series"
+ )
+
+ return {
+ "warnings": warnings,
+ "is_consistent": len(warnings) == 0,
+ "topology_changes": topology_changes,
+ "first_topology": {"points": first_n_points, "faces": first_n_faces},
+ }
diff --git a/src/physiomotion4d/workflow_convert_heart_gated_ct_to_usd.py b/src/physiomotion4d/workflow_convert_heart_gated_ct_to_usd.py
index 5e89384..1610c2d 100644
--- a/src/physiomotion4d/workflow_convert_heart_gated_ct_to_usd.py
+++ b/src/physiomotion4d/workflow_convert_heart_gated_ct_to_usd.py
@@ -13,9 +13,9 @@
import numpy as np
import pyvista as pv
+from physiomotion4d import ConvertVTKToUSD
from physiomotion4d.contour_tools import ContourTools
from physiomotion4d.convert_nrrd_4d_to_3d import ConvertNRRD4DTo3D
-from physiomotion4d.convert_vtk_to_usd import ConvertVTKToUSD
from physiomotion4d.physiomotion4d_base import PhysioMotion4DBase
from physiomotion4d.register_images_ants import RegisterImagesANTs
from physiomotion4d.register_images_base import RegisterImagesBase
diff --git a/tests/test_convert_vtk_to_usd_polymesh.py b/tests/test_convert_vtk_to_usd.py
similarity index 86%
rename from tests/test_convert_vtk_to_usd_polymesh.py
rename to tests/test_convert_vtk_to_usd.py
index 9408d44..cb93ba4 100644
--- a/tests/test_convert_vtk_to_usd_polymesh.py
+++ b/tests/test_convert_vtk_to_usd.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
"""
-Test for VTK to USD PolyMesh conversion.
+Test for VTK to USD conversion.
This test depends on test_contour_tools and uses the extracted contours
to test USD conversion functionality.
@@ -11,12 +11,12 @@
import pyvista as pv
from pxr import UsdGeom
-from physiomotion4d.convert_vtk_to_usd_polymesh import ConvertVTKToUSDPolyMesh
+from physiomotion4d import ConvertVTKToUSD
@pytest.mark.requires_data
@pytest.mark.slow
-class TestConvertVTKToUSDPolyMesh:
+class TestConvertVTKToUSD:
"""Test suite for VTK to USD PolyMesh conversion."""
@pytest.fixture(scope="class")
@@ -54,8 +54,8 @@ def contour_meshes(self, contour_tools, segmentation_results, test_directories):
return meshes
def test_converter_initialization(self):
- """Test that ConvertVTKToUSDPolyMesh initializes correctly."""
- converter = ConvertVTKToUSDPolyMesh(
+ """Test that ConvertVTKToUSD initializes correctly."""
+ converter = ConvertVTKToUSD(
data_basename="TestModel", input_polydata=[], mask_ids=None
)
@@ -68,7 +68,7 @@ def test_supports_mesh_type(self, contour_meshes):
"""Test that converter correctly identifies supported mesh types."""
mesh = contour_meshes[0]
- converter = ConvertVTKToUSDPolyMesh(
+ converter = ConvertVTKToUSD(
data_basename="TestModel", input_polydata=[mesh], mask_ids=None
)
@@ -89,7 +89,7 @@ def test_convert_single_time_point(self, contour_meshes, test_directories):
print("\nConverting single time point to USD...")
print(f" Mesh: {mesh.n_points} points, {mesh.n_cells} cells")
- converter = ConvertVTKToUSDPolyMesh(
+ converter = ConvertVTKToUSD(
data_basename="HeartSingle", input_polydata=[mesh], mask_ids=None
)
@@ -117,7 +117,7 @@ def test_convert_multiple_time_points(self, contour_meshes, test_directories):
print("\nConverting multiple time points to USD...")
print(f" Time points: {len(contour_meshes)}")
- converter = ConvertVTKToUSDPolyMesh(
+ converter = ConvertVTKToUSD(
data_basename="HeartMulti", input_polydata=contour_meshes, mask_ids=None
)
@@ -132,10 +132,10 @@ def test_convert_multiple_time_points(self, contour_meshes, test_directories):
prim = stage.GetPrimAtPath("/World/HeartMulti")
assert prim.IsValid(), "Root prim not found at /World/HeartMulti"
- # Check that mesh exists (checking the Transform group)
- transform_path = "/World/HeartMulti/Transform_heart_multi_time"
- transform_prim = stage.GetPrimAtPath(transform_path)
- assert transform_prim.IsValid(), f"Transform not found at {transform_path}"
+ # Check that mesh exists
+ mesh_path = "/World/HeartMulti/Mesh"
+ mesh_prim = stage.GetPrimAtPath(mesh_path)
+ assert mesh_prim.IsValid(), f"Mesh not found at {mesh_path}"
print("Multiple time points converted to USD")
print(f" Output: {output_file}")
@@ -161,7 +161,7 @@ def test_convert_with_deformation(
print("\nConverting mesh with deformation magnitude...")
- converter = ConvertVTKToUSDPolyMesh(
+ converter = ConvertVTKToUSD(
data_basename="HeartDeformation", input_polydata=[contours], mask_ids=None
)
@@ -171,10 +171,10 @@ def test_convert_with_deformation(
assert stage is not None, "USD stage not created"
assert output_file.exists(), "USD file not created"
- # Check that transform was created (actual path includes /World prefix)
- transform_path = "/World/HeartDeformation/Transform_heart_with_deformation"
- transform_prim = stage.GetPrimAtPath(transform_path)
- assert transform_prim.IsValid(), f"Transform not found at {transform_path}"
+ # Check that mesh was created (actual path includes /World prefix)
+ mesh_path = "/World/HeartDeformation/Mesh"
+ mesh_prim = stage.GetPrimAtPath(mesh_path)
+ assert mesh_prim.IsValid(), f"Mesh not found at {mesh_path}"
print("Mesh with deformation converted to USD")
print(f" Output: {output_file}")
@@ -194,7 +194,7 @@ def test_convert_with_colormap(self, contour_meshes, test_directories):
print("\nConverting mesh with colormap...")
- converter = ConvertVTKToUSDPolyMesh(
+ converter = ConvertVTKToUSD(
data_basename="HeartColormap", input_polydata=[mesh], mask_ids=None
)
@@ -207,10 +207,10 @@ def test_convert_with_colormap(self, contour_meshes, test_directories):
assert stage is not None, "USD stage not created"
assert output_file.exists(), "USD file not created"
- # Verify transform was created (actual path includes /World prefix)
- transform_path = "/World/HeartColormap/Transform_heart_with_colormap"
- transform_prim = stage.GetPrimAtPath(transform_path)
- assert transform_prim.IsValid(), f"Transform not found at {transform_path}"
+ # Verify mesh was created (actual path includes /World prefix)
+ mesh_path = "/World/HeartColormap/Mesh"
+ mesh_prim = stage.GetPrimAtPath(mesh_path)
+ assert mesh_prim.IsValid(), f"Mesh not found at {mesh_path}"
print("Mesh with colormap converted to USD")
print(" Colormap: plasma")
@@ -246,7 +246,7 @@ def test_convert_unstructured_grid_to_surface(self, test_directories):
print("\nConverting UnstructuredGrid to USD...")
print(f" Grid: {ugrid.n_points} points, {ugrid.n_cells} cells")
- converter = ConvertVTKToUSDPolyMesh(
+ converter = ConvertVTKToUSD(
data_basename="CubeSurface",
input_polydata=[ugrid],
mask_ids=None,
@@ -268,7 +268,7 @@ def test_usd_file_structure(self, contour_meshes, test_directories):
usd_output_dir = output_dir / "usd_polymesh"
usd_output_dir.mkdir(exist_ok=True)
- converter = ConvertVTKToUSDPolyMesh(
+ converter = ConvertVTKToUSD(
data_basename="HeartStructure",
input_polydata=[contour_meshes[0]],
mask_ids=None,
@@ -284,17 +284,13 @@ def test_usd_file_structure(self, contour_meshes, test_directories):
assert root_prim.IsValid(), "Root prim not found at /World/HeartStructure"
assert UsdGeom.Xform(root_prim), "Root should be an Xform"
- # Check transform/mesh structure
- transform_prim = stage.GetPrimAtPath(
- "/World/HeartStructure/Transform_heart_structure_test"
- )
- assert transform_prim.IsValid(), (
- "Transform prim not found at /World/HeartStructure/Transform_heart_structure_test"
- )
+ # Check mesh structure
+ mesh_prim = stage.GetPrimAtPath("/World/HeartStructure/Mesh")
+ assert mesh_prim.IsValid(), "Mesh prim not found at /World/HeartStructure/Mesh"
print("USD file structure verified")
print(f" Root: {root_prim.GetPath()}")
- print(f" Transform: {transform_prim.GetPath()}")
+ print(f" Mesh: {mesh_prim.GetPath()}")
def test_time_varying_topology(self, contour_meshes, test_directories):
"""Test handling of time-varying topology."""
@@ -313,7 +309,7 @@ def test_time_varying_topology(self, contour_meshes, test_directories):
print(f" Mesh 1: {mesh1.n_points} points, {mesh1.n_cells} cells")
print(f" Mesh 2: {mesh2.n_points} points, {mesh2.n_cells} cells")
- converter = ConvertVTKToUSDPolyMesh(
+ converter = ConvertVTKToUSD(
data_basename="HeartVarying", input_polydata=[mesh1, mesh2], mask_ids=None
)
@@ -362,7 +358,7 @@ def test_batch_conversion(
# Convert each anatomy separately
for anatomy, mesh in meshes_dict.items():
- converter = ConvertVTKToUSDPolyMesh(
+ converter = ConvertVTKToUSD(
data_basename=f"{anatomy.capitalize()}",
input_polydata=[mesh],
mask_ids=None,
diff --git a/tests/test_vtk_to_usd_library.py b/tests/test_vtk_to_usd_library.py
new file mode 100644
index 0000000..2cb4539
--- /dev/null
+++ b/tests/test_vtk_to_usd_library.py
@@ -0,0 +1,430 @@
+#!/usr/bin/env python
+"""
+Tests for the vtk_to_usd library module.
+
+This test suite validates the new modular vtk_to_usd library including:
+- VTK file reading (VTP, VTK, VTU formats)
+- Data structure conversions
+- USD conversion
+- Material handling
+- Time-series support
+
+Note: These tests require manually downloaded data:
+- KCL-Heart-Model: Must be manually downloaded and placed in data/KCL-Heart-Model/
+- CHOP-Valve4D: Must be manually downloaded and placed in data/CHOP-Valve4D/
+"""
+
+from pathlib import Path
+
+import pytest
+from pxr import UsdGeom, UsdShade
+
+from physiomotion4d.vtk_to_usd import (
+ ConversionSettings,
+ MaterialData,
+ VTKToUSDConverter,
+ read_vtk_file,
+)
+
+
+# Helper to get data paths
+def get_data_dir():
+ """Get the data directory path."""
+ tests_dir = Path(__file__).parent
+ project_root = tests_dir.parent
+ return project_root / "data"
+
+
+def check_kcl_heart_data():
+ """Check if KCL Heart Model data is available."""
+ data_dir = get_data_dir() / "KCL-Heart-Model"
+ vtp_file = data_dir / "average_surface.vtp"
+ vtk_file = data_dir / "average_mesh.vtk"
+ return vtp_file.exists() and vtk_file.exists()
+
+
+def check_valve4d_data():
+ """Check if CHOP Valve4D data is available."""
+ data_dir = get_data_dir() / "CHOP-Valve4D"
+ alterra_dir = data_dir / "Alterra"
+ return alterra_dir.exists() and any(alterra_dir.glob("*.vtk"))
+
+
+@pytest.mark.requires_data
+class TestVTKReader:
+ """Test VTK file reading capabilities."""
+
+ def test_read_vtp_file(self):
+ """Test reading VTP (PolyData) files."""
+ if not check_kcl_heart_data():
+ pytest.skip(
+ "KCL-Heart-Model data not available (must be manually downloaded)"
+ )
+
+ data_dir = get_data_dir() / "KCL-Heart-Model"
+ vtp_file = data_dir / "average_surface.vtp"
+
+ assert vtp_file.exists(), f"VTP file not found: {vtp_file}"
+
+ # Read the file
+ mesh_data = read_vtk_file(vtp_file)
+
+ # Verify mesh data structure
+ assert mesh_data is not None
+ assert mesh_data.points is not None
+ assert len(mesh_data.points) > 0
+ assert mesh_data.face_vertex_counts is not None
+ assert mesh_data.face_vertex_indices is not None
+
+ print(f"\n✓ Read VTP file: {vtp_file.name}")
+ print(f" Points: {len(mesh_data.points):,}")
+ print(f" Faces: {len(mesh_data.face_vertex_counts):,}")
+ print(f" Data arrays: {len(mesh_data.generic_arrays)}")
+
+ def test_read_legacy_vtk_file(self):
+ """Test reading legacy VTK files."""
+ if not check_kcl_heart_data():
+ pytest.skip(
+ "KCL-Heart-Model data not available (must be manually downloaded)"
+ )
+
+ data_dir = get_data_dir() / "KCL-Heart-Model"
+ vtk_file = data_dir / "average_mesh.vtk"
+
+ assert vtk_file.exists(), f"VTK file not found: {vtk_file}"
+
+ # Read the file with surface extraction
+ mesh_data = read_vtk_file(vtk_file, extract_surface=True)
+
+ # Verify mesh data structure
+ assert mesh_data is not None
+ assert mesh_data.points is not None
+ assert len(mesh_data.points) > 0
+ assert mesh_data.face_vertex_counts is not None
+ assert mesh_data.face_vertex_indices is not None
+
+ print(f"\n✓ Read legacy VTK file: {vtk_file.name}")
+ print(f" Points: {len(mesh_data.points):,}")
+ print(f" Faces: {len(mesh_data.face_vertex_counts):,}")
+ print(f" Data arrays: {len(mesh_data.generic_arrays)}")
+
+ def test_generic_arrays_preserved(self):
+ """Test that generic data arrays are preserved during reading."""
+ if not check_kcl_heart_data():
+ pytest.skip(
+ "KCL-Heart-Model data not available (must be manually downloaded)"
+ )
+
+ data_dir = get_data_dir() / "KCL-Heart-Model"
+ vtp_file = data_dir / "average_surface.vtp"
+
+ mesh_data = read_vtk_file(vtp_file)
+
+ # Verify generic arrays
+ assert len(mesh_data.generic_arrays) > 0, "No data arrays found"
+
+ # Check array structure
+ for array in mesh_data.generic_arrays:
+ assert array.name is not None
+ assert array.data is not None
+ assert array.num_components > 0
+ assert array.interpolation in ["vertex", "uniform", "constant"]
+
+ print("\n✓ Generic arrays preserved:")
+ for array in mesh_data.generic_arrays:
+ print(
+ f" - {array.name}: {array.num_components} components, {len(array.data):,} values"
+ )
+
+
+@pytest.mark.requires_data
+class TestVTKToUSDConversion:
+ """Test VTK to USD conversion capabilities."""
+
+ def test_single_file_conversion(self, test_directories):
+ """Test converting a single VTK file to USD."""
+ if not check_kcl_heart_data():
+ pytest.skip(
+ "KCL-Heart-Model data not available (must be manually downloaded)"
+ )
+
+ output_dir = test_directories["output"] / "vtk_to_usd_library"
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ # Get test data
+ data_dir = get_data_dir() / "KCL-Heart-Model"
+ vtp_file = data_dir / "average_surface.vtp"
+
+ # Convert to USD
+ output_usd = output_dir / "heart_surface.usd"
+ converter = VTKToUSDConverter()
+ stage = converter.convert_file(
+ vtp_file,
+ output_usd,
+ mesh_name="HeartSurface",
+ )
+
+ # Verify USD file
+ assert output_usd.exists()
+ assert stage is not None
+
+ # Check mesh exists in stage
+ mesh_prim = stage.GetPrimAtPath("/World/Meshes/HeartSurface")
+ assert mesh_prim.IsValid()
+ assert mesh_prim.IsA(UsdGeom.Mesh)
+
+ # Check mesh has geometry
+ mesh = UsdGeom.Mesh(mesh_prim)
+ points = mesh.GetPointsAttr().Get()
+ assert len(points) > 0
+
+ print("\n✓ Converted single file to USD")
+ print(f" Input: {vtp_file.name}")
+ print(f" Output: {output_usd}")
+ print(f" Points: {len(points):,}")
+
+ def test_conversion_with_material(self, test_directories):
+ """Test conversion with custom material."""
+ if not check_kcl_heart_data():
+ pytest.skip(
+ "KCL-Heart-Model data not available (must be manually downloaded)"
+ )
+
+ output_dir = test_directories["output"] / "vtk_to_usd_library"
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ data_dir = get_data_dir() / "KCL-Heart-Model"
+ vtp_file = data_dir / "average_surface.vtp"
+
+ # Create custom material
+ material = MaterialData(
+ name="heart_tissue",
+ diffuse_color=(0.9, 0.3, 0.3),
+ roughness=0.4,
+ metallic=0.0,
+ )
+
+ # Convert with material
+ output_usd = output_dir / "heart_with_material.usd"
+ converter = VTKToUSDConverter()
+ stage = converter.convert_file(
+ vtp_file,
+ output_usd,
+ mesh_name="HeartSurface",
+ material=material,
+ )
+
+ # Verify material exists
+ material_path = f"/World/Looks/{material.name}"
+ material_prim = stage.GetPrimAtPath(material_path)
+ assert material_prim.IsValid()
+ assert material_prim.IsA(UsdShade.Material)
+
+ # Verify material is bound to mesh
+ mesh_prim = stage.GetPrimAtPath("/World/Meshes/HeartSurface")
+ binding_api = UsdShade.MaterialBindingAPI(mesh_prim)
+ bound_material = binding_api.ComputeBoundMaterial()[0]
+ assert bound_material.GetPrim().IsValid()
+
+ print("\n✓ Converted with custom material")
+ print(f" Material: {material.name}")
+ print(f" Color: {material.diffuse_color}")
+
+ def test_conversion_settings(self, test_directories):
+ """Test conversion with custom settings."""
+ if not check_kcl_heart_data():
+ pytest.skip(
+ "KCL-Heart-Model data not available (must be manually downloaded)"
+ )
+
+ output_dir = test_directories["output"] / "vtk_to_usd_library"
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ data_dir = get_data_dir() / "KCL-Heart-Model"
+ vtp_file = data_dir / "average_surface.vtp"
+
+ # Create custom settings
+ settings = ConversionSettings(
+ triangulate_meshes=True,
+ compute_normals=True,
+ preserve_point_arrays=True,
+ preserve_cell_arrays=True,
+ meters_per_unit=0.001, # mm to meters
+ up_axis="Y",
+ )
+
+ # Convert with settings
+ output_usd = output_dir / "heart_custom_settings.usd"
+ converter = VTKToUSDConverter(settings)
+ stage = converter.convert_file(vtp_file, output_usd)
+
+ # Verify stage metadata
+ assert UsdGeom.GetStageMetersPerUnit(stage) == 0.001
+ assert UsdGeom.GetStageUpAxis(stage) == UsdGeom.Tokens.y
+
+ print("\n✓ Converted with custom settings")
+ print(f" Meters per unit: {settings.meters_per_unit}")
+ print(f" Up axis: {settings.up_axis}")
+ print(f" Compute normals: {settings.compute_normals}")
+
+ def test_primvar_preservation(self, test_directories):
+ """Test that VTK data arrays are preserved as USD primvars."""
+ if not check_kcl_heart_data():
+ pytest.skip(
+ "KCL-Heart-Model data not available (must be manually downloaded)"
+ )
+
+ output_dir = test_directories["output"] / "vtk_to_usd_library"
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ data_dir = get_data_dir() / "KCL-Heart-Model"
+ vtp_file = data_dir / "average_surface.vtp"
+
+ # Read to check arrays
+ mesh_data = read_vtk_file(vtp_file)
+ array_names = [arr.name for arr in mesh_data.generic_arrays]
+
+ # Convert to USD
+ output_usd = output_dir / "heart_with_primvars.usd"
+ converter = VTKToUSDConverter()
+ stage = converter.convert_file(vtp_file, output_usd)
+
+ # Check primvars exist
+ mesh_prim = stage.GetPrimAtPath("/World/Meshes/Mesh")
+ primvars_api = UsdGeom.PrimvarsAPI(mesh_prim)
+ primvars = primvars_api.GetPrimvars()
+
+ primvar_names = [pv.GetPrimvarName() for pv in primvars]
+
+ # Verify at least some arrays were converted to primvars
+ assert len(primvar_names) > 0
+
+ print("\n✓ Primvars preserved:")
+ print(f" Source arrays: {len(array_names)}")
+ print(f" USD primvars: {len(primvar_names)}")
+ for name in primvar_names[:5]: # Show first 5
+ print(f" - {name}")
+
+
+@pytest.mark.requires_data
+class TestTimeSeriesConversion:
+ """Test time-series conversion capabilities."""
+
+ def test_time_series_conversion(self, test_directories):
+ """Test converting multiple VTK files as time series."""
+ if not check_kcl_heart_data():
+ pytest.skip(
+ "KCL-Heart-Model data not available (must be manually downloaded)"
+ )
+
+ output_dir = test_directories["output"] / "vtk_to_usd_library"
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ data_dir = get_data_dir() / "KCL-Heart-Model"
+ vtp_file = data_dir / "average_surface.vtp"
+
+ # Use same file multiple times to simulate time series
+ vtk_files = [vtp_file] * 3
+ time_codes = [0.0, 1.0, 2.0]
+
+ # Convert time series
+ output_usd = output_dir / "heart_time_series.usd"
+ converter = VTKToUSDConverter()
+ stage = converter.convert_sequence(
+ vtk_files=vtk_files,
+ output_usd=output_usd,
+ time_codes=time_codes,
+ )
+
+ # Verify time range
+ assert stage.GetStartTimeCode() == 0.0
+ assert stage.GetEndTimeCode() == 2.0
+
+ # Verify mesh has time samples
+ mesh_prim = stage.GetPrimAtPath("/World/Meshes/Mesh")
+ mesh = UsdGeom.Mesh(mesh_prim)
+ points_attr = mesh.GetPointsAttr()
+
+ # Check time samples exist
+ time_samples = points_attr.GetTimeSamples()
+ assert len(time_samples) == 3
+ assert time_samples == time_codes
+
+ print("\n✓ Converted time series")
+ print(f" Frames: {len(vtk_files)}")
+ print(f" Time codes: {time_codes}")
+ print(
+ f" Stage time range: {stage.GetStartTimeCode()} - {stage.GetEndTimeCode()}"
+ )
+
+
+@pytest.mark.slow
+class TestIntegration:
+ """Integration tests combining multiple features."""
+
+ def test_end_to_end_conversion(self, test_directories):
+ """Test complete conversion workflow with all features."""
+ if not check_kcl_heart_data():
+ pytest.skip(
+ "KCL-Heart-Model data not available (must be manually downloaded)"
+ )
+
+ output_dir = test_directories["output"] / "vtk_to_usd_library"
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ data_dir = get_data_dir() / "KCL-Heart-Model"
+ vtp_file = data_dir / "average_surface.vtp"
+
+ # Configure everything
+ settings = ConversionSettings(
+ triangulate_meshes=True,
+ compute_normals=True,
+ preserve_point_arrays=True,
+ meters_per_unit=0.001,
+ times_per_second=24.0,
+ )
+
+ material = MaterialData(
+ name="cardiac_muscle",
+ diffuse_color=(0.85, 0.2, 0.2),
+ roughness=0.5,
+ metallic=0.0,
+ )
+
+ # Convert
+ output_usd = output_dir / "heart_complete.usd"
+ converter = VTKToUSDConverter(settings)
+ stage = converter.convert_file(
+ vtp_file,
+ output_usd,
+ mesh_name="CardiacModel",
+ material=material,
+ )
+
+ # Comprehensive verification
+ assert output_usd.exists()
+ assert stage is not None
+
+ # Check structure
+ mesh_prim = stage.GetPrimAtPath("/World/Meshes/CardiacModel")
+ assert mesh_prim.IsValid()
+
+ # Check geometry
+ mesh = UsdGeom.Mesh(mesh_prim)
+ points = mesh.GetPointsAttr().Get()
+ assert len(points) > 0
+
+ # Check material
+ material_prim = stage.GetPrimAtPath(f"/World/Looks/{material.name}")
+ assert material_prim.IsValid()
+
+ # Check primvars
+ primvars_api = UsdGeom.PrimvarsAPI(mesh_prim)
+ primvars = primvars_api.GetPrimvars()
+ assert len(primvars) > 0
+
+ print("\n✓ End-to-end conversion complete")
+ print(f" Output: {output_usd}")
+ print(f" Size: {output_usd.stat().st_size / 1024:.1f} KB")
+ print(f" Points: {len(points):,}")
+ print(f" Primvars: {len(primvars)}")