`.
-
-
-Final script
-============
-
-The completed final script can be found below:
-
-.. code-block:: python
-
- import os
- from compas.datastructures import Mesh
- import logging
- import compas_slicer.utilities as utils
- from compas_slicer.slicers import InterpolationSlicer
- from compas_slicer.post_processing import simplify_paths_rdp
- from compas_slicer.pre_processing import InterpolationSlicingPreprocessor
- from compas_slicer.print_organization import set_extruder_toggle, set_linear_velocity_by_range
- from compas_slicer.print_organization import add_safety_printpoints
- from compas_slicer.pre_processing import create_mesh_boundary_attributes
- from compas_slicer.print_organization import InterpolationPrintOrganizer
- from compas_slicer.post_processing import seams_smooth
- from compas_slicer.print_organization import smooth_printpoints_up_vectors, smooth_printpoints_layer_heights
- import time
-
- logger = logging.getLogger('logger')
- logging.basicConfig(format='%(levelname)s - %(message)s', level=logging.INFO)
-
- DATA_PATH = os.path.join(os.path.dirname(__file__), 'data_Y_shape')
- OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
- OBJ_INPUT_NAME = os.path.join(DATA_PATH, 'mesh.obj')
-
-
- def main():
- start_time = time.time()
-
- # --- Load initial_mesh
- mesh = Mesh.from_obj(os.path.join(DATA_PATH, OBJ_INPUT_NAME))
-
- # --- Load targets (boundaries)
- low_boundary_vs = utils.load_from_json(DATA_PATH, 'boundaryLOW.json')
- high_boundary_vs = utils.load_from_json(DATA_PATH, 'boundaryHIGH.json')
- create_mesh_boundary_attributes(mesh, low_boundary_vs, high_boundary_vs)
-
- avg_layer_height = 2.0
-
- parameters = {
- 'avg_layer_height': avg_layer_height, # controls number of curves that will be generated
- }
-
- preprocessor = InterpolationSlicingPreprocessor(mesh, parameters, DATA_PATH)
- preprocessor.create_compound_targets()
- g_eval = preprocessor.create_gradient_evaluation(norm_filename='gradient_norm.json', g_filename='gradient.json',
- target_1=preprocessor.target_LOW,
- target_2=preprocessor.target_HIGH)
- preprocessor.find_critical_points(g_eval, output_filenames=['minima.json', 'maxima.json', 'saddles.json'])
-
- # --- slicing
- slicer = InterpolationSlicer(mesh, preprocessor, parameters)
- slicer.slice_model() # compute_norm_of_gradient contours
-
- simplify_paths_rdp(slicer, threshold=0.25)
- seams_smooth(slicer, smooth_distance=3)
- slicer.printout_info()
- utils.save_to_json(slicer.to_data(), OUTPUT_PATH, 'curved_slicer.json')
-
- # --- Print organizer
- print_organizer = InterpolationPrintOrganizer(slicer, parameters, DATA_PATH)
- print_organizer.create_printpoints()
-
- smooth_printpoints_up_vectors(print_organizer, strength=0.5, iterations=10)
- smooth_printpoints_layer_heights(print_organizer, strength=0.5, iterations=5)
-
- set_linear_velocity_by_range(print_organizer, param_func=lambda ppt: ppt.layer_height,
- parameter_range=[avg_layer_height*0.5, avg_layer_height*2.0],
- velocity_range=[150, 70], bound_remapping=False)
- set_extruder_toggle(print_organizer, slicer)
- add_safety_printpoints(print_organizer, z_hop=10.0)
-
- # --- Save printpoints dictionary to json file
- printpoints_data = print_organizer.output_printpoints_dict()
- utils.save_to_json(printpoints_data, OUTPUT_PATH, 'out_printpoints.json')
-
- end_time = time.time()
- print("Total elapsed time", round(end_time - start_time, 2), "seconds")
-
-
- if __name__ == "__main__":
- main()
diff --git a/docs/examples/03_planar_slicing_vertical_sorting.rst b/docs/examples/03_planar_slicing_vertical_sorting.rst
deleted file mode 100644
index 0e6909e7..00000000
--- a/docs/examples/03_planar_slicing_vertical_sorting.rst
+++ /dev/null
@@ -1,96 +0,0 @@
-.. _compas_slicer_example_3:
-
-************************************
-Planar slicing with vertical sorting
-************************************
-
-During the print of branching shell shapes, the layers can be sorted;
-- following a horizontal logic, with all paths that are on the same height being adjacent.
-- following a vertical logic, with all paths that are on the same branch being adjacent.
-The vertical sorting can enable significant reduction of the interruptions of the print and the traveling of the tool
-from one path to the next, as it is shown in the illustration below.
-
-.. figure:: figures/03_print_organization_planar_slicing_vertical_sorting.png
- :figclass: figure
- :class: figure-img img-fluid
-
- *Fabrication path using horizontal sorting (left), and vertical sorting (right). The traveling paths are shown with orange lines.*
-
-In planar slicing, horizontal ordering of paths is the default method, while in non-planar slicing vertical ordering of paths is
-the default method. The example below demonstrates how planar paths can be sorted in a vertical logic. Its files can be found in the folder
-`/examples/3_planar_vertical_sorting/`. Once you have
-run the python file to generate the results, you can visualize them by opening the grasshopper file.
-
-
-.. code-block:: python
-
- import os
- import logging
-
- import compas_slicer.utilities as utils
- from compas_slicer.pre_processing import move_mesh_to_point
- from compas_slicer.slicers import PlanarSlicer
- from compas_slicer.post_processing import generate_brim
- from compas_slicer.post_processing import simplify_paths_rdp_igl
- from compas_slicer.post_processing import sort_into_vertical_layers
- from compas_slicer.post_processing import reorder_vertical_layers
- from compas_slicer.post_processing import seams_smooth
- from compas_slicer.print_organization import PlanarPrintOrganizer
- from compas_slicer.print_organization import set_extruder_toggle
- from compas_slicer.print_organization import add_safety_printpoints
- from compas_slicer.print_organization import set_linear_velocity_constant
- from compas_slicer.print_organization import set_blend_radius
- from compas_slicer.utilities import save_to_json
- from compas.datastructures import Mesh
- from compas.geometry import Point
-
- # ==============================================================================
- # Logging
- # ==============================================================================
- logger = logging.getLogger('logger')
- logging.basicConfig(format='%(levelname)s-%(message)s', level=logging.INFO)
-
- # ==============================================================================
- # Select location of data folder and specify model to slice
- # ==============================================================================
- DATA = os.path.join(os.path.dirname(__file__), 'data')
- OUTPUT_DIR = utils.get_output_directory(DATA) # creates 'output' folder if it doesn't already exist
- MODEL = 'distorted_v_closed_mid_res.obj'
-
-
- def main():
- compas_mesh = Mesh.from_obj(os.path.join(DATA, MODEL))
- move_mesh_to_point(compas_mesh, Point(0, 0, 0))
-
- # Slicing
- slicer = PlanarSlicer(compas_mesh, slicer_type="cgal", layer_height=5.0)
- slicer.slice_model()
-
- # Sorting into vertical layers and reordering
- sort_into_vertical_layers(slicer, max_paths_per_layer=25)
- reorder_vertical_layers(slicer, align_with="x_axis")
-
- # Post-processing
- generate_brim(slicer, layer_width=3.0, number_of_brim_offsets=5)
- simplify_paths_rdp_igl(slicer, threshold=0.7)
- seams_smooth(slicer, smooth_distance=10)
- slicer.printout_info()
- save_to_json(slicer.to_data(), OUTPUT_DIR, 'slicer_data.json')
-
- # PlanarPrintOrganization
- print_organizer = PlanarPrintOrganizer(slicer)
- print_organizer.create_printpoints()
-
- set_extruder_toggle(print_organizer, slicer)
- add_safety_printpoints(print_organizer, z_hop=10.0)
- set_linear_velocity_constant(print_organizer, v=25.0)
- set_blend_radius(print_organizer, d_fillet=10.0)
-
- print_organizer.printout_info()
-
- printpoints_data = print_organizer.output_printpoints_dict()
- utils.save_to_json(printpoints_data, OUTPUT_DIR, 'out_printpoints.json')
-
-
- if __name__ == "__main__":
- main()
diff --git a/docs/examples/03_vertical_sorting.md b/docs/examples/03_vertical_sorting.md
new file mode 100644
index 00000000..afd1f280
--- /dev/null
+++ b/docs/examples/03_vertical_sorting.md
@@ -0,0 +1,159 @@
+# Vertical Layer Sorting
+
+This example demonstrates how to handle models with multiple disconnected regions at the same height - like V-shapes, trees, or branching structures.
+
+
+
+## What You'll Learn
+
+- Sorting paths into vertical layers (disconnected regions)
+- Reordering vertical layers for optimal printing
+- Setting blend radius for smooth robotic motion
+
+## The Problem
+
+When slicing a V-shaped or branching model, a single horizontal slice may intersect multiple disconnected regions:
+
+```
+ Layer 5: ● ● (two separate contours)
+ Layer 4: ● ●
+ Layer 3: ● ●
+ Layer 2: ● ●
+ Layer 1: ● ●
+ Layer 0: ●●●● (one contour at base)
+```
+
+Without vertical sorting, the printer jumps between left and right branches randomly. With vertical sorting, each branch is printed completely before moving to the next.
+
+## The Solution
+
+```mermaid
+flowchart LR
+ A[Planar Slicing] --> B[Sort into Vertical Layers]
+ B --> C[Reorder Vertical Layers]
+ C --> D[Print Organization]
+```
+
+## Step-by-Step Walkthrough
+
+### 1. Standard Planar Slicing
+
+```python
+from compas.datastructures import Mesh
+from compas.geometry import Point
+from compas_slicer.slicers import PlanarSlicer
+from compas_slicer.pre_processing import move_mesh_to_point
+
+mesh = Mesh.from_obj(DATA_PATH / 'distorted_v_closed_mid_res.obj')
+move_mesh_to_point(mesh, Point(0, 0, 0))
+
+slicer = PlanarSlicer(mesh, layer_height=5.0)
+slicer.slice_model()
+```
+
+### 2. Sort into Vertical Layers
+
+```python
+from compas_slicer.post_processing import sort_into_vertical_layers
+
+sort_into_vertical_layers(
+ slicer,
+ dist_threshold=25.0, # Max distance to group paths together
+ max_paths_per_layer=25 # Max paths per vertical layer
+)
+```
+
+This groups paths into "vertical layers" - continuous vertical columns that can be printed without jumping.
+
+**Parameters:**
+
+| Parameter | Description |
+|-----------|-------------|
+| `dist_threshold` | Maximum XY distance between paths to be in same vertical layer |
+| `max_paths_per_layer` | Limit on paths per vertical layer (prevents huge groups) |
+
+### 3. Reorder Vertical Layers
+
+```python
+from compas_slicer.post_processing import reorder_vertical_layers
+
+reorder_vertical_layers(slicer, align_with="x_axis")
+```
+
+Orders the vertical layers for optimal printing sequence. Options:
+
+| Mode | Description |
+|------|-------------|
+| `"x_axis"` | Left to right |
+| `"y_axis"` | Front to back |
+| `"centroids"` | By centroid position |
+
+### 4. Standard Post-Processing
+
+```python
+from compas_slicer.post_processing import generate_brim, simplify_paths_rdp, seams_smooth
+
+generate_brim(slicer, layer_width=3.0, number_of_brim_offsets=5)
+simplify_paths_rdp(slicer, threshold=0.7)
+seams_smooth(slicer, smooth_distance=10)
+```
+
+### 5. Print Organization with Blend Radius
+
+```python
+from compas_slicer.print_organization import (
+ PlanarPrintOrganizer,
+ set_extruder_toggle,
+ add_safety_printpoints,
+ set_linear_velocity_constant,
+ set_blend_radius,
+)
+
+print_organizer = PlanarPrintOrganizer(slicer)
+print_organizer.create_printpoints()
+
+set_extruder_toggle(print_organizer, slicer)
+add_safety_printpoints(print_organizer, z_hop=10.0)
+set_linear_velocity_constant(print_organizer, v=25.0)
+
+# Blend radius for smooth robot motion
+set_blend_radius(print_organizer, d_fillet=10.0)
+```
+
+!!! info "What is blend radius?"
+ Blend radius (or fillet) smooths sharp corners in the robot path. Instead of stopping at each point, the robot blends through with a curved motion.
+
+ - `d_fillet=0`: Stop at each point (precise but slow)
+ - `d_fillet=10`: 10mm blend radius (smooth and fast)
+
+## Complete Code
+
+```python
+--8<-- "examples/3_planar_slicing_vertical_sorting/example_3_planar_vertical_sorting.py"
+```
+
+## Running the Example
+
+```bash
+cd examples/3_planar_slicing_vertical_sorting
+python example_3_planar_vertical_sorting.py --visualize
+```
+
+## When to Use Vertical Sorting
+
+Use vertical sorting when your model has:
+
+- **Branching geometry**: Y-shapes, trees, coral-like structures
+- **Multiple parts**: Several objects on one build plate
+- **Islands**: Disconnected regions at the same height
+
+## Key Takeaways
+
+1. **Vertical sorting groups continuous regions**: Print one branch completely before moving to the next
+2. **Reordering optimizes travel**: Choose sensible order to minimize jumps
+3. **Blend radius matters for robots**: Smooth motion reduces wear and improves quality
+
+## Next Steps
+
+- [G-code Generation](04_gcode.md) - Export for desktop printers
+- [Curved Slicing](02_curved_slicing.md) - Non-planar approach for branching
diff --git a/docs/examples/04_gcode.md b/docs/examples/04_gcode.md
new file mode 100644
index 00000000..c022921c
--- /dev/null
+++ b/docs/examples/04_gcode.md
@@ -0,0 +1,280 @@
+# G-code Generation
+
+This example demonstrates exporting toolpaths to G-code for desktop FDM 3D printers like Prusa, Ender, or Ultimaker.
+
+## What You'll Learn
+
+- Configuring printer parameters with `GcodeConfig`
+- Positioning mesh for delta vs cartesian printers
+- Generating G-code from printpoints
+- Understanding the G-code structure (header, purge, toolpath, footer)
+
+## G-code Basics
+
+G-code is the standard language for CNC machines and 3D printers. Each line is a command:
+
+```gcode
+G1 X100 Y50 Z0.2 E1.5 F3600 ; Move to X=100, Y=50, Z=0.2 while extruding 1.5mm at 3600mm/min
+```
+
+Common commands:
+
+| Command | Description |
+|---------|-------------|
+| `G0` / `G1` | Linear move (G0 = rapid, G1 = controlled) |
+| `G28` | Home axes |
+| `G90` / `G91` | Absolute / relative positioning |
+| `M104` / `M109` | Set extruder temp (no wait / wait) |
+| `M140` / `M190` | Set bed temp (no wait / wait) |
+| `M106` / `M107` | Fan on / off |
+
+## The Pipeline
+
+```mermaid
+flowchart LR
+ A[Slice Mesh] --> B[Create PrintPoints]
+ B --> C[Configure GcodeConfig]
+ C --> D[Generate G-code]
+ D --> E[Save .gcode File]
+```
+
+## Step-by-Step Walkthrough
+
+### 1. Setup and Configuration
+
+```python
+from pathlib import Path
+from compas.datastructures import Mesh
+from compas.geometry import Point
+from compas_slicer.config import GcodeConfig
+from compas_slicer.pre_processing import move_mesh_to_point
+
+mesh = Mesh.from_obj(DATA_PATH / 'simple_vase_open_low_res.obj')
+
+# Create G-code configuration
+gcode_config = GcodeConfig()
+```
+
+### 2. Position Mesh for Printer Type
+
+```python
+if gcode_config.delta:
+ # Delta printers: origin at center
+ move_mesh_to_point(mesh, Point(0, 0, 0))
+else:
+ # Cartesian printers: center in build volume
+ move_mesh_to_point(mesh, Point(
+ gcode_config.print_volume_x / 2,
+ gcode_config.print_volume_y / 2,
+ 0
+ ))
+```
+
+!!! info "Delta vs Cartesian"
+ - **Delta printers**: Circular build plate, origin at center (0, 0, 0)
+ - **Cartesian printers**: Rectangular build plate, origin at corner (0, 0, 0)
+
+### 3. Slice and Process
+
+```python
+from compas_slicer.slicers import PlanarSlicer
+from compas_slicer.post_processing import generate_brim, simplify_paths_rdp, seams_smooth
+
+slicer = PlanarSlicer(mesh, layer_height=4.5)
+slicer.slice_model()
+
+generate_brim(slicer, layer_width=3.0, number_of_brim_offsets=4)
+simplify_paths_rdp(slicer, threshold=0.6)
+seams_smooth(slicer, smooth_distance=10)
+```
+
+### 4. Create PrintPoints
+
+```python
+from compas_slicer.print_organization import PlanarPrintOrganizer, set_extruder_toggle
+
+print_organizer = PlanarPrintOrganizer(slicer)
+print_organizer.create_printpoints()
+set_extruder_toggle(print_organizer, slicer)
+```
+
+### 5. Generate and Save G-code
+
+```python
+from compas_slicer.utilities import save_to_text_file
+
+gcode_text = print_organizer.output_gcode(gcode_config)
+save_to_text_file(gcode_text, OUTPUT_PATH, 'my_gcode.gcode')
+```
+
+## GcodeConfig Parameters
+
+The `GcodeConfig` dataclass controls all printer parameters:
+
+### Hardware Settings
+
+| Parameter | Default | Description |
+|-----------|---------|-------------|
+| `nozzle_diameter` | 0.4 mm | Nozzle diameter |
+| `filament_diameter` | 1.75 mm | Filament diameter (1.75 or 2.85) |
+| `delta` | False | Delta printer flag |
+| `print_volume` | (300, 300, 600) | Build volume (x, y, z) in mm |
+
+### Temperature & Fan
+
+| Parameter | Default | Description |
+|-----------|---------|-------------|
+| `extruder_temperature` | 200°C | Hotend temperature |
+| `bed_temperature` | 60°C | Heated bed temperature |
+| `fan_speed` | 255 | Part cooling fan (0-255) |
+| `fan_start_z` | 0.0 mm | Height to enable fan |
+
+### Extrusion
+
+| Parameter | Default | Description |
+|-----------|---------|-------------|
+| `layer_width` | 0.6 mm | Extrusion width |
+| `flowrate` | 1.0 | Flow multiplier |
+| `flow_over` | 1.0 | Overextrusion factor near bed |
+| `min_over_z` | 0.0 mm | Height for overextrusion |
+
+### Motion
+
+| Parameter | Default | Description |
+|-----------|---------|-------------|
+| `feedrate` | 3600 mm/min | Print speed (60 mm/s) |
+| `feedrate_travel` | 4800 mm/min | Travel speed (80 mm/s) |
+| `feedrate_low` | 1800 mm/min | First layer speed (30 mm/s) |
+| `feedrate_retraction` | 2400 mm/min | Retraction speed |
+| `acceleration` | 0 | Acceleration (0 = default) |
+| `jerk` | 0 | Jerk (0 = default) |
+
+### Retraction
+
+| Parameter | Default | Description |
+|-----------|---------|-------------|
+| `z_hop` | 0.5 mm | Z lift during travel |
+| `retraction_length` | 1.0 mm | Filament retraction distance |
+| `retraction_min_travel` | 6.0 mm | Minimum travel to trigger retraction |
+
+## Custom Configuration
+
+Override defaults when creating the config:
+
+```python
+gcode_config = GcodeConfig(
+ extruder_temperature=210,
+ bed_temperature=65,
+ feedrate=2400, # 40 mm/s
+ layer_width=0.45,
+ retraction_length=0.8,
+)
+```
+
+Or load from a TOML file using `PrintConfig`:
+
+```python
+from compas_slicer.config import PrintConfig
+
+config = PrintConfig.from_toml("my_printer.toml")
+gcode_config = config.gcode
+```
+
+## G-code Structure
+
+The generated G-code has four sections:
+
+### 1. Header
+```gcode
+;G-code generated by compas_slicer
+T0 ;select tool 0
+G21 ;metric units
+G90 ;absolute positioning
+M140 S60 ;set bed temp (no wait)
+M104 S200 ;set extruder temp (no wait)
+M109 S200 ;wait for extruder temp
+M190 S60 ;wait for bed temp
+G28 X0 Y0 ;home X and Y
+G28 Z0 ;home Z
+```
+
+### 2. Purge Line
+```gcode
+;Purge line
+G1 Z0.2 ;move to purge height
+G1 X5 Y5 ;move to purge start
+G1 Y150 E3.5 ;purge line 1
+G1 X5.6 ;step over
+G1 Y5 E3.5 ;purge line 2
+G92 E0 ;reset extruder position
+```
+
+### 3. Toolpath
+```gcode
+;Begin toolpath
+G1 F1800 ;slow feedrate for adhesion
+G1 X50.000 Y30.000 Z0.200
+G1 X51.234 Y30.567 E0.125
+...
+```
+
+### 4. Footer
+```gcode
+;End of print
+G1 E-1.0 ;final retract
+G1 Z10.000 ;lift nozzle
+G1 X0 Y0 ;move to home
+M104 S0 ;extruder heater off
+M140 S0 ;bed heater off
+M84 ;motors off
+```
+
+## Volumetric Extrusion
+
+The G-code generator calculates extrusion using volumetric math:
+
+$$E = \frac{d \cdot h \cdot w}{\pi (D/2)^2} \cdot f$$
+
+Where:
+
+- $E$ = extrusion length (mm)
+- $d$ = travel distance (mm)
+- $h$ = layer height (mm)
+- $w$ = path width (mm)
+- $D$ = filament diameter (mm)
+- $f$ = flow rate multiplier
+
+This ensures correct material deposition regardless of layer height or path width.
+
+## Complete Code
+
+```python
+--8<-- "examples/4_gcode_generation/example_4_gcode.py"
+```
+
+## Running the Example
+
+```bash
+cd examples/4_gcode_generation
+python example_4_gcode.py
+```
+
+With visualization:
+
+```bash
+python example_4_gcode.py --visualize
+```
+
+Output: `examples/4_gcode_generation/data/output/my_gcode.gcode`
+
+## Key Takeaways
+
+1. **Configure for your printer**: Set temperatures, speeds, and retraction for your specific machine
+2. **Position mesh correctly**: Delta vs cartesian printers have different origins
+3. **Volumetric extrusion**: Automatically calculates correct E values
+4. **Modular structure**: Header/purge/toolpath/footer makes debugging easier
+
+## Next Steps
+
+- [Scalar Field Slicing](05_scalar_field.md) - Custom slicing patterns
+- [Print Organization](../concepts/print-organization.md) - More fabrication parameters
diff --git a/docs/examples/04_gcode_generation.rst b/docs/examples/04_gcode_generation.rst
deleted file mode 100644
index 76f0f169..00000000
--- a/docs/examples/04_gcode_generation.rst
+++ /dev/null
@@ -1,72 +0,0 @@
-.. _compas_slicer_example_4:
-
-************************************
-Gcode generation
-************************************
-
-While compas slicer has been mostly developed for robotic printing, we can also export the gcode of the generated paths
-to materialize them in a typical desktop 3D printer. The gcode generation is still at a basic level and is a work in progress.
-The following file can be found in `/examples/4_gcode_generation/`. The gcode file is placed in the `/output/` folder.
-
-
-.. code-block:: python
-
- import os
- import logging
- import compas_slicer.utilities as utils
- from compas_slicer.pre_processing import move_mesh_to_point
- from compas_slicer.slicers import PlanarSlicer
- from compas_slicer.post_processing import generate_brim
- from compas_slicer.post_processing import simplify_paths_rdp_igl
- from compas_slicer.post_processing import seams_smooth
- from compas_slicer.print_organization import PlanarPrintOrganizer
- from compas_slicer.print_organization import set_extruder_toggle
- from compas_slicer.utilities import save_to_json
- from compas_slicer.parameters import get_param
-
- from compas.datastructures import Mesh
- from compas.geometry import Point
-
- logger = logging.getLogger('logger')
- logging.basicConfig(format='%(levelname)s-%(message)s', level=logging.INFO)
-
- DATA = os.path.join(os.path.dirname(__file__), 'data')
- OUTPUT_DIR = utils.get_output_directory(DATA) # creates 'output' folder if it doesn't already exist
- MODEL = 'simple_vase_open_low_res.obj'
-
-
- def main():
-
- compas_mesh = Mesh.from_obj(os.path.join(DATA, MODEL))
- delta = get_param({}, key='delta', defaults_type='gcode') # boolean for delta printers
- print_volume_x = get_param({}, key='print_volume_x', defaults_type='gcode') # in mm
- print_volume_y = get_param({}, key='print_volume_y', defaults_type='gcode') # in mm
- if delta:
- move_mesh_to_point(compas_mesh, Point(0, 0, 0))
- else:
- move_mesh_to_point(compas_mesh, Point(print_volume_x/2, print_volume_y/2, 0))
-
- # ----- slicing
- slicer = PlanarSlicer(compas_mesh, slicer_type="cgal", layer_height=4.5)
- slicer.slice_model()
- generate_brim(slicer, layer_width=3.0, number_of_brim_offsets=4)
- simplify_paths_rdp_igl(slicer, threshold=0.6)
- seams_smooth(slicer, smooth_distance=10)
- slicer.printout_info()
- save_to_json(slicer.to_data(), OUTPUT_DIR, 'slicer_data.json')
-
- # ----- print organization
- print_organizer = PlanarPrintOrganizer(slicer)
- print_organizer.create_printpoints()
- # Set fabrication-related parameters
- set_extruder_toggle(print_organizer, slicer)
- print_organizer.printout_info()
-
- # create and output gcode
- gcode_parameters = {} # leave all to default
- gcode_text = print_organizer.output_gcode(gcode_parameters)
- utils.save_to_text_file(gcode_text, OUTPUT_DIR, 'my_gcode.gcode')
-
-
- if __name__ == "__main__":
- main()
diff --git a/docs/examples/05_non_planar_slicing_on_custom_base.rst b/docs/examples/05_non_planar_slicing_on_custom_base.rst
deleted file mode 100644
index 36983f70..00000000
--- a/docs/examples/05_non_planar_slicing_on_custom_base.rst
+++ /dev/null
@@ -1,73 +0,0 @@
-.. _compas_slicer_example_5:
-
-************************************
-Non-planar slicing on custom base
-************************************
-
-In this example we describe the process of non-planar slicing of a mesh, generating paths that are an offset to its
-custom base. We are using the ``ScalarFieldSlicer`` cleass, which generates paths as contours of a scalar field defined on every
-vertex of the mesh. In this case we create a scalar field with the distance of each vertex from the custom base.
-
-.. figure:: figures/05_scalar_field_slicing.PNG
- :figclass: figure
- :class: figure-img img-fluid
-
- *Result of scalar field slicing considering the distance of each vertex from the custom base.*
-
-The files for this example can be found on the folder `/examples/5_non_planar_slicing_on_custom_base/`. Once you have
-run the python file to generate the results, you can visualize them by opening the grasshopper file
-`visualization_scalar_field_slicing.gh`.
-
-.. code-block:: python
-
- import logging
- from compas.geometry import distance_point_point
- from compas.datastructures import Mesh
- import os
- import compas_slicer.utilities as slicer_utils
- from compas_slicer.post_processing import simplify_paths_rdp_igl
- from compas_slicer.slicers import ScalarFieldSlicer
- import compas_slicer.utilities as utils
- from compas_slicer.print_organization import ScalarFieldPrintOrganizer
-
- logger = logging.getLogger('logger')
- logging.basicConfig(format='%(levelname)s-%(message)s', level=logging.INFO)
-
- DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
- OUTPUT_PATH = slicer_utils.get_output_directory(DATA_PATH)
- MODEL = 'geom_to_slice.obj'
- BASE = 'custom_base.obj'
-
- if __name__ == '__main__':
-
- # --- load meshes
- mesh = Mesh.from_obj(os.path.join(DATA_PATH, MODEL))
- base = Mesh.from_obj(os.path.join(DATA_PATH, BASE))
-
- # --- Create per-vertex scalar field (distance of every vertex from the custom base)
- pts = [mesh.vertex_coordinates(v_key, axes='xyz') for v_key in
- mesh.vertices()] # list of the vertex coordinates of the mesh as compas.geometry.Point instances
- _, projected_pts = utils.pull_pts_to_mesh_faces(base, pts) # list with projections of all mesh vertices on the mesh
- u = [distance_point_point(pt, proj_pt) for pt, proj_pt in
- zip(pts, projected_pts)] # list with distance between initial+projected pts (one per vertex)
- utils.save_to_json(u, OUTPUT_PATH, 'distance_field.json') # save distance field to json for visualization
-
- # --- assign the scalar field to the mesh's attributes, under the name 'scalar_field'
- mesh.update_default_vertex_attributes({'scalar_field': 0.0})
- for i, (v_key, data) in enumerate(mesh.vertices(data=True)):
- data['scalar_field'] = u[i]
-
- # --- Slice model by generating contours of scalar field
- slicer = ScalarFieldSlicer(mesh, u, no_of_isocurves=50)
- slicer.slice_model()
- # simplify_paths_rdp(slicer, threshold=0.3)
- slicer_utils.save_to_json(slicer.to_data(), OUTPUT_PATH, 'isocontours.json') # save results to json
-
- # --- Print organization calculations (i.e. generation of printpoints with fabrication-related information)
- simplify_paths_rdp_igl(slicer, threshold=0.3)
- print_organizer = ScalarFieldPrintOrganizer(slicer, parameters={}, DATA_PATH=DATA_PATH)
- print_organizer.create_printpoints()
-
- print_organizer.printout_info()
- printpoints_data = print_organizer.output_printpoints_dict()
- utils.save_to_json(printpoints_data, OUTPUT_PATH, 'out_printpoints.json') # save results to json
diff --git a/docs/examples/05_scalar_field.md b/docs/examples/05_scalar_field.md
new file mode 100644
index 00000000..3036b4ab
--- /dev/null
+++ b/docs/examples/05_scalar_field.md
@@ -0,0 +1,269 @@
+# Scalar Field Slicing
+
+This example demonstrates non-planar slicing using a custom scalar field - enabling you to print along arbitrary surfaces rather than horizontal planes.
+
+## What You'll Learn
+
+- Creating a scalar field from geometric distance
+- Assigning scalar values to mesh vertices
+- Slicing with `ScalarFieldSlicer`
+- Printing on top of existing geometry
+
+## Why Scalar Field Slicing?
+
+Traditional planar slicing creates horizontal layers. But what if you want to:
+
+- **Print on a curved base**: Add features on top of an existing surface
+- **Follow terrain**: Print conformal layers over a landscape
+- **Custom layer patterns**: Define exactly where each layer goes
+
+Scalar field slicing generates isocurves (contours) from any per-vertex scalar field, not just Z-height.
+
+```
+Planar slicing: Scalar field slicing:
+ _____ _____
+ |_____| Z=3 / \ u=3
+ |_____| Z=2 / \ u=2
+ |_____| Z=1 ( ) u=1
+ |_____| Z=0 \_______/ u=0 (base surface)
+```
+
+## The Pipeline
+
+```mermaid
+flowchart TD
+ A[Load Mesh to Slice] --> B[Load Base Surface]
+ B --> C[Compute Distance Field]
+ C --> D[Assign to Vertices]
+ D --> E[ScalarFieldSlicer]
+ E --> F[Extract Isocurves]
+ F --> G[Create PrintPoints]
+```
+
+## Step-by-Step Walkthrough
+
+### 1. Load Meshes
+
+We need two meshes:
+
+- **Mesh to slice**: The geometry you want to print
+- **Base surface**: The surface to print on top of
+
+```python
+from pathlib import Path
+from compas.datastructures import Mesh
+
+mesh = Mesh.from_obj(DATA_PATH / 'geom_to_slice.obj') # Geometry to slice
+base = Mesh.from_obj(DATA_PATH / 'custom_base.obj') # Base surface
+```
+
+### 2. Compute Distance Field
+
+For each vertex of the mesh, compute its distance to the base surface:
+
+```python
+from compas.geometry import distance_point_point
+import compas_slicer.utilities as utils
+
+# Get all vertex coordinates
+pts = [mesh.vertex_coordinates(v_key, axes='xyz') for v_key in mesh.vertices()]
+
+# Project points onto base surface
+_, projected_pts = utils.pull_pts_to_mesh_faces(base, pts)
+
+# Compute distance from each point to its projection
+u = [distance_point_point(pt, proj_pt) for pt, proj_pt in zip(pts, projected_pts)]
+```
+
+This creates a scalar field `u` where:
+
+- `u = 0` at points on the base surface
+- `u > 0` at points above the base
+- The value increases with distance from the base
+
+### 3. Assign Scalar Field to Mesh
+
+Store the scalar values as vertex attributes:
+
+```python
+mesh.update_default_vertex_attributes({'scalar_field': 0.0})
+
+for i, (v_key, data) in enumerate(mesh.vertices(data=True)):
+ data['scalar_field'] = u[i]
+```
+
+### 4. Slice with Scalar Field
+
+```python
+from compas_slicer.slicers import ScalarFieldSlicer
+
+slicer = ScalarFieldSlicer(mesh, u, no_of_isocurves=50)
+slicer.slice_model()
+```
+
+The slicer:
+
+1. Sorts vertices by scalar value
+2. Determines the range `[u_min, u_max]`
+3. Creates `no_of_isocurves` evenly spaced threshold values
+4. Extracts the zero-crossing contour for each threshold
+
+**Parameters:**
+
+| Parameter | Description |
+|-----------|-------------|
+| `mesh` | The mesh to slice |
+| `u` | List of scalar values (one per vertex) |
+| `no_of_isocurves` | Number of contours to extract |
+
+### 5. Simplify and Create PrintPoints
+
+```python
+from compas_slicer.post_processing import simplify_paths_rdp
+from compas_slicer.print_organization import ScalarFieldPrintOrganizer
+
+simplify_paths_rdp(slicer, threshold=0.3)
+
+print_organizer = ScalarFieldPrintOrganizer(slicer, DATA_PATH=DATA_PATH)
+print_organizer.create_printpoints()
+```
+
+### 6. Export Results
+
+```python
+printpoints_data = print_organizer.output_printpoints_dict()
+utils.save_to_json(printpoints_data, OUTPUT_PATH, 'out_printpoints.json')
+```
+
+## How It Works
+
+### Isocurve Extraction
+
+An isocurve is a contour where the scalar field equals a specific value. For a threshold $t$:
+
+$$\text{isocurve}_t = \{p \in \text{mesh} : u(p) = t\}$$
+
+The algorithm finds edges where the scalar field crosses the threshold:
+
+```
+Vertex A (u=2.3)
+ ●
+ \ Threshold t=2.0
+ \ Isocurve crosses here!
+ ●
+Vertex B (u=1.7)
+```
+
+The crossing point is found by linear interpolation:
+
+$$p = A + \frac{t - u_A}{u_B - u_A}(B - A)$$
+
+### Choosing Number of Isocurves
+
+The number of isocurves determines layer height:
+
+$$\text{avg layer height} \approx \frac{u_{max} - u_{min}}{\text{no\_of\_isocurves}}$$
+
+More isocurves = thinner layers = smoother surface but longer print time.
+
+## Custom Scalar Fields
+
+You can use any scalar field, not just distance:
+
+### Height-Based Field
+
+```python
+u = [mesh.vertex_coordinates(v)[2] for v in mesh.vertices()]
+```
+
+This is equivalent to planar slicing.
+
+### Radial Field
+
+```python
+from compas.geometry import Point, distance_point_point
+
+center = Point(0, 0, 0)
+u = [distance_point_point(mesh.vertex_coordinates(v), center)
+ for v in mesh.vertices()]
+```
+
+Creates concentric circular layers (spiral vase mode).
+
+### Geodesic Field
+
+```python
+# Using igl for geodesic distance from boundary vertices
+import igl
+distances = igl.exact_geodesic(V, F, boundary_vertices)
+```
+
+Creates layers that follow surface curvature.
+
+## Complete Code
+
+```python
+--8<-- "examples/5_non_planar_slicing_on_custom_base/scalar_field_slicing.py"
+```
+
+## Running the Example
+
+```bash
+cd examples/5_non_planar_slicing_on_custom_base
+python scalar_field_slicing.py
+```
+
+With visualization:
+
+```bash
+python scalar_field_slicing.py --visualize
+```
+
+## Output Files
+
+| File | Description |
+|------|-------------|
+| `distance_field.json` | Scalar values for visualization |
+| `isocontours.json` | Slicer output data |
+| `out_printpoints.json` | Final printpoints |
+
+## Use Cases
+
+### Printing on Curved Surfaces
+
+Print text or patterns on top of a curved object:
+
+1. Load the base object
+2. Create the feature geometry offset above the surface
+3. Use distance-to-surface as scalar field
+4. Layers conform to the base curvature
+
+### Conformal Printing
+
+Print a thin shell that follows terrain:
+
+1. Load terrain mesh as base
+2. Create offset shell above terrain
+3. Scalar field = distance from terrain
+4. Each layer follows the terrain contours
+
+### Variable Layer Height
+
+Combine with other slicing methods:
+
+1. Planar slice the main body
+2. Scalar field slice the curved top
+3. Merge the toolpaths
+
+## Key Takeaways
+
+1. **Scalar field defines layers**: Any per-vertex value can drive slicing
+2. **Distance field is common**: Distance from base surface creates conformal layers
+3. **Isocurves are contours**: Zero-crossings at each threshold value
+4. **Number of curves = resolution**: More isocurves = finer layers
+
+## Next Steps
+
+- [Attribute Transfer](06_attributes.md) - Transfer mesh properties to toolpaths
+- [Slicing Algorithms](../concepts/slicing-algorithms.md) - Deep dive into theory
+- [Curved Slicing](02_curved_slicing.md) - Geodesic interpolation approach
diff --git a/docs/examples/06_attributes.md b/docs/examples/06_attributes.md
new file mode 100644
index 00000000..0ef1d2c9
--- /dev/null
+++ b/docs/examples/06_attributes.md
@@ -0,0 +1,289 @@
+# Attribute Transfer
+
+This example demonstrates how to transfer mesh attributes (overhang angles, normals, colors, custom data) to printpoints for variable printing parameters based on geometry.
+
+## What You'll Learn
+
+- Adding face and vertex attributes to meshes
+- Transferring attributes from mesh to printpoints
+- Using transferred data for variable printing
+- Understanding face vs vertex attribute interpolation
+
+## Why Attribute Transfer?
+
+Different parts of a model may need different printing parameters:
+
+- **Overhangs** need slower speeds and more cooling
+- **Visible surfaces** need finer resolution
+- **Structural areas** need higher infill
+- **Colored regions** need different materials
+
+Attribute transfer lets you encode this information on the mesh and automatically apply it to toolpaths.
+
+## The Pipeline
+
+```mermaid
+flowchart LR
+ A[Mesh] --> B[Add Attributes]
+ B --> C[Slice]
+ C --> D[Create PrintPoints]
+ D --> E[Transfer Attributes]
+ E --> F[Variable Parameters]
+```
+
+## Step-by-Step Walkthrough
+
+### 1. Load Mesh
+
+```python
+from pathlib import Path
+from compas.datastructures import Mesh
+
+mesh = Mesh.from_obj(DATA_PATH / 'distorted_v_closed_low_res.obj')
+```
+
+### 2. Add Face Attributes
+
+Face attributes are values assigned to each face of the mesh. They can be any type: float, bool, string, list, etc.
+
+#### Overhang Angle (Float)
+
+Calculate how much each face is tilted from vertical:
+
+```python
+from compas.geometry import Vector
+
+mesh.update_default_face_attributes({'overhang': 0.0})
+
+for f_key, data in mesh.faces(data=True):
+ face_normal = mesh.face_normal(f_key, unitized=True)
+ # Dot product with up vector: 1 = horizontal face, 0 = vertical face
+ data['overhang'] = Vector(0.0, 0.0, 1.0).dot(face_normal)
+```
+
+| Overhang Value | Meaning |
+|----------------|---------|
+| 1.0 | Horizontal (flat top) |
+| 0.0 | Vertical (wall) |
+| -1.0 | Horizontal facing down (overhang) |
+
+#### Boolean Attribute
+
+Check if face normal points toward positive Y:
+
+```python
+mesh.update_default_face_attributes({'positive_y_axis': False})
+
+for f_key, data in mesh.faces(data=True):
+ face_normal = mesh.face_normal(f_key, unitized=True)
+ is_positive_y = Vector(0.0, 1.0, 0.0).dot(face_normal) > 0
+ data['positive_y_axis'] = is_positive_y
+```
+
+### 3. Add Vertex Attributes
+
+Vertex attributes must be numeric types that can be interpolated (float, numpy array).
+
+#### Distance from Plane (Float)
+
+```python
+from compas.geometry import Point, Vector, distance_point_plane
+
+mesh.update_default_vertex_attributes({'dist_from_plane': 0.0})
+
+plane = (Point(0.0, 0.0, -30.0), Vector(0.0, 0.5, 0.5))
+
+for v_key, data in mesh.vertices(data=True):
+ v_coord = mesh.vertex_coordinates(v_key, axes='xyz')
+ data['dist_from_plane'] = distance_point_plane(v_coord, plane)
+```
+
+#### Direction Vector (Array)
+
+```python
+import numpy as np
+from compas.geometry import normalize_vector
+
+mesh.update_default_vertex_attributes({'direction_to_pt': 0.0})
+
+target_pt = Point(4.0, 1.0, 0.0)
+
+for v_key, data in mesh.vertices(data=True):
+ v_coord = mesh.vertex_coordinates(v_key, axes='xyz')
+ direction = Vector.from_start_end(v_coord, target_pt)
+ data['direction_to_pt'] = np.array(normalize_vector(direction))
+```
+
+### 4. Slice and Create PrintPoints
+
+```python
+from compas_slicer.slicers import PlanarSlicer
+from compas_slicer.post_processing import simplify_paths_rdp
+from compas_slicer.print_organization import PlanarPrintOrganizer
+
+slicer = PlanarSlicer(mesh, layer_height=5.0)
+slicer.slice_model()
+simplify_paths_rdp(slicer, threshold=1.0)
+
+print_organizer = PlanarPrintOrganizer(slicer)
+print_organizer.create_printpoints()
+```
+
+### 5. Transfer Attributes
+
+```python
+from compas_slicer.utilities.attributes_transfer import transfer_mesh_attributes_to_printpoints
+
+transfer_mesh_attributes_to_printpoints(mesh, print_organizer.printpoints)
+```
+
+This function:
+
+1. Finds which mesh face each printpoint lies on
+2. For **face attributes**: Directly copies the value
+3. For **vertex attributes**: Interpolates using barycentric coordinates
+
+### 6. Access Transferred Attributes
+
+```python
+# Get all values of an attribute across all printpoints
+overhangs = print_organizer.get_printpoints_attribute(attr_name='overhang')
+positive_y = print_organizer.get_printpoints_attribute(attr_name='positive_y_axis')
+distances = print_organizer.get_printpoints_attribute(attr_name='dist_from_plane')
+directions = print_organizer.get_printpoints_attribute(attr_name='direction_to_pt')
+```
+
+Or access individual printpoint attributes:
+
+```python
+for ppt in print_organizer.printpoints_iterator():
+ if ppt.attributes.get('overhang', 0) < 0:
+ # This is an overhang - adjust printing parameters
+ ppt.velocity = 20.0 # Slow down
+```
+
+## How Interpolation Works
+
+### Face Attributes
+
+Face attributes are discrete - each point on a face gets the same value:
+
+```
+Face A (overhang=0.8) Face B (overhang=0.3)
+ _____ _____
+ | ● | | ● |
+ |_____| |_____|
+
+ Point gets 0.8 Point gets 0.3
+```
+
+### Vertex Attributes
+
+Vertex attributes are interpolated using barycentric coordinates:
+
+```
+ V1 (dist=10)
+ ●
+ /|\
+ / | \
+ / ●P \ P is at barycentric coords (0.2, 0.3, 0.5)
+ / | \ dist(P) = 0.2×10 + 0.3×5 + 0.5×2 = 4.5
+ ●----+----●
+ V2 (dist=5) V3 (dist=2)
+```
+
+The interpolation formula:
+
+$$\text{attr}(P) = \lambda_1 \cdot \text{attr}(V_1) + \lambda_2 \cdot \text{attr}(V_2) + \lambda_3 \cdot \text{attr}(V_3)$$
+
+Where $\lambda_1 + \lambda_2 + \lambda_3 = 1$ are the barycentric coordinates.
+
+## Practical Applications
+
+### Variable Velocity by Overhang
+
+Slow down on overhangs for better print quality:
+
+```python
+from compas_slicer.print_organization import set_linear_velocity_by_range
+
+set_linear_velocity_by_range(
+ print_organizer,
+ param_func=lambda ppt: ppt.attributes.get('overhang', 0),
+ parameter_range=[-1.0, 1.0], # overhang range
+ velocity_range=[15, 60], # slow for overhangs, fast for flat
+)
+```
+
+### Color-Based Material Selection
+
+```python
+# Assume 'color' attribute is 0 (white) or 1 (black)
+for ppt in print_organizer.printpoints_iterator():
+ if ppt.attributes.get('color', 0) > 0.5:
+ ppt.extruder_id = 1 # Use second extruder
+ else:
+ ppt.extruder_id = 0
+```
+
+### Structural Reinforcement
+
+```python
+# Higher flow rate in structural regions
+for ppt in print_organizer.printpoints_iterator():
+ if ppt.attributes.get('is_structural', False):
+ ppt.flowrate = 1.2 # 20% more material
+```
+
+## Attribute Type Requirements
+
+| Attribute Location | Allowed Types | Interpolation |
+|--------------------|---------------|---------------|
+| Face | Any (float, bool, str, list, dict) | None (direct copy) |
+| Vertex | Numeric only (float, np.array) | Barycentric |
+
+!!! warning "Vertex Attribute Limitation"
+ Vertex attributes must be numeric types that can be meaningfully multiplied by floats. Boolean or string vertex attributes will cause errors during interpolation.
+
+## Complete Code
+
+```python
+--8<-- "examples/6_attributes_transfer/example_6_attributes_transfer.py"
+```
+
+## Running the Example
+
+```bash
+cd examples/6_attributes_transfer
+python example_6_attributes_transfer.py
+```
+
+With visualization:
+
+```bash
+python example_6_attributes_transfer.py --visualize
+```
+
+## Output Files
+
+| File | Description |
+|------|-------------|
+| `slicer_data.json` | Sliced geometry |
+| `out_printpoints.json` | PrintPoints with attributes |
+| `overhangs_list.json` | Overhang values per point |
+| `positive_y_axis_list.json` | Boolean values per point |
+| `dist_from_plane_list.json` | Distance values per point |
+| `direction_to_pt_list.json` | Direction vectors per point |
+
+## Key Takeaways
+
+1. **Face vs vertex attributes**: Face attributes are discrete, vertex attributes are interpolated
+2. **Numeric vertex attributes only**: Must be floats or arrays for barycentric interpolation
+3. **Automatic transfer**: One function call transfers all mesh attributes to printpoints
+4. **Variable parameters**: Use transferred attributes to drive printing parameters
+
+## Next Steps
+
+- [Print Organization](../concepts/print-organization.md) - More on fabrication parameters
+- [Curved Slicing](02_curved_slicing.md) - Combine with non-planar techniques
+- [API Reference](../api/utilities.md) - `transfer_mesh_attributes_to_printpoints` details
diff --git a/docs/examples/06_attributes_transfer.rst b/docs/examples/06_attributes_transfer.rst
deleted file mode 100644
index 2929fdaf..00000000
--- a/docs/examples/06_attributes_transfer.rst
+++ /dev/null
@@ -1,106 +0,0 @@
-.. _compas_slicer_example_6:
-
-**************************************
-Transferring attributes to PrintPoints
-**************************************
-
-Often in 3D printing we need to transfer information from the mesh that is being sliced to the PrintPoints that
-are used in the fabrication process. We might want, for example, to print paths that are generated from different parts of
-the geometry using different parameters. In compas_slicer this can be done using the *transfer_mesh_attributes_to_printpoints()* function, as
-shown in the example below. During the slicing process each printpoint is projected to the closest mesh face.
-It takes directly all the face attributes, and it takes the averaged vertex attributes of the face vertices using
-barycentric coordinates.
-
-.. figure:: figures/06_attributes.png
- :figclass: figure
- :class: figure-img img-fluid
-
- *PrintPoints with visualization of the attribute: overhang angle of the underlying mesh.*
-
-
-.. code-block:: python
-
- import logging
- import os
- from compas.geometry import Point, Vector, distance_point_plane, normalize_vector
- from compas.datastructures import Mesh
- import compas_slicer.utilities as slicer_utils
- from compas_slicer.post_processing import simplify_paths_rdp_igl
- from compas_slicer.slicers import PlanarSlicer
- import compas_slicer.utilities.utils as utils
- from compas_slicer.utilities.attributes_transfer import transfer_mesh_attributes_to_printpoints
- from compas_slicer.print_organization import PlanarPrintOrganizer
- import numpy as np
-
- logger = logging.getLogger('logger')
- logging.basicConfig(format='%(levelname)s-%(message)s', level=logging.INFO)
-
- DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
- OUTPUT_PATH = slicer_utils.get_output_directory(DATA_PATH)
- MODEL = 'distorted_v_closed_low_res.obj'
-
- if __name__ == '__main__':
- # load mesh
- mesh = Mesh.from_obj(os.path.join(DATA_PATH, MODEL))
-
- # --------------- Add attributes to mesh
- # Face attributes can be anything (ex. float, bool, array, text ...)
- # Vertex attributes can only be entities that can be meaningfully multiplied with a float (ex. float, np.array ...)
-
- # overhand attribute - Scalar value (per face)
- mesh.update_default_face_attributes({'overhang': 0.0})
- for f_key, data in mesh.faces(data=True):
- face_normal = mesh.face_normal(f_key, unitized=True)
- data['overhang'] = Vector(0.0, 0.0, 1.0).dot(face_normal)
-
- # face looking towards the positive y axis - Boolean value (per face)
- mesh.update_default_face_attributes({'positive_y_axis': False})
- for f_key, data in mesh.faces(data=True):
- face_normal = mesh.face_normal(f_key, unitized=True)
- is_positive_y = Vector(0.0, 1.0, 0.0).dot(face_normal) > 0 # boolean value
- data['positive_y_axis'] = is_positive_y
-
- # distance from plane - Scalar value (per vertex)
- mesh.update_default_vertex_attributes({'dist_from_plane': 0.0})
- plane = (Point(0.0, 0.0, -30.0), Vector(0.0, 0.5, 0.5))
- for v_key, data in mesh.vertices(data=True):
- v_coord = mesh.vertex_coordinates(v_key, axes='xyz')
- data['dist_from_plane'] = distance_point_plane(v_coord, plane)
-
- # direction towards point - Vector value (per vertex)
- mesh.update_default_vertex_attributes({'direction_to_pt': 0.0})
- pt = Point(4.0, 1.0, 0.0)
- for v_key, data in mesh.vertices(data=True):
- v_coord = mesh.vertex_coordinates(v_key, axes='xyz')
- data['direction_to_pt'] = np.array(normalize_vector(Vector.from_start_end(v_coord, pt)))
-
- # --------------- Slice mesh
- slicer = PlanarSlicer(mesh, slicer_type="default", layer_height=5.0)
- slicer.slice_model()
- simplify_paths_rdp_igl(slicer, threshold=1.0)
- slicer_utils.save_to_json(slicer.to_data(), OUTPUT_PATH, 'slicer_data.json')
-
- # --------------- Create printpoints
- print_organizer = PlanarPrintOrganizer(slicer)
- print_organizer.create_printpoints()
-
- # --------------- Transfer mesh attributes to printpoints
- transfer_mesh_attributes_to_printpoints(mesh, print_organizer.printpoints_dict)
-
- # --------------- Save printpoints to json (only json-serializable attributes are saved)
- printpoints_data = print_organizer.output_printpoints_dict()
- utils.save_to_json(printpoints_data, OUTPUT_PATH, 'out_printpoints.json')
-
- # --------------- Print the info to see the attributes of the printpoints (you can also visualize them on gh)
- print_organizer.printout_info()
-
- # --------------- Save printpoints attributes for visualization
- overhangs_list = print_organizer.get_printpoints_attribute(attr_name='overhang')
- positive_y_axis_list = print_organizer.get_printpoints_attribute(attr_name='positive_y_axis')
- dist_from_plane_list = print_organizer.get_printpoints_attribute(attr_name='dist_from_plane')
- direction_to_pt_list = print_organizer.get_printpoints_attribute(attr_name='direction_to_pt')
-
- utils.save_to_json(overhangs_list, OUTPUT_PATH, 'overhangs_list.json')
- utils.save_to_json(positive_y_axis_list, OUTPUT_PATH, 'positive_y_axis_list.json')
- utils.save_to_json(dist_from_plane_list, OUTPUT_PATH, 'dist_from_plane_list.json')
- utils.save_to_json(utils.point_list_to_dict(direction_to_pt_list), OUTPUT_PATH, 'direction_to_pt_list.json')
\ No newline at end of file
diff --git a/docs/examples/07_medial_axis.md b/docs/examples/07_medial_axis.md
new file mode 100644
index 00000000..dc931931
--- /dev/null
+++ b/docs/examples/07_medial_axis.md
@@ -0,0 +1,217 @@
+# Medial Axis Infill
+
+This example demonstrates generating infill paths using the medial axis (straight skeleton) of each layer contour - a geometry-aware approach that adapts to the shape.
+
+## What You'll Learn
+
+- Generating medial axis infill using CGAL's straight skeleton
+- Controlling infill density with `min_length`
+- Understanding bisector vs inner_bisector edges
+- When to use medial axis vs traditional infill patterns
+
+## Why Medial Axis Infill?
+
+Traditional infill patterns (grid, honeycomb, gyroid) ignore the geometry - they apply the same pattern everywhere. Medial axis infill follows the natural centerlines of the shape:
+
+```
+Traditional Grid: Medial Axis:
+ ┌─────────────┐ ┌─────────────┐
+ │ │ │ │ │ │ │ │ │ │ │
+ │─┼─┼─┼─┼─┼─┼─│ │ ╲ │ ╱ │
+ │ │ │ │ │ │ │ │ │ ╲ │ ╱ │
+ │─┼─┼─┼─┼─┼─┼─│ │─────╳│╳─────│
+ │ │ │ │ │ │ │ │ │ ╱ │ ╲ │
+ └─────────────┘ └─────────────┘
+ (ignores shape) (follows geometry)
+```
+
+Benefits:
+
+- **Adaptive density**: Naturally denser in thin walls, sparser in open areas
+- **Follows geometry**: Infill aligns with the shape's structure
+- **Handles complexity**: Works well with irregular shapes, holes, and thin features
+
+## The Pipeline
+
+```mermaid
+flowchart LR
+ A[Slice Mesh] --> B[For Each Layer]
+ B --> C[Compute Straight Skeleton]
+ C --> D[Extract Skeleton Edges]
+ D --> E[Filter by Length]
+ E --> F[Add as Infill Paths]
+```
+
+## Step-by-Step Walkthrough
+
+### 1. Load and Slice
+
+```python
+from pathlib import Path
+from compas.datastructures import Mesh
+from compas_slicer.slicers import PlanarSlicer
+from compas_slicer.post_processing import simplify_paths_rdp
+
+mesh = Mesh.from_obj(DATA_PATH / 'simple_vase_open_low_res.obj')
+
+slicer = PlanarSlicer(mesh, layer_height=2.0)
+slicer.slice_model()
+
+# Simplify paths first (recommended)
+simplify_paths_rdp(slicer, threshold=0.5)
+```
+
+### 2. Generate Medial Axis Infill
+
+```python
+from compas_slicer.post_processing import generate_medial_axis_infill
+
+generate_medial_axis_infill(
+ slicer,
+ min_length=2.0, # Skip edges shorter than 2mm
+ include_bisectors=True # Include spokes to boundary
+)
+```
+
+**Parameters:**
+
+| Parameter | Description |
+|-----------|-------------|
+| `min_length` | Minimum skeleton edge length to include (mm) |
+| `include_bisectors` | Include edges connecting skeleton to boundary |
+
+### 3. Continue with Print Organization
+
+```python
+from compas_slicer.print_organization import PlanarPrintOrganizer
+
+print_organizer = PlanarPrintOrganizer(slicer)
+print_organizer.create_printpoints()
+# ... rest of print organization
+```
+
+## How It Works
+
+### The Straight Skeleton
+
+The straight skeleton is computed by "shrinking" the polygon inward at constant speed. Where the shrinking boundary meets itself, skeleton edges form:
+
+```
+Original polygon: After shrinking:
+ ┌───────┐ ┌───────┐
+ │ │ │╲ ╱│
+ │ │ → │ ╲ ╱ │
+ │ │ │ ╲ ╱ │
+ └───────┘ └───╳───┘
+ skeleton
+```
+
+CGAL's `interior_straight_skeleton()` returns a graph with:
+
+- **Boundary edges**: Original polygon edges
+- **Inner bisector edges**: Internal skeleton (medial axis)
+- **Bisector edges**: Spokes connecting skeleton to boundary vertices
+
+### Edge Types
+
+```
+ ●─────────────●
+ ╱│ boundary │╲
+ ╱ │ │ ╲
+ ╱ │ inner_ │ ╲
+ ●───● bisector ●───●
+ ╲ │ │ ╱
+ ╲ │ bisector │ ╱
+ ╲│ (spoke) │╱
+ ●─────────────●
+```
+
+| Edge Type | Description | Use Case |
+|-----------|-------------|----------|
+| `boundary` | Original polygon edges | Skipped (already in perimeter) |
+| `inner_bisector` | Internal skeleton | Always included |
+| `bisector` | Skeleton to boundary | Optional (include_bisectors) |
+
+## Tuning Parameters
+
+### min_length
+
+Controls infill density:
+
+| Value | Effect |
+|-------|--------|
+| Small (1-2mm) | Dense infill, more paths |
+| Medium (5-10mm) | Moderate infill |
+| Large (20mm+) | Sparse, only main skeleton |
+
+### include_bisectors
+
+```
+include_bisectors=True: include_bisectors=False:
+ ┌───────┐ ┌───────┐
+ │╲ ╱│ │ │
+ │ ╲ ╱ │ │ │ │
+ │ ╲ ╱ │ │ │ │
+ │───╳───│ │───┼───│
+ │ ╱ ╲ │ │ │ │
+ │ ╱ ╲ │ │ │
+ └───────┘ └───────┘
+ (more support) (cleaner look)
+```
+
+## Complete Code
+
+```python
+--8<-- "examples/7_medial_axis_infill/example_7_medial_axis_infill.py"
+```
+
+## Running the Example
+
+```bash
+cd examples/7_medial_axis_infill
+python example_7_medial_axis_infill.py
+```
+
+With visualization:
+
+```bash
+python example_7_medial_axis_infill.py --visualize
+```
+
+## When to Use Medial Axis Infill
+
+**Good for:**
+
+- Irregular shapes with varying wall thickness
+- Organic geometries (vases, sculptures)
+- Parts with thin features that need internal support
+- Single-wall prints that need occasional bridging
+
+**Not ideal for:**
+
+- Regular mechanical parts (use grid/honeycomb)
+- High infill density requirements (use traditional patterns)
+- Parts needing uniform strength in all directions
+
+## Comparison with Traditional Infill
+
+| Aspect | Medial Axis | Grid/Honeycomb |
+|--------|-------------|----------------|
+| Adapts to geometry | Yes | No |
+| Density control | Via min_length | Via infill % |
+| Thin wall support | Excellent | May miss thin areas |
+| Computation | Per-layer skeleton | Simple patterns |
+| Uniform strength | No (follows shape) | Yes |
+
+## Key Takeaways
+
+1. **Geometry-aware**: Infill follows the natural structure of the shape
+2. **Adaptive density**: Automatically denser where needed
+3. **CGAL powered**: Uses robust straight skeleton computation
+4. **Tunable**: Control density with `min_length`, coverage with `include_bisectors`
+
+## Next Steps
+
+- [Planar Slicing](01_planar_slicing.md) - Basic slicing workflow
+- [Print Organization](../concepts/print-organization.md) - Fabrication parameters
+- [API Reference](../api/post_processing.md) - Full function documentation
diff --git a/docs/examples/index.md b/docs/examples/index.md
new file mode 100644
index 00000000..7df988a0
--- /dev/null
+++ b/docs/examples/index.md
@@ -0,0 +1,73 @@
+# Examples
+
+Complete working examples demonstrating COMPAS Slicer capabilities.
+
+All examples are available in the [`examples/`](https://github.com/compas-dev/compas_slicer/tree/master/examples) folder of the repository.
+
+
+
+- :material-layers-outline:{ .lg .middle } **Planar Slicing**
+
+ ---
+
+ Basic horizontal slicing workflow with brim, raft, and seam alignment
+
+ [:octicons-arrow-right-24: Example](01_planar_slicing.md)
+
+- :material-sine-wave:{ .lg .middle } **Curved Slicing**
+
+ ---
+
+ Non-planar slicing using interpolation between boundary curves
+
+ [:octicons-arrow-right-24: Example](02_curved_slicing.md)
+
+- :material-sort-ascending:{ .lg .middle } **Vertical Sorting**
+
+ ---
+
+ Organize branching paths into vertical layers for efficient printing
+
+ [:octicons-arrow-right-24: Example](03_vertical_sorting.md)
+
+- :material-file-code-outline:{ .lg .middle } **G-code Generation**
+
+ ---
+
+ Export toolpaths to G-code for desktop 3D printers
+
+ [:octicons-arrow-right-24: Example](04_gcode.md)
+
+- :material-gradient-vertical:{ .lg .middle } **Scalar Field Slicing**
+
+ ---
+
+ Slice along custom scalar field contours
+
+ [:octicons-arrow-right-24: Example](05_scalar_field.md)
+
+- :material-transfer:{ .lg .middle } **Attribute Transfer**
+
+ ---
+
+ Transfer mesh attributes (colors, normals) to printpoints
+
+ [:octicons-arrow-right-24: Example](06_attributes.md)
+
+
+
+## Running Examples
+
+```bash
+# Clone the repository
+git clone https://github.com/compas-dev/compas_slicer.git
+cd compas_slicer
+
+# Install
+pip install -e .
+
+# Run an example
+python examples/1_planar_slicing_simple/example_1_planar_slicing_simple.py
+```
+
+Output files are saved to `examples//data/output/`.
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 00000000..049105cc
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,70 @@
+# COMPAS Slicer
+
+
+
+**COMPAS Slicer is a slicing package for FDM 3D Printing using COMPAS.**
+
+The package builds upon [COMPAS](https://compas.dev/), an open-source Python-based framework for collaboration and research in architecture, engineering and digital fabrication.
+
+!!! note
+ COMPAS Slicer is a collaborative work-in-progress project and might contain bugs. If you find bugs or would like to request additional functionality, please submit an issue using the [Issue Tracker](https://github.com/compas-dev/compas_slicer/issues).
+
+## Authors
+
+COMPAS Slicer was developed at [ETH Zurich](https://ethz.ch/) by:
+
+- **[Ioanna Mitropoulou](https://ioannamitropoulouarch.com/)** - [Digital Building Technologies](https://dbt.arch.ethz.ch/)
+
+- **[Joris Burger](https://github.com/joburger)** - [Gramazio Kohler Research](https://gramaziokohler.arch.ethz.ch/)
+
+- **[Andrei Jipa](https://github.com/stratocaster)** - [Gramazio Kohler Research](https://gramaziokohler.arch.ethz.ch/)
+
+- **[Jelle Feringa](https://github.com/jf---)** - [Gramazio Kohler Research](https://terrestrial.construction)
+
+The package emerged from research on non-planar 3D printing and robotic fabrication at the Institute of Technology in Architecture.
+
+## Features
+
+- **Planar Slicing** - Fast CGAL-based mesh slicing with configurable layer heights
+- **Curved Slicing** - Non-planar toolpaths following scalar fields or interpolation
+- **Print Organization** - Generate printpoints with fabrication parameters
+- **G-code Generation** - Export toolpaths for desktop 3D printers
+- **Grasshopper Integration** - Visualize results in Rhino/Grasshopper
+
+## Quick Links
+
+
+
+- :material-download:{ .lg .middle } **Installation**
+
+ ---
+
+ Get started with COMPAS Slicer in minutes
+
+ [:octicons-arrow-right-24: Install](installation.md)
+
+- :material-book-open-variant:{ .lg .middle } **Tutorials**
+
+ ---
+
+ Learn the fundamentals with step-by-step guides
+
+ [:octicons-arrow-right-24: Tutorials](tutorials/index.md)
+
+- :material-code-tags:{ .lg .middle } **Examples**
+
+ ---
+
+ Explore complete working examples
+
+ [:octicons-arrow-right-24: Examples](examples/index.md)
+
+- :material-api:{ .lg .middle } **API Reference**
+
+ ---
+
+ Detailed documentation of all modules
+
+ [:octicons-arrow-right-24: API](api/index.md)
+
+
diff --git a/docs/index.rst b/docs/index.rst
deleted file mode 100644
index 754b882b..00000000
--- a/docs/index.rst
+++ /dev/null
@@ -1,35 +0,0 @@
-********************************************************************************
-Overview
-********************************************************************************
-
-.. image:: _images/compas_slicer_main_image_small.jpg
- :class: img-fluid mb-3
-
-.. rst-class:: lead
-
- **COMPAS_SLICER is a slicing package for FDM 3D Printing using COMPAS.**
- The package builds upon `COMPAS `_, an open-source Python-based framework for collaboration and research in architecture, engineering and digital fabrication.
-
-.. note::
-
- COMPAS_SLICER is a collaborative work-in-progress project and might
- contain bugs. In case you find bugs or would like to request additional
- functionality, please submit an issue using the
- `Issue Tracker `_.
-
-
-Table of Contents
-=================
-
-.. toctree::
- :maxdepth: 3
- :titlesonly:
-
- Overview
- installation
- tutorials
- examples
- api
- devguide
- license
- citing
diff --git a/docs/installation.md b/docs/installation.md
new file mode 100644
index 00000000..459b7481
--- /dev/null
+++ b/docs/installation.md
@@ -0,0 +1,97 @@
+# Installation
+
+COMPAS Slicer can be installed on Windows, macOS, and Linux.
+
+## Requirements
+
+- Python 3.9 or higher
+- [COMPAS](https://compas.dev/) >= 2.15
+- [compas_cgal](https://github.com/compas-dev/compas_cgal) >= 0.9
+
+## Quick Install
+
+=== "pip"
+
+ ```bash
+ pip install compas_slicer
+ ```
+
+=== "conda"
+
+ ```bash
+ conda install -c conda-forge compas_slicer
+ ```
+
+## Development Install
+
+To install from source for development:
+
+```bash
+# Clone the repository
+git clone https://github.com/compas-dev/compas_slicer.git
+cd compas_slicer
+
+# Create a virtual environment (recommended)
+python -m venv .venv
+source .venv/bin/activate # On Windows: .venv\Scripts\activate
+
+# Install in editable mode with dev dependencies
+pip install -e ".[dev]"
+```
+
+## Verify Installation
+
+Test that the installation works:
+
+```bash
+python -c "import compas_slicer; print(compas_slicer.__version__)"
+```
+
+## Grasshopper Integration
+
+To use COMPAS Slicer in Rhino/Grasshopper:
+
+```bash
+python -m compas_rhino.install -v 8.0
+```
+
+!!! tip
+ Replace `8.0` with your Rhino version (e.g., `7.0` for Rhino 7).
+
+## Troubleshooting
+
+### Grasshopper components not working
+
+If the Grasshopper components don't load after installation, manually add the path in Rhino:
+
+1. In Rhino, type `EditPythonScript`
+2. Go to **Tools > Options > Add to search path**
+3. Add: `/compas_slicer/src/grasshopper_visualization`
+
+### Microsoft Visual C++ Build Tools (Windows)
+
+If you see:
+
+```
+error: Microsoft Visual C++ 14.0 or greater is required
+```
+
+Install the [Microsoft C++ Build Tools](https://visualstudio.microsoft.com/visual-cpp-build-tools/).
+
+### CGAL Issues
+
+COMPAS Slicer requires `compas_cgal`. If you have issues:
+
+```bash
+conda install -c conda-forge compas_cgal
+```
+
+## Bug Reports
+
+When [reporting a bug](https://github.com/compas-dev/compas_slicer/issues), please include:
+
+- Operating system and version
+- Python version
+- COMPAS Slicer version (`python -c "import compas_slicer; print(compas_slicer.__version__)"`)
+- Complete error traceback
+- Steps to reproduce
diff --git a/docs/installation.rst b/docs/installation.rst
deleted file mode 100644
index 27b9b25e..00000000
--- a/docs/installation.rst
+++ /dev/null
@@ -1,157 +0,0 @@
-.. _compas_slicer_installation:
-
-************
-Installation
-************
-
-.. rst-class:: lead
-
-COMPAS_SLICER can be easily installed on multiple platforms.
-
-Basic installation steps
-========================
-
-Install compas slicer
------------------------------
-
-
-The recommended way to install `compas_slicer` is with `conda `_.
-For example, create an environment named ``my-project`` (or replace with your own environment name) and install as follows:
-
-.. code-block:: bash
-
- conda config --add channels conda-forge
- conda create -n my-project compas_slicer
- conda activate my-project
-
-* Install COMPAS CGAL
-
-.. code-block:: bash
-
- conda install -c conda-forge compas_cgal
-
-
-* Install Grasshopper components
-
-The Grasshopper components are automatically installed with the `compas_rhino` installation, e.g.:
-
-.. code-block:: bash
-
- conda activate my-project
- python -m compas_rhino.install -v 7.0
-
-
-* Test if the library works
-
-Activate your environment and run the following command:
-
-.. code-block:: bash
-
- conda activate my-project
- python -m compas_slicer
-
-
-* Install from local folder
-
-If you would like to install the latest code from github, or to make modifications on the code and have the updated version
-run in your environment, then you can install compas_slicer from a local folder in your computer. To do that, after following
-the steps described above clone the compas_slicer repository using the command
-
-.. code-block:: bash
-
- git clone https://github.com/compas-dev/compas_slicer.git
-
-Navigate to the compas_slicer folder and after you activate the desired environment, install compas_slicer from the local copy
-with the following command:
-
-.. code-block:: bash
- conda activate my-project
- pip install -e .
-
-
-Enjoy!
-
-
-Troubleshooting
-===============
-
-If you encounter a problem that is not described here, please file an issue
-using the `Issue Tracker `_.
-
-* Grasshopper components not working
-
-If despite completing all the compas_slicer installation steps, the compas_slicer grasshopper components still do not work, then
-you can fix this by manually adding the correct folder to your paths in Rhino.
-In Rhino, type "EditPythonScript", go to Tools > Options > Add to search path and add the following folder:
-/compas_slicer/src/grasshopper_visualization'
-
-
-
-* Installing Planarity
-
-.. code-block:: bash
-
- ModuleNotFoundError: No module named 'Cython'
-
-The installation process with pip can fail while installing planarity because Cython is not installed.
-In that case, install cython using pip (or conda) and then run the installation of COMPAS_SLICER again.
-
-.. code-block:: bash
-
- pip install cython --install-option="--no-cython-compile"
-
-* Microsoft Visual C++ Build Tools
-
-.. code-block:: bash
-
- error: Microsoft Visual C++ 14.0 or greater is required. Get it with "Microsoft C++ Build Tools": https://visualstudio.microsoft.com/visual-cpp-build-tools/
-
-The installation with pip can fail because “Microsoft Visual C++ Build Tools are missing”.
-To install the Microsoft Visual C++ Build Tools choose one of the options provided here:
-https://www.scivision.dev/python-windows-visual-c-14-required/ and just follow the instructions.
-Then run the pip installation commands again.
-
-* Numpy error
-
-.. code-block:: bash
-
- RuntimeError: The current Numpy installation ('C:\\Users\\\\.conda\\envs\\compas_slicer\\lib\\site-packages\\numpy\\__init__.py') fails to pass a sanity check due to a bug in the windows runtime. See this issue for more information: https://tinyurl.com/y3dm3h86
-
-A conflict between Numpy and Python can appear, in order to fix this you need to downgrade Numpy to 1.19.3 (from 1.19.4).
-Make sure you are in the correct environment and type:
-
-.. code-block:: bash
-
- pip install numpy==1.19.3
-
-* Fractions error
-
-.. code-block:: bash
-
- ImportError: cannot import name 'gcd' from 'fractions' (C:\ProgramData\Anaconda3\envs\compas_slicer\lib\fractions.py)
-
-This issue can be solved, as explained here: https://stackoverflow.com/questions/66174862/import-error-cant-import-name-gcd-from-fractions
-by typing the following command (make sure you are in the correct environment)
-
-.. code-block:: bash
-
- conda install -c conda-forge networkx=2.5
-
-
-
-Bug reports
-===========
-
-When `reporting a bug `_, please include:
-
-- Operating system name and version.
-- Any details about your local setup that might be helpful in troubleshooting.
-- Detailed steps to reproduce the bug.
-
-Feature requests
-================
-
-When `proposing a new feature `_, please include:
-
-- Explain in detail how it would work.
-- Keep the scope as narrow as possible, to make it easier to implement.
diff --git a/docs/javascripts/mathjax.js b/docs/javascripts/mathjax.js
new file mode 100644
index 00000000..7e48906a
--- /dev/null
+++ b/docs/javascripts/mathjax.js
@@ -0,0 +1,19 @@
+window.MathJax = {
+ tex: {
+ inlineMath: [["\\(", "\\)"]],
+ displayMath: [["\\[", "\\]"]],
+ processEscapes: true,
+ processEnvironments: true
+ },
+ options: {
+ ignoreHtmlClass: ".*|",
+ processHtmlClass: "arithmatex"
+ }
+};
+
+document$.subscribe(() => {
+ MathJax.startup.output.clearCache()
+ MathJax.typesetClear()
+ MathJax.texReset()
+ MathJax.typesetPromise()
+})
diff --git a/docs/license.rst b/docs/license.rst
deleted file mode 100644
index cd570801..00000000
--- a/docs/license.rst
+++ /dev/null
@@ -1,13 +0,0 @@
-********************************************************************************
-License
-********************************************************************************
-
-.. rst-class:: lead
-
-COMPAS is an open source framework with a permissive license such that it can be used
-for research as well as for proprietary projects, in academia and in practice,
-or at the interface between both.
-
-
-.. literalinclude:: ../LICENSE
- :language: none
diff --git a/docs/quickstart.md b/docs/quickstart.md
new file mode 100644
index 00000000..35aa93ed
--- /dev/null
+++ b/docs/quickstart.md
@@ -0,0 +1,102 @@
+# Quick Start
+
+Get started with COMPAS Slicer in under 5 minutes.
+
+## Basic Workflow
+
+COMPAS Slicer follows a simple pipeline:
+
+```mermaid
+graph LR
+ A[Load Mesh] --> B[Slice]
+ B --> C[Post-process]
+ C --> D[Print Organization]
+ D --> E[Export]
+```
+
+## Minimal Example
+
+```python
+from pathlib import Path
+from compas.datastructures import Mesh
+from compas_slicer.slicers import PlanarSlicer
+from compas_slicer.print_organization import PlanarPrintOrganizer
+
+# Load a mesh
+mesh = Mesh.from_obj("model.obj")
+
+# Slice with 2mm layer height
+slicer = PlanarSlicer(mesh, layer_height=2.0)
+slicer.slice_model()
+
+# Create printpoints
+organizer = PlanarPrintOrganizer(slicer)
+organizer.create_printpoints()
+
+# Export
+organizer.printout_info()
+```
+
+## Complete Example with G-code
+
+```python
+from pathlib import Path
+from compas.datastructures import Mesh
+from compas.geometry import Point
+
+from compas_slicer.slicers import PlanarSlicer
+from compas_slicer.pre_processing import move_mesh_to_point
+from compas_slicer.post_processing import generate_brim, simplify_paths_rdp
+from compas_slicer.print_organization import PlanarPrintOrganizer, set_extruder_toggle
+from compas_slicer.print_organization import create_gcode_text
+from compas_slicer.config import GcodeConfig
+
+# Load and position mesh
+mesh = Mesh.from_obj("model.obj")
+move_mesh_to_point(mesh, Point(100, 100, 0))
+
+# Slice
+slicer = PlanarSlicer(mesh, layer_height=0.2)
+slicer.slice_model()
+
+# Post-processing
+generate_brim(slicer, layer_width=0.4, number_of_brim_offsets=3)
+simplify_paths_rdp(slicer, threshold=0.1)
+
+# Print organization
+organizer = PlanarPrintOrganizer(slicer)
+organizer.create_printpoints()
+set_extruder_toggle(organizer, slicer)
+
+# Generate G-code
+gcode = create_gcode_text(organizer, GcodeConfig())
+Path("output.gcode").write_text(gcode)
+```
+
+## Key Concepts
+
+### Slicers
+
+| Slicer | Use Case |
+|--------|----------|
+| `PlanarSlicer` | Standard horizontal slicing |
+| `InterpolationSlicer` | Curved/non-planar slicing |
+| `ScalarFieldSlicer` | Slicing along scalar field contours |
+
+### Post-processing
+
+- `simplify_paths_rdp()` - Reduce point count using RDP algorithm
+- `generate_brim()` - Add adhesion brim
+- `generate_raft()` - Add raft layers
+- `seams_align()` - Align layer start points
+
+### Print Organization
+
+- `PlanarPrintOrganizer` - For planar slicing
+- `InterpolationPrintOrganizer` - For curved slicing
+
+## Next Steps
+
+- [:material-book-open-variant: Tutorials](tutorials/index.md) - Learn the fundamentals
+- [:material-code-tags: Examples](examples/index.md) - See complete workflows
+- [:material-api: API Reference](api/index.md) - Detailed documentation
diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css
new file mode 100644
index 00000000..1d33f2fc
--- /dev/null
+++ b/docs/stylesheets/extra.css
@@ -0,0 +1,38 @@
+/* Custom styles for COMPAS Slicer docs */
+
+:root {
+ --md-primary-fg-color: #673ab7;
+ --md-primary-fg-color--light: #9575cd;
+ --md-primary-fg-color--dark: #512da8;
+}
+
+/* Code block styling */
+.md-typeset code {
+ font-size: 0.85em;
+}
+
+/* Preserve whitespace in code blocks for ASCII art */
+.md-typeset pre code {
+ white-space: pre;
+ font-family: 'JetBrains Mono', 'Courier New', monospace;
+}
+
+/* API docs styling */
+.doc-heading {
+ font-weight: 600;
+}
+
+/* Better image styling */
+.md-typeset img {
+ border-radius: 8px;
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
+}
+
+/* Admonition icons */
+.md-typeset .admonition.tip {
+ border-color: #00bfa5;
+}
+
+.md-typeset .admonition.warning {
+ border-color: #ff9100;
+}
diff --git a/docs/tutorials.rst b/docs/tutorials.rst
deleted file mode 100644
index 2f5dfb88..00000000
--- a/docs/tutorials.rst
+++ /dev/null
@@ -1,13 +0,0 @@
-********************************************************************************
-Tutorials
-********************************************************************************
-
-Here you can find some compas_slicer tutorials.
-
-.. toctree::
- :numbered:
- :maxdepth: 1
- :titlesonly:
-
- tutorials/01_introduction
- tutorials/02_grasshopper_visualization
diff --git a/docs/tutorials/01_introduction.rst b/docs/tutorials/01_introduction.rst
deleted file mode 100644
index 547d011c..00000000
--- a/docs/tutorials/01_introduction.rst
+++ /dev/null
@@ -1,71 +0,0 @@
-.. _compas_slicer_tutorial_1_introduction:
-
-****************************
-Introduction
-****************************
-
-Overview
-========
-
-The COMPAS SLICER package broadly contains four blocks of code:
-
-* **Geometry:** contains all geometrical entities
-* **Slicers:** provides the functionality that generates the sliced model
-* **Print organizers:** provides the functionality for embedding fabrication related information to the sliced model
-* **Visualization:** visualizes the results in the Rhino-Grasshopper environment
-
-Geometry
---------
-
-The geometry part of COMPAS SLICER contains all of the geometrical entities that are specific to COMPAS SLICER:
-The ``Layer``, ``VerticalLayer``, ``Path``, and ``PrintPoint``. A ``Layer`` is essentially a single slice of the model.
-In case of planar slicing, all points on one ``Layer`` are at the same height, however, this is not the case for non-planar slicing.
-A ``VerticalLayer`` is a special method for organizing layers, in this case several ``Layers`` are grouped together to form a
-group of layers. The ``VerticalLayer`` is only relevant in case a model consists out of multiple ``Paths``.
-A ``Layer`` contains a list of one or mulitple ``Paths``. Multiple ``Paths`` occur when a model has multiple branches (see image).
-Each ``Path`` is a closed or open contour and contains a list of ``compas.geometry.Point`` objects.
-Lastly, the ``PrintPoint`` is essentially the same as the Points in the ``Path``, however, it contains additional fabrication parameters.
-
-.. figure:: intro_figures/01_layer.png
- :figclass: figure
- :class: figure-img img-fluid
-
-.. figure:: intro_figures/02_vert_layer.png
- :figclass: figure
- :class: figure-img img-fluid
-
-Slicers
---------
-
-Different slicers are integrated into COMPAS SLICER.
-
-* **Planar slicer:** Most basic slicer, slices the model through an intersection of a plane parallel to the XY plane.
-* **Interpolation slicer:** Creates non-planar slices by interpolating between boundary curves.
-* **Scalar field slicer:** Uses a scalar field to create non-planar slices.
-* **UV slicer:** TBA.
-
-Print organizers
-----------------
-
-After the model is sliced using one of the available slicers, the toolpath can be generated using the PrintOrganizers.
-The ``PrintOrganizer`` creates ``PrintPoint`` objects out of the ``compas.geometry.Point`` objects that are used throughout the slicing process.
-Using different functions, fabrication parameters can be added or modified to the ``PrintPoints``. Examples of this include:
-setting the velocity, setting the blend radius, or adding safety printpoints (z-hop).
-
-Visualization
--------------
-
-Both the Slicer output and the PrintOrganizer output can be visualized in Grasshopper. More details on this can be found in Tutorial 2.
-
-Folder structure
-================
-
-The functions of COMPAS SLICER are grouped into different folders:
-
-* **geometry:** Contains the main geometric classes of compas_slicer: ``Layer``, ``Path``, and ``PrintPoint``.
-* **parameters:** Contains parameter presets.
-* **pre_processing:** Contains various functions that are used *before slicing a model*, such as repositioning, or various operations for curved slicing.
-* **slicers:** Contains the slicer classes.
-* **print_organization:** Contains the print organization classes. These take a sliced model and embed on it all necessary fabrication-related information.
-* **post_processing:** Contains various functions that are used *after* a model is sliced, such as different methods of sorting, or adding a brim/raft.
-* **utilities:** Contains miscellaneous useful functions.
\ No newline at end of file
diff --git a/docs/tutorials/02_grasshopper_visualization.rst b/docs/tutorials/02_grasshopper_visualization.rst
deleted file mode 100644
index 82c22d93..00000000
--- a/docs/tutorials/02_grasshopper_visualization.rst
+++ /dev/null
@@ -1,81 +0,0 @@
-.. _compas_slicer_tutorial_2:
-
-****************************
-Visualization in grasshopper
-****************************
-
-This tutorial gives an overview of how the results of the slicing process can be visualized in Rhino grasshopper.
-It is important to note that the grasshopper components do not run any slicing code, they merely visualize
-existing results, that have been generated using python.
-
-Installation
-===============
-To install the grasshopper components see the :ref:`installation page `.
-
-
-Overview
-=========
-
-When compas_slicer code is run for a project `my_project`, then the folder `my_project/data/output/` is created, where
-all the intermediary and final results of the slicing process are saved in the form of json files. The grasshopper
-components look into that folder, load the according json file, and turn it into geometry that can be visualized.
-To find the correct folder, the grasshopper file that contains the components needs to be positioned in the `my_project`
-folder. You can look at the `compas_slicer/examples` to see the way the grasshopper components are used.
-
-
-Workflow
-==========
-
-When the compas_slicer python code is run, then all the json files that are recalculated are overwritten in the output
-folder. To update the visualization in grasshopper we click on the button `recompute` that is on the top of each component.
-This loads the latest version of the according json file from the output folder.
-
-
-Components
-============
-The components that exist correspond to the main classes of data types from compas_slicer that someone might want to
-visualize. The components that are currently available include the following:
-
-* csLoadSlicer
-
-Loads the `compas_slicer.slicers.Base_slicer` class and all classes that inherit from it, with all the attributes stored
-in the class.
-
-.. figure:: grasshopper_vis_figures/slicer.PNG
- :figclass: figure
- :class: figure-img img-fluid
-
- *Loading the slicer results.*
-
-* csLoadPrintpoints
-
-Loads the Printpoints dictionary with all the data stored in the `compas_slicer.geometry.PrintPoint` class.
-
-.. figure:: grasshopper_vis_figures/ppts.PNG
- :figclass: figure
- :class: figure-img img-fluid
-
- *Loading the outputted printpoints information.*
-
-* csLightPathViz
-
-Creates a lightweight visualization of the paths. The print paths are displayed with white tubes, and the traveling paths
-are displayed with orange tubes.
-
-.. figure:: grasshopper_vis_figures/lightweight_viz.PNG
- :figclass: figure
- :class: figure-img img-fluid
-
- *Lightweight paths visualization.*
-
-* csRenderPathViz
-
-If the user provides a cross-section of the paths, then a more expensive visualization can be created, where each path
-is displayed with its cross-sectional geometry.
-
-.. figure:: grasshopper_vis_figures/render_viz.PNG
- :figclass: figure
- :class: figure-img img-fluid
-
- *Render paths visualization.*
-
diff --git a/docs/tutorials/grasshopper.md b/docs/tutorials/grasshopper.md
new file mode 100644
index 00000000..a5735f40
--- /dev/null
+++ b/docs/tutorials/grasshopper.md
@@ -0,0 +1,113 @@
+# Grasshopper Visualization
+
+This tutorial explains how to visualize COMPAS Slicer results in Rhino/Grasshopper.
+
+!!! info "Important"
+ The Grasshopper components **only visualize** existing results. All slicing computation happens in Python - the components load and display the JSON output files.
+
+## Setup
+
+1. Install COMPAS Slicer (see [Installation](../installation.md))
+2. Install Grasshopper components:
+
+ ```bash
+ python -m compas_rhino.install -v 8.0
+ ```
+
+## Workflow
+
+```mermaid
+graph LR
+ A[Python Script] -->|generates| B[JSON Files]
+ B -->|loaded by| C[GH Components]
+ C -->|displays| D[Rhino Geometry]
+```
+
+1. Run your Python slicing script
+2. Output files are saved to `project/data/output/`
+3. Open the Grasshopper file in `project/`
+4. Click **Recompute** on components to load latest results
+
+## Project Structure
+
+```
+my_project/
+├── data/
+│ ├── model.obj # Input mesh
+│ └── output/
+│ ├── slicer_data.json
+│ └── out_printpoints.json
+├── my_script.py # Python slicing code
+└── visualization.gh # Grasshopper file
+```
+
+!!! tip
+ Place your `.gh` file in the project root folder so components can find the `data/output/` folder.
+
+## Components
+
+### csLoadSlicer
+
+Loads slicer results from JSON, including all layer and path data.
+
+
+
+**Outputs:**
+
+- Layers (list of paths per layer)
+- Path geometry (polylines)
+- Layer heights
+
+---
+
+### csLoadPrintpoints
+
+Loads the printpoints dictionary with all fabrication parameters.
+
+
+
+**Outputs:**
+
+- Point locations
+- Velocities
+- Extruder states
+- Up vectors
+
+---
+
+### csLightPathViz
+
+Creates a lightweight visualization of toolpaths.
+
+
+
+- **White tubes**: Print paths
+- **Orange tubes**: Travel moves
+
+---
+
+### csRenderPathViz
+
+Creates a detailed visualization with actual path cross-sections.
+
+
+
+Requires a cross-section curve as input for accurate representation.
+
+## Example Workflow
+
+1. **Run Python script:**
+
+ ```python
+ from compas_slicer.slicers import PlanarSlicer
+ from compas_slicer.utilities import save_to_json
+
+ slicer = PlanarSlicer(mesh, layer_height=2.0)
+ slicer.slice_model()
+ save_to_json(slicer.to_data(), OUTPUT_PATH, 'slicer_data.json')
+ ```
+
+2. **In Grasshopper:**
+ - Add `csLoadSlicer` component
+ - Connect to visualization components
+ - Click Recompute after each Python run
diff --git a/docs/tutorials/index.md b/docs/tutorials/index.md
new file mode 100644
index 00000000..5fbeeb34
--- /dev/null
+++ b/docs/tutorials/index.md
@@ -0,0 +1,23 @@
+# Tutorials
+
+Step-by-step guides to learn COMPAS Slicer fundamentals.
+
+
+
+- :material-book-open-variant:{ .lg .middle } **Introduction**
+
+ ---
+
+ Understand the core concepts and architecture of COMPAS Slicer
+
+ [:octicons-arrow-right-24: Read](introduction.md)
+
+- :material-grass:{ .lg .middle } **Grasshopper Visualization**
+
+ ---
+
+ Learn how to visualize slicing results in Rhino/Grasshopper
+
+ [:octicons-arrow-right-24: Read](grasshopper.md)
+
+
diff --git a/docs/tutorials/introduction.md b/docs/tutorials/introduction.md
new file mode 100644
index 00000000..9a2f0c00
--- /dev/null
+++ b/docs/tutorials/introduction.md
@@ -0,0 +1,108 @@
+# Introduction
+
+This tutorial introduces the core concepts and architecture of COMPAS Slicer.
+
+## Overview
+
+COMPAS Slicer is organized into four main components:
+
+```mermaid
+graph TB
+ subgraph "1. Geometry"
+ Layer --> Path
+ Path --> PrintPoint
+ end
+
+ subgraph "2. Slicers"
+ PlanarSlicer
+ InterpolationSlicer
+ ScalarFieldSlicer
+ end
+
+ subgraph "3. Print Organization"
+ PrintOrganizer --> PrintPoint
+ end
+
+ subgraph "4. Output"
+ GCode
+ JSON
+ Grasshopper
+ end
+```
+
+| Component | Purpose |
+|-----------|---------|
+| **Geometry** | Core geometric entities: `Layer`, `Path`, `PrintPoint` |
+| **Slicers** | Generate sliced paths from meshes |
+| **Print Organizers** | Embed fabrication parameters into paths |
+| **Visualization** | Display results in Rhino/Grasshopper |
+
+## Geometry
+
+### Layer
+
+A `Layer` represents a single slice of the model. In planar slicing, all points in a layer are at the same Z height. In non-planar slicing, points can vary in height.
+
+
+
+### VerticalLayer
+
+A `VerticalLayer` groups multiple layers together - useful for models with branching geometry.
+
+
+
+### Path
+
+A `Path` is a closed or open contour containing a list of points. A layer can contain multiple paths (e.g., for models with holes or branches).
+
+### PrintPoint
+
+A `PrintPoint` extends a geometric point with fabrication parameters:
+
+- Velocity
+- Extrusion rate
+- Layer height
+- Up vector (tool orientation)
+- Extruder toggle (on/off)
+
+## Slicers
+
+| Slicer | Description | Use Case |
+|--------|-------------|----------|
+| `PlanarSlicer` | Intersects mesh with horizontal planes | Standard FDM printing |
+| `InterpolationSlicer` | Interpolates between boundary curves | Non-planar surfaces, shells |
+| `ScalarFieldSlicer` | Follows scalar field contours | Custom layer patterns |
+
+## Print Organizers
+
+After slicing, print organizers convert geometric points to `PrintPoints` with fabrication data:
+
+```python
+from compas_slicer.print_organization import PlanarPrintOrganizer
+
+organizer = PlanarPrintOrganizer(slicer)
+organizer.create_printpoints()
+
+# Add fabrication parameters
+set_linear_velocity_constant(organizer, velocity=25)
+set_extruder_toggle(organizer, slicer)
+add_safety_printpoints(organizer, z_hop=5.0)
+```
+
+## Package Structure
+
+```
+compas_slicer/
+├── geometry/ # Layer, Path, PrintPoint
+├── slicers/ # PlanarSlicer, InterpolationSlicer, etc.
+├── print_organization/# PrintOrganizers and utilities
+├── pre_processing/ # Mesh preparation (positioning, boundaries)
+├── post_processing/ # Path modifications (brim, raft, sorting)
+├── utilities/ # Helper functions
+└── visualization/ # Viewer utilities
+```
+
+## Next Steps
+
+- [Grasshopper Visualization](grasshopper.md) - Visualize results in Rhino
+- [Examples](../examples/index.md) - Complete working examples
diff --git a/environment.yml b/environment.yml
deleted file mode 100644
index a716052e..00000000
--- a/environment.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-name: compas_slicer
-channels:
- - conda-forge
-dependencies:
- - python
- - pip
- - compas=1.16.0
- - networkx
- - numpy
- - progressbar2=3.53
- - pyclipper=1.2.0
- - rdp=0.8
diff --git a/examples/1_planar_slicing_simple/example_1_planar_slicing_simple.py b/examples/1_planar_slicing_simple/example_1_planar_slicing_simple.py
index 45fea673..fe717e15 100644
--- a/examples/1_planar_slicing_simple/example_1_planar_slicing_simple.py
+++ b/examples/1_planar_slicing_simple/example_1_planar_slicing_simple.py
@@ -1,67 +1,44 @@
import time
-import os
-import logging
+from pathlib import Path
+
+from compas.datastructures import Mesh
+from compas.geometry import Point
import compas_slicer.utilities as utils
+from compas_slicer.post_processing import generate_brim, generate_raft, seams_align, seams_smooth, simplify_paths_rdp
from compas_slicer.pre_processing import move_mesh_to_point
+from compas_slicer.print_organization import (
+ PlanarPrintOrganizer,
+ add_safety_printpoints,
+ set_extruder_toggle,
+ set_linear_velocity_constant,
+)
from compas_slicer.slicers import PlanarSlicer
-from compas_slicer.post_processing import generate_brim
-from compas_slicer.post_processing import generate_raft
-from compas_slicer.post_processing import simplify_paths_rdp_igl
-from compas_slicer.post_processing import seams_smooth
-from compas_slicer.post_processing import seams_align
-from compas_slicer.print_organization import PlanarPrintOrganizer
-from compas_slicer.print_organization import set_extruder_toggle
-from compas_slicer.print_organization import add_safety_printpoints
-from compas_slicer.print_organization import set_linear_velocity_constant
-from compas_slicer.print_organization import set_blend_radius
from compas_slicer.utilities import save_to_json
+from compas_slicer.visualization import should_visualize, visualize_slicer
-from compas.datastructures import Mesh
-from compas.geometry import Point
-
-# ==============================================================================
-# Logging
-# ==============================================================================
-logger = logging.getLogger('logger')
-logging.basicConfig(format='%(levelname)s-%(message)s', level=logging.INFO)
-
-# ==============================================================================
-# Select location of data folder and specify model to slice
-# ==============================================================================
-DATA = os.path.join(os.path.dirname(__file__), 'data')
-OUTPUT_DIR = utils.get_output_directory(DATA) # creates 'output' folder if it doesn't already exist
+DATA_PATH = Path(__file__).parent / 'data'
+OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
MODEL = 'simple_vase_open_low_res.obj'
-def main():
+def main(visualize: bool = False):
start_time = time.time()
- # ==========================================================================
# Load mesh
- # ==========================================================================
- compas_mesh = Mesh.from_obj(os.path.join(DATA, MODEL))
+ compas_mesh = Mesh.from_obj(DATA_PATH / MODEL)
- # ==========================================================================
# Move to origin
- # ==========================================================================
move_mesh_to_point(compas_mesh, Point(0, 0, 0))
- # ==========================================================================
# Slicing
- # options: 'default': Both for open and closed paths. But slow
- # 'cgal': Very fast. Only for closed paths.
- # Requires additional installation (compas_cgal).
- # ==========================================================================
- slicer = PlanarSlicer(compas_mesh, slicer_type="cgal", layer_height=1.5)
+ slicer = PlanarSlicer(compas_mesh, layer_height=1.5)
slicer.slice_model()
seams_align(slicer, "next_path")
- # ==========================================================================
# Generate brim / raft
- # ==========================================================================
- # NOTE: Typically you would want to use either a brim OR a raft,
+ # NOTE: Typically you would want to use either a brim OR a raft,
# however, in this example both are used to explain the functionality
generate_brim(slicer, layer_width=3.0, number_of_brim_offsets=4)
generate_raft(slicer,
@@ -70,58 +47,38 @@ def main():
direction="xy_diagonal",
raft_layers=1)
- # ==========================================================================
# Simplify the paths by removing points with a certain threshold
- # change the threshold value to remove more or less points
- # ==========================================================================
- simplify_paths_rdp_igl(slicer, threshold=0.6)
+ simplify_paths_rdp(slicer, threshold=0.6)
- # ==========================================================================
# Smooth the seams between layers
- # change the smooth_distance value to achieve smoother, or more abrupt seams
- # ==========================================================================
seams_smooth(slicer, smooth_distance=10)
- # ==========================================================================
- # Prints out the info of the slicer
- # ==========================================================================
slicer.printout_info()
+ save_to_json(slicer.to_data(), OUTPUT_PATH, 'slicer_data.json')
- # ==========================================================================
- # Save slicer data to JSON
- # ==========================================================================
- save_to_json(slicer.to_data(), OUTPUT_DIR, 'slicer_data.json')
-
- # ==========================================================================
- # Initializes the PlanarPrintOrganizer and creates PrintPoints
- # ==========================================================================
+ # Print organization
print_organizer = PlanarPrintOrganizer(slicer)
print_organizer.create_printpoints(generate_mesh_normals=False)
- # ==========================================================================
# Set fabrication-related parameters
- # ==========================================================================
set_extruder_toggle(print_organizer, slicer)
add_safety_printpoints(print_organizer, z_hop=10.0)
set_linear_velocity_constant(print_organizer, v=25.0)
- # ==========================================================================
- # Prints out the info of the PrintOrganizer
- # ==========================================================================
print_organizer.printout_info()
- # ==========================================================================
- # Converts the PrintPoints to data and saves to JSON
- # =========================================================================
printpoints_data = print_organizer.output_printpoints_dict()
- utils.save_to_json(printpoints_data, OUTPUT_DIR, 'out_printpoints.json')
+ utils.save_to_json(printpoints_data, OUTPUT_PATH, 'out_printpoints.json')
printpoints_data = print_organizer.output_nested_printpoints_dict()
- utils.save_to_json(printpoints_data, OUTPUT_DIR, 'out_printpoints_nested.json')
+ utils.save_to_json(printpoints_data, OUTPUT_PATH, 'out_printpoints_nested.json')
end_time = time.time()
print("Total elapsed time", round(end_time - start_time, 2), "seconds")
+ if visualize:
+ visualize_slicer(slicer, compas_mesh)
+
if __name__ == "__main__":
- main()
\ No newline at end of file
+ main(visualize=should_visualize())
diff --git a/examples/2_curved_slicing/ex2_curved_slicing.py b/examples/2_curved_slicing/ex2_curved_slicing.py
index 6ae88e35..30f83fef 100644
--- a/examples/2_curved_slicing/ex2_curved_slicing.py
+++ b/examples/2_curved_slicing/ex2_curved_slicing.py
@@ -1,61 +1,60 @@
-import os
+import time
+from pathlib import Path
+
from compas.datastructures import Mesh
-import logging
+
import compas_slicer.utilities as utils
+from compas_slicer.config import InterpolationConfig
+from compas_slicer.post_processing import seams_smooth, simplify_paths_rdp
+from compas_slicer.pre_processing import InterpolationSlicingPreprocessor, create_mesh_boundary_attributes
+from compas_slicer.print_organization import (
+ InterpolationPrintOrganizer,
+ add_safety_printpoints,
+ set_extruder_toggle,
+ set_linear_velocity_by_range,
+ smooth_printpoints_layer_heights,
+ smooth_printpoints_up_vectors,
+)
from compas_slicer.slicers import InterpolationSlicer
-from compas_slicer.post_processing import simplify_paths_rdp
-from compas_slicer.pre_processing import InterpolationSlicingPreprocessor
-from compas_slicer.print_organization import set_extruder_toggle, set_linear_velocity_by_range
-from compas_slicer.print_organization import add_safety_printpoints
-from compas_slicer.pre_processing import create_mesh_boundary_attributes
-from compas_slicer.print_organization import InterpolationPrintOrganizer
-from compas_slicer.post_processing import seams_smooth
-from compas_slicer.print_organization import smooth_printpoints_up_vectors, smooth_printpoints_layer_heights
-import time
-
-logger = logging.getLogger('logger')
-logging.basicConfig(format='%(levelname)s - %(message)s', level=logging.INFO)
+from compas_slicer.visualization import should_visualize, visualize_slicer
-DATA_PATH = os.path.join(os.path.dirname(__file__), 'data_Y_shape')
+DATA_PATH = Path(__file__).parent / 'data_Y_shape'
OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
-OBJ_INPUT_NAME = os.path.join(DATA_PATH, 'mesh.obj')
-def main():
+def main(visualize: bool = False):
start_time = time.time()
- # --- Load initial_mesh
- mesh = Mesh.from_obj(os.path.join(DATA_PATH, OBJ_INPUT_NAME))
+ # Load initial_mesh
+ mesh = Mesh.from_obj(DATA_PATH / 'mesh.obj')
- # --- Load targets (boundaries)
+ # Load targets (boundaries)
low_boundary_vs = utils.load_from_json(DATA_PATH, 'boundaryLOW.json')
high_boundary_vs = utils.load_from_json(DATA_PATH, 'boundaryHIGH.json')
create_mesh_boundary_attributes(mesh, low_boundary_vs, high_boundary_vs)
avg_layer_height = 2.0
- parameters = {
- 'avg_layer_height': avg_layer_height, # controls number of curves that will be generated
- }
+ config = InterpolationConfig(avg_layer_height=avg_layer_height)
- preprocessor = InterpolationSlicingPreprocessor(mesh, parameters, DATA_PATH)
+ preprocessor = InterpolationSlicingPreprocessor(mesh, config, DATA_PATH)
preprocessor.create_compound_targets()
g_eval = preprocessor.create_gradient_evaluation(norm_filename='gradient_norm.json', g_filename='gradient.json',
target_1=preprocessor.target_LOW,
target_2=preprocessor.target_HIGH)
preprocessor.find_critical_points(g_eval, output_filenames=['minima.json', 'maxima.json', 'saddles.json'])
- # --- slicing
- slicer = InterpolationSlicer(mesh, preprocessor, parameters)
- slicer.slice_model() # compute_norm_of_gradient contours
+ # Slicing
+ slicer = InterpolationSlicer(mesh, preprocessor, config)
+ slicer.slice_model()
simplify_paths_rdp(slicer, threshold=0.25)
seams_smooth(slicer, smooth_distance=3)
slicer.printout_info()
utils.save_to_json(slicer.to_data(), OUTPUT_PATH, 'curved_slicer.json')
- # --- Print organizer
- print_organizer = InterpolationPrintOrganizer(slicer, parameters, DATA_PATH)
+ # Print organizer
+ print_organizer = InterpolationPrintOrganizer(slicer, config, DATA_PATH)
print_organizer.create_printpoints()
smooth_printpoints_up_vectors(print_organizer, strength=0.5, iterations=10)
@@ -67,13 +66,16 @@ def main():
set_extruder_toggle(print_organizer, slicer)
add_safety_printpoints(print_organizer, z_hop=10.0)
- # --- Save printpoints dictionary to json file
+ # Save printpoints dictionary to json file
printpoints_data = print_organizer.output_printpoints_dict()
utils.save_to_json(printpoints_data, OUTPUT_PATH, 'out_printpoints.json')
end_time = time.time()
print("Total elapsed time", round(end_time - start_time, 2), "seconds")
+ if visualize:
+ visualize_slicer(slicer, mesh)
+
if __name__ == "__main__":
- main()
+ main(visualize=should_visualize())
diff --git a/examples/3_planar_slicing_vertical_sorting/example_3_planar_vertical_sorting.py b/examples/3_planar_slicing_vertical_sorting/example_3_planar_vertical_sorting.py
index 454599aa..24d7afb0 100644
--- a/examples/3_planar_slicing_vertical_sorting/example_3_planar_vertical_sorting.py
+++ b/examples/3_planar_slicing_vertical_sorting/example_3_planar_vertical_sorting.py
@@ -1,43 +1,39 @@
-import os
-import logging
+from pathlib import Path
+
+from compas.datastructures import Mesh
+from compas.geometry import Point
import compas_slicer.utilities as utils
+from compas_slicer.post_processing import (
+ generate_brim,
+ reorder_vertical_layers,
+ seams_smooth,
+ simplify_paths_rdp,
+ sort_into_vertical_layers,
+)
from compas_slicer.pre_processing import move_mesh_to_point
+from compas_slicer.print_organization import (
+ PlanarPrintOrganizer,
+ add_safety_printpoints,
+ set_blend_radius,
+ set_extruder_toggle,
+ set_linear_velocity_constant,
+)
from compas_slicer.slicers import PlanarSlicer
-from compas_slicer.post_processing import generate_brim
-from compas_slicer.post_processing import simplify_paths_rdp_igl
-from compas_slicer.post_processing import sort_into_vertical_layers
-from compas_slicer.post_processing import reorder_vertical_layers
-from compas_slicer.post_processing import seams_smooth
-from compas_slicer.print_organization import PlanarPrintOrganizer
-from compas_slicer.print_organization import set_extruder_toggle
-from compas_slicer.print_organization import add_safety_printpoints
-from compas_slicer.print_organization import set_linear_velocity_constant
-from compas_slicer.print_organization import set_blend_radius
from compas_slicer.utilities import save_to_json
-from compas.datastructures import Mesh
-from compas.geometry import Point
-
-# ==============================================================================
-# Logging
-# ==============================================================================
-logger = logging.getLogger('logger')
-logging.basicConfig(format='%(levelname)s-%(message)s', level=logging.INFO)
+from compas_slicer.visualization import should_visualize, visualize_slicer
-# ==============================================================================
-# Select location of data folder and specify model to slice
-# ==============================================================================
-DATA = os.path.join(os.path.dirname(__file__), 'data')
-OUTPUT_DIR = utils.get_output_directory(DATA) # creates 'output' folder if it doesn't already exist
+DATA_PATH = Path(__file__).parent / 'data'
+OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
MODEL = 'distorted_v_closed_mid_res.obj'
-def main():
- compas_mesh = Mesh.from_obj(os.path.join(DATA, MODEL))
+def main(visualize: bool = False):
+ compas_mesh = Mesh.from_obj(DATA_PATH / MODEL)
move_mesh_to_point(compas_mesh, Point(0, 0, 0))
# Slicing
- slicer = PlanarSlicer(compas_mesh, slicer_type="cgal", layer_height=5.0)
+ slicer = PlanarSlicer(compas_mesh, layer_height=5.0)
slicer.slice_model()
# Sorting into vertical layers and reordering
@@ -46,10 +42,10 @@ def main():
# Post-processing
generate_brim(slicer, layer_width=3.0, number_of_brim_offsets=5)
- simplify_paths_rdp_igl(slicer, threshold=0.7)
+ simplify_paths_rdp(slicer, threshold=0.7)
seams_smooth(slicer, smooth_distance=10)
slicer.printout_info()
- save_to_json(slicer.to_data(), OUTPUT_DIR, 'slicer_data.json')
+ save_to_json(slicer.to_data(), OUTPUT_PATH, 'slicer_data.json')
# PlanarPrintOrganization
print_organizer = PlanarPrintOrganizer(slicer)
@@ -63,8 +59,11 @@ def main():
print_organizer.printout_info()
printpoints_data = print_organizer.output_printpoints_dict()
- utils.save_to_json(printpoints_data, OUTPUT_DIR, 'out_printpoints.json')
+ utils.save_to_json(printpoints_data, OUTPUT_PATH, 'out_printpoints.json')
+
+ if visualize:
+ visualize_slicer(slicer, compas_mesh)
if __name__ == "__main__":
- main()
+ main(visualize=should_visualize())
diff --git a/examples/4_gcode_generation/example_4_gcode.py b/examples/4_gcode_generation/example_4_gcode.py
index 9583e236..1aac77a9 100644
--- a/examples/4_gcode_generation/example_4_gcode.py
+++ b/examples/4_gcode_generation/example_4_gcode.py
@@ -1,46 +1,38 @@
-import os
-import logging
-import compas_slicer.utilities as utils
-from compas_slicer.pre_processing import move_mesh_to_point
-from compas_slicer.slicers import PlanarSlicer
-from compas_slicer.post_processing import generate_brim
-from compas_slicer.post_processing import simplify_paths_rdp_igl
-from compas_slicer.post_processing import seams_smooth
-from compas_slicer.print_organization import PlanarPrintOrganizer
-from compas_slicer.print_organization import set_extruder_toggle
-from compas_slicer.utilities import save_to_json
-from compas_slicer.parameters import get_param
+from pathlib import Path
from compas.datastructures import Mesh
from compas.geometry import Point
-logger = logging.getLogger('logger')
-logging.basicConfig(format='%(levelname)s-%(message)s', level=logging.INFO)
+import compas_slicer.utilities as utils
+from compas_slicer.config import GcodeConfig
+from compas_slicer.post_processing import generate_brim, seams_smooth, simplify_paths_rdp
+from compas_slicer.pre_processing import move_mesh_to_point
+from compas_slicer.print_organization import PlanarPrintOrganizer, set_extruder_toggle
+from compas_slicer.slicers import PlanarSlicer
+from compas_slicer.utilities import save_to_json
+from compas_slicer.visualization import should_visualize, visualize_slicer
-DATA = os.path.join(os.path.dirname(__file__), 'data')
-OUTPUT_DIR = utils.get_output_directory(DATA) # creates 'output' folder if it doesn't already exist
+DATA_PATH = Path(__file__).parent / 'data'
+OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
MODEL = 'simple_vase_open_low_res.obj'
-def main():
-
- compas_mesh = Mesh.from_obj(os.path.join(DATA, MODEL))
- delta = get_param({}, key='delta', defaults_type='gcode') # boolean for delta printers
- print_volume_x = get_param({}, key='print_volume_x', defaults_type='gcode') # in mm
- print_volume_y = get_param({}, key='print_volume_y', defaults_type='gcode') # in mm
- if delta:
+def main(visualize: bool = False):
+ compas_mesh = Mesh.from_obj(DATA_PATH / MODEL)
+ gcode_config = GcodeConfig()
+ if gcode_config.delta:
move_mesh_to_point(compas_mesh, Point(0, 0, 0))
else:
- move_mesh_to_point(compas_mesh, Point(print_volume_x/2, print_volume_y/2, 0))
+ move_mesh_to_point(compas_mesh, Point(gcode_config.print_volume_x/2, gcode_config.print_volume_y/2, 0))
# ----- slicing
- slicer = PlanarSlicer(compas_mesh, slicer_type="cgal", layer_height=4.5)
+ slicer = PlanarSlicer(compas_mesh, layer_height=4.5)
slicer.slice_model()
generate_brim(slicer, layer_width=3.0, number_of_brim_offsets=4)
- simplify_paths_rdp_igl(slicer, threshold=0.6)
+ simplify_paths_rdp(slicer, threshold=0.6)
seams_smooth(slicer, smooth_distance=10)
slicer.printout_info()
- save_to_json(slicer.to_data(), OUTPUT_DIR, 'slicer_data.json')
+ save_to_json(slicer.to_data(), OUTPUT_PATH, 'slicer_data.json')
# ----- print organization
print_organizer = PlanarPrintOrganizer(slicer)
@@ -50,10 +42,12 @@ def main():
print_organizer.printout_info()
# create and output gcode
- gcode_parameters = {} # leave all to default
- gcode_text = print_organizer.output_gcode(gcode_parameters)
- utils.save_to_text_file(gcode_text, OUTPUT_DIR, 'my_gcode.gcode')
+ gcode_text = print_organizer.output_gcode(gcode_config)
+ utils.save_to_text_file(gcode_text, OUTPUT_PATH, 'my_gcode.gcode')
+
+ if visualize:
+ visualize_slicer(slicer, compas_mesh)
if __name__ == "__main__":
- main()
+ main(visualize=should_visualize())
diff --git a/examples/5_non_planar_slicing_on_custom_base/scalar_field_slicing.py b/examples/5_non_planar_slicing_on_custom_base/scalar_field_slicing.py
index ffd1b45f..c7da8bed 100644
--- a/examples/5_non_planar_slicing_on_custom_base/scalar_field_slicing.py
+++ b/examples/5_non_planar_slicing_on_custom_base/scalar_field_slicing.py
@@ -1,26 +1,25 @@
-import logging
-from compas.geometry import distance_point_point
+from pathlib import Path
+
from compas.datastructures import Mesh
-import os
+from compas.geometry import distance_point_point
+
import compas_slicer.utilities as slicer_utils
-from compas_slicer.post_processing import simplify_paths_rdp_igl
-from compas_slicer.slicers import ScalarFieldSlicer
import compas_slicer.utilities as utils
+from compas_slicer.post_processing import simplify_paths_rdp
from compas_slicer.print_organization import ScalarFieldPrintOrganizer
+from compas_slicer.slicers import ScalarFieldSlicer
+from compas_slicer.visualization import should_visualize, visualize_slicer
-logger = logging.getLogger('logger')
-logging.basicConfig(format='%(levelname)s-%(message)s', level=logging.INFO)
-
-DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
+DATA_PATH = Path(__file__).parent / 'data'
OUTPUT_PATH = slicer_utils.get_output_directory(DATA_PATH)
MODEL = 'geom_to_slice.obj'
BASE = 'custom_base.obj'
-if __name__ == '__main__':
+def main(visualize: bool = False):
# --- load meshes
- mesh = Mesh.from_obj(os.path.join(DATA_PATH, MODEL))
- base = Mesh.from_obj(os.path.join(DATA_PATH, BASE))
+ mesh = Mesh.from_obj(DATA_PATH / MODEL)
+ base = Mesh.from_obj(DATA_PATH / BASE)
# --- Create per-vertex scalar field (distance of every vertex from the custom base)
pts = [mesh.vertex_coordinates(v_key, axes='xyz') for v_key in
@@ -32,7 +31,7 @@
# --- assign the scalar field to the mesh's attributes, under the name 'scalar_field'
mesh.update_default_vertex_attributes({'scalar_field': 0.0})
- for i, (v_key, data) in enumerate(mesh.vertices(data=True)):
+ for i, (_v_key, data) in enumerate(mesh.vertices(data=True)):
data['scalar_field'] = u[i]
# --- Slice model by generating contours of scalar field
@@ -42,10 +41,17 @@
slicer_utils.save_to_json(slicer.to_data(), OUTPUT_PATH, 'isocontours.json') # save results to json
# --- Print organization calculations (i.e. generation of printpoints with fabrication-related information)
- simplify_paths_rdp_igl(slicer, threshold=0.3)
- print_organizer = ScalarFieldPrintOrganizer(slicer, parameters={}, DATA_PATH=DATA_PATH)
+ simplify_paths_rdp(slicer, threshold=0.3)
+ print_organizer = ScalarFieldPrintOrganizer(slicer, DATA_PATH=DATA_PATH)
print_organizer.create_printpoints()
print_organizer.printout_info()
printpoints_data = print_organizer.output_printpoints_dict()
utils.save_to_json(printpoints_data, OUTPUT_PATH, 'out_printpoints.json') # save results to json
+
+ if visualize:
+ visualize_slicer(slicer, mesh)
+
+
+if __name__ == '__main__':
+ main(visualize=should_visualize())
diff --git a/examples/6_attributes_transfer/example_6_attributes_transfer.py b/examples/6_attributes_transfer/example_6_attributes_transfer.py
index 10a7646d..366fc861 100644
--- a/examples/6_attributes_transfer/example_6_attributes_transfer.py
+++ b/examples/6_attributes_transfer/example_6_attributes_transfer.py
@@ -1,25 +1,25 @@
-import logging
-import os
-from compas.geometry import Point, Vector, distance_point_plane, normalize_vector
+from pathlib import Path
+
+import numpy as np
from compas.datastructures import Mesh
+from compas.geometry import Point, Vector, distance_point_plane, normalize_vector
+
import compas_slicer.utilities as slicer_utils
-from compas_slicer.post_processing import simplify_paths_rdp_igl
-from compas_slicer.slicers import PlanarSlicer
import compas_slicer.utilities.utils as utils
-from compas_slicer.utilities.attributes_transfer import transfer_mesh_attributes_to_printpoints
+from compas_slicer.post_processing import simplify_paths_rdp
from compas_slicer.print_organization import PlanarPrintOrganizer
-import numpy as np
-
-logger = logging.getLogger('logger')
-logging.basicConfig(format='%(levelname)s-%(message)s', level=logging.INFO)
+from compas_slicer.slicers import PlanarSlicer
+from compas_slicer.utilities.attributes_transfer import transfer_mesh_attributes_to_printpoints
+from compas_slicer.visualization import should_visualize, visualize_slicer
-DATA_PATH = os.path.join(os.path.dirname(__file__), 'data')
+DATA_PATH = Path(__file__).parent / 'data'
OUTPUT_PATH = slicer_utils.get_output_directory(DATA_PATH)
MODEL = 'distorted_v_closed_low_res.obj'
-if __name__ == '__main__':
+
+def main(visualize: bool = False):
# load mesh
- mesh = Mesh.from_obj(os.path.join(DATA_PATH, MODEL))
+ mesh = Mesh.from_obj(DATA_PATH / MODEL)
# --------------- Add attributes to mesh
# Face attributes can be anything (ex. float, bool, array, text ...)
@@ -53,9 +53,9 @@
data['direction_to_pt'] = np.array(normalize_vector(Vector.from_start_end(v_coord, pt)))
# --------------- Slice mesh
- slicer = PlanarSlicer(mesh, slicer_type="default", layer_height=5.0)
+ slicer = PlanarSlicer(mesh, layer_height=5.0)
slicer.slice_model()
- simplify_paths_rdp_igl(slicer, threshold=1.0)
+ simplify_paths_rdp(slicer, threshold=1.0)
slicer_utils.save_to_json(slicer.to_data(), OUTPUT_PATH, 'slicer_data.json')
# --------------- Create printpoints
@@ -63,7 +63,7 @@
print_organizer.create_printpoints()
# --------------- Transfer mesh attributes to printpoints
- transfer_mesh_attributes_to_printpoints(mesh, print_organizer.printpoints_dict)
+ transfer_mesh_attributes_to_printpoints(mesh, print_organizer.printpoints)
# --------------- Save printpoints to json (only json-serializable attributes are saved)
printpoints_data = print_organizer.output_printpoints_dict()
@@ -82,3 +82,10 @@
utils.save_to_json(positive_y_axis_list, OUTPUT_PATH, 'positive_y_axis_list.json')
utils.save_to_json(dist_from_plane_list, OUTPUT_PATH, 'dist_from_plane_list.json')
utils.save_to_json(utils.point_list_to_dict(direction_to_pt_list), OUTPUT_PATH, 'direction_to_pt_list.json')
+
+ if visualize:
+ visualize_slicer(slicer, mesh)
+
+
+if __name__ == '__main__':
+ main(visualize=should_visualize())
diff --git a/examples/7_medial_axis_infill/example_7_medial_axis_infill.py b/examples/7_medial_axis_infill/example_7_medial_axis_infill.py
new file mode 100644
index 00000000..76bb093c
--- /dev/null
+++ b/examples/7_medial_axis_infill/example_7_medial_axis_infill.py
@@ -0,0 +1,70 @@
+"""Example: Medial Axis Infill Generation
+
+This example demonstrates how to generate medial axis based infill
+paths using CGAL's straight skeleton.
+
+The medial axis naturally follows the centerlines of the geometry,
+producing adaptive infill that handles thin walls and complex shapes.
+"""
+import time
+from pathlib import Path
+
+from compas.datastructures import Mesh
+from loguru import logger
+
+from compas_slicer.post_processing import generate_medial_axis_infill, simplify_paths_rdp
+from compas_slicer.slicers import PlanarSlicer
+from compas_slicer.utilities import save_to_json
+from compas_slicer.visualization import should_visualize, visualize_slicer
+
+DATA_PATH = Path(__file__).parent / "data"
+OUTPUT_PATH = DATA_PATH / "output"
+
+
+def main(visualize: bool = False):
+ start_time = time.time()
+
+ OUTPUT_PATH.mkdir(exist_ok=True)
+
+ # Load mesh - use the vase from example 1
+ mesh_path = Path(__file__).parent.parent / "1_planar_slicing_simple" / "data" / "simple_vase_open_low_res.obj"
+ mesh = Mesh.from_obj(mesh_path)
+
+ # Slice the mesh
+ logger.info("Slicing mesh...")
+ slicer = PlanarSlicer(mesh, layer_height=2.0)
+ slicer.slice_model()
+
+ # Simplify paths first (optional but recommended)
+ simplify_paths_rdp(slicer, threshold=0.5)
+
+ # Count paths before infill
+ paths_before = sum(len(layer.paths) for layer in slicer.layers)
+ logger.info(f"Paths before infill: {paths_before}")
+
+ # Generate medial axis infill
+ logger.info("Generating medial axis infill...")
+ generate_medial_axis_infill(
+ slicer,
+ min_length=2.0,
+ include_bisectors=True,
+ )
+
+ # Count paths after infill
+ paths_after = sum(len(layer.paths) for layer in slicer.layers)
+ logger.info(f"Paths after infill: {paths_after}")
+ logger.info(f"Infill paths added: {paths_after - paths_before}")
+
+ # Save results
+ slicer.printout_info()
+ save_to_json(slicer.to_data(), OUTPUT_PATH, "medial_axis_slicer.json")
+
+ end_time = time.time()
+ logger.info(f"Total time: {end_time - start_time:.2f} seconds")
+
+ if visualize:
+ visualize_slicer(slicer, mesh)
+
+
+if __name__ == "__main__":
+ main(visualize=should_visualize())
diff --git a/mkdocs.yml b/mkdocs.yml
new file mode 100644
index 00000000..abe32014
--- /dev/null
+++ b/mkdocs.yml
@@ -0,0 +1,163 @@
+site_name: COMPAS Slicer
+site_description: Slicing package for FDM 3D Printing with COMPAS
+site_url: https://compas.dev/compas_slicer
+repo_url: https://github.com/compas-dev/compas_slicer
+repo_name: compas-dev/compas_slicer
+edit_uri: edit/master/docs/
+
+theme:
+ name: material
+ palette:
+ - media: "(prefers-color-scheme: light)"
+ scheme: default
+ primary: deep purple
+ accent: purple
+ toggle:
+ icon: material/brightness-7
+ name: Switch to dark mode
+ - media: "(prefers-color-scheme: dark)"
+ scheme: slate
+ primary: deep purple
+ accent: purple
+ toggle:
+ icon: material/brightness-4
+ name: Switch to light mode
+ font:
+ text: Inter
+ code: JetBrains Mono
+ icon:
+ repo: fontawesome/brands/github
+ logo: material/printer-3d-nozzle
+ features:
+ - navigation.instant
+ - navigation.instant.progress
+ - navigation.tracking
+ - navigation.tabs
+ - navigation.tabs.sticky
+ - navigation.sections
+ - navigation.expand
+ - navigation.path
+ - navigation.top
+ - navigation.footer
+ - search.suggest
+ - search.highlight
+ - search.share
+ - content.code.copy
+ - content.code.annotate
+ - content.tabs.link
+ - content.action.edit
+ - toc.follow
+
+plugins:
+ - search
+ - mkdocstrings:
+ default_handler: python
+ handlers:
+ python:
+ paths: [src]
+ options:
+ docstring_style: numpy
+ show_source: true
+ show_root_heading: true
+ show_root_full_path: false
+ show_symbol_type_heading: true
+ show_symbol_type_toc: true
+ members_order: source
+ merge_init_into_class: true
+ separate_signature: true
+ signature_crossrefs: true
+
+nav:
+ - Home: index.md
+ - Getting Started:
+ - Installation: installation.md
+ - Quick Start: quickstart.md
+ - Concepts:
+ - concepts/index.md
+ - Architecture: concepts/architecture.md
+ - Slicing Algorithms: concepts/slicing-algorithms.md
+ - Print Organization: concepts/print-organization.md
+ - Tutorials:
+ - tutorials/index.md
+ - Introduction: tutorials/introduction.md
+ - Grasshopper Visualization: tutorials/grasshopper.md
+ - Examples:
+ - examples/index.md
+ - Planar Slicing: examples/01_planar_slicing.md
+ - Curved Slicing: examples/02_curved_slicing.md
+ - Vertical Sorting: examples/03_vertical_sorting.md
+ - G-code Generation: examples/04_gcode.md
+ - Scalar Field Slicing: examples/05_scalar_field.md
+ - Attribute Transfer: examples/06_attributes.md
+ - Medial Axis Infill: examples/07_medial_axis.md
+ - API Reference:
+ - api/index.md
+ - Geometry: api/geometry.md
+ - Slicers: api/slicers.md
+ - Print Organization: api/print_organization.md
+ - Pre-processing: api/pre_processing.md
+ - Post-processing: api/post_processing.md
+ - Utilities: api/utilities.md
+ - Contributing: contributing.md
+ - Citing: citing.md
+
+markdown_extensions:
+ - abbr
+ - admonition
+ - attr_list
+ - def_list
+ - footnotes
+ - md_in_html
+ - tables
+ - toc:
+ permalink: true
+ toc_depth: 3
+ - pymdownx.arithmatex:
+ generic: true
+ - pymdownx.betterem
+ - pymdownx.caret
+ - pymdownx.details
+ - pymdownx.emoji:
+ emoji_index: !!python/name:material.extensions.emoji.twemoji
+ emoji_generator: !!python/name:material.extensions.emoji.to_svg
+ - pymdownx.highlight:
+ anchor_linenums: true
+ line_spans: __span
+ pygments_lang_class: true
+ - pymdownx.inlinehilite
+ - pymdownx.keys
+ - pymdownx.magiclink
+ - pymdownx.mark
+ - pymdownx.smartsymbols
+ - pymdownx.superfences:
+ custom_fences:
+ - name: mermaid
+ class: mermaid
+ format: !!python/name:pymdownx.superfences.fence_code_format
+ - pymdownx.tabbed:
+ alternate_style: true
+ - pymdownx.tasklist:
+ custom_checkbox: true
+ - pymdownx.tilde
+
+extra:
+ social:
+ - icon: fontawesome/brands/github
+ link: https://github.com/compas-dev/compas_slicer
+ - icon: fontawesome/brands/python
+ link: https://pypi.org/project/compas_slicer/
+ version:
+ provider: mike
+ analytics:
+ provider: google
+ property: G-XXXXXXXXXX
+
+extra_css:
+ - stylesheets/extra.css
+
+extra_javascript:
+ - javascripts/mathjax.js
+ - https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js
+
+watch:
+ - src/compas_slicer
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..625d692b
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,123 @@
+[build-system]
+requires = ["setuptools>=61.0", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "compas_slicer"
+version = "0.8.0"
+description = "Slicing package for FDM 3D Printing with COMPAS"
+readme = "README.md"
+license = {text = "MIT"}
+authors = [
+ {name = "Ioanna Mitropoulou", email = "mitropoulou@arch.ethz.ch"},
+ {name = "Joris Burger"},
+]
+requires-python = ">=3.9"
+classifiers = [
+ "Development Status :: 4 - Beta",
+ "Intended Audience :: Developers",
+ "Topic :: Scientific/Engineering",
+ "License :: OSI Approved :: MIT License",
+ "Operating System :: Unix",
+ "Operating System :: POSIX",
+ "Operating System :: Microsoft :: Windows",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: Implementation :: CPython",
+]
+dependencies = [
+ "attrs>=25.0",
+ "compas>=2.15",
+ "compas_cgal>=0.9",
+ "loguru>=0.7",
+ "networkx>=3.6",
+ "numpy>=2.0",
+ "progressbar2>=4.5",
+ "pyclipper>=1.4",
+ "rdp>=0.8",
+ "scipy>=1.16",
+ "tomli>=2.0; python_version < '3.11'",
+]
+
+[project.urls]
+Homepage = "https://github.com/compas-dev/compas_slicer"
+Documentation = "https://compas.dev/compas_slicer"
+Repository = "https://github.com/compas-dev/compas_slicer"
+
+[project.optional-dependencies]
+viz = [
+ "compas_viewer>=1.2",
+]
+dev = [
+ "bump2version>=1.0",
+ "compas_invocations2>=0.7",
+ "invoke>=2.0",
+ "mypy",
+ "pytest>=7.0",
+ "pytest-benchmark",
+ "pytest-cov",
+ "ruff",
+ "twine",
+]
+docs = [
+ "mike>=2.0",
+ "mkdocs>=1.6",
+ "mkdocs-material>=9.5",
+ "mkdocstrings[python]>=0.27",
+]
+
+[tool.setuptools.packages.find]
+where = ["src"]
+
+[tool.setuptools.package-data]
+"*" = ["*.json", "*.obj", "*.toml"]
+
+[tool.ruff]
+target-version = "py39"
+line-length = 120
+src = ["src"]
+
+[tool.ruff.lint]
+select = [
+ "E", # pycodestyle errors
+ "F", # pyflakes
+ "I", # isort
+ "UP", # pyupgrade
+ "B", # flake8-bugbear
+ "C4", # flake8-comprehensions
+ "SIM", # flake8-simplify
+]
+ignore = [
+ "E501", # line too long (handled by formatter)
+ "B008", # do not perform function calls in argument defaults
+ "SIM108", # use ternary operator instead of if-else
+]
+
+[tool.ruff.lint.isort]
+known-first-party = ["compas_slicer", "compas_slicer_ghpython"]
+
+[tool.mypy]
+python_version = "3.9"
+warn_return_any = true
+warn_unused_configs = true
+ignore_missing_imports = true
+exclude = ["tests", "docs", "examples", "scripts"]
+
+[[tool.mypy.overrides]]
+module = ["compas_slicer.slicers.*", "compas_slicer.print_organization.*", "compas_slicer.pre_processing.*"]
+disable_error_code = ["no-redef", "operator"]
+
+[tool.pytest.ini_options]
+testpaths = ["tests"]
+python_files = ["test_*.py", "*_test.py"]
+addopts = "-ra --tb=short"
+filterwarnings = ["ignore::DeprecationWarning"]
+
+[tool.coverage.run]
+source = ["src/compas_slicer"]
+omit = ["*/tests/*"]
+
+[tool.coverage.report]
+fail_under = 50
diff --git a/pytest.ini b/pytest.ini
deleted file mode 100644
index ba2d1378..00000000
--- a/pytest.ini
+++ /dev/null
@@ -1,3 +0,0 @@
-[pytest]
-testpaths = tests
-doctest_optionflags= NORMALIZE_WHITESPACE IGNORE_EXCEPTION_DETAIL ALLOW_UNICODE ALLOW_BYTES
diff --git a/requirements-dev.txt b/requirements-dev.txt
deleted file mode 100644
index 051a8d49..00000000
--- a/requirements-dev.txt
+++ /dev/null
@@ -1,18 +0,0 @@
-autopep8
-attrs >=17.4
-bump2version >=1.0
-check-manifest >=0.36
-doc8
-flake8
-invoke >=0.14
-ipykernel
-ipython >=5.8
-isort
-m2r
-nbsphinx
-pydocstyle
-pytest >=3.2
-sphinx_compas_theme >=0.12
-sphinx >=1.6
-twine
--e .
diff --git a/requirements.txt b/requirements.txt
deleted file mode 100644
index 9d1c5d92..00000000
--- a/requirements.txt
+++ /dev/null
@@ -1,8 +0,0 @@
-attrs>=19.2.0
-compas>=1.16.0,<2.0.0
-networkx>=2.5,<3.2
-numpy<=1.23.2
-progressbar2>=3.53,<4.4
-pyclipper>=1.2.0,<1.3.0
-rdp==0.8
-libigl>=2.4.1,<2.5.0
\ No newline at end of file
diff --git a/scripts/curved slicing advanced example/ex2_curved_slicing_advanced.py b/scripts/curved slicing advanced example/ex2_curved_slicing_advanced.py
index 6acb2f9a..3f9da75a 100644
--- a/scripts/curved slicing advanced example/ex2_curved_slicing_advanced.py
+++ b/scripts/curved slicing advanced example/ex2_curved_slicing_advanced.py
@@ -1,6 +1,8 @@
import os
+import time
+import math
from compas.datastructures import Mesh
-import logging
+from loguru import logger
import compas_slicer.utilities as utils
from compas_slicer.slicers import InterpolationSlicer
from compas_slicer.post_processing import simplify_paths_rdp_igl
@@ -11,10 +13,6 @@
from compas_slicer.print_organization import add_safety_printpoints, set_wait_time_on_sharp_corners
from compas_slicer.print_organization import smooth_printpoints_up_vectors, set_blend_radius
from compas_slicer.post_processing import generate_brim, seams_smooth
-import time, math
-
-logger = logging.getLogger('logger')
-logging.basicConfig(format='%(levelname)s - %(message)s', level=logging.INFO)
DATA_PATH = os.path.join(os.path.dirname(__file__), 'data_advanced_example')
OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
diff --git a/scripts/load_paths_from_gh.py b/scripts/load_paths_from_gh.py
index c13df127..4abc456f 100644
--- a/scripts/load_paths_from_gh.py
+++ b/scripts/load_paths_from_gh.py
@@ -1,15 +1,10 @@
import os
+from loguru import logger
from compas_slicer.utilities import load_from_json
from compas_slicer.slicers import PlanarSlicer
from compas_viewers.objectviewer import ObjectViewer
-######################## Logging
-import logging
-
-logger = logging.getLogger('logger')
-logging.basicConfig(format='%(levelname)s-%(message)s', level=logging.INFO)
-
########################
diff --git a/scripts/planar_slicing_igl.py b/scripts/planar_slicing_igl.py
index 884db952..c7175b4c 100644
--- a/scripts/planar_slicing_igl.py
+++ b/scripts/planar_slicing_igl.py
@@ -5,15 +5,13 @@
import numpy as np
import copy
import networkx as nx
-import logging
+from loguru import logger
import compas_slicer.utilities as utils
from compas_slicer.slicers.slice_utilities import sort_graph_connected_components
import progressbar
__all__ = ['create_planar_paths_igl']
-logger = logging.getLogger('logger')
-
def try_to_create_connection(G, isoV, ei, ej, i, j, side_i, side_j, connections_found, tol):
vi = isoV[ei[side_i]]
diff --git a/scripts/planar_slicing_numpy.py b/scripts/planar_slicing_numpy.py
index 9a3106e2..deb387a8 100644
--- a/scripts/planar_slicing_numpy.py
+++ b/scripts/planar_slicing_numpy.py
@@ -3,8 +3,7 @@
from compas.geometry import Point, distance_point_point
from compas_slicer.geometry import Path
from compas_slicer.geometry import Layer
-import logging
-logger = logging.getLogger('logger')
+from loguru import logger
from progress.bar import Bar
diff --git a/scripts/sort_per_shortest_path_mlrose.py b/scripts/sort_per_shortest_path_mlrose.py
index d6264c77..f46fa8e4 100644
--- a/scripts/sort_per_shortest_path_mlrose.py
+++ b/scripts/sort_per_shortest_path_mlrose.py
@@ -1,7 +1,5 @@
import mlrose
-import logging
-
-logger = logging.getLogger('logger')
+from loguru import logger
__all__ = ['sort_per_shortest_path_mlrose']
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index fac73c75..00000000
--- a/setup.cfg
+++ /dev/null
@@ -1,32 +0,0 @@
-[bdist_wheel]
-universal = 1
-
-[flake8]
-max-line-length = 180
-exclude =
- .git,
- __pycache__,
- docs,
- build,
- temp,
- dist
-
-[tool:pytest]
-testpaths = tests
-
-norecursedirs =
- migrations
-
-python_files =
- test_*.py
- *_test.py
- tests.py
-
-addopts =
- -ra
- --strict
- --doctest-modules
- --doctest-glob=\*.rst
- --tb=short
-
-
diff --git a/setup.py b/setup.py
deleted file mode 100644
index 0f90a9f0..00000000
--- a/setup.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python
-# -*- encoding: utf-8 -*-
-# flake8: noqa
-from __future__ import absolute_import
-from __future__ import print_function
-
-import io, os
-from os import path
-
-from setuptools import setup
-from setuptools.command.develop import develop
-from setuptools.command.install import install
-
-
-here = path.abspath(path.dirname(__file__))
-
-
-def read(*names, **kwargs):
- return io.open(
- path.join(here, *names),
- encoding=kwargs.get("encoding", "utf8")
- ).read()
-
-
-long_description = read("README.md")
-requirements = read("requirements.txt").split("\n")
-optional_requirements = {}
-
-setup(
- name="compas_slicer",
- version="0.7.0",
- description="Slicing package for FDM 3D Printing with COMPAS",
- long_description=long_description,
- long_description_content_type="text/markdown",
- url="https://github.com/compas-dev/compas_slicer",
- author="Ioanna Mitropoulou and Joris Burger",
- author_email="mitropoulou@arch.ethz.ch",
- license="MIT license",
- classifiers=[
- "Development Status :: 4 - Beta",
- "Intended Audience :: Developers",
- "Topic :: Scientific/Engineering",
- "License :: OSI Approved :: MIT License",
- "Operating System :: Unix",
- "Operating System :: POSIX",
- "Operating System :: Microsoft :: Windows",
- "Programming Language :: Python :: 3.8",
- "Programming Language :: Python :: 3.9",
- "Programming Language :: Python :: Implementation :: CPython",
- "Programming Language :: Python :: Implementation :: IronPython",
- ],
- keywords=[],
- project_urls={},
- packages=["compas_slicer", "compas_slicer_ghpython"],
- package_dir={"": "src"},
- package_data={},
- data_files=[],
- include_package_data=True,
- zip_safe=False,
- install_requires=[requirements],
- python_requires=">=3.8",
- extras_require=optional_requirements,
- entry_points={
- "console_scripts": [],
- },
- ext_modules=[],
-)
diff --git a/src/compas_slicer/__init__.py b/src/compas_slicer/__init__.py
index ffb78172..cda344a2 100644
--- a/src/compas_slicer/__init__.py
+++ b/src/compas_slicer/__init__.py
@@ -1,71 +1,44 @@
-"""
-********************************************************************************
-compas_slicer
-********************************************************************************
+"""COMPAS Slicer - Slicing package for FDM 3D Printing with COMPAS."""
-.. currentmodule:: compas_slicer
+from pathlib import Path
+__author__ = ["Ioanna Mitropoulou", "Joris Burger"]
+__copyright__ = "Copyright 2020 ETH Zurich"
+__license__ = "MIT License"
+__email__ = "mitropoulou@arch.ethz.ch"
+__version__ = "0.8.0"
-.. toctree::
- :maxdepth: 1
- geometry
- slicers
- print_organization
- pre_processing
- post_processing
- utilities
-
-"""
-
-from __future__ import print_function
-from __future__ import absolute_import
-from __future__ import division
-import os
-import compas
-
-
-__author__ = ['Ioanna Mitropoulou and Joris Burger']
-__copyright__ = 'Copyright 2020 ETH Zurich'
-__license__ = 'MIT License'
-__email__ = 'mitropoulou@arch.ethz.ch'
-__version__ = '0.7.0'
-
-
-HERE = os.path.dirname(__file__)
-
-HOME = os.path.abspath(os.path.join(HERE, "../../"))
-DATA = os.path.abspath(os.path.join(HOME, "data"))
-DOCS = os.path.abspath(os.path.join(HOME, "docs"))
-TEMP = os.path.abspath(os.path.join(HOME, "temp"))
+HERE = Path(__file__).parent
+HOME = HERE.parent.parent
+DATA = HOME / "data"
+DOCS = HOME / "docs"
+TEMP = HOME / "temp"
# Check if package is installed from git
# If that's the case, try to append the current head's hash to __version__
try:
- git_head_file = compas._os.absjoin(HOME, '.git', 'HEAD')
+ git_head_file = HOME / ".git" / "HEAD"
- if os.path.exists(git_head_file):
+ if git_head_file.exists():
# git head file contains one line that looks like this:
# ref: refs/heads/master
- with open(git_head_file, 'r') as git_head:
- _, ref_path = git_head.read().strip().split(' ')
- ref_path = ref_path.split('/')
+ ref_path = git_head_file.read_text().strip().split(" ")[1].split("/")
+ git_head_refs_file = HOME / ".git" / Path(*ref_path)
- git_head_refs_file = compas._os.absjoin(HOME, '.git', *ref_path)
-
- if os.path.exists(git_head_refs_file):
- with open(git_head_refs_file, 'r') as git_head_ref:
- git_commit = git_head_ref.read().strip()
- __version__ += '-' + git_commit[:8]
+ if git_head_refs_file.exists():
+ git_commit = git_head_refs_file.read_text().strip()
+ __version__ += "-" + git_commit[:8]
except Exception:
+ # Git version detection is optional, fail silently if not in git repo
pass
+from .config import * # noqa: F401 E402 F403
from .geometry import * # noqa: F401 E402 F403
-from .slicers import * # noqa: F401 E402 F403
-from .print_organization import * # noqa: F401 E402 F403
-from .utilities import * # noqa: F401 E402 F403
from .post_processing import * # noqa: F401 E402 F403
from .pre_processing import * # noqa: F401 E402 F403
-from .parameters import * # noqa: F401 E402 F403
+from .print_organization import * # noqa: F401 E402 F403
+from .slicers import * # noqa: F401 E402 F403
+from .utilities import * # noqa: F401 E402 F403
__all__ = ["HOME", "DATA", "DOCS", "TEMP"]
diff --git a/src/compas_slicer/__main__.py b/src/compas_slicer/__main__.py
index 3a0de509..2bc52cb7 100644
--- a/src/compas_slicer/__main__.py
+++ b/src/compas_slicer/__main__.py
@@ -1,7 +1,9 @@
import compas
+from loguru import logger
+
import compas_slicer
if __name__ == '__main__':
- print('COMPAS: {}'.format(compas.__version__))
- print('COMPAS Slicer: {}'.format(compas_slicer.__version__))
- print('Awesome! Your installation worked! :)')
+ logger.info(f'COMPAS: {compas.__version__}')
+ logger.info(f'COMPAS Slicer: {compas_slicer.__version__}')
+ logger.info('Awesome! Your installation worked! :)')
diff --git a/src/compas_slicer/_numpy_ops.py b/src/compas_slicer/_numpy_ops.py
new file mode 100644
index 00000000..579ef362
--- /dev/null
+++ b/src/compas_slicer/_numpy_ops.py
@@ -0,0 +1,251 @@
+"""Vectorized numpy operations for performance-critical computations."""
+
+from __future__ import annotations
+
+import numpy as np
+from numpy.typing import NDArray
+from scipy.spatial import cKDTree
+
+
+def batch_closest_points(
+ query_pts: NDArray[np.float64],
+ target_pts: NDArray[np.float64],
+) -> tuple[NDArray[np.intp], NDArray[np.float64]]:
+ """Find closest points using KDTree for efficient batch queries.
+
+ Parameters
+ ----------
+ query_pts : ndarray (N, 3)
+ Points to query.
+ target_pts : ndarray (M, 3)
+ Target point cloud.
+
+ Returns
+ -------
+ indices : ndarray (N,)
+ Index of closest target point for each query.
+ distances : ndarray (N,)
+ Distance to closest point.
+ """
+ tree = cKDTree(target_pts)
+ distances, indices = tree.query(query_pts)
+ return indices, distances
+
+
+def vertex_gradient_from_face_gradient(
+ V: NDArray[np.float64],
+ F: NDArray[np.intp],
+ face_gradient: NDArray[np.float64],
+ face_areas: NDArray[np.float64],
+) -> NDArray[np.float64]:
+ """Compute per-vertex gradient from face gradients using area weighting.
+
+ Vectorized version: accumulates face contributions to vertices using numpy.
+
+ Parameters
+ ----------
+ V : ndarray (V, 3)
+ Vertex coordinates.
+ F : ndarray (F, 3)
+ Face vertex indices.
+ face_gradient : ndarray (F, 3)
+ Gradient vector per face.
+ face_areas : ndarray (F,)
+ Area per face.
+
+ Returns
+ -------
+ ndarray (V, 3)
+ Gradient vector per vertex.
+ """
+ n_vertices = len(V)
+
+ # Weight gradients by area
+ weighted_gradients = face_gradient * face_areas[:, np.newaxis] # (F, 3)
+
+ # Accumulate to vertices using np.add.at
+ vertex_grad_sum = np.zeros((n_vertices, 3), dtype=np.float64)
+ vertex_area_sum = np.zeros(n_vertices, dtype=np.float64)
+
+ for i in range(3): # For each vertex of each face
+ np.add.at(vertex_grad_sum, F[:, i], weighted_gradients)
+ np.add.at(vertex_area_sum, F[:, i], face_areas)
+
+ # Avoid division by zero
+ vertex_area_sum = np.maximum(vertex_area_sum, 1e-10)
+
+ return vertex_grad_sum / vertex_area_sum[:, np.newaxis]
+
+
+def edge_gradient_from_vertex_gradient(
+ edges: NDArray[np.intp],
+ vertex_gradient: NDArray[np.float64],
+) -> NDArray[np.float64]:
+ """Compute edge gradient as sum of endpoint vertex gradients.
+
+ Parameters
+ ----------
+ edges : ndarray (E, 2)
+ Edge vertex indices.
+ vertex_gradient : ndarray (V, 3)
+ Gradient per vertex.
+
+ Returns
+ -------
+ ndarray (E, 3)
+ Gradient per edge.
+ """
+ return vertex_gradient[edges[:, 0]] + vertex_gradient[edges[:, 1]]
+
+
+def face_gradient_from_scalar_field(
+ V: NDArray[np.float64],
+ F: NDArray[np.intp],
+ scalar_field: NDArray[np.float64],
+ face_normals: NDArray[np.float64],
+ face_areas: NDArray[np.float64],
+) -> NDArray[np.float64]:
+ """Compute per-face gradient from vertex scalar field.
+
+ Vectorized computation using the formula:
+ grad_u = ((u1-u0) * cross(v0-v2, N) + (u2-u0) * cross(v1-v0, N)) / (2*A)
+
+ Parameters
+ ----------
+ V : ndarray (V, 3)
+ Vertex coordinates.
+ F : ndarray (F, 3)
+ Face vertex indices.
+ scalar_field : ndarray (V,)
+ Scalar value per vertex.
+ face_normals : ndarray (F, 3)
+ Normal vector per face.
+ face_areas : ndarray (F,)
+ Area per face.
+
+ Returns
+ -------
+ ndarray (F, 3)
+ Gradient vector per face.
+ """
+ # Get vertex coordinates for each face
+ v0 = V[F[:, 0]] # (F, 3)
+ v1 = V[F[:, 1]] # (F, 3)
+ v2 = V[F[:, 2]] # (F, 3)
+
+ # Get scalar values for each face vertex
+ u0 = scalar_field[F[:, 0]] # (F,)
+ u1 = scalar_field[F[:, 1]] # (F,)
+ u2 = scalar_field[F[:, 2]] # (F,)
+
+ # Compute cross products
+ cross1 = np.cross(v0 - v2, face_normals) # (F, 3)
+ cross2 = np.cross(v1 - v0, face_normals) # (F, 3)
+
+ # Compute gradient
+ grad = (
+ (u1 - u0)[:, np.newaxis] * cross1 + (u2 - u0)[:, np.newaxis] * cross2
+ ) / (2 * face_areas[:, np.newaxis])
+
+ return grad
+
+
+def per_vertex_divergence(
+ V: NDArray[np.float64],
+ F: NDArray[np.intp],
+ X: NDArray[np.float64],
+ cotans: NDArray[np.float64],
+) -> NDArray[np.float64]:
+ """Compute divergence of face gradient field at each vertex.
+
+ Parameters
+ ----------
+ V : ndarray (V, 3)
+ Vertex coordinates.
+ F : ndarray (F, 3)
+ Face vertex indices.
+ X : ndarray (F, 3)
+ Gradient vector per face.
+ cotans : ndarray (F, 3)
+ Cotangent weights per face edge.
+
+ Returns
+ -------
+ ndarray (V,)
+ Divergence value per vertex.
+ """
+ n_vertices = len(V)
+
+ # Get vertex coordinates for each face
+ v0 = V[F[:, 0]] # (F, 3)
+ v1 = V[F[:, 1]] # (F, 3)
+ v2 = V[F[:, 2]] # (F, 3)
+
+ # Edge vectors (opposite to vertex i)
+ e0 = v1 - v2 # edge opposite to v0
+ e1 = v2 - v0 # edge opposite to v1
+ e2 = v0 - v1 # edge opposite to v2
+
+ # Compute dot products with gradient
+ dot0 = np.einsum('ij,ij->i', X, e0) # (F,)
+ dot1 = np.einsum('ij,ij->i', X, e1) # (F,)
+ dot2 = np.einsum('ij,ij->i', X, e2) # (F,)
+
+ # Cotangent contributions (cotans[f, i] is cotan of angle at vertex i)
+ # For vertex i: contrib = cotan[k] * dot(X, e_i) + cotan[j] * dot(X, -e_k)
+ # where j = (i+1)%3, k = (i+2)%3
+ contrib0 = (cotans[:, 2] * dot0 + cotans[:, 1] * (-dot2)) / 2.0
+ contrib1 = (cotans[:, 0] * dot1 + cotans[:, 2] * (-dot0)) / 2.0
+ contrib2 = (cotans[:, 1] * dot2 + cotans[:, 0] * (-dot1)) / 2.0
+
+ # Accumulate to vertices
+ div_X = np.zeros(n_vertices, dtype=np.float64)
+ np.add.at(div_X, F[:, 0], contrib0)
+ np.add.at(div_X, F[:, 1], contrib1)
+ np.add.at(div_X, F[:, 2], contrib2)
+
+ return div_X
+
+
+def vectorized_distances(
+ points1: NDArray[np.float64],
+ points2: NDArray[np.float64],
+) -> NDArray[np.float64]:
+ """Compute pairwise distances between two point sets.
+
+ Parameters
+ ----------
+ points1 : ndarray (N, 3)
+ points2 : ndarray (M, 3)
+
+ Returns
+ -------
+ ndarray (N, M)
+ Distance matrix.
+ """
+ # Using broadcasting: (N, 1, 3) - (1, M, 3) = (N, M, 3)
+ diff = points1[:, np.newaxis, :] - points2[np.newaxis, :, :]
+ return np.linalg.norm(diff, axis=2)
+
+
+def min_distances_to_set(
+ query_pts: NDArray[np.float64],
+ target_pts: NDArray[np.float64],
+) -> NDArray[np.float64]:
+ """Compute minimum distance from each query point to target set.
+
+ More memory efficient than full distance matrix for large sets.
+
+ Parameters
+ ----------
+ query_pts : ndarray (N, 3)
+ target_pts : ndarray (M, 3)
+
+ Returns
+ -------
+ ndarray (N,)
+ Minimum distance for each query point.
+ """
+ tree = cKDTree(target_pts)
+ distances, _ = tree.query(query_pts)
+ return distances
diff --git a/src/compas_slicer/config.py b/src/compas_slicer/config.py
new file mode 100644
index 00000000..7123e963
--- /dev/null
+++ b/src/compas_slicer/config.py
@@ -0,0 +1,476 @@
+"""Configuration dataclasses for compas_slicer.
+
+This module provides typed configuration objects with defaults loaded from
+a TOML file. All configs are dataclasses with full type hints.
+"""
+
+from __future__ import annotations
+
+import sys
+from dataclasses import dataclass, field
+from enum import Enum
+from pathlib import Path
+from typing import Any
+
+from compas.data import Data
+
+if sys.version_info >= (3, 11):
+ import tomllib
+else:
+ import tomli as tomllib
+
+__all__ = [
+ "SlicerConfig",
+ "InterpolationConfig",
+ "GcodeConfig",
+ "PrintConfig",
+ "OutputConfig",
+ "GeodesicsMethod",
+ "UnionMethod",
+ "load_defaults",
+]
+
+# Load defaults from TOML at module import time
+_DEFAULTS_PATH = Path(__file__).parent / "data" / "defaults.toml"
+
+
+def load_defaults() -> dict[str, Any]:
+ """Load default configuration from TOML file.
+
+ Returns
+ -------
+ dict[str, Any]
+ Dictionary with sections: slicer, interpolation, gcode
+
+ """
+ with open(_DEFAULTS_PATH, "rb") as f:
+ return tomllib.load(f)
+
+
+_DEFAULTS = load_defaults()
+
+
+class GeodesicsMethod(str, Enum):
+ """Method for computing geodesic distances."""
+
+ EXACT_IGL = "exact_igl"
+ HEAT_IGL = "heat_igl"
+ HEAT_CGAL = "heat_cgal"
+ HEAT = "heat"
+
+
+class UnionMethod(str, Enum):
+ """Method for combining target boundaries."""
+
+ MIN = "min"
+ SMOOTH = "smooth"
+ CHAMFER = "chamfer"
+ STAIRS = "stairs"
+
+
+@dataclass
+class OutputConfig:
+ """Configuration for output paths.
+
+ Attributes
+ ----------
+ base_path : Path
+ Base directory for input/output.
+ output_subdir : str
+ Name of the output subdirectory (created if not exists).
+
+ """
+
+ base_path: Path = field(default_factory=Path.cwd)
+ output_subdir: str = "output"
+
+ @property
+ def output_path(self) -> Path:
+ """Get the full output path, creating directory if needed."""
+ out = self.base_path / self.output_subdir
+ out.mkdir(exist_ok=True)
+ return out
+
+ def __post_init__(self) -> None:
+ if isinstance(self.base_path, str):
+ self.base_path = Path(self.base_path)
+
+
+def _slicer_defaults() -> dict[str, Any]:
+ return _DEFAULTS.get("slicer", {})
+
+
+def _interpolation_defaults() -> dict[str, Any]:
+ return _DEFAULTS.get("interpolation", {})
+
+
+def _gcode_defaults() -> dict[str, Any]:
+ return _DEFAULTS.get("gcode", {})
+
+
+@dataclass
+class SlicerConfig(Data):
+ """Configuration for slicer operations.
+
+ Attributes
+ ----------
+ layer_height : float
+ Height between layers in mm.
+ min_path_length : int
+ Minimum number of points for a valid path.
+ close_path_tolerance : float
+ Distance threshold for considering path endpoints as coincident.
+
+ """
+
+ layer_height: float = field(default_factory=lambda: _slicer_defaults().get("layer_height", 2.0))
+ min_path_length: int = field(default_factory=lambda: _slicer_defaults().get("min_path_length", 2))
+ close_path_tolerance: float = field(default_factory=lambda: _slicer_defaults().get("close_path_tolerance", 0.00001))
+
+ def __post_init__(self) -> None:
+ super().__init__()
+
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "layer_height": self.layer_height,
+ "min_path_length": self.min_path_length,
+ "close_path_tolerance": self.close_path_tolerance,
+ }
+
+ @classmethod
+ def __from_data__(cls, data: dict[str, Any]) -> SlicerConfig:
+ d = _slicer_defaults()
+ return cls(
+ layer_height=data.get("layer_height", d.get("layer_height", 2.0)),
+ min_path_length=data.get("min_path_length", d.get("min_path_length", 2)),
+ close_path_tolerance=data.get("close_path_tolerance", d.get("close_path_tolerance", 0.00001)),
+ )
+
+
+@dataclass
+class InterpolationConfig(Data):
+ """Configuration for interpolation (curved) slicing.
+
+ Attributes
+ ----------
+ avg_layer_height : float
+ Average height between layers.
+ min_layer_height : float
+ Minimum layer height.
+ max_layer_height : float
+ Maximum layer height.
+ vertical_layers_max_centroid_dist : float
+ Maximum distance for grouping paths into vertical layers.
+ target_low_geodesics_method : GeodesicsMethod
+ Method for computing geodesics to low boundary.
+ target_high_geodesics_method : GeodesicsMethod
+ Method for computing geodesics to high boundary.
+ target_high_union_method : UnionMethod
+ Method for combining high target boundaries.
+ target_high_union_params : list[float]
+ Parameters for the union method.
+ uneven_upper_targets_offset : float
+ Offset for uneven upper targets.
+
+ """
+
+ avg_layer_height: float = field(default_factory=lambda: _interpolation_defaults().get("avg_layer_height", 5.0))
+ min_layer_height: float = field(default_factory=lambda: _interpolation_defaults().get("min_layer_height", 0.5))
+ max_layer_height: float = field(default_factory=lambda: _interpolation_defaults().get("max_layer_height", 10.0))
+ vertical_layers_max_centroid_dist: float = field(
+ default_factory=lambda: _interpolation_defaults().get("vertical_layers_max_centroid_dist", 25.0)
+ )
+ target_low_geodesics_method: GeodesicsMethod = field(
+ default_factory=lambda: GeodesicsMethod(_interpolation_defaults().get("target_low_geodesics_method", "heat_igl"))
+ )
+ target_high_geodesics_method: GeodesicsMethod = field(
+ default_factory=lambda: GeodesicsMethod(_interpolation_defaults().get("target_high_geodesics_method", "heat_igl"))
+ )
+ target_high_union_method: UnionMethod = field(
+ default_factory=lambda: UnionMethod(_interpolation_defaults().get("target_high_union_method", "min"))
+ )
+ target_high_union_params: list[float] = field(
+ default_factory=lambda: list(_interpolation_defaults().get("target_high_union_params", []))
+ )
+ uneven_upper_targets_offset: float = field(
+ default_factory=lambda: _interpolation_defaults().get("uneven_upper_targets_offset", 0.0)
+ )
+
+ def __post_init__(self) -> None:
+ super().__init__()
+ # Convert string enums if needed
+ if isinstance(self.target_low_geodesics_method, str):
+ self.target_low_geodesics_method = GeodesicsMethod(self.target_low_geodesics_method)
+ if isinstance(self.target_high_geodesics_method, str):
+ self.target_high_geodesics_method = GeodesicsMethod(self.target_high_geodesics_method)
+ if isinstance(self.target_high_union_method, str):
+ self.target_high_union_method = UnionMethod(self.target_high_union_method)
+
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "avg_layer_height": self.avg_layer_height,
+ "min_layer_height": self.min_layer_height,
+ "max_layer_height": self.max_layer_height,
+ "vertical_layers_max_centroid_dist": self.vertical_layers_max_centroid_dist,
+ "target_low_geodesics_method": self.target_low_geodesics_method.value,
+ "target_high_geodesics_method": self.target_high_geodesics_method.value,
+ "target_high_union_method": self.target_high_union_method.value,
+ "target_high_union_params": self.target_high_union_params,
+ "uneven_upper_targets_offset": self.uneven_upper_targets_offset,
+ }
+
+ @classmethod
+ def __from_data__(cls, data: dict[str, Any]) -> InterpolationConfig:
+ d = _interpolation_defaults()
+ return cls(
+ avg_layer_height=data.get("avg_layer_height", d.get("avg_layer_height", 5.0)),
+ min_layer_height=data.get("min_layer_height", d.get("min_layer_height", 0.5)),
+ max_layer_height=data.get("max_layer_height", d.get("max_layer_height", 10.0)),
+ vertical_layers_max_centroid_dist=data.get(
+ "vertical_layers_max_centroid_dist", d.get("vertical_layers_max_centroid_dist", 25.0)
+ ),
+ target_low_geodesics_method=data.get(
+ "target_low_geodesics_method", d.get("target_low_geodesics_method", "heat_igl")
+ ),
+ target_high_geodesics_method=data.get(
+ "target_high_geodesics_method", d.get("target_high_geodesics_method", "heat_igl")
+ ),
+ target_high_union_method=data.get("target_high_union_method", d.get("target_high_union_method", "min")),
+ target_high_union_params=data.get("target_high_union_params", d.get("target_high_union_params", [])),
+ uneven_upper_targets_offset=data.get(
+ "uneven_upper_targets_offset", d.get("uneven_upper_targets_offset", 0.0)
+ ),
+ )
+
+
+@dataclass
+class GcodeConfig(Data):
+ """Configuration for G-code generation.
+
+ Attributes
+ ----------
+ nozzle_diameter : float
+ Nozzle diameter in mm.
+ filament_diameter : float
+ Filament diameter in mm.
+ delta : bool
+ True for delta printers.
+ print_volume : tuple[float, float, float]
+ Print volume (x, y, z) in mm.
+ layer_width : float
+ Layer width in mm.
+ extruder_temperature : int
+ Extruder temperature in C.
+ bed_temperature : int
+ Bed temperature in C.
+ fan_speed : int
+ Fan speed (0-255).
+ fan_start_z : float
+ Height at which fan starts in mm.
+ flowrate : float
+ Global flow multiplier.
+ feedrate : float
+ Print feedrate in mm/min.
+ feedrate_travel : float
+ Travel feedrate in mm/min.
+ feedrate_low : float
+ Low feedrate in mm/min.
+ feedrate_retraction : float
+ Retraction feedrate in mm/min.
+ acceleration : float
+ Acceleration in mm/s2. 0 = driver default.
+ jerk : float
+ Jerk in mm/s. 0 = driver default.
+ z_hop : float
+ Z hop distance in mm.
+ retraction_length : float
+ Retraction length in mm.
+ retraction_min_travel : float
+ Minimum travel distance for retraction in mm.
+ flow_over : float
+ Overextrusion factor below min_over_z.
+ min_over_z : float
+ Height below which overextrusion applies.
+
+ """
+
+ nozzle_diameter: float = field(default_factory=lambda: _gcode_defaults().get("nozzle_diameter", 0.4))
+ filament_diameter: float = field(default_factory=lambda: _gcode_defaults().get("filament_diameter", 1.75))
+ delta: bool = field(default_factory=lambda: _gcode_defaults().get("delta", False))
+ print_volume: tuple[float, float, float] = field(
+ default_factory=lambda: tuple(_gcode_defaults().get("print_volume", [300.0, 300.0, 600.0]))
+ )
+ layer_width: float = field(default_factory=lambda: _gcode_defaults().get("layer_width", 0.6))
+ extruder_temperature: int = field(default_factory=lambda: _gcode_defaults().get("extruder_temperature", 200))
+ bed_temperature: int = field(default_factory=lambda: _gcode_defaults().get("bed_temperature", 60))
+ fan_speed: int = field(default_factory=lambda: _gcode_defaults().get("fan_speed", 255))
+ fan_start_z: float = field(default_factory=lambda: _gcode_defaults().get("fan_start_z", 0.0))
+ flowrate: float = field(default_factory=lambda: _gcode_defaults().get("flowrate", 1.0))
+ feedrate: float = field(default_factory=lambda: _gcode_defaults().get("feedrate", 3600.0))
+ feedrate_travel: float = field(default_factory=lambda: _gcode_defaults().get("feedrate_travel", 4800.0))
+ feedrate_low: float = field(default_factory=lambda: _gcode_defaults().get("feedrate_low", 1800.0))
+ feedrate_retraction: float = field(default_factory=lambda: _gcode_defaults().get("feedrate_retraction", 2400.0))
+ acceleration: float = field(default_factory=lambda: _gcode_defaults().get("acceleration", 0.0))
+ jerk: float = field(default_factory=lambda: _gcode_defaults().get("jerk", 0.0))
+ z_hop: float = field(default_factory=lambda: _gcode_defaults().get("z_hop", 0.5))
+ retraction_length: float = field(default_factory=lambda: _gcode_defaults().get("retraction_length", 1.0))
+ retraction_min_travel: float = field(default_factory=lambda: _gcode_defaults().get("retraction_min_travel", 6.0))
+ flow_over: float = field(default_factory=lambda: _gcode_defaults().get("flow_over", 1.0))
+ min_over_z: float = field(default_factory=lambda: _gcode_defaults().get("min_over_z", 0.0))
+
+ def __post_init__(self) -> None:
+ super().__init__()
+ # Ensure print_volume is a tuple
+ if isinstance(self.print_volume, list):
+ self.print_volume = tuple(self.print_volume)
+
+ @property
+ def print_volume_x(self) -> float:
+ return self.print_volume[0]
+
+ @property
+ def print_volume_y(self) -> float:
+ return self.print_volume[1]
+
+ @property
+ def print_volume_z(self) -> float:
+ return self.print_volume[2]
+
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "nozzle_diameter": self.nozzle_diameter,
+ "filament_diameter": self.filament_diameter,
+ "delta": self.delta,
+ "print_volume": list(self.print_volume),
+ "layer_width": self.layer_width,
+ "extruder_temperature": self.extruder_temperature,
+ "bed_temperature": self.bed_temperature,
+ "fan_speed": self.fan_speed,
+ "fan_start_z": self.fan_start_z,
+ "flowrate": self.flowrate,
+ "feedrate": self.feedrate,
+ "feedrate_travel": self.feedrate_travel,
+ "feedrate_low": self.feedrate_low,
+ "feedrate_retraction": self.feedrate_retraction,
+ "acceleration": self.acceleration,
+ "jerk": self.jerk,
+ "z_hop": self.z_hop,
+ "retraction_length": self.retraction_length,
+ "retraction_min_travel": self.retraction_min_travel,
+ "flow_over": self.flow_over,
+ "min_over_z": self.min_over_z,
+ }
+
+ @classmethod
+ def __from_data__(cls, data: dict[str, Any]) -> GcodeConfig:
+ d = _gcode_defaults()
+ # Handle both tuple and separate x/y/z keys for print_volume
+ if "print_volume" in data:
+ print_volume = tuple(data["print_volume"])
+ else:
+ default_vol = d.get("print_volume", [300.0, 300.0, 600.0])
+ print_volume = (
+ data.get("print_volume_x", default_vol[0]),
+ data.get("print_volume_y", default_vol[1]),
+ data.get("print_volume_z", default_vol[2]),
+ )
+
+ return cls(
+ nozzle_diameter=data.get("nozzle_diameter", d.get("nozzle_diameter", 0.4)),
+ filament_diameter=data.get("filament_diameter", d.get("filament_diameter", 1.75)),
+ delta=data.get("delta", d.get("delta", False)),
+ print_volume=print_volume,
+ layer_width=data.get("layer_width", d.get("layer_width", 0.6)),
+ extruder_temperature=data.get("extruder_temperature", d.get("extruder_temperature", 200)),
+ bed_temperature=data.get("bed_temperature", d.get("bed_temperature", 60)),
+ fan_speed=data.get("fan_speed", d.get("fan_speed", 255)),
+ fan_start_z=data.get("fan_start_z", d.get("fan_start_z", 0.0)),
+ flowrate=data.get("flowrate", d.get("flowrate", 1.0)),
+ feedrate=data.get("feedrate", d.get("feedrate", 3600.0)),
+ feedrate_travel=data.get("feedrate_travel", d.get("feedrate_travel", 4800.0)),
+ feedrate_low=data.get("feedrate_low", d.get("feedrate_low", 1800.0)),
+ feedrate_retraction=data.get("feedrate_retraction", d.get("feedrate_retraction", 2400.0)),
+ acceleration=data.get("acceleration", d.get("acceleration", 0.0)),
+ jerk=data.get("jerk", d.get("jerk", 0.0)),
+ z_hop=data.get("z_hop", d.get("z_hop", 0.5)),
+ retraction_length=data.get("retraction_length", d.get("retraction_length", 1.0)),
+ retraction_min_travel=data.get("retraction_min_travel", d.get("retraction_min_travel", 6.0)),
+ flow_over=data.get("flow_over", d.get("flow_over", 1.0)),
+ min_over_z=data.get("min_over_z", d.get("min_over_z", 0.0)),
+ )
+
+
+@dataclass
+class PrintConfig(Data):
+ """Unified configuration for print operations.
+
+ This combines slicer, interpolation, and gcode configs into a single
+ configuration object for convenience.
+
+ Attributes
+ ----------
+ slicer : SlicerConfig
+ Slicer configuration.
+ interpolation : InterpolationConfig
+ Interpolation slicing configuration.
+ gcode : GcodeConfig
+ G-code generation configuration.
+ output : OutputConfig
+ Output path configuration.
+
+ """
+
+ slicer: SlicerConfig = field(default_factory=SlicerConfig)
+ interpolation: InterpolationConfig = field(default_factory=InterpolationConfig)
+ gcode: GcodeConfig = field(default_factory=GcodeConfig)
+ output: OutputConfig = field(default_factory=OutputConfig)
+
+ def __post_init__(self) -> None:
+ super().__init__()
+
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "slicer": self.slicer.__data__,
+ "interpolation": self.interpolation.__data__,
+ "gcode": self.gcode.__data__,
+ "output": {
+ "base_path": str(self.output.base_path),
+ "output_subdir": self.output.output_subdir,
+ },
+ }
+
+ @classmethod
+ def __from_data__(cls, data: dict[str, Any]) -> PrintConfig:
+ output_data = data.get("output", {})
+ return cls(
+ slicer=SlicerConfig.__from_data__(data.get("slicer", {})),
+ interpolation=InterpolationConfig.__from_data__(data.get("interpolation", {})),
+ gcode=GcodeConfig.__from_data__(data.get("gcode", {})),
+ output=OutputConfig(
+ base_path=Path(output_data.get("base_path", ".")),
+ output_subdir=output_data.get("output_subdir", "output"),
+ ),
+ )
+
+ @classmethod
+ def from_toml(cls, path: str | Path) -> PrintConfig:
+ """Load configuration from a TOML file.
+
+ Parameters
+ ----------
+ path : str | Path
+ Path to TOML configuration file.
+
+ Returns
+ -------
+ PrintConfig
+ Configuration loaded from file.
+
+ """
+ with open(path, "rb") as f:
+ data = tomllib.load(f)
+ return cls.__from_data__(data)
diff --git a/src/compas_slicer/data/defaults.toml b/src/compas_slicer/data/defaults.toml
new file mode 100644
index 00000000..e7c1d443
--- /dev/null
+++ b/src/compas_slicer/data/defaults.toml
@@ -0,0 +1,52 @@
+# compas_slicer default configuration
+# This file is the single source of truth for all parameter defaults
+
+[slicer]
+layer_height = 2.0
+min_path_length = 2
+close_path_tolerance = 0.00001
+
+[interpolation]
+avg_layer_height = 5.0
+min_layer_height = 0.5
+max_layer_height = 10.0
+vertical_layers_max_centroid_dist = 25.0
+target_low_geodesics_method = "heat_cgal"
+target_high_geodesics_method = "heat_cgal"
+target_high_union_method = "min"
+target_high_union_params = []
+uneven_upper_targets_offset = 0.0
+
+[gcode]
+# Physical parameters
+nozzle_diameter = 0.4 # mm
+filament_diameter = 1.75 # mm
+delta = false
+print_volume = [300.0, 300.0, 600.0] # mm [x, y, z]
+
+# Dimensional parameters
+layer_width = 0.6 # mm
+
+# Temperature parameters
+extruder_temperature = 200 # C
+bed_temperature = 60 # C
+fan_speed = 255 # 0-255
+fan_start_z = 0.0 # mm, height at which fan starts
+
+# Movement parameters
+flowrate = 1.0 # global flow multiplier
+feedrate = 3600.0 # mm/min
+feedrate_travel = 4800.0 # mm/min
+feedrate_low = 1800.0 # mm/min
+feedrate_retraction = 2400.0 # mm/min
+acceleration = 0.0 # mm/s2, 0 = driver default
+jerk = 0.0 # mm/s, 0 = driver default
+
+# Retraction
+z_hop = 0.5 # mm
+retraction_length = 1.0 # mm
+retraction_min_travel = 6.0 # mm, below this no retraction
+
+# Adhesion parameters
+flow_over = 1.0 # overextrusion factor for z < min_over_z
+min_over_z = 0.0 # mm, height below which overextrusion applies
diff --git a/src/compas_slicer/geometry/__init__.py b/src/compas_slicer/geometry/__init__.py
index 13dd31a2..964b0153 100644
--- a/src/compas_slicer/geometry/__init__.py
+++ b/src/compas_slicer/geometry/__init__.py
@@ -1,40 +1,8 @@
-"""
-********************************************************************************
-geometry
-********************************************************************************
+"""Core geometric entities: Layer, Path, and PrintPoint."""
-.. currentmodule:: compas_slicer.geometry
-
-
-Geometry in compas_slicer consists out of a Layer, Path, or Printpoint.
-A Layer is generated when a geometry is sliced into layers and can therefore be
-seen as a 'slice' of a geometry. Layers are typically organized horizontally,
-but can also be organized vertically. A Layer consists out of one, or multiple
-Paths (depending on the geometry).
-A Path is a contour within a layer. A Path consists out of a list of
-compas.Points, plus some additional attributes.
-A PrintPoint consists out of a single compas.geometry.Point, with additional
-functionality added for the printing process.
-
-
-Classes
-=======
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- Layer
- Path
- PrintPoint
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-from .path import * # noqa: F401 E402 F403
from .layer import * # noqa: F401 E402 F403
+from .path import * # noqa: F401 F403
from .print_point import * # noqa: F401 E402 F403
+from .printpoints_collection import * # noqa: F401 E402 F403
__all__ = [name for name in dir() if not name.startswith('_')]
diff --git a/src/compas_slicer/geometry/layer.py b/src/compas_slicer/geometry/layer.py
index f1d932dd..b2857046 100644
--- a/src/compas_slicer/geometry/layer.py
+++ b/src/compas_slicer/geometry/layer.py
@@ -1,215 +1,267 @@
-import logging
-import compas_slicer
-import compas_slicer.utilities.utils as utils
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import TYPE_CHECKING, Any
+
import numpy as np
-from compas_slicer.geometry import Path
+from compas.data import Data
+from loguru import logger
+
+import compas_slicer.utilities.utils as utils
+from compas_slicer.geometry.path import Path
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from numpy.typing import NDArray
-__all__ = ['Layer',
- 'VerticalLayer',
- 'VerticalLayersManager']
+__all__ = ["Layer", "VerticalLayer", "VerticalLayersManager"]
+
+
+def _parse_min_max(value: Any) -> tuple[float | None, float | None]:
+ """Parse min_max_z_height from data."""
+ if value is None:
+ return (None, None)
+ if isinstance(value, (list, tuple)) and len(value) == 2:
+ return (value[0], value[1])
+ return (None, None)
-class Layer(object):
- """
- A Layer stores a group of ordered paths that are generated when a geometry is sliced.
- Layers are typically organized horizontally, but can also be organized vertically (see VerticalLayer).
- A Layer consists of one, or multiple Paths (depending on the geometry).
+
+@dataclass
+class Layer(Data):
+ """A Layer stores a group of ordered paths generated when a geometry is sliced.
+
+ Layers are typically organized horizontally, but can also be organized
+ vertically (see VerticalLayer). A Layer consists of one or multiple Paths.
Attributes
----------
- paths: list
- :class:`compas_slicer.geometry.Path`
- is_brim: bool
+ paths : list[Path]
+ List of paths in this layer.
+ is_brim : bool
True if this layer is a brim layer.
- number_of_brim_offsets: int
+ number_of_brim_offsets : int | None
The number of brim offsets this layer has (None if no brim).
- is_raft: bool
+ is_raft : bool
True if this layer is a raft layer.
- """
-
- def __init__(self, paths):
- # check input
- if paths is None:
- paths = []
- if len(paths) > 0:
- assert isinstance(paths[0], compas_slicer.geometry.Path)
- self.paths = paths
+ min_max_z_height : tuple[float | None, float | None]
+ Tuple containing the min and max z height of the layer.
- self.min_max_z_height = (None, None) # Tuple containing the min and max z height of the layer.
- if paths:
- self.calculate_z_bounds()
-
- # brim
- self.is_brim = False
- self.number_of_brim_offsets = None
-
- # raft
- self.is_raft = False
+ """
- def __repr__(self):
+ paths: list[Path] = field(default_factory=list)
+ is_brim: bool = False
+ number_of_brim_offsets: int | None = None
+ is_raft: bool = False
+ min_max_z_height: tuple[float | None, float | None] = (None, None)
+
+ def __post_init__(self) -> None:
+ super().__init__() # Initialize Data base class
+ if len(self.paths) > 0:
+ if not isinstance(self.paths[0], Path):
+ raise TypeError("paths must contain Path objects")
+ if self.min_max_z_height == (None, None):
+ self.calculate_z_bounds()
+
+ def __repr__(self) -> str:
no_of_paths = len(self.paths) if self.paths else 0
- return "" % no_of_paths
+ return f""
@property
- def total_number_of_points(self):
+ def total_number_of_points(self) -> int:
"""Returns the total number of points within the layer."""
- num = 0
- for path in self.paths:
- num += len(path.printpoints)
- return num
-
- def calculate_z_bounds(self):
- """ Fills in the attribute self.min_max_z_height. """
- assert len(self.paths) > 0, "You cannot calculate z_bounds because the list of paths is empty."
- z_min = 2 ** 32 # very big number
- z_max = -2 ** 32 # very small number
+ return sum(len(path.points) for path in self.paths)
+
+ def calculate_z_bounds(self) -> None:
+ """Fills in the attribute self.min_max_z_height."""
+ if not self.paths:
+ raise ValueError("Cannot calculate z_bounds because the list of paths is empty.")
+
+ # Vectorized z extraction
+ all_z = []
for path in self.paths:
for pt in path.points:
- z_min = min(z_min, pt[2])
- z_max = max(z_max, pt[2])
- self.min_max_z_height = (z_min, z_max)
+ all_z.append(pt[2])
+
+ self.min_max_z_height = (min(all_z), max(all_z))
+
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "paths": [path.__data__ for path in self.paths],
+ "layer_type": "horizontal_layer",
+ "is_brim": self.is_brim,
+ "number_of_brim_offsets": self.number_of_brim_offsets,
+ "min_max_z_height": list(self.min_max_z_height),
+ }
+
+ @classmethod
+ def __from_data__(cls, data: dict[str, Any]) -> Layer:
+ paths_data = data["paths"]
+ # Handle both list format and legacy dict format
+ if isinstance(paths_data, dict):
+ paths = [Path.from_data(paths_data[key]) for key in sorted(paths_data.keys(), key=lambda x: int(x))]
+ else:
+ paths = [Path.from_data(p) for p in paths_data]
+
+ return cls(
+ paths=paths,
+ is_brim=data.get("is_brim", False),
+ number_of_brim_offsets=data.get("number_of_brim_offsets"),
+ min_max_z_height=_parse_min_max(data.get("min_max_z_height")),
+ )
@classmethod
- def from_data(cls, data):
+ def from_data(cls, data: dict[str, Any]) -> Layer:
"""Construct a layer from its data representation.
Parameters
----------
- data: dict
+ data : dict
The data dictionary.
Returns
-------
- layer
+ Layer
The constructed layer.
+
"""
- paths_data = data['paths']
- paths = [Path.from_data(paths_data[key]) for key in paths_data]
- layer = cls(paths=paths)
- layer.is_brim = data['is_brim']
- layer.number_of_brim_offsets = data['number_of_brim_offsets']
- layer.min_max_z_height = data['min_max_z_height']
- return layer
+ return cls.__from_data__(data)
- def to_data(self):
- """Returns a dictionary of structured data representing the data structure.
+ def to_data(self) -> dict[str, Any]:
+ """Returns a dictionary of structured data representing the layer.
Returns
-------
dict
The layer's data.
+
"""
- data = {'paths': {i: [] for i in range(len(self.paths))},
- 'layer_type': 'horizontal_layer',
- 'is_brim': self.is_brim,
- 'number_of_brim_offsets': self.number_of_brim_offsets,
- 'min_max_z_height': self.min_max_z_height}
- for i, path in enumerate(self.paths):
- data['paths'][i] = path.to_data()
- return data
+ return self.__data__
+@dataclass
class VerticalLayer(Layer):
- """
- Vertical ordering. A VerticalLayer stores the print paths sorted in vertical groups.
+ """Vertical ordering layer that stores print paths sorted in vertical groups.
+
It is created with an empty list of paths that is filled in afterwards.
Attributes
----------
- id: int
+ id : int
Identifier of vertical layer.
+ head_centroid : NDArray | None
+ Centroid of the last path's points.
+
"""
- def __init__(self, id=0, paths=None):
- Layer.__init__(self, paths=paths)
- self.id = id
- self.head_centroid = None
+ id: int = 0
+ head_centroid: NDArray | None = field(default=None, repr=False)
- def __repr__(self):
+ def __repr__(self) -> str:
no_of_paths = len(self.paths) if self.paths else 0
- return "" % (self.id, no_of_paths)
+ return f""
- def append_(self, path):
- """ Add path to self.paths list. """
+ def append_(self, path: Path) -> None:
+ """Add path to self.paths list."""
self.paths.append(path)
self.compute_head_centroid()
self.calculate_z_bounds()
- def compute_head_centroid(self):
- """ Find the centroid of all the points of the last path in the self.paths list"""
+ def compute_head_centroid(self) -> None:
+ """Find the centroid of all the points of the last path."""
pts = np.array(self.paths[-1].points)
self.head_centroid = np.mean(pts, axis=0)
- def printout_details(self):
- """ Prints the details of the class. """
- logger.info("VerticalLayer id : %d" % self.id)
- logger.info("Total number of paths : %d" % len(self.paths))
+ def printout_details(self) -> None:
+ """Prints the details of the class."""
+ logger.info(f"VerticalLayer id: {self.id}")
+ logger.info(f"Total number of paths: {len(self.paths)}")
- def to_data(self):
- """Returns a dictionary of structured data representing the data structure.
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "paths": [path.__data__ for path in self.paths],
+ "min_max_z_height": list(self.min_max_z_height),
+ "layer_type": "vertical_layer",
+ "id": self.id,
+ }
- Returns
- -------
- dict
- The vertical layer's data.
- """
- data = {'paths': {i: [] for i in range(len(self.paths))},
- 'min_max_z_height': self.min_max_z_height,
- 'layer_type': 'vertical_layer'}
- for i, path in enumerate(self.paths):
- data['paths'][i] = path.to_data()
- return data
+ @classmethod
+ def __from_data__(cls, data: dict[str, Any]) -> VerticalLayer:
+ paths_data = data["paths"]
+ # Handle both list format and legacy dict format
+ if isinstance(paths_data, dict):
+ paths = [Path.from_data(paths_data[key]) for key in sorted(paths_data.keys(), key=lambda x: int(x))]
+ else:
+ paths = [Path.from_data(p) for p in paths_data]
+
+ layer = cls(
+ paths=paths,
+ id=data.get("id", 0),
+ min_max_z_height=_parse_min_max(data.get("min_max_z_height")),
+ )
+ return layer
@classmethod
- def from_data(cls, data):
+ def from_data(cls, data: dict[str, Any]) -> VerticalLayer:
"""Construct a vertical layer from its data representation.
Parameters
----------
- data: dict
+ data : dict
The data dictionary.
Returns
-------
- layer
+ VerticalLayer
The constructed vertical layer.
+
"""
- paths_data = data['paths']
- paths = [Path.from_data(paths_data[key]) for key in paths_data]
- layer = cls(id=None)
- layer.paths = paths
- layer.min_max_z_height = data['min_max_z_height']
- return layer
+ return cls.__from_data__(data)
+
+ def to_data(self) -> dict[str, Any]:
+ """Returns a dictionary of structured data representing the vertical layer.
+
+ Returns
+ -------
+ dict
+ The vertical layer's data.
+
+ """
+ return self.__data__
class VerticalLayersManager:
- """
- Creates empty vertical layers and assigns to the input paths to the fitting vertical layer using the add() function.
- The criterion for grouping paths to VerticalLayers is based on the proximity of the centroids of the paths.
- If the input paths don't fit in any vertical layer, then new vertical layer is created with that path.
+ """Creates and manages vertical layers, assigning paths to fitting layers.
+
+ The criterion for grouping paths to VerticalLayers is based on the
+ proximity of the centroids of the paths. If the input paths don't fit
+ in any vertical layer, then a new vertical layer is created.
Attributes
----------
- max_paths_per_layer: int
- Maximum number of layers that a vertical layer can consist of.
- If None, then the vertical layer has an unlimited number of layers.
+ layers : list[VerticalLayer]
+ List of vertical layers.
+ avg_layer_height : float
+ Average layer height for proximity calculations.
+ max_paths_per_layer : int | None
+ Maximum number of paths per vertical layer. If None, unlimited.
+
"""
- def __init__(self, avg_layer_height, max_paths_per_layer=None):
- self.layers = [VerticalLayer(id=0)] # vertical_layers_print_data that contain isocurves (compas_slicer.Path)
+ def __init__(self, avg_layer_height: float, max_paths_per_layer: int | None = None) -> None:
+ self.layers: list[VerticalLayer] = [VerticalLayer(id=0)]
self.avg_layer_height = avg_layer_height
self.max_paths_per_layer = max_paths_per_layer
- def add(self, path):
- selected_layer = None
+ def add(self, path: Path) -> None:
+ """Add a path to the appropriate vertical layer."""
+ selected_layer: VerticalLayer | None = None
- # Find an eligible layer for path (called selected_layer)
- if len(self.layers[0].paths) == 0: # first path goes to first layer
+ # Find an eligible layer for path
+ if len(self.layers[0].paths) == 0:
selected_layer = self.layers[0]
-
- else: # find the candidate segment for new isocurve
+ else:
centroid = np.mean(np.array(path.points), axis=0)
other_centroids = get_vertical_layers_centroids_list(self.layers)
candidate_layer = self.layers[utils.get_closest_pt_index(centroid, other_centroids)]
@@ -222,35 +274,43 @@ def add(self, path):
else:
selected_layer = candidate_layer
- if selected_layer: # also check that the actual distance between the layers is acceptable
+ if selected_layer:
+ # Check that actual distance between layers is acceptable
pts_selected_layer = np.array(candidate_layer.paths[-1].points)
pts = np.array(path.points)
- # find min distance between pts_selected_layer and pts
- min_dist = 1e10 # some large number
- max_dist = 0.0 # some small number
+
+ min_dist = float("inf")
+ max_dist = 0.0
for pt in pts:
pt_array = np.tile(pt, (pts_selected_layer.shape[0], 1))
dists = np.linalg.norm(pts_selected_layer - pt_array, axis=1)
min_dist = min(np.min(dists), min_dist)
max_dist = max(np.min(dists), max_dist)
+
if min_dist > 3.0 * self.avg_layer_height or max_dist > 8.0 * self.avg_layer_height:
selected_layer = None
- if not selected_layer: # then create new layer
+ if not selected_layer:
selected_layer = VerticalLayer(id=self.layers[-1].id + 1)
self.layers.append(selected_layer)
selected_layer.append_(path)
-def get_vertical_layers_centroids_list(vert_layers):
- """ Returns a list with points that are the centroids of the heads of all vertical_layers_print_data. The head
- of a vertical_layer is its last path. """
- head_centroids = []
- for vert_layer in vert_layers:
- head_centroids.append(vert_layer.head_centroid)
- return head_centroids
+def get_vertical_layers_centroids_list(vert_layers: list[VerticalLayer]) -> list[NDArray]:
+ """Returns a list of centroids of the heads of all vertical layers.
+ The head of a vertical_layer is its last path.
-if __name__ == "__main__":
- pass
+ Parameters
+ ----------
+ vert_layers : list[VerticalLayer]
+ List of vertical layers.
+
+ Returns
+ -------
+ list[NDArray]
+ List of head centroids.
+
+ """
+ return [vert_layer.head_centroid for vert_layer in vert_layers]
diff --git a/src/compas_slicer/geometry/path.py b/src/compas_slicer/geometry/path.py
index ee69f134..c665ee3a 100644
--- a/src/compas_slicer/geometry/path.py
+++ b/src/compas_slicer/geometry/path.py
@@ -1,60 +1,81 @@
-import logging
-import compas
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Any
+
+from compas.data import Data
from compas.geometry import Point
-logger = logging.getLogger('logger')
+__all__ = ["Path"]
-__all__ = ['Path']
+@dataclass
+class Path(Data):
+ """A Path is a connected contour within a Layer.
-class Path(object):
- """
- A Path is a connected contour within a Layer. A Path consists of a list of
- compas.geometry.Points.
+ A Path consists of a list of compas.geometry.Points.
Attributes
----------
- points: list
- :class:`compas.geometry.Point`
- is_closed: bool
+ points : list[Point]
+ List of points defining the path.
+ is_closed : bool
True if the Path is a closed curve, False if the Path is open.
If the path is closed, the first and the last point are identical.
+
"""
- def __init__(self, points, is_closed):
- # check input
- assert isinstance(points[0], compas.geometry.Point)
+ points: list[Point] = field(default_factory=list)
+ is_closed: bool = False
- self.points = points # :class: compas.geometry.Point
- self.is_closed = is_closed # bool
+ def __post_init__(self) -> None:
+ super().__init__() # Initialize Data base class
+ if not self.points or not isinstance(self.points[0], Point):
+ raise TypeError("points must be a non-empty list of compas.geometry.Point")
- def __repr__(self):
+ def __repr__(self) -> str:
no_of_points = len(self.points) if self.points else 0
- return "" % no_of_points
+ return f""
+
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "points": [point.__data__ for point in self.points],
+ "is_closed": self.is_closed,
+ }
@classmethod
- def from_data(cls, data):
+ def __from_data__(cls, data: dict[str, Any]) -> Path:
+ points_data = data["points"]
+ # Handle both list format and legacy dict format
+ if isinstance(points_data, dict):
+ pts = [
+ Point.__from_data__(points_data[key])
+ for key in sorted(points_data.keys(), key=lambda x: int(x))
+ ]
+ else:
+ pts = [Point.__from_data__(p) for p in points_data]
+ return cls(points=pts, is_closed=data["is_closed"])
+
+ @classmethod
+ def from_data(cls, data: dict[str, Any]) -> Path:
"""Construct a path from its data representation.
Parameters
----------
- data: dict
+ data : dict
The data dictionary.
Returns
-------
- path
+ Path
The constructed path.
"""
- points_data = data['points']
- pts = [Point(points_data[key][0], points_data[key][1], points_data[key][2])
- for key in points_data]
- path = cls(points=pts, is_closed=data['is_closed'])
- return path
+ return cls.__from_data__(data)
- def to_data(self):
- """Returns a dictionary of structured data representing the data structure.
+ def to_data(self) -> dict[str, Any]:
+ """Returns a dictionary of structured data representing the path.
Returns
-------
@@ -62,10 +83,4 @@ def to_data(self):
The path's data.
"""
- data = {'points': {i: point.to_data() for i, point in enumerate(self.points)},
- 'is_closed': self.is_closed}
- return data
-
-
-if __name__ == '__main__':
- pass
+ return self.__data__
diff --git a/src/compas_slicer/geometry/print_point.py b/src/compas_slicer/geometry/print_point.py
index 913faeee..003477d7 100644
--- a/src/compas_slicer/geometry/print_point.py
+++ b/src/compas_slicer/geometry/print_point.py
@@ -1,151 +1,168 @@
-from compas.geometry import Point, Frame, Vector, cross_vectors, dot_vectors, norm_vector
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Any
+
+from compas.data import Data
+from compas.geometry import Frame, Point, Vector, cross_vectors, dot_vectors, norm_vector
+
import compas_slicer.utilities.utils as utils
-import compas
-__all__ = ['PrintPoint']
+__all__ = ["PrintPoint"]
-class PrintPoint(object):
- """
- A PrintPoint consists of a compas.geometry.Point,
- and additional attributes related to the printing process.
+@dataclass
+class PrintPoint(Data):
+ """A PrintPoint consists of a compas.geometry.Point and printing attributes.
Attributes
----------
- pt: :class:`compas.geometry.Point`
- A compas Point consisting out of x, y, z coordinates.
- layer_height: float
+ pt : Point
+ A compas Point consisting of x, y, z coordinates.
+ layer_height : float
The distance between the point on this layer and the previous layer.
- For planar slicing this is the vertical distance, for curved slicing this is absolute distance.
- mesh_normal: :class:`compas.geometry.Vector`
+ mesh_normal : Vector
Normal of the mesh at this PrintPoint.
- up_vector: :class:`compas.geometry.Vector`
- Vector in up direction. For planar slicing this corresponds to the z axis, for curved slicing it varies.
- frame: :class:`compas.geometry.Frame`
+ up_vector : Vector
+ Vector in up direction.
+ frame : Frame
Frame with x-axis pointing up, y-axis pointing towards the mesh normal.
- extruder_toggle: bool
- True if extruder should be on (when printing), False if it should be off (when travelling).
- velocity: float
- Velocity to use for printing (print speed), in mm/s.
- wait_time: float
+ extruder_toggle : bool | None
+ True if extruder should be on, False if off.
+ velocity : float | None
+ Velocity for printing (print speed), in mm/s.
+ wait_time : float | None
Time in seconds to wait at this PrintPoint.
- """
-
- def __init__(self, pt, layer_height, mesh_normal):
- assert isinstance(pt, compas.geometry.Point)
- assert isinstance(mesh_normal, compas.geometry.Vector)
- assert layer_height
-
- # --- basic printpoint
- self.pt = pt
- self.layer_height = layer_height
+ blend_radius : float | None
+ Blend radius in mm.
+ closest_support_pt : Point | None
+ Closest support point.
+ distance_to_support : float | None
+ Distance to support.
+ is_feasible : bool
+ Whether this print point is feasible.
+ attributes : dict[str, Any]
+ Additional attributes transferred from the mesh.
- self.mesh_normal = mesh_normal # compas.geometry.Vector
- self.up_vector = Vector(0, 0, 1) # default value that can be updated
- self.frame = self.get_frame() # compas.geometry.Frame
-
- # --- attributes transferred from the mesh (vertex / face attributes)
- self.attributes = {} # dict. To fill this in,
-
- # --- print_organization related attributes
- self.extruder_toggle = None # bool
- self.velocity = None # float (mm/s)
- self.wait_time = None # float (sec)
- self.blend_radius = None # float (mm)
-
- # --- relation to support
- self.closest_support_pt = None #
- self.distance_to_support = None # float
-
- self.is_feasible = True # bool
+ """
- def __repr__(self):
+ pt: Point
+ layer_height: float
+ mesh_normal: Vector
+ up_vector: Vector = field(default_factory=lambda: Vector(0, 0, 1))
+ frame: Frame | None = field(default=None)
+ extruder_toggle: bool | None = None
+ velocity: float | None = None
+ wait_time: float | None = None
+ blend_radius: float | None = None
+ closest_support_pt: Point | None = None
+ distance_to_support: float | None = None
+ is_feasible: bool = True
+ attributes: dict[str, Any] = field(default_factory=dict)
+
+ def __post_init__(self) -> None:
+ super().__init__() # Initialize Data base class
+ if not isinstance(self.pt, Point):
+ raise TypeError("pt must be a compas.geometry.Point")
+ if not isinstance(self.mesh_normal, Vector):
+ raise TypeError("mesh_normal must be a compas.geometry.Vector")
+ if not self.layer_height:
+ raise ValueError("layer_height must be provided")
+ if self.frame is None:
+ self.frame = self._compute_frame()
+
+ def __repr__(self) -> str:
x, y, z = self.pt[0], self.pt[1], self.pt[2]
- return "" % (x, y, z)
+ return f""
- def get_frame(self):
- """ Returns a Frame with x-axis pointing up, y-axis pointing towards the mesh normal. """
- if abs(dot_vectors(self.up_vector, self.mesh_normal)) < 1.0: # if the normalized vectors are not co-linear
+ def _compute_frame(self) -> Frame:
+ """Compute frame with x-axis pointing up, y-axis towards mesh normal."""
+ if abs(dot_vectors(self.up_vector, self.mesh_normal)) < 1.0:
c = cross_vectors(self.up_vector, self.mesh_normal)
if norm_vector(c) == 0:
c = Vector(1, 0, 0)
- if norm_vector(self.mesh_normal) == 0:
- self.mesh_normal = Vector(0, 1, 0)
- return Frame(self.pt, c, self.mesh_normal)
- else: # in horizontal surfaces the vectors happen to be co-linear
+ mesh_normal = self.mesh_normal
+ if norm_vector(mesh_normal) == 0:
+ mesh_normal = Vector(0, 1, 0)
+ return Frame(self.pt, c, mesh_normal)
+ else:
return Frame(self.pt, Vector(1, 0, 0), Vector(0, 1, 0))
- #################################
- # --- To data , from data
- def to_data(self):
- """Returns a dictionary of structured data representing the data structure.
- TODO: The attributes of the printpoints are not saved in the dictionary because they can be non-Json
- serializable. Find a solution for this.
+ def get_frame(self) -> Frame:
+ """Returns a Frame with x-axis pointing up, y-axis towards mesh normal."""
+ return self._compute_frame()
+
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "pt": self.pt.__data__,
+ "layer_height": self.layer_height,
+ "mesh_normal": self.mesh_normal.__data__,
+ "up_vector": self.up_vector.__data__,
+ "frame": self.frame.__data__ if self.frame else None,
+ "extruder_toggle": self.extruder_toggle,
+ "velocity": self.velocity,
+ "wait_time": self.wait_time,
+ "blend_radius": self.blend_radius,
+ "closest_support_pt": self.closest_support_pt.__data__ if self.closest_support_pt else None,
+ "distance_to_support": self.distance_to_support,
+ "is_feasible": self.is_feasible,
+ "attributes": utils.get_jsonable_attributes(self.attributes),
+ }
+
+ @classmethod
+ def __from_data__(cls, data: dict[str, Any]) -> PrintPoint:
+ closest_support_pt = None
+ if data.get("closest_support_pt"):
+ closest_support_pt = Point.__from_data__(data["closest_support_pt"])
+
+ frame: Frame | None = None
+ if data.get("frame"):
+ frame = Frame.__from_data__(data["frame"]) # type: ignore[assignment]
+
+ return cls(
+ pt=Point.__from_data__(data["pt"]),
+ layer_height=data["layer_height"],
+ mesh_normal=Vector.__from_data__(data["mesh_normal"]),
+ up_vector=Vector.__from_data__(data["up_vector"]),
+ frame=frame,
+ extruder_toggle=data.get("extruder_toggle"),
+ velocity=data.get("velocity"),
+ wait_time=data.get("wait_time"),
+ blend_radius=data.get("blend_radius"),
+ closest_support_pt=closest_support_pt,
+ distance_to_support=data.get("distance_to_support"),
+ is_feasible=data.get("is_feasible", True),
+ attributes=data.get("attributes", {}),
+ )
+
+ def to_data(self) -> dict[str, Any]:
+ """Returns a dictionary of structured data representing the PrintPoint.
Returns
-------
dict
- The PrintPoints' data.
+ The PrintPoint's data.
"""
- point = {
- 'point': [self.pt[0], self.pt[1], self.pt[2]],
- 'layer_height': self.layer_height,
-
- 'mesh_normal': self.mesh_normal.to_data(),
- 'up_vector': self.up_vector.to_data(),
- 'frame': self.frame.to_data(),
-
- 'extruder_toggle': self.extruder_toggle,
- 'velocity': self.velocity,
- 'wait_time': self.wait_time,
- 'blend_radius': self.blend_radius,
-
- 'closest_support_pt': self.closest_support_pt.to_data() if self.closest_support_pt else None,
- 'distance_to_support': self.distance_to_support,
-
- 'is_feasible': self.is_feasible,
-
- 'attributes': utils.get_jsonable_attributes(self.attributes)
- }
- return point
+ return self.__data__
@classmethod
- def from_data(cls, data):
+ def from_data(cls, data: dict[str, Any]) -> PrintPoint:
"""Construct a PrintPoint from its data representation.
Parameters
----------
- data: dict
+ data : dict
The data dictionary.
Returns
-------
- layer
+ PrintPoint
The constructed PrintPoint.
"""
-
- pp = cls(pt=Point.from_data(data['point']),
- layer_height=data['layer_height'],
- mesh_normal=Vector.from_data(data['mesh_normal']))
-
- pp.up_vector = Vector.from_data(data['up_vector'])
- pp.frame = Frame.from_data(data['frame'])
-
- pp.extruder_toggle = data['extruder_toggle']
- pp.velocity = data['velocity']
- pp.wait_time = data['wait_time']
- pp.blend_radius = data['blend_radius']
-
- pp.closest_support_pt = Point.from_data(data['closest_support_pt'])
- pp.distance_to_support = data['distance_to_support']
-
- pp.is_feasible = data['is_feasible']
-
- pp.attributes = data['attributes']
- return pp
-
-
-if __name__ == "__main__":
- pass
+ # Handle legacy format with "point" key instead of "pt"
+ if "point" in data and "pt" not in data:
+ data["pt"] = data.pop("point")
+ return cls.__from_data__(data)
diff --git a/src/compas_slicer/geometry/printpoints_collection.py b/src/compas_slicer/geometry/printpoints_collection.py
new file mode 100644
index 00000000..43e11813
--- /dev/null
+++ b/src/compas_slicer/geometry/printpoints_collection.py
@@ -0,0 +1,256 @@
+from __future__ import annotations
+
+from collections.abc import Iterator
+from dataclasses import dataclass, field
+from typing import TYPE_CHECKING, Any
+
+from compas.data import Data
+
+if TYPE_CHECKING:
+ from compas_slicer.geometry import PrintPoint
+
+__all__ = ["PrintPath", "PrintLayer", "PrintPointsCollection"]
+
+
+@dataclass
+class PrintPath(Data):
+ """A collection of PrintPoints forming a continuous print path.
+
+ Attributes
+ ----------
+ printpoints : list[PrintPoint]
+ List of print points in this path.
+
+ """
+
+ printpoints: list[PrintPoint] = field(default_factory=list)
+
+ def __post_init__(self) -> None:
+ super().__init__() # Initialize Data base class
+
+ def __len__(self) -> int:
+ return len(self.printpoints)
+
+ def __iter__(self) -> Iterator[PrintPoint]:
+ return iter(self.printpoints)
+
+ def __getitem__(self, index: int) -> PrintPoint:
+ return self.printpoints[index]
+
+ def __repr__(self) -> str:
+ return f""
+
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "printpoints": [pp.__data__ for pp in self.printpoints],
+ }
+
+ @classmethod
+ def __from_data__(cls, data: dict[str, Any]) -> PrintPath:
+ from compas_slicer.geometry import PrintPoint
+
+ return cls(
+ printpoints=[PrintPoint.__from_data__(pp) for pp in data["printpoints"]],
+ )
+
+
+@dataclass
+class PrintLayer(Data):
+ """A layer containing multiple print paths.
+
+ Attributes
+ ----------
+ paths : list[PrintPath]
+ List of print paths in this layer.
+
+ """
+
+ paths: list[PrintPath] = field(default_factory=list)
+
+ def __post_init__(self) -> None:
+ super().__init__() # Initialize Data base class
+
+ def __len__(self) -> int:
+ return len(self.paths)
+
+ def __iter__(self) -> Iterator[PrintPath]:
+ return iter(self.paths)
+
+ def __getitem__(self, index: int) -> PrintPath:
+ return self.paths[index]
+
+ def __repr__(self) -> str:
+ total_points = sum(len(path) for path in self.paths)
+ return f""
+
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "paths": [path.__data__ for path in self.paths],
+ }
+
+ @classmethod
+ def __from_data__(cls, data: dict[str, Any]) -> PrintLayer:
+ return cls(
+ paths=[PrintPath.__from_data__(p) for p in data["paths"]],
+ )
+
+
+@dataclass
+class PrintPointsCollection(Data):
+ """A collection of print layers, paths, and points.
+
+ Replaces the old PrintPointsDict structure (dict[str, dict[str, list[PrintPoint]]]).
+
+ Attributes
+ ----------
+ layers : list[PrintLayer]
+ List of print layers.
+
+ Example
+ -------
+ >>> collection[0].paths[1].printpoints[2] # Access by index
+ >>> for layer in collection:
+ ... for path in layer:
+ ... for pp in path:
+ ... print(pp.pt)
+
+ """
+
+ layers: list[PrintLayer] = field(default_factory=list)
+
+ def __post_init__(self) -> None:
+ super().__init__() # Initialize Data base class
+
+ def __len__(self) -> int:
+ return len(self.layers)
+
+ def __iter__(self) -> Iterator[PrintLayer]:
+ return iter(self.layers)
+
+ def __getitem__(self, index: int) -> PrintLayer:
+ return self.layers[index]
+
+ def __repr__(self) -> str:
+ total_paths = sum(len(layer) for layer in self.layers)
+ total_points = sum(len(path) for layer in self.layers for path in layer)
+ return f""
+
+ @property
+ def number_of_layers(self) -> int:
+ """Number of layers."""
+ return len(self.layers)
+
+ @property
+ def number_of_paths(self) -> int:
+ """Total number of paths across all layers."""
+ return sum(len(layer) for layer in self.layers)
+
+ @property
+ def number_of_printpoints(self) -> int:
+ """Total number of print points."""
+ return sum(len(path) for layer in self.layers for path in layer)
+
+ def iter_printpoints(self) -> Iterator[PrintPoint]:
+ """Iterate over all printpoints in the collection.
+
+ Yields
+ ------
+ PrintPoint
+ Each printpoint in the collection.
+
+ """
+ for layer in self.layers:
+ for path in layer:
+ yield from path
+
+ def iter_with_indices(self) -> Iterator[tuple[PrintPoint, int, int, int]]:
+ """Iterate over printpoints with their indices.
+
+ Yields
+ ------
+ tuple[PrintPoint, int, int, int]
+ Tuple of (printpoint, layer_index, path_index, point_index).
+
+ """
+ for i, layer in enumerate(self.layers):
+ for j, path in enumerate(layer):
+ for k, pp in enumerate(path):
+ yield pp, i, j, k
+
+ def get_printpoint(self, layer_idx: int, path_idx: int, pp_idx: int) -> PrintPoint:
+ """Get a specific printpoint by indices.
+
+ Parameters
+ ----------
+ layer_idx : int
+ Layer index.
+ path_idx : int
+ Path index within the layer.
+ pp_idx : int
+ Printpoint index within the path.
+
+ Returns
+ -------
+ PrintPoint
+ The requested printpoint.
+
+ """
+ return self.layers[layer_idx].paths[path_idx].printpoints[pp_idx]
+
+ def number_of_paths_on_layer(self, layer_idx: int) -> int:
+ """Get the number of paths in a specific layer.
+
+ Parameters
+ ----------
+ layer_idx : int
+ Layer index.
+
+ Returns
+ -------
+ int
+ Number of paths in the layer.
+
+ """
+ return len(self.layers[layer_idx])
+
+ @property
+ def __data__(self) -> dict[str, Any]:
+ return {
+ "layers": [layer.__data__ for layer in self.layers],
+ }
+
+ @classmethod
+ def __from_data__(cls, data: dict[str, Any]) -> PrintPointsCollection:
+ return cls(
+ layers=[PrintLayer.__from_data__(layer) for layer in data["layers"]],
+ )
+
+ def to_data(self) -> dict[str, Any]:
+ """Returns a dictionary of structured data.
+
+ Returns
+ -------
+ dict
+ The collection's data.
+
+ """
+ return self.__data__
+
+ @classmethod
+ def from_data(cls, data: dict[str, Any]) -> PrintPointsCollection:
+ """Construct from data representation.
+
+ Parameters
+ ----------
+ data : dict
+ The data dictionary.
+
+ Returns
+ -------
+ PrintPointsCollection
+ The constructed collection.
+
+ """
+ return cls.__from_data__(data)
diff --git a/src/compas_slicer/parameters/__init__.py b/src/compas_slicer/parameters/__init__.py
deleted file mode 100644
index 73644095..00000000
--- a/src/compas_slicer/parameters/__init__.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""
-********************************************************************************
-parameters
-********************************************************************************
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- get_param
- defaults_curved_slicing
- defaults_gcode
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-# Polyline simplification
-from .get_param import * # noqa: F401 E402 F403
-from .defaults_interpolation_slicing import * # noqa: F401 E402 F403
-from .defaults_gcode import * # noqa: F401 E402 F403
-from .defaults_layers import * # noqa: F401 E402 F403
-from .defaults_print_organization import * # noqa: F401 E402 F403
-
-
-__all__ = [name for name in dir() if not name.startswith('_')]
diff --git a/src/compas_slicer/parameters/defaults_gcode.py b/src/compas_slicer/parameters/defaults_gcode.py
deleted file mode 100644
index 86107850..00000000
--- a/src/compas_slicer/parameters/defaults_gcode.py
+++ /dev/null
@@ -1,49 +0,0 @@
-__all__ = ['gcode_default_param']
-
-
-def gcode_default_param(key):
- """ Returns the default parameter with the specified key. """
- if key in default_parameters:
- return default_parameters[key]
- else:
- raise ValueError('The parameter with key : ' + str(key) +
- ' does not exist in the defaults of gcode parameters. ')
-
-
-default_parameters = \
- {
- # Physical parameters
- 'nozzle_diameter': 0.4, # in mm
- 'filament diameter': 1.75, # in mm, for calculating E
- 'delta': False, # boolean for delta printers
- 'print_volume_x': 300, # in mm
- 'print_volume_y': 300, # in mm
- 'print_volume_z': 600, # in mm
-
- # Dimensional parameters
- 'layer_width': 0.6, # in mm
-
- # Temperature parameters
- 'extruder_temperature': 200, # in °C
- 'bed_temperature': 60, # in °C
- 'fan_speed': 255, # 0-255
- 'fan_start_z': 0, # in mm; height at which the fan starts
-
- # Movement parameters
- 'flowrate': 1, # as fraction; this is a global flow multiplier
- 'feedrate': 3600, # in mm/s
- 'feedrate_travel': 4800, # in mm/s
- 'feedrate_low': 1800, # in mm/s
- 'feedrate_retraction': 2400, # in mm/s
- 'acceleration': 0, # in mm/s²; if set to 0, the default driver value will be used
- 'jerk': 0, # in mm/s; if set to 0, the default driver value will be used
-
- # Retraction
- 'z_hop': 0.5, # in mm
- 'retraction_length': 1, # in mm
- 'retraction_min_travel': 6, # in mm; below this value, retraction does not happen
-
- # Adhesion parameters
- 'flow_over': 1, # as fraction, usually > 1; overextrusion value for z < min_over_z, for better adhesion
- 'min_over_z': 0, # in mm; for z < min_over_z, the overextrusion factor applies
- }
diff --git a/src/compas_slicer/parameters/defaults_interpolation_slicing.py b/src/compas_slicer/parameters/defaults_interpolation_slicing.py
deleted file mode 100644
index c26a7681..00000000
--- a/src/compas_slicer/parameters/defaults_interpolation_slicing.py
+++ /dev/null
@@ -1,27 +0,0 @@
-__all__ = ['interpolation_slicing_default_param']
-
-
-def interpolation_slicing_default_param(key):
- """ Returns the default parameters with the specified key. """
- if key in default_parameters:
- return default_parameters[key]
- else:
- raise ValueError('The parameter with key : ' + str(key) +
- ' does not exist in the defaults of curved_slicing parameters. ')
-
-
-default_parameters = \
- {
- # geodesics method
- 'target_LOW_geodesics_method': 'exact_igl',
- 'target_HIGH_geodesics_method': 'exact_igl',
-
- # union method for HIGH target
- # if all are false, then default 'min' method is used
- 'target_HIGH_smooth_union': [False, [10.0]], # blend radius
- 'target_HIGH_chamfer_union': [False, [100.0]], # size
- 'target_HIGH_stairs_union': [False, [80.0, 3]], # size, n-1 number of peaks
-
- 'uneven_upper_targets_offset': 0,
-
- }
diff --git a/src/compas_slicer/parameters/defaults_layers.py b/src/compas_slicer/parameters/defaults_layers.py
deleted file mode 100644
index c09c6120..00000000
--- a/src/compas_slicer/parameters/defaults_layers.py
+++ /dev/null
@@ -1,17 +0,0 @@
-__all__ = ['layers_default_param']
-
-
-def layers_default_param(key):
- """ Returns the default parameters with the specified key. """
- if key in default_parameters:
- return default_parameters[key]
- else:
- raise ValueError('The parameter with key : ' + str(key) +
- ' does not exist in the defaults of curved_slicing parameters. ')
-
-
-default_parameters = \
- {
- 'avg_layer_height': 5.0,
- 'vertical_layers_max_centroid_dist': 25.0
- }
diff --git a/src/compas_slicer/parameters/defaults_print_organization.py b/src/compas_slicer/parameters/defaults_print_organization.py
deleted file mode 100644
index 6fb8afb5..00000000
--- a/src/compas_slicer/parameters/defaults_print_organization.py
+++ /dev/null
@@ -1,15 +0,0 @@
-__all__ = ['print_organization_default_param']
-
-
-def print_organization_default_param(key):
- """ Returns the default parameters with the specified key. """
- if key in default_parameters:
- return default_parameters[key]
- else:
- raise ValueError('The parameter with key : ' + str(key) +
- ' does not exist in the defaults of curved_slicing parameters. ')
-
-
-default_parameters = \
- {
- }
diff --git a/src/compas_slicer/parameters/get_param.py b/src/compas_slicer/parameters/get_param.py
deleted file mode 100644
index d7b0c39b..00000000
--- a/src/compas_slicer/parameters/get_param.py
+++ /dev/null
@@ -1,34 +0,0 @@
-import compas_slicer
-
-__all__ = ['get_param']
-
-
-def get_param(params, key, defaults_type):
- """
- Function useful for accessing the params dictionary of curved slicing.
- If the key is in the params dict, it returns its value,
- otherwise it returns the default_value.
-
- Parameters
- ----------
- params: dict
- key: str
- defaults_type: str specifying which defaults the dictionary of parameters draws for. 'curved_slicing' / 'gcode'
-
- Returns
- ----------
- params[key] if key in params, otherwise default_value
- """
- if key in params:
- return params[key]
- else:
- if defaults_type == 'interpolation_slicing':
- return compas_slicer.parameters.interpolation_slicing_default_param(key)
- elif defaults_type == 'gcode':
- return compas_slicer.parameters.gcode_default_param(key)
- elif defaults_type == 'layers':
- return compas_slicer.parameters.layers_default_param(key)
- elif defaults_type == 'print_organization':
- return compas_slicer.parameters.gcode_default_param(key)
- else:
- raise ValueError('The specified parameter type : ' + str(defaults_type) + ' does not exist.')
diff --git a/src/compas_slicer/post_processing/__init__.py b/src/compas_slicer/post_processing/__init__.py
index 8426bcec..ff6fc16a 100644
--- a/src/compas_slicer/post_processing/__init__.py
+++ b/src/compas_slicer/post_processing/__init__.py
@@ -1,65 +1,24 @@
-"""
-********************************************************************************
-post_processing
-********************************************************************************
-
-Polyline simplification
-=======================
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- simplify_paths_rdp
-
-Sorting
-=======
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- seams_align
- seams_smooth
- sort_into_vertical_layers
- reorder_vertical_layers
- sort_paths_minimum_travel_time
- zig_zag_open_paths
-
-Additional
-==========
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- generate_brim
- generate_raft
- spiralize_contours
-
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
+"""Post-processing utilities for modifying sliced paths."""
# Polyline simplification
-from .simplify_paths_rdp import * # noqa: F401 E402 F403
+# Additional
+from .generate_brim import * # noqa: F401 E402 F403
+from .generate_raft import * # noqa: F401 E402 F403
+
+# Infill
+from .infill import * # noqa: F401 E402 F403
+from .reorder_vertical_layers import * # noqa: F401 E402 F403
# Sorting
from .seams_align import * # noqa: F401 E402 F403
from .seams_smooth import * # noqa: F401 E402 F403
+from .simplify_paths_rdp import * # noqa: F401 F403
from .sort_into_vertical_layers import * # noqa: F401 E402 F403
-from .reorder_vertical_layers import * # noqa: F401 E402 F403
from .sort_paths_minimum_travel_time import * # noqa: F401 E402 F403
+from .spiralize_contours import * # noqa: F401 E402 F403
# Orienting
from .unify_paths_orientation import * # noqa: F401 E402 F403
-
-# Additional
-from .generate_brim import * # noqa: F401 E402 F403
-from .generate_raft import * # noqa: F401 E402 F403
-from .spiralize_contours import * # noqa: F401 E402 F403
from .zig_zag_open_paths import * # noqa: F401 E402 F403
__all__ = [name for name in dir() if not name.startswith('_')]
diff --git a/src/compas_slicer/post_processing/generate_brim.py b/src/compas_slicer/post_processing/generate_brim.py
index afca8325..b3bed697 100644
--- a/src/compas_slicer/post_processing/generate_brim.py
+++ b/src/compas_slicer/post_processing/generate_brim.py
@@ -1,18 +1,197 @@
-import pyclipper
-from pyclipper import scale_from_clipper, scale_to_clipper
-from compas_slicer.geometry import Layer
-from compas_slicer.geometry import Path
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
from compas.geometry import Point
+from loguru import logger
+
import compas_slicer
-import logging
-from compas_slicer.post_processing import seams_align
+from compas_slicer.geometry import Layer, Path
+from compas_slicer.post_processing.seams_align import seams_align
+
+# Try CGAL first, fall back to pyclipper
+_USE_CGAL = False
+try:
+ from compas_cgal.straight_skeleton_2 import offset_polygon as _cgal_offset
+ from compas_cgal.straight_skeleton_2 import offset_polygon_with_holes as _cgal_offset_with_holes
+ _USE_CGAL = True
+except ImportError:
+ _cgal_offset = None
+ _cgal_offset_with_holes = None
+
+if TYPE_CHECKING:
+ from compas_slicer.slicers import BaseSlicer
+
+
+__all__ = ['generate_brim', 'offset_polygon', 'offset_polygon_with_holes']
+
+
+def _offset_polygon_cgal(points: list[Point], offset: float, z: float) -> list[Point]:
+ """Offset a polygon using CGAL straight skeleton.
+
+ Parameters
+ ----------
+ points : list[Point]
+ 2D/3D points of the polygon (z ignored for offset, restored after).
+ offset : float
+ Offset distance (positive = outward, negative = inward).
+ z : float
+ Z coordinate to assign to result points.
+
+ Returns
+ -------
+ list[Point]
+ Offset polygon points with z coordinate.
+ """
+ # CGAL expects points with z=0 and normal pointing up
+ pts_2d = [[p[0], p[1], 0] for p in points]
+
+ # CGAL offset: negative = inward, positive = outward (opposite of pyclipper)
+ # For brim we want outward offset
+ result_polys = _cgal_offset(pts_2d, -offset)
+
+ if not result_polys:
+ return []
+
+ # Take first result polygon, add z coordinate
+ result_pts = [Point(p[0], p[1], z) for p in result_polys[0].points]
+
+ # Close the polygon
+ if result_pts and result_pts[0] != result_pts[-1]:
+ result_pts.append(result_pts[0])
+
+ return result_pts
+
+
+def _offset_polygon_pyclipper(points: list[Point], offset: float, z: float) -> list[Point]:
+ """Offset a polygon using pyclipper.
+
+ Parameters
+ ----------
+ points : list[Point]
+ 2D/3D points of the polygon.
+ offset : float
+ Offset distance (positive = outward).
+ z : float
+ Z coordinate to assign to result points.
+
+ Returns
+ -------
+ list[Point]
+ Offset polygon points with z coordinate.
+ """
+ import pyclipper
+ from pyclipper import scale_from_clipper, scale_to_clipper
+
+ SCALING_FACTOR = 2 ** 32
+
+ xy_coords = [[p[0], p[1]] for p in points]
+
+ pco = pyclipper.PyclipperOffset()
+ pco.AddPath(
+ scale_to_clipper(xy_coords, SCALING_FACTOR),
+ pyclipper.JT_MITER,
+ pyclipper.ET_CLOSEDPOLYGON
+ )
+
+ result = scale_from_clipper(pco.Execute(offset * SCALING_FACTOR), SCALING_FACTOR)
+
+ if not result:
+ return []
-logger = logging.getLogger('logger')
+ result_pts = [Point(xy[0], xy[1], z) for xy in result[0]]
-__all__ = ['generate_brim']
+ # Close the polygon
+ if result_pts:
+ result_pts.append(result_pts[0])
+ return result_pts
-def generate_brim(slicer, layer_width, number_of_brim_offsets):
+
+def offset_polygon(points: list[Point], offset: float, z: float) -> list[Point]:
+ """Offset a polygon, using CGAL if available.
+
+ Parameters
+ ----------
+ points : list[Point]
+ Points of the polygon.
+ offset : float
+ Offset distance (positive = outward).
+ z : float
+ Z coordinate for result points.
+
+ Returns
+ -------
+ list[Point]
+ Offset polygon points.
+ """
+ if _USE_CGAL:
+ return _offset_polygon_cgal(points, offset, z)
+ else:
+ return _offset_polygon_pyclipper(points, offset, z)
+
+
+def offset_polygon_with_holes(
+ outer: list[Point],
+ holes: list[list[Point]],
+ offset: float,
+ z: float
+) -> list[tuple[list[Point], list[list[Point]]]]:
+ """Offset a polygon with holes using CGAL straight skeleton.
+
+ Parameters
+ ----------
+ outer : list[Point]
+ Points of the outer boundary (CCW orientation).
+ holes : list[list[Point]]
+ List of hole polygons (CW orientation).
+ offset : float
+ Offset distance (positive = outward, negative = inward).
+ z : float
+ Z coordinate for result points.
+
+ Returns
+ -------
+ list[tuple[list[Point], list[list[Point]]]]
+ List of (outer_boundary, holes) tuples for resulting polygons.
+
+ Raises
+ ------
+ ImportError
+ If CGAL is not available.
+ """
+ if not _USE_CGAL:
+ raise ImportError("offset_polygon_with_holes requires compas_cgal")
+
+ from compas.geometry import Polygon
+
+ # CGAL expects Polygon objects with z=0, normal up for outer, down for holes
+ outer_poly = Polygon([[p[0], p[1], 0] for p in outer])
+ hole_polys = [Polygon([[p[0], p[1], 0] for p in hole]) for hole in holes]
+
+ # CGAL: negative = outward, positive = inward (opposite of our convention)
+ result = _cgal_offset_with_holes(outer_poly, hole_polys, -offset)
+
+ # Convert back to Points with z coordinate
+ output = []
+ for poly, poly_holes in result:
+ outer_pts = [Point(p[0], p[1], z) for p in poly.points]
+ if outer_pts and outer_pts[0] != outer_pts[-1]:
+ outer_pts.append(outer_pts[0])
+
+ hole_pts_list = []
+ for hole in poly_holes:
+ hole_pts = [Point(p[0], p[1], z) for p in hole.points]
+ if hole_pts and hole_pts[0] != hole_pts[-1]:
+ hole_pts.append(hole_pts[0])
+ hole_pts_list.append(hole_pts)
+
+ output.append((outer_pts, hole_pts_list))
+
+ return output
+
+
+def generate_brim(slicer: BaseSlicer, layer_width: float, number_of_brim_offsets: int) -> None:
"""Creates a brim around the bottom contours of the print.
Parameters
@@ -25,14 +204,8 @@ def generate_brim(slicer, layer_width, number_of_brim_offsets):
number_of_brim_offsets: int
Number of brim paths to add.
"""
-
- logger.info(
- "Generating brim with layer width: %.2f mm, consisting of %d layers" % (layer_width, number_of_brim_offsets))
-
- # TODO: Add post_processing for merging several contours when the brims overlap.
- # uses the default scaling factor of 2**32
- # see: https://github.com/fonttools/pyclipper/wiki/Deprecating-SCALING_FACTOR
- SCALING_FACTOR = 2 ** 32
+ backend = "CGAL" if _USE_CGAL else "pyclipper"
+ logger.info(f"Generating brim with layer width: {layer_width:.2f} mm, {number_of_brim_offsets} offsets ({backend})")
if slicer.layers[0].is_raft:
raise NameError("Raft found: cannot apply brim when raft is used, choose one")
@@ -50,8 +223,8 @@ def generate_brim(slicer, layer_width, number_of_brim_offsets):
paths_to_offset = slicer.layers[0].paths
has_vertical_layers = False
- assert len(paths_to_offset) > 0, 'Attention the brim generator did not find any path on the base. Please check the \
- paths of your slicer. '
+ if len(paths_to_offset) == 0:
+ raise ValueError('Brim generator did not find any path on the base. Please check the paths of your slicer.')
# (2) --- create new empty brim_layer
brim_layer = Layer(paths=[])
@@ -60,39 +233,15 @@ def generate_brim(slicer, layer_width, number_of_brim_offsets):
# (3) --- create offsets and add them to the paths of the brim_layer
for path in paths_to_offset:
- # evaluate per path
- xy_coords_for_clipper = []
- for point in path.points:
- # gets the X and Y coordinate since Clipper only does 2D offset operations
- xy_coords = [point[0], point[1]]
- xy_coords_for_clipper.append(xy_coords)
-
- # initialise Clipper
- pco = pyclipper.PyclipperOffset()
- pco.AddPath(scale_to_clipper(xy_coords_for_clipper, SCALING_FACTOR), pyclipper.JT_MITER,
- pyclipper.ET_CLOSEDPOLYGON)
+ z = path.points[0][2]
for i in range(number_of_brim_offsets):
- # iterate through a list of brim paths
- clipper_points_per_brim_path = []
-
- # gets result
- result = scale_from_clipper(pco.Execute((i) * layer_width * SCALING_FACTOR), SCALING_FACTOR)
-
- for xy in result[0]:
- # gets the X and Y coordinate from the Clipper result
- x = xy[0]
- y = xy[1]
- z = path.points[0][2]
-
- clipper_points_per_brim_path.append(Point(x, y, z))
-
- # adds the first point as the last point to form a closed contour
- clipper_points_per_brim_path = clipper_points_per_brim_path + [clipper_points_per_brim_path[0]]
+ offset_distance = i * layer_width
+ offset_pts = offset_polygon(path.points, offset_distance, z)
- # create a path per brim contour
- new_path = Path(points=clipper_points_per_brim_path, is_closed=True)
- brim_layer.paths.append(new_path)
+ if offset_pts:
+ new_path = Path(points=offset_pts, is_closed=True)
+ brim_layer.paths.append(new_path)
brim_layer.paths.reverse() # go from outside towards the object
brim_layer.calculate_z_bounds()
diff --git a/src/compas_slicer/post_processing/generate_raft.py b/src/compas_slicer/post_processing/generate_raft.py
index 8662e5da..82f5ecd1 100644
--- a/src/compas_slicer/post_processing/generate_raft.py
+++ b/src/compas_slicer/post_processing/generate_raft.py
@@ -1,19 +1,10 @@
-import logging
import math
+from compas.geometry import Line, Point, Vector, bounding_box_xy, intersection_line_line, offset_line, offset_polygon
+from loguru import logger
+
import compas_slicer
-from compas_slicer.geometry import Layer
-from compas_slicer.geometry import Path
-
-from compas.geometry import Point
-from compas.geometry import Line
-from compas.geometry import Vector
-from compas.geometry import bounding_box_xy
-from compas.geometry import offset_polygon
-from compas.geometry import intersection_line_line
-from compas.geometry import offset_line
-
-logger = logging.getLogger('logger')
+from compas_slicer.geometry import Layer, Path
__all__ = ['generate_raft']
diff --git a/src/compas_slicer/post_processing/infill/__init__.py b/src/compas_slicer/post_processing/infill/__init__.py
new file mode 100644
index 00000000..9160ccab
--- /dev/null
+++ b/src/compas_slicer/post_processing/infill/__init__.py
@@ -0,0 +1,7 @@
+"""Infill generation for sliced paths."""
+
+from .medial_axis_infill import generate_medial_axis_infill
+
+__all__ = [
+ "generate_medial_axis_infill",
+]
diff --git a/src/compas_slicer/post_processing/infill/medial_axis_infill.py b/src/compas_slicer/post_processing/infill/medial_axis_infill.py
new file mode 100644
index 00000000..f42d72de
--- /dev/null
+++ b/src/compas_slicer/post_processing/infill/medial_axis_infill.py
@@ -0,0 +1,148 @@
+"""Medial axis based infill generation using CGAL straight skeleton."""
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from compas.geometry import Point, distance_point_point
+from loguru import logger
+
+from compas_slicer.geometry import Path
+
+if TYPE_CHECKING:
+ from compas.datastructures import Graph
+
+ from compas_slicer.slicers import BaseSlicer
+
+
+__all__ = ["generate_medial_axis_infill"]
+
+
+def generate_medial_axis_infill(
+ slicer: BaseSlicer,
+ min_length: float = 5.0,
+ include_bisectors: bool = True,
+) -> None:
+ """Generate medial axis infill paths for all layers.
+
+ Uses CGAL's straight skeleton to compute the medial axis of each
+ closed contour, then converts skeleton edges to infill paths.
+
+ Parameters
+ ----------
+ slicer : BaseSlicer
+ Slicer with layers containing boundary paths.
+ min_length : float
+ Minimum skeleton edge length to include. Shorter edges are skipped.
+ include_bisectors : bool
+ If True, include bisector edges (skeleton to boundary connections).
+ If False, only include inner_bisector edges (skeleton internal edges).
+
+ """
+ from compas_cgal.straight_skeleton_2 import interior_straight_skeleton
+
+ logger.info("Generating medial axis infill")
+
+ for layer in slicer.layers:
+ infill_paths: list[Path] = []
+
+ for path in layer.paths:
+ if not path.is_closed:
+ continue
+
+ # Convert path to 2D polygon
+ polygon_2d = _path_to_polygon_2d(path)
+ if len(polygon_2d) < 3:
+ continue
+
+ z_height = path.points[0][2]
+
+ # Compute straight skeleton
+ try:
+ graph = interior_straight_skeleton(polygon_2d)
+ except Exception as e:
+ logger.warning(f"Skeleton failed for path: {e}")
+ continue
+
+ # Extract skeleton edges as paths
+ skeleton_paths = _skeleton_to_paths(
+ graph, z_height, min_length, include_bisectors
+ )
+ infill_paths.extend(skeleton_paths)
+
+ # Add infill paths to layer
+ layer.paths.extend(infill_paths)
+ logger.info(f"Added {len(infill_paths)} infill paths to layer")
+
+
+def _path_to_polygon_2d(path: Path) -> list[list[float]]:
+ """Convert 3D Path to 2D polygon vertices.
+
+ Parameters
+ ----------
+ path : Path
+ Path with 3D points.
+
+ Returns
+ -------
+ list[list[float]]
+ 2D polygon vertices (x, y, 0).
+
+ """
+ return [[pt[0], pt[1], 0.0] for pt in path.points]
+
+
+def _skeleton_to_paths(
+ graph: Graph,
+ z_height: float,
+ min_length: float,
+ include_bisectors: bool,
+) -> list[Path]:
+ """Convert skeleton graph edges to Path objects.
+
+ Parameters
+ ----------
+ graph : Graph
+ Skeleton graph from CGAL.
+ z_height : float
+ Z height to assign to path points.
+ min_length : float
+ Minimum edge length to include.
+ include_bisectors : bool
+ If True, include bisector edges. If False, only inner_bisector edges.
+
+ Returns
+ -------
+ list[Path]
+ List of infill paths.
+
+ """
+ paths = []
+
+ for edge in graph.edges():
+ edge_attrs = graph.edge_attributes(edge)
+
+ # Skip boundary edges (polygon edges)
+ if edge_attrs.get("boundary"):
+ continue
+
+ # Check if this is a skeleton edge we want
+ is_inner = edge_attrs.get("inner_bisector", False)
+ is_bisector = edge_attrs.get("bisector", False)
+
+ if not is_inner and not (include_bisectors and is_bisector):
+ continue
+
+ u, v = edge
+ node_u = graph.node_attributes(u)
+ node_v = graph.node_attributes(v)
+
+ pt_u = Point(float(node_u["x"]), float(node_u["y"]), z_height)
+ pt_v = Point(float(node_v["x"]), float(node_v["y"]), z_height)
+
+ # Skip short edges
+ if distance_point_point(pt_u, pt_v) < min_length:
+ continue
+
+ paths.append(Path(points=[pt_u, pt_v], is_closed=False))
+
+ return paths
diff --git a/src/compas_slicer/post_processing/reorder_vertical_layers.py b/src/compas_slicer/post_processing/reorder_vertical_layers.py
index bf934baf..dee90811 100644
--- a/src/compas_slicer/post_processing/reorder_vertical_layers.py
+++ b/src/compas_slicer/post_processing/reorder_vertical_layers.py
@@ -1,14 +1,21 @@
-import logging
+from __future__ import annotations
+
import itertools
+from typing import TYPE_CHECKING, Literal
from compas.geometry import Point, distance_point_point
+from loguru import logger
+
+if TYPE_CHECKING:
+ from compas_slicer.slicers import BaseSlicer
-logger = logging.getLogger('logger')
__all__ = ['reorder_vertical_layers']
+AlignWith = Literal["x_axis", "y_axis"]
+
-def reorder_vertical_layers(slicer, align_with):
+def reorder_vertical_layers(slicer: BaseSlicer, align_with: AlignWith | Point) -> None:
"""Re-orders the vertical layers in a specific way
Parameters
@@ -30,12 +37,14 @@ def reorder_vertical_layers(slicer, align_with):
else:
raise NameError("Unknown align_with : " + str(align_with))
- logger.info("Re-ordering vertical layers to start with the vertical layer closest to: %s" % align_with)
+ logger.info(f"Re-ordering vertical layers to start with the vertical layer closest to: {align_with}")
for layer in slicer.layers:
- assert layer.min_max_z_height[0] is not None and layer.min_max_z_height[1] is not None, \
- "To use the 'reorder_vertical_layers function you need first to calculate the layers' z_bounds. To do " \
- "that use the function 'Layer.calculate_z_bounds()'"
+ if layer.min_max_z_height[0] is None or layer.min_max_z_height[1] is None:
+ raise ValueError(
+ "To use reorder_vertical_layers you need first to calculate the layers' z_bounds. "
+ "Use the function Layer.calculate_z_bounds()"
+ )
# group vertical layers based on the min_max_z_height
grouped_iter = itertools.groupby(slicer.layers, lambda x: x.min_max_z_height)
diff --git a/src/compas_slicer/post_processing/seams_align.py b/src/compas_slicer/post_processing/seams_align.py
index d5063d0a..aaba1e93 100644
--- a/src/compas_slicer/post_processing/seams_align.py
+++ b/src/compas_slicer/post_processing/seams_align.py
@@ -1,14 +1,21 @@
-import logging
+from __future__ import annotations
+from typing import TYPE_CHECKING, Literal
+
+import numpy as np
from compas.geometry import Point
-from compas.geometry import distance_point_point
+from loguru import logger
+
+if TYPE_CHECKING:
+ from compas_slicer.slicers import BaseSlicer
-logger = logging.getLogger('logger')
__all__ = ['seams_align']
+AlignWith = Literal["next_path", "origin", "x_axis", "y_axis"]
+
-def seams_align(slicer, align_with="next_path"):
+def seams_align(slicer: BaseSlicer, align_with: AlignWith | Point = "next_path") -> None:
"""Aligns the seams (start- and endpoint) of a print.
Parameters
@@ -23,12 +30,9 @@ def seams_align(slicer, align_with="next_path"):
y_axis = orients the seam to the y_axis
Point(x,y,z) = orients the seam according to the given point
- Returns
- -------
- None
"""
# TODO: Implement random seams
- logger.info("Aligning seams to: %s" % align_with)
+ logger.info(f"Aligning seams to: {align_with}")
for i, layer in enumerate(slicer.layers):
for j, path in enumerate(layer.paths):
@@ -84,10 +88,12 @@ def seams_align(slicer, align_with="next_path"):
else:
first_last_point_the_same = False
- # computes distance between pt_to_align_with and the current path points
- distance_current_pt_align_pt = [distance_point_point(pt_to_align_with, pt) for pt in path_to_change]
- # gets the index of the closest point by looking for the minimum
- new_start_index = distance_current_pt_align_pt.index(min(distance_current_pt_align_pt))
+ # computes distance between pt_to_align_with and the current path points (vectorized)
+ ref = np.asarray(pt_to_align_with, dtype=np.float64)
+ pts = np.asarray(path_to_change, dtype=np.float64)
+ distances = np.linalg.norm(pts - ref, axis=1)
+ # gets the index of the closest point
+ new_start_index = int(np.argmin(distances))
# shifts the list by the distance determined
shift_list = path_to_change[new_start_index:] + path_to_change[:new_start_index]
@@ -100,11 +106,10 @@ def seams_align(slicer, align_with="next_path"):
# OPEN PATHS
path_to_change = layer.paths[j].points
- # get the distance between the align point and the start/end point
- start = path_to_change[0]
- end = path_to_change[-1]
- d_start = distance_point_point(start, pt_to_align_with)
- d_end = distance_point_point(end, pt_to_align_with)
+ # get the distance between the align point and the start/end point (vectorized)
+ ref = np.asarray(pt_to_align_with, dtype=np.float64)
+ d_start = np.linalg.norm(np.asarray(path_to_change[0]) - ref)
+ d_end = np.linalg.norm(np.asarray(path_to_change[-1]) - ref)
# if closer to end point > reverse list
if d_start > d_end:
diff --git a/src/compas_slicer/post_processing/seams_smooth.py b/src/compas_slicer/post_processing/seams_smooth.py
index 40eae09a..a91f7589 100644
--- a/src/compas_slicer/post_processing/seams_smooth.py
+++ b/src/compas_slicer/post_processing/seams_smooth.py
@@ -1,14 +1,20 @@
-import logging
-from compas.geometry import distance_point_point
-from compas.geometry import Vector
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from compas.geometry import Vector, distance_point_point
+from loguru import logger
+
import compas_slicer
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from compas_slicer.slicers import BaseSlicer
+
__all__ = ['seams_smooth']
-def seams_smooth(slicer, smooth_distance):
+def seams_smooth(slicer: BaseSlicer, smooth_distance: float) -> None:
"""Smooths the seams (transition between layers)
by removing points within a certain distance.
@@ -20,11 +26,11 @@ def seams_smooth(slicer, smooth_distance):
Distance (in mm) to perform smoothing
"""
- logger.info("Smoothing seams with a distance of %i mm" % smooth_distance)
+ logger.info(f"Smoothing seams with a distance of {smooth_distance} mm")
for i, layer in enumerate(slicer.layers):
if len(layer.paths) == 1 or isinstance(layer, compas_slicer.geometry.VerticalLayer):
- for j, path in enumerate(layer.paths):
+ for _j, path in enumerate(layer.paths):
if path.is_closed: # only for closed paths
pt0 = path.points[0]
# only points in the first half of a path should be evaluated
@@ -44,8 +50,10 @@ def seams_smooth(slicer, smooth_distance):
path.points.pop(-1) # remove last point
break
else:
- logger.warning("Smooth seams only works for layers consisting out of a single path, or for vertical layers."
- "\nPaths were not changed, seam smoothing skipped for layer %i" % i)
+ logger.warning(
+ "Smooth seams only works for layers consisting out of a single path, or for vertical layers."
+ f"\nPaths were not changed, seam smoothing skipped for layer {i}"
+ )
if __name__ == "__main__":
diff --git a/src/compas_slicer/post_processing/simplify_paths_rdp.py b/src/compas_slicer/post_processing/simplify_paths_rdp.py
index e2511ef4..4be08262 100644
--- a/src/compas_slicer/post_processing/simplify_paths_rdp.py
+++ b/src/compas_slicer/post_processing/simplify_paths_rdp.py
@@ -1,24 +1,32 @@
-import rdp as rdp
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
import numpy as np
-import logging
-import progressbar
+import rdp as rdp_py
from compas.geometry import Point
-import compas_slicer.utilities as utils
-from compas.plugins import PluginNotInstalledError
+from loguru import logger
-packages = utils.TerminalCommand('conda list').get_split_output_strings()
-if 'igl' in packages:
- import igl
+if TYPE_CHECKING:
+ from compas_slicer.slicers import BaseSlicer
-logger = logging.getLogger('logger')
-__all__ = ['simplify_paths_rdp',
- 'simplify_paths_rdp_igl']
+__all__ = ['simplify_paths_rdp']
+# Check for CGAL availability at module load
+_USE_CGAL = False
+try:
+ from compas_cgal.polylines import simplify_polylines as _cgal_simplify
+ _USE_CGAL = True
+except ImportError:
+ _cgal_simplify = None
-def simplify_paths_rdp(slicer, threshold):
- """Simplifies a path using the Ramer–Douglas–Peucker algorithm, implemented in the rdp python library.
- https://en.wikipedia.org/wiki/Ramer-Douglas-Peucker_algorithm
+
+def simplify_paths_rdp(slicer: BaseSlicer, threshold: float) -> None:
+ """Simplify paths using the Ramer-Douglas-Peucker algorithm.
+
+ Uses CGAL native implementation if available (10-20x faster),
+ otherwise falls back to Python rdp library.
Parameters
----------
@@ -27,50 +35,52 @@ def simplify_paths_rdp(slicer, threshold):
threshold: float
Controls the degree of polyline simplification.
Low threshold removes few points, high threshold removes many points.
+
+ References
+ ----------
+ https://en.wikipedia.org/wiki/Ramer-Douglas-Peucker_algorithm
"""
+ if _USE_CGAL:
+ _simplify_paths_cgal(slicer, threshold)
+ else:
+ _simplify_paths_python(slicer, threshold)
+
- logger.info("Paths simplification rdp")
+def _simplify_paths_cgal(slicer: BaseSlicer, threshold: float) -> None:
+ """Simplify paths using CGAL Polyline_simplification_2."""
+ logger.info("Paths simplification rdp (CGAL)")
remaining_pts_num = 0
- with progressbar.ProgressBar(max_value=len(slicer.layers)) as bar:
- for i, layer in enumerate(slicer.layers):
- if not layer.is_raft: # no simplification necessary for raft layer
- for path in layer.paths:
- pts_rdp = rdp.rdp(np.array(path.points), epsilon=threshold)
- path.points = [Point(pt[0], pt[1], pt[2]) for pt in pts_rdp]
- remaining_pts_num += len(path.points)
- bar.update(i)
- logger.info('%d Points remaining after rdp simplification' % remaining_pts_num)
+ for layer in slicer.layers:
+ if layer.is_raft:
+ continue
+ # Batch all paths in this layer for efficient CGAL processing
+ polylines = [[[pt[0], pt[1], pt[2]] for pt in path.points] for path in layer.paths]
+ simplified = _cgal_simplify(polylines, threshold)
-def simplify_paths_rdp_igl(slicer, threshold):
- """
- https://libigl.github.io/libigl-python-bindings/igl_docs/#ramer_douglas_peucker
- Parameters
- ----------
- slicer: :class:`compas_slicer.slicers.BaseSlicer`
- An instance of one of the compas_slicer.slicers classes.
- threshold: float
- Controls the degree of polyline simplification.
- Low threshold removes few points, high threshold removes many points.
- """
- try:
- # utils.check_package_is_installed('igl')
- logger.info("Paths simplification rdp - igl")
- remaining_pts_num = 0
-
- for i, layer in enumerate(slicer.layers):
- if not layer.is_raft: # no simplification necessary for raft layer
- for path in layer.paths:
- pts = np.array([[pt[0], pt[1], pt[2]] for pt in path.points])
- S, J, Q = igl.ramer_douglas_peucker(pts, threshold)
- path.points = [Point(pt[0], pt[1], pt[2]) for pt in S]
- remaining_pts_num += len(path.points)
- logger.info('%d Points remaining after rdp simplification' % remaining_pts_num)
-
- except PluginNotInstalledError:
- logger.info("Libigl is not installed. Falling back to python rdp function")
- simplify_paths_rdp(slicer, threshold)
+ for path, pts_simplified in zip(layer.paths, simplified):
+ path.points = [Point(pt[0], pt[1], pt[2]) for pt in pts_simplified]
+ remaining_pts_num += len(path.points)
+
+ logger.info(f'{remaining_pts_num} points remaining after simplification')
+
+
+def _simplify_paths_python(slicer: BaseSlicer, threshold: float) -> None:
+ """Simplify paths using Python rdp library."""
+ logger.info("Paths simplification rdp (Python)")
+ remaining_pts_num = 0
+
+ for layer in slicer.layers:
+ if layer.is_raft:
+ continue
+
+ for path in layer.paths:
+ pts_rdp = rdp_py.rdp(np.array(path.points), epsilon=threshold)
+ path.points = [Point(pt[0], pt[1], pt[2]) for pt in pts_rdp]
+ remaining_pts_num += len(path.points)
+
+ logger.info(f'{remaining_pts_num} points remaining after simplification')
if __name__ == "__main__":
diff --git a/src/compas_slicer/post_processing/sort_into_vertical_layers.py b/src/compas_slicer/post_processing/sort_into_vertical_layers.py
index 5ca8f4f4..d7029710 100644
--- a/src/compas_slicer/post_processing/sort_into_vertical_layers.py
+++ b/src/compas_slicer/post_processing/sort_into_vertical_layers.py
@@ -1,12 +1,21 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from loguru import logger
+
from compas_slicer.geometry import VerticalLayersManager
-import logging
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from compas_slicer.slicers import BaseSlicer
+
__all__ = ['sort_into_vertical_layers']
-def sort_into_vertical_layers(slicer, dist_threshold=25.0, max_paths_per_layer=None):
+def sort_into_vertical_layers(
+ slicer: BaseSlicer, dist_threshold: float = 25.0, max_paths_per_layer: int | None = None
+) -> None:
"""Sorts the paths from horizontal layers into Vertical Layers.
Vertical Layers are layers at different heights that are grouped together by proximity
@@ -33,7 +42,7 @@ def sort_into_vertical_layers(slicer, dist_threshold=25.0, max_paths_per_layer=N
vertical_layers_manager.add(path)
slicer.layers = vertical_layers_manager.layers
- logger.info("Number of vertical_layers: %d" % len(slicer.layers))
+ logger.info(f"Number of vertical_layers: {len(slicer.layers)}")
if __name__ == "__main__":
diff --git a/src/compas_slicer/post_processing/sort_paths_minimum_travel_time.py b/src/compas_slicer/post_processing/sort_paths_minimum_travel_time.py
index 50f15a5b..4104caae 100644
--- a/src/compas_slicer/post_processing/sort_paths_minimum_travel_time.py
+++ b/src/compas_slicer/post_processing/sort_paths_minimum_travel_time.py
@@ -1,14 +1,20 @@
-# from compas_slicer.geometry import VerticalLayersManager
-import logging
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import numpy as np
from compas.geometry import Point
-from compas.geometry import distance_point_point
+from loguru import logger
+
+if TYPE_CHECKING:
+ from compas_slicer.geometry import Path as SlicerPath
+ from compas_slicer.slicers import BaseSlicer
-logger = logging.getLogger('logger')
__all__ = ['sort_paths_minimum_travel_time']
-def sort_paths_minimum_travel_time(slicer):
+def sort_paths_minimum_travel_time(slicer: BaseSlicer) -> None:
"""Sorts the paths within a horizontal layer to reduce total travel time.
Parameters
@@ -31,7 +37,7 @@ def sort_paths_minimum_travel_time(slicer):
slicer.layers[i].paths = sorted_paths
-def adjust_seam_to_closest_pos(ref_point, path):
+def adjust_seam_to_closest_pos(ref_point: Point, path: SlicerPath) -> None:
"""Aligns the seam (start- and endpoint) of a contour so that it is closest to a given point.
for open paths, check if the end point closest to the reference point is the start point
@@ -48,20 +54,24 @@ def adjust_seam_to_closest_pos(ref_point, path):
if path.is_closed: # if path is closed
# remove first point
path.points.pop(-1)
- # calculate distances from ref_point to vertices of path
- distances = [distance_point_point(ref_point, points) for points in path.points]
- # find index of closest point
- closest_point = distances.index(min(distances))
+ # calculate distances from ref_point to vertices of path (vectorized)
+ ref = np.asarray(ref_point, dtype=np.float64)
+ pts = np.asarray(path.points, dtype=np.float64)
+ distances = np.linalg.norm(pts - ref, axis=1)
+ closest_point = int(np.argmin(distances))
# adjust seam
adjusted_seam = path.points[closest_point:] + path.points[:closest_point] + [path.points[closest_point]]
path.points = adjusted_seam
else: # if path is open
- # if end point is closer than start point >> flip
- if distance_point_point(ref_point, path.points[0]) > distance_point_point(ref_point, path.points[-1]):
+ # if end point is closer than start point >> flip (vectorized)
+ ref = np.asarray(ref_point, dtype=np.float64)
+ d_start = np.linalg.norm(np.asarray(path.points[0]) - ref)
+ d_end = np.linalg.norm(np.asarray(path.points[-1]) - ref)
+ if d_start > d_end:
path.points.reverse()
-def closest_path(ref_point, somepaths):
+def closest_path(ref_point: Point, somepaths: list[SlicerPath]) -> int:
"""Finds the closest path to a reference point in a list of paths.
Parameters
@@ -69,18 +79,16 @@ def closest_path(ref_point, somepaths):
ref_point: the reference point
somepaths: list of paths to look into for finding the closest
"""
- min_dist = distance_point_point(ref_point, somepaths[0].points[0])
- closest_index = 0
+ ref = np.asarray(ref_point, dtype=np.float64)
- for i, path in enumerate(somepaths):
- # for each path, adjust the seam to be in the closest vertex to ref_point
+ # First adjust all seams
+ for path in somepaths:
adjust_seam_to_closest_pos(ref_point, path)
- # calculate the minimum distance to the nearest seam of each path
- min_dist_temp = distance_point_point(ref_point, path.points[0])
- if min_dist_temp < min_dist:
- min_dist = min_dist_temp
- closest_index = i
- return closest_index
+
+ # Then find closest path (vectorized)
+ start_pts = np.array([path.points[0] for path in somepaths], dtype=np.float64)
+ distances = np.linalg.norm(start_pts - ref, axis=1)
+ return int(np.argmin(distances))
if __name__ == "__main__":
diff --git a/src/compas_slicer/post_processing/spiralize_contours.py b/src/compas_slicer/post_processing/spiralize_contours.py
index 0d7715f9..124025ef 100644
--- a/src/compas_slicer/post_processing/spiralize_contours.py
+++ b/src/compas_slicer/post_processing/spiralize_contours.py
@@ -1,14 +1,21 @@
-import logging
-import compas_slicer
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
from compas.geometry import Point
+from loguru import logger
+
+import compas_slicer
from compas_slicer.utilities.utils import pull_pts_to_mesh_faces
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from compas_slicer.slicers import PlanarSlicer
+
__all__ = ['spiralize_contours']
-def spiralize_contours(slicer):
+def spiralize_contours(slicer: PlanarSlicer) -> None:
"""Spiralizes contours. Only works for Planar Slicer.
Can only be used for geometries consisting out of a single closed contour (i.e. vases).
@@ -23,11 +30,14 @@ def spiralize_contours(slicer):
logger.warning("spiralize_contours() contours only works for PlanarSlicer. Skipping function.")
return
+ if slicer.layer_height is None:
+ raise ValueError("layer_height must be set before spiralizing contours")
+
for j, layer in enumerate(slicer.layers):
if len(layer.paths) == 1:
for path in layer.paths:
d = slicer.layer_height / (len(path.points) - 1)
- for i, point in enumerate(path.points):
+ for i, _point in enumerate(path.points):
# add the distance to move to the z value and create new points
path.points[i][2] += d * i
@@ -39,8 +49,10 @@ def spiralize_contours(slicer):
path.points.pop(len(path.points) - 1)
else:
- logger.warning("Spiralize contours only works for layers consisting out of a single path, contours were "
- "not changed, spiralize contour skipped for layer %d" % j)
+ logger.warning(
+ "Spiralize contours only works for layers consisting out of a single path, contours were "
+ f"not changed, spiralize contour skipped for layer {j}"
+ )
if __name__ == "__main__":
diff --git a/src/compas_slicer/post_processing/unify_paths_orientation.py b/src/compas_slicer/post_processing/unify_paths_orientation.py
index 371481d8..54b6d860 100644
--- a/src/compas_slicer/post_processing/unify_paths_orientation.py
+++ b/src/compas_slicer/post_processing/unify_paths_orientation.py
@@ -1,13 +1,18 @@
-import logging
-from compas.geometry import normalize_vector, subtract_vectors, dot_vectors
+from __future__ import annotations
+
from collections import deque
+from typing import TYPE_CHECKING
+
+from compas.geometry import Point, dot_vectors, normalize_vector, subtract_vectors
+
+if TYPE_CHECKING:
+ from compas_slicer.slicers import BaseSlicer
-logger = logging.getLogger('logger')
__all__ = ['unify_paths_orientation']
-def unify_paths_orientation(slicer):
+def unify_paths_orientation(slicer: BaseSlicer) -> None:
"""
Unifies the orientation of paths that are closed.
@@ -29,7 +34,9 @@ def unify_paths_orientation(slicer):
path.points = match_paths_orientations(path.points, reference_points, path.is_closed)
-def match_paths_orientations(pts, reference_points, is_closed):
+def match_paths_orientations(
+ pts: list[Point], reference_points: list[Point], is_closed: bool
+) -> list[Point]:
"""Check if new curve has same direction as prev curve, otherwise reverse.
Parameters
diff --git a/src/compas_slicer/post_processing/zig_zag_open_paths.py b/src/compas_slicer/post_processing/zig_zag_open_paths.py
index 5aa9d9a3..e8fef19b 100644
--- a/src/compas_slicer/post_processing/zig_zag_open_paths.py
+++ b/src/compas_slicer/post_processing/zig_zag_open_paths.py
@@ -1,15 +1,19 @@
-import logging
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from compas_slicer.slicers import BaseSlicer
-logger = logging.getLogger('logger')
__all__ = ['zig_zag_open_paths']
-def zig_zag_open_paths(slicer):
+def zig_zag_open_paths(slicer: BaseSlicer) -> None:
""" Reverses half of the open paths of the slicer, so that they can be printed in a zig zag motion. """
reverse = False
for layer in slicer.layers:
- for i, path in enumerate(layer.paths):
+ for _i, path in enumerate(layer.paths):
if not path.is_closed:
if not reverse:
reverse = True
diff --git a/src/compas_slicer/pre_processing/__init__.py b/src/compas_slicer/pre_processing/__init__.py
index 3f1bcb81..2bc4e5ad 100644
--- a/src/compas_slicer/pre_processing/__init__.py
+++ b/src/compas_slicer/pre_processing/__init__.py
@@ -1,27 +1,8 @@
-"""
-********************************************************************************
-pre_processing
-********************************************************************************
+"""Pre-processing utilities for mesh preparation before slicing."""
-Positioning
-=======================
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- move_mesh_to_point
- get_mid_pt_base
-
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-from .preprocessing_utils import * # noqa: F401 E402 F403
-from .interpolation_slicing_preprocessor import * # noqa: F401 E402 F403
from .gradient_evaluation import * # noqa: F401 E402 F403
+from .interpolation_slicing_preprocessor import * # noqa: F401 E402 F403
# Positioning
from .positioning import * # noqa: F401 E402 F403
+from .preprocessing_utils import * # noqa: F401 F403
diff --git a/src/compas_slicer/pre_processing/gradient_evaluation.py b/src/compas_slicer/pre_processing/gradient_evaluation.py
index 30f06ca0..44e231d1 100644
--- a/src/compas_slicer/pre_processing/gradient_evaluation.py
+++ b/src/compas_slicer/pre_processing/gradient_evaluation.py
@@ -1,15 +1,26 @@
+from __future__ import annotations
+
+from pathlib import Path as FilePath
+from typing import TYPE_CHECKING
+
import numpy as np
-import logging
+from loguru import logger
+from numpy.typing import NDArray
+
import compas_slicer.utilities as utils
-from compas_slicer.pre_processing.preprocessing_utils import get_face_gradient_from_scalar_field
-from compas_slicer.pre_processing.preprocessing_utils import get_vertex_gradient_from_face_gradient
+from compas_slicer.pre_processing.preprocessing_utils import (
+ get_face_gradient_from_scalar_field,
+ get_vertex_gradient_from_face_gradient,
+)
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
-logger = logging.getLogger('logger')
__all__ = ['GradientEvaluation']
-class GradientEvaluation(object):
+class GradientEvaluation:
"""
Evaluation of the gradient of the scalar function of the mesh.
The scalar function should be stored as a vertex attribute on every vertex, with key='scalar_field'
@@ -20,30 +31,32 @@ class GradientEvaluation(object):
DATA_PATH: str, path to the data folder
"""
- def __init__(self, mesh, DATA_PATH):
+ def __init__(self, mesh: Mesh, DATA_PATH: str | FilePath) -> None:
for v_key, data in mesh.vertices(data=True):
- assert 'scalar_field' in data, "Vertex %d does not have the attribute 'scalar_field'"
+ if 'scalar_field' not in data:
+ raise ValueError(f"Vertex {v_key} does not have the attribute 'scalar_field'")
- print('')
logger.info('Gradient evaluation')
self.mesh = mesh
self.DATA_PATH = DATA_PATH
self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
- self.minima, self.maxima, self.saddles = [], [], []
+ self.minima: list[int] = []
+ self.maxima: list[int] = []
+ self.saddles: list[int] = []
- self.face_gradient = [] # np.array (#F x 3) one gradient vector per face.
- self.vertex_gradient = [] # np.array (#V x 3) one gradient vector per vertex.
- self.face_gradient_norm = [] # list (#F x 1)
- self.vertex_gradient_norm = [] # list (#V x 1)
+ self.face_gradient: NDArray[np.floating] | list = [] # np.array (#F x 3) one gradient vector per face.
+ self.vertex_gradient: NDArray[np.floating] | list = [] # np.array (#V x 3) one gradient vector per vertex.
+ self.face_gradient_norm: list[float] = [] # list (#F x 1)
+ self.vertex_gradient_norm: list[float] = [] # list (#V x 1)
- def compute_gradient(self):
+ def compute_gradient(self) -> None:
""" Computes the gradient on the faces and the vertices. """
u_v = [self.mesh.vertex[vkey]['scalar_field'] for vkey in self.mesh.vertices()]
self.face_gradient = get_face_gradient_from_scalar_field(self.mesh, u_v)
self.vertex_gradient = get_vertex_gradient_from_face_gradient(self.mesh, self.face_gradient)
- def compute_gradient_norm(self):
+ def compute_gradient_norm(self) -> None:
""" Computes the norm of the gradient. """
logger.info('Computing norm of gradient')
f_g = np.array([self.face_gradient[i] for i, fkey in enumerate(self.mesh.faces())])
@@ -51,7 +64,7 @@ def compute_gradient_norm(self):
self.face_gradient_norm = list(np.linalg.norm(f_g, axis=1))
self.vertex_gradient_norm = list(np.linalg.norm(v_g, axis=1))
- def find_critical_points(self):
+ def find_critical_points(self) -> None:
""" Finds minima, maxima and saddle points of the scalar function on the mesh. """
for vkey, data in self.mesh.vertices(data=True):
current_v = data['scalar_field']
@@ -80,10 +93,10 @@ def find_critical_points(self):
# --- Helpers
-def count_sign_changes(values):
+def count_sign_changes(values: list[float]) -> int:
""" Returns the number of sign changes in a list of values. """
count = 0
- prev_v = 0
+ prev_v: float = 0.0
for i, v in enumerate(values):
if i == 0:
prev_v = v
diff --git a/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py b/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
index c9e92535..eb64d7ef 100644
--- a/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
+++ b/src/compas_slicer/pre_processing/interpolation_slicing_preprocessor.py
@@ -1,43 +1,57 @@
-from compas_slicer.pre_processing import CompoundTarget
-from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation
-import logging
-import os
+from __future__ import annotations
+
+from pathlib import Path
+from typing import TYPE_CHECKING
+
from compas.datastructures import Mesh
-from compas_slicer.pre_processing.preprocessing_utils import region_split as rs, \
- topological_sorting as topo_sort
-from compas_slicer.pre_processing import get_existing_cut_indices, get_vertices_that_belong_to_cuts, \
- replace_mesh_vertex_attribute
+from loguru import logger
+
import compas_slicer.utilities as utils
-from compas_slicer.parameters import get_param
+from compas_slicer.config import InterpolationConfig
+from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation
from compas_slicer.pre_processing.preprocessing_utils import assign_interpolation_distance_to_mesh_vertices
+from compas_slicer.pre_processing.preprocessing_utils import region_split as rs
+from compas_slicer.pre_processing.preprocessing_utils import topological_sorting as topo_sort
+from compas_slicer.pre_processing.preprocessing_utils.compound_target import CompoundTarget
+from compas_slicer.pre_processing.preprocessing_utils.mesh_attributes_handling import (
+ get_existing_cut_indices,
+ get_vertices_that_belong_to_cuts,
+ replace_mesh_vertex_attribute,
+)
+
+if TYPE_CHECKING:
+ from compas_slicer.pre_processing.preprocessing_utils.topological_sorting import MeshDirectedGraph
-logger = logging.getLogger('logger')
__all__ = ['InterpolationSlicingPreprocessor']
class InterpolationSlicingPreprocessor:
- """
- Takes care of all the pre-processing that is (or might be) needed before the interpolation slicing process.
- Not all the functionality needs to be run every time, depending on the characteristics of the inputs.
+ """Handles pre-processing for interpolation slicing.
Attributes
----------
- mesh: :class: 'compas.datastructures.Mesh'
- parameters: dict
- DATA_PATH: str, path to the data folder
+ mesh : Mesh
+ Input mesh.
+ config : InterpolationConfig
+ Interpolation configuration.
+ DATA_PATH : str | Path
+ Path to the data folder.
+
"""
- def __init__(self, mesh, parameters, DATA_PATH):
+ def __init__(
+ self, mesh: Mesh, config: InterpolationConfig | None = None, DATA_PATH: str | Path = "."
+ ) -> None:
self.mesh = mesh
- self.parameters = parameters
+ self.config = config if config else InterpolationConfig()
self.DATA_PATH = DATA_PATH
self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
- self.target_LOW = None # :class: 'compas_slicer.pre_processing.CompoundTarget'
- self.target_HIGH = None # :class: 'compas_slicer.pre_processing.CompoundTarget'
+ self.target_LOW: CompoundTarget | None = None
+ self.target_HIGH: CompoundTarget | None = None
- self.split_meshes = [] # list , :class: 'compas.datastructures.Mesh'
+ self.split_meshes: list[Mesh] = []
# The meshes that result from the region splitting process.
utils.utils.check_triangular_mesh(mesh)
@@ -45,38 +59,37 @@ def __init__(self, mesh, parameters, DATA_PATH):
###########################
# --- compound targets
- def create_compound_targets(self):
- """ Creates the target_LOW and the target_HIGH and computes the geodesic distances. """
+ def create_compound_targets(self) -> None:
+ """Create target_LOW and target_HIGH and compute geodesic distances."""
# --- low target
- geodesics_method = get_param(self.parameters, key='target_LOW_geodesics_method',
- defaults_type='interpolation_slicing')
- method, params = 'min', [] # no other union methods currently supported for lower target
+ geodesics_method = self.config.target_low_geodesics_method.value
+ method = 'min' # no other union methods currently supported for lower target
+ params: list[float] = []
self.target_LOW = CompoundTarget(self.mesh, 'boundary', 1, self.DATA_PATH,
union_method=method,
union_params=params,
geodesics_method=geodesics_method)
# --- high target
- geodesics_method = get_param(self.parameters, key='target_HIGH_geodesics_method',
- defaults_type='interpolation_slicing')
- method, params = get_union_method(self.parameters)
- logger.info("Creating target with union type : " + method + " and params : " + str(params))
+ geodesics_method = self.config.target_high_geodesics_method.value
+ method = self.config.target_high_union_method.value
+ params = self.config.target_high_union_params
+ logger.info(f"Creating target with union type: {method} and params: {params}")
self.target_HIGH = CompoundTarget(self.mesh, 'boundary', 2, self.DATA_PATH,
union_method=method,
union_params=params,
geodesics_method=geodesics_method)
# --- uneven boundaries of high target
- self.target_HIGH.offset = get_param(self.parameters, key='uneven_upper_targets_offset',
- defaults_type='interpolation_slicing')
+ self.target_HIGH.offset = self.config.uneven_upper_targets_offset
self.target_HIGH.compute_uneven_boundaries_weight_max(self.target_LOW)
# --- save intermediary get_distance outputs
self.target_LOW.save_distances("distances_LOW.json")
self.target_HIGH.save_distances("distances_HIGH.json")
- def targets_laplacian_smoothing(self, iterations, strength):
+ def targets_laplacian_smoothing(self, iterations: int, strength: float) -> None:
"""
Smooth geodesic distances of targets. Saves again the distances to json.
@@ -85,6 +98,8 @@ def targets_laplacian_smoothing(self, iterations, strength):
iterations: int
strength: float
"""
+ if self.target_LOW is None or self.target_HIGH is None:
+ raise RuntimeError("Targets not initialized. Call create_compound_targets() first.")
self.target_LOW.laplacian_smoothing(iterations=iterations, strength=strength)
self.target_HIGH.laplacian_smoothing(iterations=iterations, strength=strength)
self.target_LOW.save_distances("distances_LOW.json")
@@ -93,13 +108,22 @@ def targets_laplacian_smoothing(self, iterations, strength):
###########################
# --- scalar field evaluation
- def create_gradient_evaluation(self, target_1, target_2=None, save_output=True,
- norm_filename='gradient_norm.json', g_filename='gradient.json'):
+ def create_gradient_evaluation(
+ self,
+ target_1: CompoundTarget,
+ target_2: CompoundTarget | None = None,
+ save_output: bool = True,
+ norm_filename: str = 'gradient_norm.json',
+ g_filename: str = 'gradient.json',
+ ) -> GradientEvaluation:
"""
Creates a compas_slicer.pre_processing.GradientEvaluation that is stored in self.g_evaluation
Also, computes the gradient and gradient_norm and saves them to Json .
"""
- assert self.target_LOW.VN == target_1.VN, "Attention! Preprocessor does not match targets. "
+ if self.target_LOW is None or self.target_HIGH is None:
+ raise RuntimeError("Targets not initialized. Call create_compound_targets() first.")
+ if self.target_LOW.VN != target_1.VN:
+ raise ValueError("Preprocessor does not match targets: vertex count mismatch.")
assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=0.5,
target_LOW=self.target_LOW, target_HIGH=self.target_HIGH)
g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH)
@@ -113,7 +137,9 @@ def create_gradient_evaluation(self, target_1, target_2=None, save_output=True,
return g_evaluation
- def find_critical_points(self, g_evaluation, output_filenames):
+ def find_critical_points(
+ self, g_evaluation: GradientEvaluation, output_filenames: tuple[str, str, str]
+ ) -> None:
""" Computes and saves to json the critical points of the df on the mesh (minima, maxima, saddles)"""
g_evaluation.find_critical_points()
# save results to json
@@ -124,8 +150,13 @@ def find_critical_points(self, g_evaluation, output_filenames):
###########################
# --- Region Split
- def region_split(self, cut_mesh=True, separate_neighborhoods=True, topological_sorting=True,
- save_split_meshes=True):
+ def region_split(
+ self,
+ cut_mesh: bool = True,
+ separate_neighborhoods: bool = True,
+ topological_sorting: bool = True,
+ save_split_meshes: bool = True,
+ ) -> None:
"""
Splits the mesh on the saddle points. This process can take a long time.
It consists of four parts:
@@ -139,8 +170,7 @@ def region_split(self, cut_mesh=True, separate_neighborhoods=True, topological_s
time, you can turn the respective processes to false.
"""
- print("")
- logging.info("--- Mesh region splitting")
+ logger.info("--- Mesh region splitting")
if cut_mesh: # (1)
self.mesh.update_default_vertex_attributes({'cut': 0})
@@ -149,16 +179,16 @@ def region_split(self, cut_mesh=True, separate_neighborhoods=True, topological_s
self.mesh = mesh_splitter.mesh
logger.info('Completed Region splitting')
- logger.info("Region split cut indices: " + str(mesh_splitter.cut_indices))
+ logger.info(f"Region split cut indices: {mesh_splitter.cut_indices}")
# save results to json
- self.mesh.to_obj(os.path.join(self.OUTPUT_PATH, 'mesh_with_cuts.obj'))
- self.mesh.to_json(os.path.join(self.OUTPUT_PATH, 'mesh_with_cuts.json'))
- logger.info("Saving to Obj and Json: " + os.path.join(self.OUTPUT_PATH, 'mesh_with_cuts.json'))
+ output_path = Path(self.OUTPUT_PATH)
+ self.mesh.to_obj(str(output_path / 'mesh_with_cuts.obj'))
+ self.mesh.to_json(str(output_path / 'mesh_with_cuts.json'))
+ logger.info(f"Saving to Obj and Json: {output_path / 'mesh_with_cuts.json'}")
if separate_neighborhoods: # (2)
- print("")
logger.info("--- Separating mesh disconnected components")
- self.mesh = Mesh.from_json(os.path.join(self.OUTPUT_PATH, 'mesh_with_cuts.json'))
+ self.mesh = Mesh.from_json(str(Path(self.OUTPUT_PATH) / 'mesh_with_cuts.json'))
region_split_cut_indices = get_existing_cut_indices(self.mesh)
# save results to json
@@ -168,15 +198,14 @@ def region_split(self, cut_mesh=True, separate_neighborhoods=True, topological_s
self.split_meshes = rs.separate_disconnected_components(self.mesh, attr='cut',
values=region_split_cut_indices,
OUTPUT_PATH=self.OUTPUT_PATH)
- logger.info('Created %d split meshes.' % len(self.split_meshes))
+ logger.info(f'Created {len(self.split_meshes)} split meshes.')
if topological_sorting: # (3)
- print("")
logger.info("--- Topological sort of meshes directed graph to determine print order")
graph = topo_sort.MeshDirectedGraph(self.split_meshes, self.DATA_PATH)
all_orders = graph.get_all_topological_orders()
selected_order = all_orders[0]
- logger.info('selected_order : ' + str(selected_order)) # TODO: improve the way an order is selected
+ logger.info(f'selected_order: {selected_order}') # TODO: improve the way an order is selected
self.cleanup_mesh_attributes_based_on_selected_order(selected_order, graph)
# reorder split_meshes based on selected order
@@ -184,16 +213,17 @@ def region_split(self, cut_mesh=True, separate_neighborhoods=True, topological_s
# --- save split meshes
if save_split_meshes: # (4)
- print("")
logger.info("--- Saving resulting split meshes")
+ output_path = Path(self.OUTPUT_PATH)
for i, m in enumerate(self.split_meshes):
- m.to_obj(os.path.join(self.OUTPUT_PATH, 'split_mesh_' + str(i) + '.obj'))
- m.to_json(os.path.join(self.OUTPUT_PATH, 'split_mesh_' + str(i) + '.json'))
- logger.info('Saving to Obj and Json: ' + os.path.join(self.OUTPUT_PATH, 'split_mesh_%.obj'))
- logger.info("Saved %d split_meshes" % len(self.split_meshes))
- print('')
-
- def cleanup_mesh_attributes_based_on_selected_order(self, selected_order, graph):
+ m.to_obj(str(output_path / f'split_mesh_{i}.obj'))
+ m.to_json(str(output_path / f'split_mesh_{i}.json'))
+ logger.info(f'Saving to Obj and Json: {output_path / "split_mesh_%.obj"}')
+ logger.info(f"Saved {len(self.split_meshes)} split_meshes")
+
+ def cleanup_mesh_attributes_based_on_selected_order(
+ self, selected_order: list[int], graph: MeshDirectedGraph
+ ) -> None:
"""
Based on the selected order of split meshes, it rearranges their attributes, so that they can then be used
with an interpolation slicer that requires data['boundary'] to be filled for every vertex.
@@ -220,43 +250,9 @@ def cleanup_mesh_attributes_based_on_selected_order(self, selected_order, graph)
pts_boundary_LOW = utils.get_mesh_vertex_coords_with_attribute(mesh, 'boundary', 1)
pts_boundary_HIGH = utils.get_mesh_vertex_coords_with_attribute(mesh, 'boundary', 2)
utils.save_to_json(utils.point_list_to_dict(pts_boundary_LOW), self.OUTPUT_PATH,
- 'pts_boundary_LOW_%d.json' % index)
+ f'pts_boundary_LOW_{index}.json')
utils.save_to_json(utils.point_list_to_dict(pts_boundary_HIGH), self.OUTPUT_PATH,
- 'pts_boundary_HIGH_%d.json' % index)
-
-
-# ---- utils
-
-def get_union_method(params_dict):
- """
- Read input params_dict and return union method id and its parameters.
- target_type: LOW/HIGH
- """
- smooth_union_data = get_param(params_dict, key='target_HIGH_smooth_union', defaults_type='interpolation_slicing')
- chamfer_union_data = get_param(params_dict, key='target_HIGH_chamfer_union', defaults_type='interpolation_slicing')
- stairs_union_data = get_param(params_dict, key='target_HIGH_stairs_union', defaults_type='interpolation_slicing')
- if smooth_union_data[0]:
- method = 'smooth'
- params = smooth_union_data[1]
- assert not chamfer_union_data[0] and not stairs_union_data[0], 'You can only select one union method.'
- assert len(params) == 1, 'Wrong number of union params'
- return method, params
- elif chamfer_union_data[0]:
- method = 'chamfer'
- params = chamfer_union_data[1]
- assert not smooth_union_data[0] and not stairs_union_data[0], 'You can only select one union method.'
- assert len(params) == 1, 'Wrong number of union params'
- return method, params
- elif stairs_union_data[0]:
- method = 'stairs'
- params = stairs_union_data[1]
- assert not smooth_union_data[0] and not chamfer_union_data[0], 'You can only select one union method.'
- assert len(params) == 2, 'Wrong number of union params'
- return method, params
- else:
- method = 'min'
- params = []
- return method, params
+ f'pts_boundary_HIGH_{index}.json')
if __name__ == "__main__":
diff --git a/src/compas_slicer/pre_processing/positioning.py b/src/compas_slicer/pre_processing/positioning.py
index cd081feb..ef1e4912 100644
--- a/src/compas_slicer/pre_processing/positioning.py
+++ b/src/compas_slicer/pre_processing/positioning.py
@@ -1,16 +1,20 @@
-from compas.geometry import Frame, Point
-from compas.geometry import Transformation
-from compas.datastructures import mesh_bounding_box
+from __future__ import annotations
-import logging
+from typing import TYPE_CHECKING
+
+from compas.geometry import Frame, Point, Transformation, bounding_box
+from loguru import logger
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
-logger = logging.getLogger('logger')
__all__ = ['move_mesh_to_point',
- 'get_mid_pt_base']
+ 'get_mid_pt_base',
+ 'remesh_mesh']
-def move_mesh_to_point(mesh, target_point):
+def move_mesh_to_point(mesh: Mesh, target_point: Point) -> Mesh:
"""Moves (translates) a mesh to a target point.
Parameters
@@ -29,12 +33,12 @@ def move_mesh_to_point(mesh, target_point):
T = Transformation.from_frame_to_frame(mesh_frame, target_frame)
mesh.transform(T)
- logger.info("Mesh moved to: " + str(target_point))
+ logger.info(f"Mesh moved to: {target_point}")
return mesh
-def get_mid_pt_base(mesh):
+def get_mid_pt_base(mesh: Mesh) -> Point:
"""Gets the middle point of the base (bottom) of the mesh.
Parameters
@@ -49,7 +53,8 @@ def get_mid_pt_base(mesh):
"""
# get center bottom point of mesh model
- bbox = mesh_bounding_box(mesh)
+ vertices = list(mesh.vertices_attributes('xyz'))
+ bbox = bounding_box(vertices)
corner_pts = [bbox[0], bbox[2]]
x = [p[0] for p in corner_pts]
@@ -61,5 +66,66 @@ def get_mid_pt_base(mesh):
return mesh_mid_pt
+def remesh_mesh(
+ mesh: Mesh,
+ target_edge_length: float,
+ number_of_iterations: int = 10,
+ do_project: bool = True
+) -> Mesh:
+ """Remesh a triangle mesh to achieve uniform edge lengths.
+
+ Uses CGAL's isotropic remeshing to improve mesh quality for slicing.
+ This can help with curved slicing and geodesic computations.
+
+ Parameters
+ ----------
+ mesh : Mesh
+ A compas mesh (must be triangulated).
+ target_edge_length : float
+ Target edge length for the remeshed output.
+ number_of_iterations : int
+ Number of remeshing iterations (default: 10).
+ do_project : bool
+ Reproject vertices onto original surface (default: True).
+
+ Returns
+ -------
+ Mesh
+ Remeshed compas mesh.
+
+ Raises
+ ------
+ ImportError
+ If compas_cgal is not available.
+
+ Examples
+ --------
+ >>> from compas.datastructures import Mesh
+ >>> from compas_slicer.pre_processing import remesh_mesh
+ >>> mesh = Mesh.from_stl('model.stl')
+ >>> remeshed = remesh_mesh(mesh, target_edge_length=2.0)
+ """
+ try:
+ from compas_cgal.meshing import trimesh_remesh
+ except ImportError as e:
+ raise ImportError(
+ "remesh_mesh requires compas_cgal. Install with: pip install compas_cgal"
+ ) from e
+
+ from compas.datastructures import Mesh as CompasMesh
+
+ M = mesh.to_vertices_and_faces()
+ V, F = trimesh_remesh(M, target_edge_length, number_of_iterations, do_project)
+
+ result = CompasMesh.from_vertices_and_faces(V.tolist(), F.tolist())
+
+ logger.info(
+ f"Remeshed: {mesh.number_of_vertices()} -> {result.number_of_vertices()} vertices, "
+ f"target edge length: {target_edge_length}"
+ )
+
+ return result
+
+
if __name__ == "__main__":
pass
diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/__init__.py b/src/compas_slicer/pre_processing/preprocessing_utils/__init__.py
index 36fd8b72..66d4792d 100644
--- a/src/compas_slicer/pre_processing/preprocessing_utils/__init__.py
+++ b/src/compas_slicer/pre_processing/preprocessing_utils/__init__.py
@@ -1,12 +1,8 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
+from .assign_vertex_distance import * # noqa: F401 F403
+from .compound_target import * # noqa: F401 F403
+from .geodesics import * # noqa: F401 F403
+from .gradient import * # noqa: F401 F403
+from .mesh_attributes_handling import * # noqa: F401 F403
+from .region_split import * # noqa: F401 F403
-from .mesh_attributes_handling import * # noqa: F401 E402 F403
-from .compound_target import * # noqa: F401 E402 F403
-from .geodesics import * # noqa: F401 E402 F403
-from .assign_vertex_distance import * # noqa: F401 E402 F403
-from .gradient import * # noqa: F401 E402 F403
-from .region_split import * # noqa: F401 E402 F403
-
-__all__ = [name for name in dir() if not name.startswith('_')]
+__all__ = [name for name in dir() if not name.startswith("_")]
diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py b/src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py
index 6dc2a3e2..ebd1014e 100644
--- a/src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py
+++ b/src/compas_slicer/pre_processing/preprocessing_utils/assign_vertex_distance.py
@@ -1,15 +1,29 @@
-import logging
-from compas_slicer.pre_processing.preprocessing_utils import blend_union_list, stairs_union_list, chamfer_union_list
-from compas_slicer.utilities.utils import remap_unbound
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
import numpy as np
-logger = logging.getLogger('logger')
+from compas_slicer.pre_processing.preprocessing_utils.compound_target import (
+ blend_union_list,
+ chamfer_union_list,
+ stairs_union_list,
+)
+from compas_slicer.utilities.utils import remap_unbound
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
+
+ from compas_slicer.pre_processing.preprocessing_utils.compound_target import CompoundTarget
+
__all__ = ['assign_interpolation_distance_to_mesh_vertices',
'assign_interpolation_distance_to_mesh_vertex']
-def assign_interpolation_distance_to_mesh_vertices(mesh, weight, target_LOW, target_HIGH):
+def assign_interpolation_distance_to_mesh_vertices(
+ mesh: Mesh, weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget | None
+) -> None:
"""
Fills in the 'get_distance' attribute of every vertex of the mesh.
@@ -23,12 +37,71 @@ def assign_interpolation_distance_to_mesh_vertices(mesh, weight, target_LOW, tar
target_HIGH: :class: 'compas_slicer.pre_processing.CompoundTarget'
The upper compound target.
"""
- for i, vkey in enumerate(mesh.vertices()):
- d = assign_interpolation_distance_to_mesh_vertex(vkey, weight, target_LOW, target_HIGH)
- mesh.vertex[vkey]['scalar_field'] = d
+ # Vectorized computation for all vertices at once
+ distances = _compute_all_distances_vectorized(weight, target_LOW, target_HIGH)
+ for vkey, d in zip(mesh.vertices(), distances):
+ mesh.vertex[vkey]['scalar_field'] = float(d)
+
+
+def _compute_all_distances_vectorized(
+ weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget | None
+) -> np.ndarray:
+ """Compute weighted distances for all vertices at once."""
+ if target_LOW and target_HIGH:
+ return _get_weighted_distances_vectorized(weight, target_LOW, target_HIGH)
+ elif target_LOW:
+ offset = weight * target_LOW.get_max_dist()
+ return target_LOW.get_all_distances() - offset
+ else:
+ raise ValueError('You need to provide at least one target')
+
+
+def _get_weighted_distances_vectorized(
+ weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget
+) -> np.ndarray:
+ """Vectorized weighted distance computation for all vertices."""
+ d_low = target_LOW.get_all_distances() # (n_vertices,)
+
+ if target_HIGH.has_uneven_weights:
+ # (n_boundaries, n_vertices)
+ ds_high = target_HIGH.get_all_distances_array()
+
+ if target_HIGH.number_of_boundaries > 1:
+ weights = np.array([
+ remap_unbound(weight, 0, wmax, 0, 1)
+ for wmax in target_HIGH.weight_max_per_cluster
+ ]) # (n_boundaries,)
+ else:
+ weights = np.array([weight])
+
+ # Broadcast: (n_boundaries, n_vertices)
+ distances = (weights[:, None] - 1) * d_low + weights[:, None] * ds_high
+
+ if target_HIGH.union_method == 'min':
+ return np.min(distances, axis=0)
+ elif target_HIGH.union_method == 'smooth':
+ return np.array([
+ blend_union_list(distances[:, i].tolist(), target_HIGH.union_params[0])
+ for i in range(distances.shape[1])
+ ])
+ elif target_HIGH.union_method == 'chamfer':
+ return np.array([
+ chamfer_union_list(distances[:, i].tolist(), target_HIGH.union_params[0])
+ for i in range(distances.shape[1])
+ ])
+ elif target_HIGH.union_method == 'stairs':
+ return np.array([
+ stairs_union_list(distances[:, i].tolist(), target_HIGH.union_params[0], target_HIGH.union_params[1])
+ for i in range(distances.shape[1])
+ ])
+ else:
+ d_high = target_HIGH.get_all_distances()
+ return d_low * (1 - weight) - d_high * weight
-def assign_interpolation_distance_to_mesh_vertex(vkey, weight, target_LOW, target_HIGH):
+def assign_interpolation_distance_to_mesh_vertex(
+ vkey: int, weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget | None
+) -> float:
"""
Fills in the 'get_distance' attribute for a single vertex with vkey.
@@ -53,7 +126,9 @@ def assign_interpolation_distance_to_mesh_vertex(vkey, weight, target_LOW, targe
return d
-def get_weighted_distance(vkey, weight, target_LOW, target_HIGH):
+def get_weighted_distance(
+ vkey: int, weight: float, target_LOW: CompoundTarget, target_HIGH: CompoundTarget
+) -> float:
"""
Computes the weighted get_distance for a single vertex with vkey.
diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py b/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
index 1c56cafe..74c14fdc 100644
--- a/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
+++ b/src/compas_slicer/pre_processing/preprocessing_utils/compound_target.py
@@ -1,16 +1,37 @@
-import numpy as np
-import math
-from compas.datastructures import Mesh
-import compas_slicer.utilities as utils
-import logging
-import networkx as nx
-from compas_slicer.slicers.slice_utilities import create_graph_from_mesh_vkeys
-from compas_slicer.pre_processing.preprocessing_utils.geodesics import get_igl_EXACT_geodesic_distances, \
- get_custom_HEAT_geodesic_distances
+from __future__ import annotations
+import math
import statistics
+from typing import Any, Literal
-logger = logging.getLogger('logger')
+import networkx as nx
+import numpy as np
+from compas.datastructures import Mesh
+from loguru import logger
+from numpy.typing import NDArray
+
+import compas_slicer.utilities as utils
+from compas_slicer.pre_processing.preprocessing_utils.geodesics import (
+ get_cgal_HEAT_geodesic_distances,
+ get_custom_HEAT_geodesic_distances,
+ get_igl_EXACT_geodesic_distances,
+ get_igl_HEAT_geodesic_distances,
+)
+
+GeodesicsMethod = Literal['exact_igl', 'heat_igl', 'heat_cgal', 'heat']
+UnionMethod = Literal['min', 'smooth', 'chamfer', 'stairs']
+
+
+def _create_graph_from_mesh_vkeys(mesh: Mesh, v_keys: list[int]) -> nx.Graph:
+ """Creates a graph with one node for every vertex, and edges between neighboring vertices."""
+ G = nx.Graph()
+ [G.add_node(v) for v in v_keys]
+ for v in v_keys:
+ v_neighbors = mesh.vertex_neighbors(v)
+ for other_v in v_neighbors:
+ if other_v != v and other_v in v_keys:
+ G.add_edge(v, other_v)
+ return G
__all__ = ['CompoundTarget',
'blend_union_list',
@@ -37,17 +58,28 @@ class CompoundTarget:
has_blend_union: bool
blend_radius : float
geodesics_method: str
- 'exact_igl' exact igl geodesic distances
- 'heat' custom heat geodesic distances
+ 'heat_cgal' CGAL heat geodesic distances (recommended)
+ 'heat' custom heat geodesic distances
anisotropic_scaling: bool
This is not yet implemented
"""
- def __init__(self, mesh, v_attr, value, DATA_PATH, union_method='min', union_params=[],
- geodesics_method='exact_igl', anisotropic_scaling=False):
-
- logger.info('Creating target with attribute : ' + v_attr + '=%d' % value)
- logger.info('union_method : ' + union_method + ', union_params = ' + str(union_params))
+ def __init__(
+ self,
+ mesh: Mesh,
+ v_attr: str,
+ value: int,
+ DATA_PATH: str,
+ union_method: UnionMethod = 'min',
+ union_params: list[Any] | None = None,
+ geodesics_method: GeodesicsMethod = 'heat_cgal',
+ anisotropic_scaling: bool = False,
+ ) -> None:
+
+ if union_params is None:
+ union_params = []
+ logger.info(f'Creating target with attribute : {v_attr}={value}')
+ logger.info(f'union_method: {union_method}, union_params: {union_params}')
self.mesh = mesh
self.v_attr = v_attr
self.value = value
@@ -64,25 +96,25 @@ def __init__(self, mesh, v_attr, value, DATA_PATH, union_method='min', union_par
self.VN = len(list(self.mesh.vertices()))
# filled in by function 'self.find_targets_connected_components()'
- self.all_target_vkeys = [] # flattened list with all vi_starts
- self.clustered_vkeys = [] # nested list with all vi_starts
- self.number_of_boundaries = None # int
+ self.all_target_vkeys: list[int] = [] # flattened list with all vi_starts
+ self.clustered_vkeys: list[list[int]] = [] # nested list with all vi_starts
+ self.number_of_boundaries: int = 0
- self.weight_max_per_cluster = []
+ self.weight_max_per_cluster: list[float] = []
# geodesic distances
# filled in by function 'self.update_distances_lists()'
- self._distances_lists = [] # nested list. Shape: number_of_boundaries x number_of_vertices
- self._distances_lists_flipped = [] # nested list. Shape: number_of_vertices x number_of_boundaries
- self._np_distances_lists_flipped = np.array([]) # numpy array of self._distances_lists_flipped
- self._max_dist = None # maximum get_distance value from the target on any vertex of the mesh
+ self._distances_lists: list[list[float]] = [] # Shape: number_of_boundaries x number_of_vertices
+ self._distances_lists_flipped: list[list[float]] = [] # Shape: number_of_vertices x number_of_boundaries
+ self._np_distances_lists_flipped: NDArray[np.floating] = np.array([])
+ self._max_dist: float | None = None # maximum distance from target on any mesh vertex
# compute
self.find_targets_connected_components()
self.compute_geodesic_distances()
# --- Neighborhoods clustering
- def find_targets_connected_components(self):
+ def find_targets_connected_components(self) -> None:
"""
Clusters all the vertices that belong to the target into neighborhoods using a graph.
Each target can have an arbitrary number of neighborhoods/clusters.
@@ -90,20 +122,25 @@ def find_targets_connected_components(self):
"""
self.all_target_vkeys = [vkey for vkey, data in self.mesh.vertices(data=True) if
data[self.v_attr] == self.value]
- assert len(self.all_target_vkeys) > 0, "There are no vertices in the mesh with the attribute : " \
- + self.v_attr + ", value : %d" % self.value + " .Probably you made a " \
- "mistake while creating the targets. "
- G = create_graph_from_mesh_vkeys(self.mesh, self.all_target_vkeys)
- assert len(list(G.nodes())) == len(self.all_target_vkeys)
+ if len(self.all_target_vkeys) == 0:
+ raise ValueError(
+ f"No vertices in mesh with attribute '{self.v_attr}'={self.value}. "
+ "Check your target creation."
+ )
+ G = _create_graph_from_mesh_vkeys(self.mesh, self.all_target_vkeys)
+ if len(list(G.nodes())) != len(self.all_target_vkeys):
+ raise RuntimeError("Graph node count doesn't match target vertex count.")
self.number_of_boundaries = len(list(nx.connected_components(G)))
- for i, cp in enumerate(nx.connected_components(G)):
+ for _i, cp in enumerate(nx.connected_components(G)):
self.clustered_vkeys.append(list(cp))
- logger.info("Compound target with 'boundary'=%d. Number of connected_components : %d" % (
- self.value, len(list(nx.connected_components(G)))))
+ logger.info(
+ f"Compound target with 'boundary'={self.value}. Number of connected_components : "
+ f"{len(list(nx.connected_components(G)))}"
+ )
# --- Geodesic distances
- def compute_geodesic_distances(self):
+ def compute_geodesic_distances(self) -> None:
"""
Computes the geodesic distances from each of the target's neighborhoods to all the mesh vertices.
Fills in the distances attributes.
@@ -111,8 +148,14 @@ def compute_geodesic_distances(self):
if self.geodesics_method == 'exact_igl':
distances_lists = [get_igl_EXACT_geodesic_distances(self.mesh, vstarts) for vstarts in
self.clustered_vkeys]
+ elif self.geodesics_method == 'heat_igl':
+ distances_lists = [get_igl_HEAT_geodesic_distances(self.mesh, vstarts) for vstarts in
+ self.clustered_vkeys]
+ elif self.geodesics_method == 'heat_cgal':
+ distances_lists = [get_cgal_HEAT_geodesic_distances(self.mesh, vstarts) for vstarts in
+ self.clustered_vkeys]
elif self.geodesics_method == 'heat':
- distances_lists = [get_custom_HEAT_geodesic_distances(self.mesh, vstarts, self.OUTPUT_PATH) for vstarts in
+ distances_lists = [get_custom_HEAT_geodesic_distances(self.mesh, vstarts, str(self.OUTPUT_PATH)) for vstarts in
self.clustered_vkeys]
else:
raise ValueError('Unknown geodesics method : ' + self.geodesics_method)
@@ -120,7 +163,7 @@ def compute_geodesic_distances(self):
distances_lists = [list(dl) for dl in distances_lists] # number_of_boundaries x #V
self.update_distances_lists(distances_lists)
- def update_distances_lists(self, distances_lists):
+ def update_distances_lists(self, distances_lists: list[list[float]]) -> None:
"""
Fills in the distances attributes.
"""
@@ -134,11 +177,11 @@ def update_distances_lists(self, distances_lists):
# --- Uneven weights
@property
- def has_uneven_weights(self):
+ def has_uneven_weights(self) -> bool:
""" Returns True if the target has uneven_weights calculated, False otherwise. """
return len(self.weight_max_per_cluster) > 0
- def compute_uneven_boundaries_weight_max(self, other_target):
+ def compute_uneven_boundaries_weight_max(self, other_target: CompoundTarget) -> None:
"""
If the target has multiple neighborhoods/clusters of vertices, then it computes their maximum distance from
the other_target. Based on that it calculates their weight_max for the interpolation process
@@ -151,12 +194,14 @@ def compute_uneven_boundaries_weight_max(self, other_target):
ds_avg_HIGH[i] = d + self.offset
self.weight_max_per_cluster = [d / max_param for d in ds_avg_HIGH]
- logger.info('weight_max_per_cluster : ' + str(self.weight_max_per_cluster))
+ logger.info(f'weight_max_per_cluster: {self.weight_max_per_cluster}')
else:
logger.info("Did not compute_norm_of_gradient uneven boundaries, target consists of single component")
# --- Relation to other target
- def get_boundaries_rel_dist_from_other_target(self, other_target, avg_type='median'):
+ def get_boundaries_rel_dist_from_other_target(
+ self, other_target: CompoundTarget, avg_type: Literal['mean', 'median'] = 'median'
+ ) -> list[float]:
"""
Returns a list, one relative distance value per connected boundary neighborhood.
That is the average of the distances of the vertices of that boundary neighborhood from the other_target.
@@ -170,43 +215,67 @@ def get_boundaries_rel_dist_from_other_target(self, other_target, avg_type='medi
distances.append(statistics.median(ds))
return distances
- def get_avg_distances_from_other_target(self, other_target):
+ def get_avg_distances_from_other_target(self, other_target: CompoundTarget) -> float:
"""
Returns the minimum and maximum distance of the vertices of this target from the other_target
"""
extreme_distances = []
for v_index in other_target.all_target_vkeys:
extreme_distances.append(self.get_all_distances()[v_index])
- return np.average(np.array(extreme_distances))
+ return float(np.average(np.array(extreme_distances)))
#############################
# --- get all distances
- # All distances
- def get_all_distances(self):
- """ Returns the resulting distances per every vertex. """
- return [self.get_distance(i) for i in range(self.VN)]
-
- def get_all_clusters_distances_dict(self):
+ def get_all_clusters_distances_dict(self) -> dict[int, list[float]]:
""" Returns dict. keys: index of connected target neighborhood, value: list, distances (one per vertex). """
return {i: self._distances_lists[i] for i in range(self.number_of_boundaries)}
- def get_max_dist(self):
+ def get_max_dist(self) -> float | None:
""" Returns the maximum distance that the target has on a mesh vertex. """
return self._max_dist
+ #############################
+ # --- vectorized distances (all vertices at once)
+
+ def get_all_distances(self) -> np.ndarray:
+ """Return distances for all vertices as 1D array, applying union method."""
+ if self.union_method == 'min':
+ return np.min(self._np_distances_lists_flipped, axis=1)
+ elif self.union_method == 'smooth':
+ return np.array([
+ blend_union_list(row.tolist(), self.union_params[0])
+ for row in self._np_distances_lists_flipped
+ ])
+ elif self.union_method == 'chamfer':
+ return np.array([
+ chamfer_union_list(row.tolist(), self.union_params[0])
+ for row in self._np_distances_lists_flipped
+ ])
+ elif self.union_method == 'stairs':
+ return np.array([
+ stairs_union_list(row.tolist(), self.union_params[0], self.union_params[1])
+ for row in self._np_distances_lists_flipped
+ ])
+ else:
+ raise ValueError(f"Unknown union method: {self.union_method}")
+
+ def get_all_distances_array(self) -> np.ndarray:
+ """Return raw distances as (n_boundaries, n_vertices) array."""
+ return np.array(self._distances_lists)
+
#############################
# --- per vkey distances
- def get_all_distances_for_vkey(self, i):
+ def get_all_distances_for_vkey(self, i: int) -> list[float]:
""" Returns distances from each cluster separately for vertex i. Smooth union doesn't play here any role. """
return [self._distances_lists[list_index][i] for list_index in range(self.number_of_boundaries)]
- def get_distance(self, i):
+ def get_distance(self, i: int) -> float:
""" Return get_distance for vertex with vkey i. """
if self.union_method == 'min':
# --- simple union
- return np.min(self._np_distances_lists_flipped[i])
+ return float(np.min(self._np_distances_lists_flipped[i]))
elif self.union_method == 'smooth':
# --- blend (smooth) union
return blend_union_list(values=self._np_distances_lists_flipped[i], r=self.union_params[0])
@@ -223,13 +292,13 @@ def get_distance(self, i):
#############################
# --- scalar field smoothing
- def laplacian_smoothing(self, iterations, strength):
+ def laplacian_smoothing(self, iterations: int, strength: float) -> None:
""" Smooth the distances on the mesh, using iterative laplacian smoothing. """
L = utils.get_mesh_cotmatrix_igl(self.mesh, fix_boundaries=True)
new_distances_lists = []
logger.info('Laplacian smoothing of all distances')
- for i, a in enumerate(self._distances_lists):
+ for _i, a in enumerate(self._distances_lists):
a = np.array(a) # a: numpy array containing the attribute to be smoothed
for _ in range(iterations): # iterative smoothing
a_prime = a + strength * L * a
@@ -239,7 +308,7 @@ def laplacian_smoothing(self, iterations, strength):
#############################
# ------ output
- def save_distances(self, name):
+ def save_distances(self, name: str) -> None:
"""
Save distances to json.
Saves one list with distance values (one per vertex).
@@ -248,10 +317,10 @@ def save_distances(self, name):
----------
name: str, name of json to be saved
"""
- utils.save_to_json(self.get_all_distances(), self.OUTPUT_PATH, name)
+ utils.save_to_json(self.get_all_distances().tolist(), self.OUTPUT_PATH, name)
# ------ assign new Mesh
- def assign_new_mesh(self, mesh):
+ def assign_new_mesh(self, mesh: Mesh) -> None:
""" When the base mesh changes, a new mesh needs to be assigned. """
mesh.to_json(self.OUTPUT_PATH + "/temp.obj")
mesh = Mesh.from_json(self.OUTPUT_PATH + "/temp.obj")
@@ -262,44 +331,44 @@ def assign_new_mesh(self, mesh):
####################
# unions on lists
-def blend_union_list(values, r):
+def blend_union_list(values: NDArray[np.floating] | list[float], r: float) -> float:
""" Returns a smooth union of all the elements in the list, with blend radius blend_radius. """
- d_result = 9999999 # very big number
+ d_result: float = 9999999.0 # very big number
for d in values:
- d_result = blend_union(d_result, d, r)
+ d_result = blend_union(d_result, float(d), r)
return d_result
-def stairs_union_list(values, r, n):
+def stairs_union_list(values: NDArray[np.floating] | list[float], r: float, n: int) -> float:
""" Returns a stairs union of all the elements in the list, with blend radius r and number of peaks n-1."""
- d_result = 9999999 # very big number
- for i, d in enumerate(values):
- d_result = stairs_union(d_result, d, r, n)
+ d_result: float = 9999999.0 # very big number
+ for _i, d in enumerate(values):
+ d_result = stairs_union(d_result, float(d), r, n)
return d_result
-def chamfer_union_list(values, r):
- d_result = 9999999 # very big number
- for i, d in enumerate(values):
- d_result = chamfer_union(d_result, d, r)
+def chamfer_union_list(values: NDArray[np.floating] | list[float], r: float) -> float:
+ d_result: float = 9999999.0 # very big number
+ for _i, d in enumerate(values):
+ d_result = chamfer_union(d_result, float(d), r)
return d_result
####################
# unions on pairs
-def blend_union(da, db, r):
+def blend_union(da: float, db: float, r: float) -> float:
""" Returns a smooth union of the two elements da, db with blend radius blend_radius. """
e = max(r - abs(da - db), 0)
return min(da, db) - e * e * 0.25 / r
-def chamfer_union(a, b, r):
+def chamfer_union(a: float, b: float, r: float) -> float:
""" Returns a chamfer union of the two elements da, db with radius r. """
return min(min(a, b), (a - r + b) * math.sqrt(0.5))
-def stairs_union(a, b, r, n):
+def stairs_union(a: float, b: float, r: float, n: int) -> float:
""" Returns a stairs union of the two elements da, db with radius r. """
s = r / n
u = b - r
diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py b/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
index d7c23d5f..fa736d40 100644
--- a/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
+++ b/src/compas_slicer/pre_processing/preprocessing_utils/geodesics.py
@@ -1,42 +1,147 @@
+from __future__ import annotations
+
+import math
+from typing import TYPE_CHECKING
+
import numpy as np
-import logging
-import compas_slicer.utilities as utils
-from compas_slicer.pre_processing.preprocessing_utils.gradient import get_scalar_field_from_gradient, \
- get_face_gradient_from_scalar_field, normalize_gradient
import scipy
-import math
+from loguru import logger
+from numpy.typing import NDArray
+
+import compas_slicer.utilities as utils
+from compas_slicer.pre_processing.preprocessing_utils.gradient import (
+ get_face_gradient_from_scalar_field,
+ get_scalar_field_from_gradient,
+ normalize_gradient,
+)
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
+
+
+__all__ = ['get_heat_geodesic_distances',
+ 'get_custom_HEAT_geodesic_distances',
+ 'GeodesicsCache']
-logger = logging.getLogger('logger')
-__all__ = ['get_igl_EXACT_geodesic_distances',
- 'get_custom_HEAT_geodesic_distances']
+# CGAL heat method solver cache (for precomputation reuse)
+_cgal_solver_cache: dict[int, object] = {}
-def get_igl_EXACT_geodesic_distances(mesh, vertices_start):
+def get_heat_geodesic_distances(
+ mesh: Mesh, vertices_start: list[int]
+) -> NDArray[np.floating]:
"""
- Calculate geodesic distances using libigl.
+ Calculate geodesic distances using CGAL heat method.
+
+ Uses compas_cgal's HeatGeodesicSolver which provides CGAL's Heat_method_3
+ implementation with intrinsic Delaunay triangulation.
Parameters
----------
- mesh: :class: 'compas.datastructures.Mesh'
- vertices_start: list, int
+ mesh : Mesh
+ A compas mesh (must be triangulated).
+ vertices_start : list[int]
+ Source vertex indices.
+
+ Returns
+ -------
+ NDArray
+ Minimum distance from any source to each vertex.
"""
- # utils.check_package_is_installed('igl')
- import igl
+ from compas_cgal.geodesics import HeatGeodesicSolver
+
+ # Check if we have a cached solver for this mesh
+ mesh_hash = hash((len(list(mesh.vertices())), len(list(mesh.faces()))))
+ if mesh_hash not in _cgal_solver_cache:
+ _cgal_solver_cache.clear() # Clear old solvers
+ _cgal_solver_cache[mesh_hash] = HeatGeodesicSolver(mesh)
+
+ solver = _cgal_solver_cache[mesh_hash]
+
+ # Compute distances for each source and take minimum
+ all_distances = []
+ for source in vertices_start:
+ distances = solver.solve([source])
+ all_distances.append(distances)
+
+ return np.min(np.array(all_distances), axis=0)
- v, f = mesh.to_vertices_and_faces()
- v = np.array(v)
- f = np.array(f)
- vertices_target = np.arange(len(v)) # all vertices are targets
- vstart = np.array(vertices_start)
- distances = igl.exact_geodesic(v, f, vstart, vertices_target)
- return distances
+# Backwards compatibility aliases
+get_cgal_HEAT_geodesic_distances = get_heat_geodesic_distances
+get_igl_HEAT_geodesic_distances = get_heat_geodesic_distances
+get_igl_EXACT_geodesic_distances = get_heat_geodesic_distances
+
+
+class GeodesicsCache:
+ """Cache for geodesic distances to avoid redundant computations.
+
+ Note: This class is kept for backwards compatibility but now uses CGAL.
+ The CGAL solver has its own internal caching via _cgal_solver_cache.
+ """
+
+ def __init__(self) -> None:
+ self._cache: dict[tuple[int, str], NDArray[np.floating]] = {}
+ self._mesh_hash: int | None = None
-def get_custom_HEAT_geodesic_distances(mesh, vi_sources, OUTPUT_PATH, v_equalize=None, anisotropic_scaling=False):
- """ Calculate geodesic distances using the heat method. """
+ def clear(self) -> None:
+ """Clear the cache."""
+ self._cache.clear()
+ self._mesh_hash = None
+
+ def get_distances(
+ self, mesh: Mesh, sources: list[int], method: str = 'heat'
+ ) -> NDArray[np.floating]:
+ """Get geodesic distances from sources, using cache when possible.
+
+ Parameters
+ ----------
+ mesh : Mesh
+ The mesh to compute distances on.
+ sources : list[int]
+ Source vertex indices.
+ method : str
+ Geodesic method (ignored, always uses CGAL heat method).
+
+ Returns
+ -------
+ NDArray
+ Minimum distance from any source to each vertex.
+ """
+ return get_heat_geodesic_distances(mesh, sources)
+
+
+def get_custom_HEAT_geodesic_distances(
+ mesh: Mesh,
+ vi_sources: list[int],
+ OUTPUT_PATH: str,
+ v_equalize: list[int] | None = None,
+) -> NDArray[np.floating]:
+ """Calculate geodesic distances using the custom heat method.
+
+ This is a pure Python implementation of the heat method (Crane et al., 2013).
+ For production use, prefer CGAL's implementation via get_heat_geodesic_distances()
+ which uses intrinsic Delaunay triangulation for better accuracy.
+
+ Parameters
+ ----------
+ mesh : Mesh
+ A compas mesh (must be triangulated).
+ vi_sources : list[int]
+ Source vertex indices.
+ OUTPUT_PATH : str
+ Path to save intermediate results.
+ v_equalize : list[int] | None
+ Vertices to equalize (for saddle point handling).
+
+ Returns
+ -------
+ NDArray
+ Geodesic distance from sources to each vertex.
+ """
geodesics_solver = GeodesicsSolver(mesh, OUTPUT_PATH)
- u = geodesics_solver.diffuse_heat(vi_sources, v_equalize, method='simulation')
+ u = geodesics_solver.diffuse_heat(vi_sources, v_equalize)
geodesic_dist = geodesics_solver.get_geodesic_distances(u, vi_sources, v_equalize)
return geodesic_dist
@@ -44,9 +149,9 @@ def get_custom_HEAT_geodesic_distances(mesh, vi_sources, OUTPUT_PATH, v_equalize
######################################
# --- GeodesicsSolver
-USE_FORWARDS_EULER = False
+# Heat diffusion parameters for custom solver
HEAT_DIFFUSION_ITERATIONS = 250
-DELTA = 0.1
+DELTA = 0.1 # Time step for backward Euler
class GeodesicsSolver:
@@ -60,34 +165,41 @@ class GeodesicsSolver:
OUTPUT_PATH: str
"""
- def __init__(self, mesh, OUTPUT_PATH):
- # utils.check_package_is_installed('igl')
- import igl
-
+ def __init__(self, mesh: Mesh, OUTPUT_PATH: str) -> None:
logger.info('GeodesicsSolver')
self.mesh = mesh
self.OUTPUT_PATH = OUTPUT_PATH
self.use_forwards_euler = True
- v, f = mesh.to_vertices_and_faces()
- v = np.array(v)
- f = np.array(f)
+ # Compute matrices using NumPy implementations
+ self.cotans = utils.get_mesh_cotans(mesh)
+ self.L = utils.get_mesh_cotmatrix(mesh, fix_boundaries=False)
+ self.M = utils.get_mesh_massmatrix(mesh)
- # compute necessary data
- self.cotans = igl.cotmatrix_entries(v, f) # compute_cotan_field(self.mesh)
- self.L = igl.cotmatrix(v, f) # assemble_laplacian_matrix(self.mesh, self.cotans)
- self.M = igl.massmatrix(v, f) # create_mass_matrix(mesh)
-
- def diffuse_heat(self, vi_sources, v_equalize=None, method='simulation'):
+ def diffuse_heat(
+ self,
+ vi_sources: list[int],
+ v_equalize: list[int] | None = None,
+ ) -> NDArray[np.floating]:
"""
- Heat diffusion.
+ Heat diffusion using iterative backward Euler.
+
+ This is a custom Python implementation of the heat method. For production use,
+ prefer CGAL's heat method (geodesics_method='heat_cgal') which uses intrinsic
+ Delaunay triangulation for better accuracy.
- Attributes
+ Parameters
----------
- vi_sources: list, int, the vertex indices of the sources
- v_equalize: list, int, the vertex indices whose value should be equalized
- method: str (Currently only 'simulation' works.)
+ vi_sources : list[int]
+ The vertex indices of the heat sources.
+ v_equalize : list[int] | None
+ Vertex indices whose values should be equalized (for handling saddle points).
+
+ Returns
+ -------
+ NDArray
+ Heat distribution u, with sources at 0 and increasing away from them.
"""
if not v_equalize:
v_equalize = []
@@ -97,37 +209,30 @@ def diffuse_heat(self, vi_sources, v_equalize=None, method='simulation'):
u0[vi_sources] = 1.0
u = u0
- if method == 'default': # This is buggy, does not keep boundary exactly on 0. TODO: INVESTIGATE
- t_mult = 1
- t = t_mult * np.mean(np.array([self.mesh.face_area(fkey) for fkey in self.mesh.faces()])) # avg face area
- solver = scipy.sparse.linalg.factorized(self.M - t * self.L) # pre-factor solver
- u = solver(u0) # solve the heat equation: u = (VA - t * Lc) * u0
-
- elif method == 'simulation':
- u = u0
+ # Pre-factor the matrix ONCE outside the loop (major speedup)
+ # Using backward Euler: (M - δL)u' = M·u
+ S = self.M - DELTA * self.L
+ solver = scipy.sparse.linalg.factorized(S)
- for i in range(HEAT_DIFFUSION_ITERATIONS):
- if USE_FORWARDS_EULER: # Forwards Euler (doesn't work so well)
- u_prime = u + DELTA * self.L * u
- else: # Backwards Euler
- # (M-delta*L) * u_prime = M*U
- S = (self.M - DELTA * self.L)
- b = self.M * u
- u_prime = scipy.sparse.linalg.spsolve(S, b)
+ for _i in range(HEAT_DIFFUSION_ITERATIONS):
+ b = self.M * u
+ u_prime = solver(b)
- if len(v_equalize) > 0:
- u_prime[v_equalize] = np.min(u_prime[v_equalize])
+ if len(v_equalize) > 0:
+ u_prime[v_equalize] = np.min(u_prime[v_equalize])
- u = u_prime
- u[vi_sources] = 1.0 # make sure sources remain fixed to 1
+ u = u_prime
+ u[vi_sources] = 1.0 # enforce Dirichlet boundary: sources remain fixed
- # reverse values (to make vstarts on 0)
+ # reverse values (to make sources at 0, increasing outward)
u = ([np.max(u)] * len(u)) - u
utils.save_to_json([float(value) for value in u], self.OUTPUT_PATH, 'diffused_heat.json')
return u
- def get_geodesic_distances(self, u, vi_sources, v_equalize=None):
+ def get_geodesic_distances(
+ self, u: NDArray[np.floating], vi_sources: list[int], v_equalize: list[int] | None = None
+ ) -> NDArray[np.floating]:
"""
Finds geodesic distances from heat distribution u. I
@@ -140,8 +245,8 @@ def get_geodesic_distances(self, u, vi_sources, v_equalize=None):
X = get_face_gradient_from_scalar_field(self.mesh, u)
X = normalize_gradient(X)
geodesic_dist = get_scalar_field_from_gradient(self.mesh, X, self.L, self.cotans)
- assert not math.isnan(geodesic_dist[0]), \
- "Attention, the 'get_scalar_field_from_gradient' function returned Nan. "
+ if math.isnan(geodesic_dist[0]):
+ raise RuntimeError("get_scalar_field_from_gradient returned NaN - check mesh quality.")
geodesic_dist[vi_sources] = 0 # coerce boundary vertices to be on 0 (fixes small boundary imprecision)
return geodesic_dist
diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/gradient.py b/src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
index 2e7a7af6..5fad68ea 100644
--- a/src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
+++ b/src/compas_slicer/pre_processing/preprocessing_utils/gradient.py
@@ -1,8 +1,19 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
import numpy as np
-import logging
import scipy
+from loguru import logger
+from numpy.typing import NDArray
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
-logger = logging.getLogger('logger')
+from compas_slicer._numpy_ops import edge_gradient_from_vertex_gradient as _edge_gradient_vectorized
+from compas_slicer._numpy_ops import face_gradient_from_scalar_field as _face_gradient_vectorized
+from compas_slicer._numpy_ops import per_vertex_divergence as _divergence_vectorized
+from compas_slicer._numpy_ops import vertex_gradient_from_face_gradient as _vertex_gradient_vectorized
__all__ = ['get_vertex_gradient_from_face_gradient',
'get_edge_gradient_from_vertex_gradient',
@@ -12,7 +23,16 @@
'get_scalar_field_from_gradient']
-def get_vertex_gradient_from_face_gradient(mesh, face_gradient):
+def _mesh_to_arrays(mesh: Mesh) -> tuple[NDArray[np.floating], NDArray[np.intp]]:
+ """Convert COMPAS mesh to numpy arrays for vectorized operations."""
+ V = np.array([mesh.vertex_coordinates(v) for v in mesh.vertices()], dtype=np.float64)
+ F = np.array([mesh.face_vertices(f) for f in mesh.faces()], dtype=np.intp)
+ return V, F
+
+
+def get_vertex_gradient_from_face_gradient(
+ mesh: Mesh, face_gradient: NDArray[np.floating]
+) -> NDArray[np.floating]:
"""
Finds vertex gradient given an already calculated per face gradient.
@@ -26,20 +46,14 @@ def get_vertex_gradient_from_face_gradient(mesh, face_gradient):
np.array (dimensions : #V x 3) one gradient vector per vertex.
"""
logger.info('Computing per vertex gradient')
- vertex_gradient = []
- for v_key in mesh.vertices():
- faces_total_area = 0
- faces_total_grad = np.array([0.0, 0.0, 0.0])
- for f_key in mesh.vertex_faces(v_key):
- face_area = mesh.face_area(f_key)
- faces_total_area += face_area
- faces_total_grad += face_area * face_gradient[f_key, :]
- v_grad = faces_total_grad / faces_total_area
- vertex_gradient.append(v_grad)
- return np.array(vertex_gradient)
-
-
-def get_edge_gradient_from_vertex_gradient(mesh, vertex_gradient):
+ V, F = _mesh_to_arrays(mesh)
+ face_areas = np.array([mesh.face_area(f) for f in mesh.faces()], dtype=np.float64)
+ return _vertex_gradient_vectorized(V, F, face_gradient, face_areas)
+
+
+def get_edge_gradient_from_vertex_gradient(
+ mesh: Mesh, vertex_gradient: NDArray[np.floating]
+) -> NDArray[np.floating]:
"""
Finds edge gradient given an already calculated per vertex gradient.
@@ -52,14 +66,13 @@ def get_edge_gradient_from_vertex_gradient(mesh, vertex_gradient):
----------
np.array (dimensions : #E x 3) one gradient vector per edge.
"""
- edge_gradient = []
- for u, v in mesh.edges():
- thisEdgeGradient = vertex_gradient[u] + vertex_gradient[v]
- edge_gradient.append(thisEdgeGradient)
- return np.array(edge_gradient)
+ edges = np.array(list(mesh.edges()), dtype=np.intp)
+ return _edge_gradient_vectorized(edges, vertex_gradient)
-def get_face_gradient_from_scalar_field(mesh, u, use_igl=True):
+def get_face_gradient_from_scalar_field(
+ mesh: Mesh, u: NDArray[np.floating]
+) -> NDArray[np.floating]:
"""
Finds face gradient from scalar field u.
Scalar field u is given per vertex.
@@ -74,40 +87,16 @@ def get_face_gradient_from_scalar_field(mesh, u, use_igl=True):
np.array (dimensions : #F x 3) one gradient vector per face.
"""
logger.info('Computing per face gradient')
- if use_igl:
- try:
- import igl
- v, f = mesh.to_vertices_and_faces()
- G = igl.grad(np.array(v), np.array(f))
- X = G * u
- nf = len(list(mesh.faces()))
- X = np.array([[X[i], X[i + nf], X[i + 2 * nf]] for i in range(nf)])
- return X
- except ModuleNotFoundError:
- print("Could not calculate gradient with IGL because it is not installed. Falling back to default function")
-
- grad = []
- for fkey in mesh.faces():
- A = mesh.face_area(fkey)
- N = mesh.face_normal(fkey)
- edge_0, edge_1, edge_2 = get_face_edge_vectors(mesh, fkey)
- v0, v1, v2 = mesh.face_vertices(fkey)
- u0 = u[v0]
- u1 = u[v1]
- u2 = u[v2]
- vc0 = np.array(mesh.vertex_coordinates(v0))
- vc1 = np.array(mesh.vertex_coordinates(v1))
- vc2 = np.array(mesh.vertex_coordinates(v2))
- # grad_u = -1 * ((u1-u0) * np.cross(vc0-vc2, N) + (u2-u0) * np.cross(vc1-vc0, N)) / (2 * A)
- grad_u = ((u1-u0) * np.cross(vc0-vc2, N) + (u2-u0) * np.cross(vc1-vc0, N)) / (2 * A)
- # grad_u = (np.cross(N, edge_0) * u2 +
- # np.cross(N, edge_1) * u0 +
- # np.cross(N, edge_2) * u1) / (2 * A)
- grad.append(grad_u)
- return np.array(grad)
-
-
-def get_face_edge_vectors(mesh, fkey):
+ V, F = _mesh_to_arrays(mesh)
+ scalar_field = np.asarray(u, dtype=np.float64)
+ face_normals = np.array([mesh.face_normal(f) for f in mesh.faces()], dtype=np.float64)
+ face_areas = np.array([mesh.face_area(f) for f in mesh.faces()], dtype=np.float64)
+ return _face_gradient_vectorized(V, F, scalar_field, face_normals, face_areas)
+
+
+def get_face_edge_vectors(
+ mesh: Mesh, fkey: int
+) -> tuple[NDArray[np.floating], NDArray[np.floating], NDArray[np.floating]]:
""" Returns the edge vectors of the face with fkey. """
e0, e1, e2 = mesh.face_halfedges(fkey)
edge_0 = np.array(mesh.vertex_coordinates(e0[0])) - np.array(mesh.vertex_coordinates(e0[1]))
@@ -116,7 +105,9 @@ def get_face_edge_vectors(mesh, fkey):
return edge_0, edge_1, edge_2
-def get_per_vertex_divergence(mesh, X, cotans):
+def get_per_vertex_divergence(
+ mesh: Mesh, X: NDArray[np.floating], cotans: NDArray[np.floating]
+) -> NDArray[np.floating]:
"""
Computes the divergence of the gradient X for the mesh, using cotangent weights.
@@ -130,26 +121,23 @@ def get_per_vertex_divergence(mesh, X, cotans):
----------
np.array (dimensions : #V x 1) one float (divergence value) per vertex.
"""
+ V, F = _mesh_to_arrays(mesh)
cotans = cotans.reshape(-1, 3)
- div_X = np.zeros(len(list(mesh.vertices())))
- for fi, fkey in enumerate(mesh.faces()):
- x_fi = X[fi]
- edges = np.array(get_face_edge_vectors(mesh, fkey))
- for i in range(3):
- j = (i + 1) % 3
- k = (i + 2) % 3
- div_X[mesh.face_vertices(fkey)[i]] += cotans[fi, k] * np.dot(x_fi, edges[i]) / 2.0
- div_X[mesh.face_vertices(fkey)[i]] += cotans[fi, j] * np.dot(x_fi, -edges[k]) / 2.0
- return div_X
-
-
-def normalize_gradient(X):
+ return _divergence_vectorized(V, F, X, cotans)
+
+
+def normalize_gradient(X: NDArray[np.floating]) -> NDArray[np.floating]:
""" Returns normalized gradient X. """
norm = np.linalg.norm(X, axis=1)[..., np.newaxis]
return X / norm # normalize
-def get_scalar_field_from_gradient(mesh, X, C, cotans):
+def get_scalar_field_from_gradient(
+ mesh: Mesh,
+ X: NDArray[np.floating],
+ C: scipy.sparse.csr_matrix,
+ cotans: NDArray[np.floating],
+) -> NDArray[np.floating]:
"""
Find scalar field u that best explains gradient X.
Laplacian(u) = Divergence(X).
@@ -169,7 +157,7 @@ def get_scalar_field_from_gradient(mesh, X, C, cotans):
"""
div_X = get_per_vertex_divergence(mesh, X, cotans)
u = scipy.sparse.linalg.spsolve(C, div_X)
- logger.info('Solved Δ(u) = div(X). Linear system error |Δ(u) - div(X)| = ' + str(np.linalg.norm(C * u - div_X)))
+ logger.info(f'Solved Δ(u) = div(X). Linear system error |Δ(u) - div(X)| = {np.linalg.norm(C * u - div_X):.6e}')
u = u - np.amin(u) # make start value equal 0
u = 2*u
return u
diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py b/src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
index b18cf30b..695917b7 100644
--- a/src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
+++ b/src/compas_slicer/pre_processing/preprocessing_utils/mesh_attributes_handling.py
@@ -1,5 +1,15 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+import numpy as np
+from compas.geometry import Point
+from scipy.spatial import cKDTree
+
import compas_slicer.utilities as utils
-from compas.geometry import Point, distance_point_point_sqrd
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
__all__ = ['create_mesh_boundary_attributes',
'get_existing_cut_indices',
@@ -10,7 +20,9 @@
'replace_mesh_vertex_attribute']
-def create_mesh_boundary_attributes(mesh, low_boundary_vs, high_boundary_vs):
+def create_mesh_boundary_attributes(
+ mesh: Mesh, low_boundary_vs: list[int], high_boundary_vs: list[int]
+) -> None:
"""
Creates a default vertex attribute data['boundary']=0. Then it gives the value 1 to the vertices that belong
to the lower boundary, and the value 2 to the vertices that belong to the higher boundary.
@@ -26,7 +38,7 @@ def create_mesh_boundary_attributes(mesh, low_boundary_vs, high_boundary_vs):
###############################################
# --- Mesh existing attributes on vertices
-def get_existing_cut_indices(mesh):
+def get_existing_cut_indices(mesh: Mesh) -> list[int]:
"""
Returns
----------
@@ -34,15 +46,14 @@ def get_existing_cut_indices(mesh):
The cut indices (data['cut']>0) that exist on the mesh vertices.
"""
cut_indices = []
- for vkey, data in mesh.vertices(data=True):
- if data['cut'] > 0:
- if data['cut'] not in cut_indices:
- cut_indices.append(data['cut'])
+ for _vkey, data in mesh.vertices(data=True):
+ if data['cut'] > 0 and data['cut'] not in cut_indices:
+ cut_indices.append(data['cut'])
cut_indices = sorted(cut_indices)
return cut_indices
-def get_existing_boundary_indices(mesh):
+def get_existing_boundary_indices(mesh: Mesh) -> list[int]:
"""
Returns
----------
@@ -50,45 +61,47 @@ def get_existing_boundary_indices(mesh):
The boundary indices (data['boundary']>0) that exist on the mesh vertices.
"""
indices = []
- for vkey, data in mesh.vertices(data=True):
- if data['boundary'] > 0:
- if data['boundary'] not in indices:
- indices.append(data['boundary'])
+ for _vkey, data in mesh.vertices(data=True):
+ if data['boundary'] > 0 and data['boundary'] not in indices:
+ indices.append(data['boundary'])
boundary_indices = sorted(indices)
return boundary_indices
-def get_vertices_that_belong_to_cuts(mesh, cut_indices):
+def get_vertices_that_belong_to_cuts(
+ mesh: Mesh, cut_indices: list[int]
+) -> dict[int, dict[int, list[float]]]:
"""
Returns
----------
dict, key: int, the index of each cut
- value: list, the points that belong to this cut
+ value: dict, the points that belong to this cut (point_list_to_dict format)
"""
- cuts_dict = {i: [] for i in cut_indices}
+ cuts_dict: dict[int, list[list[float]]] = {i: [] for i in cut_indices}
for vkey, data in mesh.vertices(data=True):
if data['cut'] > 0:
cut_index = data['cut']
cuts_dict[cut_index].append(mesh.vertex_coordinates(vkey))
+ result: dict[int, dict[int, list[float]]] = {}
for cut_index in cuts_dict:
- cuts_dict[cut_index] = utils.point_list_to_dict(cuts_dict[cut_index])
+ result[cut_index] = utils.point_list_to_dict(cuts_dict[cut_index])
- return cuts_dict
+ return result
###############################################
# --- Save and restore attributes
-def save_vertex_attributes(mesh):
+def save_vertex_attributes(mesh: Mesh) -> dict[str, Any]:
"""
Saves the boundary and cut attributes that are on the mesh on a dictionary.
"""
- v_attributes_dict = {'boundary_1': [], 'boundary_2': [], 'cut': {}}
+ v_attributes_dict: dict[str, Any] = {'boundary_1': [], 'boundary_2': [], 'cut': {}}
cut_indices = []
- for vkey, data in mesh.vertices(data=True):
+ for _vkey, data in mesh.vertices(data=True):
cut_index = data['cut']
if cut_index not in cut_indices:
cut_indices.append(cut_index)
@@ -114,7 +127,7 @@ def save_vertex_attributes(mesh):
return v_attributes_dict
-def restore_mesh_attributes(mesh, v_attributes_dict):
+def restore_mesh_attributes(mesh: Mesh, v_attributes_dict: dict[str, Any]) -> None:
"""
Restores the cut and boundary attributes on the mesh vertices from the dictionary of the previously saved attributes
"""
@@ -123,35 +136,33 @@ def restore_mesh_attributes(mesh, v_attributes_dict):
D_THRESHOLD = 0.01
- welded_mesh_vertices = []
- indices_to_vkeys = {}
- for i, vkey in enumerate(mesh.vertices()):
- v_coords = mesh.vertex_coordinates(vkey)
- pt = Point(x=v_coords[0], y=v_coords[1], z=v_coords[2])
- welded_mesh_vertices.append(pt)
- indices_to_vkeys[i] = vkey
-
- for v_coords in v_attributes_dict['boundary_1']:
- closest_index = utils.get_closest_pt_index(pt=v_coords, pts=welded_mesh_vertices)
- c_vkey = indices_to_vkeys[closest_index]
- if distance_point_point_sqrd(v_coords, mesh.vertex_coordinates(c_vkey)) < D_THRESHOLD:
- mesh.vertex_attribute(c_vkey, 'boundary', value=1)
-
- for v_coords in v_attributes_dict['boundary_2']:
- closest_index = utils.get_closest_pt_index(pt=v_coords, pts=welded_mesh_vertices)
- c_vkey = indices_to_vkeys[closest_index]
- if distance_point_point_sqrd(v_coords, mesh.vertex_coordinates(c_vkey)) < D_THRESHOLD:
- mesh.vertex_attribute(c_vkey, 'boundary', value=2)
+ # Build KDTree once for all queries
+ vkeys = list(mesh.vertices())
+ welded_mesh_vertices = np.array([mesh.vertex_coordinates(vkey) for vkey in vkeys], dtype=np.float64)
+ indices_to_vkeys = dict(enumerate(vkeys))
+ tree = cKDTree(welded_mesh_vertices)
+
+ def _restore_attribute_batch(pts_list, attr_name, attr_value):
+ """Restore attribute for a batch of points using KDTree."""
+ if not pts_list:
+ return
+ query_pts = np.array([[p.x, p.y, p.z] if hasattr(p, 'x') else p for p in pts_list], dtype=np.float64)
+ distances, indices = tree.query(query_pts)
+ for dist, idx in zip(distances, indices):
+ if dist ** 2 < D_THRESHOLD:
+ c_vkey = indices_to_vkeys[idx]
+ mesh.vertex_attribute(c_vkey, attr_name, value=attr_value)
+
+ _restore_attribute_batch(v_attributes_dict['boundary_1'], 'boundary', 1)
+ _restore_attribute_batch(v_attributes_dict['boundary_2'], 'boundary', 2)
for cut_index in v_attributes_dict['cut']:
- for v_coords in v_attributes_dict['cut'][cut_index]:
- closest_index = utils.get_closest_pt_index(pt=v_coords, pts=welded_mesh_vertices)
- c_vkey = indices_to_vkeys[closest_index]
- if distance_point_point_sqrd(v_coords, mesh.vertex_coordinates(c_vkey)) < D_THRESHOLD:
- mesh.vertex_attribute(c_vkey, 'cut', value=int(cut_index))
+ _restore_attribute_batch(v_attributes_dict['cut'][cut_index], 'cut', int(cut_index))
-def replace_mesh_vertex_attribute(mesh, old_attr, old_val, new_attr, new_val):
+def replace_mesh_vertex_attribute(
+ mesh: Mesh, old_attr: str, old_val: int, new_attr: str, new_val: int
+) -> None:
"""
Replaces one vertex attribute with a new one. For all the vertices where data[old_attr]=old_val, then the
old_val is replaced with 0, and data[new_attr]=new_val.
diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/region_split.py b/src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
index f3ad49af..67f563f9 100644
--- a/src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
+++ b/src/compas_slicer/pre_processing/preprocessing_utils/region_split.py
@@ -1,22 +1,21 @@
-import os
-import logging
-import numpy as np
import copy
-import compas
-import compas_slicer.utilities as utils
-from compas_slicer.pre_processing.preprocessing_utils import restore_mesh_attributes, save_vertex_attributes
+from pathlib import Path
+
+import numpy as np
+import scipy.sparse
from compas.datastructures import Mesh
-from compas_slicer.pre_processing.preprocessing_utils import assign_interpolation_distance_to_mesh_vertex
-from compas_slicer.slicers.slice_utilities import ScalarFieldContours
-from compas_slicer.pre_processing.preprocessing_utils import assign_interpolation_distance_to_mesh_vertices
-from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation
from compas.geometry import Line, distance_point_point_sqrd, project_point_line
+from loguru import logger
-packages = utils.TerminalCommand('conda list').get_split_output_strings()
-if 'igl' in packages:
- import igl
-
-logger = logging.getLogger('logger')
+import compas_slicer.utilities as utils
+from compas_slicer.pre_processing.preprocessing_utils.assign_vertex_distance import (
+ assign_interpolation_distance_to_mesh_vertex,
+ assign_interpolation_distance_to_mesh_vertices,
+)
+from compas_slicer.pre_processing.preprocessing_utils.mesh_attributes_handling import (
+ restore_mesh_attributes,
+ save_vertex_attributes,
+)
__all__ = ['MeshSplitter']
@@ -58,6 +57,9 @@ def __init__(self, mesh, target_LOW, target_HIGH, DATA_PATH):
assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=0.5, target_LOW=self.target_LOW,
target_HIGH=self.target_HIGH)
+ # Late import to avoid circular dependency
+ from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation
+
g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH)
g_evaluation.find_critical_points() # First estimation of saddle points with weight = 0.5
self.saddles = g_evaluation.saddles
@@ -85,21 +87,23 @@ def run(self):
# (1) first rough estimation of split params
split_params = self.identify_positions_to_split(self.saddles)
# TODO: merge params that are too close together to avoid creation of very thin neighborhoods.
- logger.info("%d Split params. First rough estimation : " % len(split_params) + str(split_params))
+ logger.info(f"{len(split_params)} Split params. First rough estimation : {split_params}")
# split mesh at params
logger.info('Splitting mesh at split params')
current_cut_index = 1
for i, param_first_estimation in enumerate(split_params):
- print('')
- logger.info('cut_index : %d, param_first_estimation : %.6f' % (current_cut_index, param_first_estimation))
+ logger.info(f'cut_index : {current_cut_index}, param_first_estimation : {param_first_estimation:.6f}')
# --- (1) More exact estimation of intersecting weight. Recompute gradient evaluation.
# Find exact saddle point and the weight that intersects it.
assign_interpolation_distance_to_mesh_vertices(self.mesh, weight=param_first_estimation,
target_LOW=self.target_LOW, target_HIGH=self.target_HIGH)
+ # Late import to avoid circular dependency
+ from compas_slicer.pre_processing.gradient_evaluation import GradientEvaluation
+
g_evaluation = GradientEvaluation(self.mesh, self.DATA_PATH)
g_evaluation.find_critical_points()
saddles_ds_tupples = [(vkey, abs(g_evaluation.mesh.vertex_attribute(vkey, 'scalar_field'))) for vkey in
@@ -107,10 +111,13 @@ def run(self):
saddles_ds_tupples = sorted(saddles_ds_tupples, key=lambda saddle_tupple: saddle_tupple[1])
vkey = saddles_ds_tupples[0][0]
t = self.identify_positions_to_split([vkey])[0]
- logger.info('vkey_exact : %d , t_exact : %.6f' % (vkey, t))
+ logger.info(f'vkey_exact : {vkey} , t_exact : {t:.6f}')
# --- (2) find zero-crossing points
assign_interpolation_distance_to_mesh_vertices(self.mesh, t, self.target_LOW, self.target_HIGH)
+ # Late import to avoid circular dependency
+ from compas_slicer.slicers.slice_utilities import ScalarFieldContours
+
zero_contours = ScalarFieldContours(self.mesh)
zero_contours.compute()
keys_of_clusters_to_keep = merge_clusters_saddle_point(zero_contours, saddle_vkeys=[vkey])
@@ -124,7 +131,7 @@ def run(self):
# save to json intermediary results
zero_contours.save_point_clusters_as_polylines_to_json(self.OUTPUT_PATH,
- 'point_clusters_polylines_%d.json' % int(i))
+ f'point_clusters_polylines_{int(i)}.json')
# --- (4) Create cut
logger.info("Creating cut on mesh")
@@ -143,7 +150,7 @@ def run(self):
logger.info('Updating targets, recomputing geodesic distances')
self.update_targets()
- self.mesh.to_obj(os.path.join(self.OUTPUT_PATH, 'most_recent_cut_mesh.obj'))
+ self.mesh.to_obj(str(Path(self.OUTPUT_PATH) / 'most_recent_cut_mesh.obj'))
def update_targets(self):
"""
@@ -247,7 +254,7 @@ def find_weight_intersecting_vkey(self, vkey, threshold, resolution):
next_d = assign_interpolation_distance_to_mesh_vertex(vkey, weight_list[i + 1], self.target_LOW, self.target_HIGH)
if abs(current_d) < abs(next_d) and current_d < threshold:
return weight
- raise ValueError('Could NOT find param for saddle vkey %d!' % vkey)
+ raise ValueError(f'Could NOT find param for saddle vkey {vkey}!')
###############################################
@@ -259,13 +266,147 @@ def get_weights_list(n, start=0.03, end=1.0):
return list(np.arange(start=start, stop=end, step=(end - start) / n))
+###############################################
+# --- Mesh cutting utilities (pure Python replacements for libigl)
+
+
+def _trimesh_cut_mesh(
+ vertices: np.ndarray,
+ faces: np.ndarray,
+ cut_flags: np.ndarray,
+) -> tuple[np.ndarray, np.ndarray]:
+ """Cut a mesh along flagged edges by duplicating vertices.
+
+ This is a pure Python replacement for compas_libigl.trimesh_cut_mesh.
+
+ Parameters
+ ----------
+ vertices : np.ndarray
+ Vertex coordinates (V x 3).
+ faces : np.ndarray
+ Face indices (F x 3).
+ cut_flags : np.ndarray
+ Per-face edge flags (F x 3). 1 = cut this edge, 0 = don't cut.
+ Edge i of face f is the edge from vertex f[i] to f[(i+1)%3].
+
+ Returns
+ -------
+ tuple[np.ndarray, np.ndarray]
+ New vertices and faces with duplicated vertices along cut edges.
+ """
+ n_faces = len(faces)
+
+ # Build a map from (vertex, face) -> new vertex index
+ # Vertices that are on cut edges need to be duplicated per face
+ vertex_face_to_new_index: dict[tuple[int, int], int] = {}
+ new_vertices = list(vertices)
+
+ # For each face, determine which vertices need to be duplicated
+ for fi in range(n_faces):
+ face = faces[fi]
+ for ei in range(3):
+ v0, v1 = face[ei], face[(ei + 1) % 3]
+
+ # Check if this edge is cut
+ if cut_flags[fi, ei] == 1:
+ # Both endpoints of cut edges need their own copy for this face
+ for v in [v0, v1]:
+ key = (v, fi)
+ if key not in vertex_face_to_new_index:
+ # Create a new vertex (duplicate)
+ new_idx = len(new_vertices)
+ new_vertices.append(vertices[v])
+ vertex_face_to_new_index[key] = new_idx
+
+ # Build new faces with updated vertex indices
+ new_faces = []
+ for fi in range(n_faces):
+ face = faces[fi]
+ new_face = []
+ for vi in range(3):
+ v = face[vi]
+ key = (v, fi)
+ if key in vertex_face_to_new_index:
+ # Use the duplicated vertex
+ new_face.append(vertex_face_to_new_index[key])
+ else:
+ # Use original vertex, but need to check if any adjacent face
+ # on a cut edge shares this vertex
+ new_face.append(v)
+ new_faces.append(new_face)
+
+ return np.array(new_vertices), np.array(new_faces)
+
+
+def _trimesh_face_components(
+ vertices: np.ndarray,
+ faces: np.ndarray,
+) -> np.ndarray:
+ """Find connected components of faces based on shared vertices.
+
+ This is a pure Python replacement for compas_libigl.trimesh_face_components.
+
+ Parameters
+ ----------
+ vertices : np.ndarray
+ Vertex coordinates (V x 3).
+ faces : np.ndarray
+ Face indices (F x 3).
+
+ Returns
+ -------
+ np.ndarray
+ Component label for each face.
+ """
+ n_faces = len(faces)
+
+ if n_faces == 0:
+ return np.array([], dtype=np.int32)
+
+ # Build face adjacency based on shared edges
+ # Two faces are adjacent if they share an edge (two vertices)
+ edge_to_faces: dict[tuple[int, int], list[int]] = {}
+
+ for fi, face in enumerate(faces):
+ for ei in range(3):
+ v0, v1 = int(face[ei]), int(face[(ei + 1) % 3])
+ edge = (min(v0, v1), max(v0, v1))
+ if edge not in edge_to_faces:
+ edge_to_faces[edge] = []
+ edge_to_faces[edge].append(fi)
+
+ # Build sparse adjacency matrix for faces
+ row, col = [], []
+ for _edge, face_list in edge_to_faces.items():
+ if len(face_list) == 2:
+ f0, f1 = face_list
+ row.extend([f0, f1])
+ col.extend([f1, f0])
+
+ if len(row) == 0:
+ # No adjacencies - each face is its own component
+ return np.arange(n_faces, dtype=np.int32)
+
+ data = np.ones(len(row), dtype=np.int32)
+ adjacency = scipy.sparse.csr_matrix(
+ (data, (row, col)), shape=(n_faces, n_faces)
+ )
+
+ # Find connected components
+ n_components, labels = scipy.sparse.csgraph.connected_components(
+ adjacency, directed=False
+ )
+
+ return labels
+
+
###############################################
# --- Separate disconnected components
def separate_disconnected_components(mesh, attr, values, OUTPUT_PATH):
"""
Given a mesh with cuts that have already been created, it separates the disconnected
- components using the igl function. Then it welds them and restores their attributes.
+ components by cutting along marked edges. Then it welds them and restores their attributes.
Parameters
----------
@@ -284,7 +425,7 @@ def separate_disconnected_components(mesh, attr, values, OUTPUT_PATH):
v, f = mesh.to_vertices_and_faces()
v, f = np.array(v), np.array(f)
- # --- create cut flags for igl
+ # --- create cut flags for edges
cut_flags = []
for fkey in mesh.faces():
edges = mesh.face_halfedges(fkey)
@@ -297,26 +438,28 @@ def separate_disconnected_components(mesh, attr, values, OUTPUT_PATH):
current_face_flags.append(0)
cut_flags.append(current_face_flags)
cut_flags = np.array(cut_flags)
- assert cut_flags.shape == f.shape
+ if cut_flags.shape != f.shape:
+ raise RuntimeError(f"Cut flags shape {cut_flags.shape} doesn't match face array shape {f.shape}")
- # --- cut mesh
- v_cut, f_cut = igl.cut_mesh(v, f, cut_flags)
- connected_components = igl.face_components(f_cut)
+ # --- cut mesh by duplicating vertices along cut edges
+ v_cut, f_cut = _trimesh_cut_mesh(v, f, cut_flags)
+ connected_components = _trimesh_face_components(v_cut, f_cut)
- f_dict = {}
+ f_dict: dict[int, list[list[int]]] = {}
for i in range(max(connected_components) + 1):
f_dict[i] = []
- for f_index, f in enumerate(f_cut):
+ for f_index, face in enumerate(f_cut):
component = connected_components[f_index]
- f_dict[component].append(f)
+ f_dict[component].append(face.tolist() if hasattr(face, 'tolist') else list(face))
cut_meshes = []
for component in f_dict:
- cut_mesh = Mesh.from_vertices_and_faces(v_cut, f_dict[component])
+ cut_mesh = Mesh.from_vertices_and_faces(v_cut.tolist(), f_dict[component])
cut_mesh.cull_vertices()
if len(list(cut_mesh.faces())) > 2:
- cut_mesh.to_obj(os.path.join(OUTPUT_PATH, 'temp.obj'))
- cut_mesh = Mesh.from_obj(os.path.join(OUTPUT_PATH, 'temp.obj')) # get rid of too many empty keys
+ temp_path = Path(OUTPUT_PATH) / 'temp.obj'
+ cut_mesh.to_obj(str(temp_path))
+ cut_mesh = Mesh.from_obj(str(temp_path)) # get rid of too many empty keys
cut_meshes.append(cut_mesh)
for mesh in cut_meshes:
@@ -360,9 +503,10 @@ def merge_clusters_saddle_point(zero_contours, saddle_vkeys):
Parameters
----------
- zero_contours: :class: 'compas_slicer.pre_processing.ScalarFieldContours'
- saddle_vkeys: list, int, the vertex keys of the current saddle points.
- (Currently this can only be a single saddle point)
+ zero_contours : ScalarFieldContours
+ Contours object.
+ saddle_vkeys : list[int]
+ Vertex keys of the current saddle points (currently only single saddle point supported).
Returns
----------
@@ -376,7 +520,7 @@ def merge_clusters_saddle_point(zero_contours, saddle_vkeys):
if saddle_vkey in e:
zero_contours.sorted_point_clusters[cluster_key][i] = \
zero_contours.mesh.vertex_coordinates(saddle_vkey) # merge point with saddle point
- print('Found edge to merge: ' + str(e))
+ logger.debug(f'Found edge to merge: {e}')
if cluster_key not in keys_of_clusters_to_keep:
keys_of_clusters_to_keep.append(cluster_key)
@@ -396,7 +540,7 @@ def cleanup_unrelated_isocontour_neighborhoods(zero_contours, keys_of_clusters_t
logger.error("No common vertex found! Skipping this split_param")
return None
else:
- logger.info('keys_of_clusters_to_keep : ' + str(keys_of_clusters_to_keep))
+ logger.info(f'keys_of_clusters_to_keep: {keys_of_clusters_to_keep}')
# empty all other clusters that are not in the matching_pair
sorted_point_clusters_clean = copy.deepcopy(zero_contours.sorted_point_clusters)
sorted_edge_clusters_clean = copy.deepcopy(zero_contours.sorted_edge_clusters)
@@ -418,10 +562,11 @@ def weld_mesh(mesh, OUTPUT_PATH, precision='2f'):
if len(mesh.face_vertices(f_key)) < 3:
mesh.delete_face(f_key)
- welded_mesh = compas.datastructures.mesh_weld(mesh, precision=precision)
+ welded_mesh = mesh.weld(precision=precision)
- welded_mesh.to_obj(os.path.join(OUTPUT_PATH, 'temp.obj')) # make sure there's no empty f_keys
- welded_mesh = Mesh.from_obj(os.path.join(OUTPUT_PATH, 'temp.obj')) # TODO: find a better way to do this
+ temp_path = Path(OUTPUT_PATH) / 'temp.obj'
+ welded_mesh.to_obj(str(temp_path)) # make sure there's no empty f_keys
+ welded_mesh = Mesh.from_obj(str(temp_path)) # TODO: find a better way to do this
try:
welded_mesh.unify_cycles()
diff --git a/src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py b/src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
index ea1d8c0b..05ce6f96 100644
--- a/src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
+++ b/src/compas_slicer/pre_processing/preprocessing_utils/topological_sorting.py
@@ -1,13 +1,22 @@
-import networkx as nx
-from compas.geometry import distance_point_point, distance_point_point_sqrd
-import compas_slicer.utilities as utils
-import logging
+from __future__ import annotations
+
import copy
-from compas_slicer.pre_processing.preprocessing_utils import get_existing_cut_indices, \
- get_existing_boundary_indices
from abc import abstractmethod
+from typing import TYPE_CHECKING, Any
+
+import networkx as nx
+import numpy as np
+from compas.datastructures import Mesh
+from compas.geometry import Point
+from loguru import logger
+
+import compas_slicer.utilities as utils
+from compas_slicer._numpy_ops import min_distances_to_set
+from compas_slicer.pre_processing.preprocessing_utils import get_existing_boundary_indices, get_existing_cut_indices
+
+if TYPE_CHECKING:
+ from compas_slicer.geometry import VerticalLayer
-logger = logging.getLogger('logger')
__all__ = ['MeshDirectedGraph',
'SegmentsDirectedGraph']
@@ -16,7 +25,7 @@
#################################
# DirectedGraph
-class DirectedGraph(object):
+class DirectedGraph:
"""
Base class for topological sorting of prints that consist of several parts that lie on each other.
For example the graph A->B->C would represent a print that consists of three parts; A, B, C
@@ -24,57 +33,59 @@ class DirectedGraph(object):
This graph cannot have cycles; cycles would represent an unfeasible print.
"""
- def __init__(self):
+ def __init__(self) -> None:
logger.info('Topological sorting')
- self.G = nx.DiGraph()
+ self.G: nx.DiGraph = nx.DiGraph()
self.create_graph_nodes()
self.root_indices = self.find_roots()
- logger.info('Graph roots : ' + str(self.root_indices))
- assert len(self.root_indices) > 0, "No root nodes where found. At least one root node is needed."
+ logger.info(f'Graph roots: {self.root_indices}')
+ if len(self.root_indices) == 0:
+ raise ValueError("No root nodes were found. At least one root node is needed.")
self.end_indices = self.find_ends()
- logger.info('Graph ends : ' + str(self.end_indices))
- assert len(self.end_indices) > 0, "No end nodes where found. At least one end node is needed."
+ logger.info(f'Graph ends: {self.end_indices}')
+ if len(self.end_indices) == 0:
+ raise ValueError("No end nodes were found. At least one end node is needed.")
self.create_directed_graph_edges(copy.deepcopy(self.root_indices))
- logger.info('Nodes : ' + str(self.G.nodes(data=True)))
- logger.info('Edges : ' + str(self.G.edges(data=True)))
+ logger.info(f'Nodes: {list(self.G.nodes(data=True))}')
+ logger.info(f'Edges: {list(self.G.edges(data=True))}')
- self.N = len(list(self.G.nodes()))
- self.adj_list = self.get_adjacency_list() # Nested list where adj_list[i] is a list of all the neighbors
+ self.N: int = len(list(self.G.nodes()))
+ self.adj_list: list[list[int]] = self.get_adjacency_list() # Nested list where adj_list[i] is a list of all the neighbors
# of the i-th component
self.check_that_all_nodes_found_their_connectivity()
- logger.info('Adjacency list : ' + str(self.adj_list))
- self.in_degree = self.get_in_degree() # Nested list where adj_list[i] is a list of all the edges pointing
+ logger.info(f'Adjacency list: {self.adj_list}')
+ self.in_degree: list[int] = self.get_in_degree() # Nested list where adj_list[i] is a list of all the edges pointing
# to the i-th node.
- self.all_orders = []
+ self.all_orders: list[list[int]] = []
- def __repr__(self):
- return "" % len(list(self.G.nodes()))
+ def __repr__(self) -> str:
+ return f""
# ------------------------------------ Methods to be implemented by inheriting classes
@abstractmethod
- def find_roots(self):
+ def find_roots(self) -> list[int]:
""" Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first. """
pass
@abstractmethod
- def find_ends(self):
+ def find_ends(self) -> list[int]:
""" Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last. """
pass
@abstractmethod
- def create_graph_nodes(self):
+ def create_graph_nodes(self) -> None:
""" Add the nodes to the graph with their attributes. """
pass
@abstractmethod
- def get_children_of_node(self, root):
+ def get_children_of_node(self, root: int) -> tuple[list[int], list[Any]]:
""" Find all the vertical_layers_print_data that lie on the current root segment. """
pass
# ------------------------------------ Creation of graph connectivity between different nodes
- def create_directed_graph_edges(self, root_indices):
+ def create_directed_graph_edges(self, root_indices: list[int]) -> None:
""" Create the connectivity of the directed graph using breadth-first search graph traversal. """
passed_nodes = []
queue = root_indices
@@ -85,21 +96,29 @@ def create_directed_graph_edges(self, root_indices):
queue.remove(current_node)
passed_nodes.append(current_node)
children, cut_ids = self.get_children_of_node(current_node)
- [self.G.add_edge(current_node, child_key, cut=common_cuts) for child_key, common_cuts in
- zip(children, cut_ids)]
+ for child_key, common_cuts in zip(children, cut_ids):
+ self.G.add_edge(current_node, child_key, cut=common_cuts)
+ for child_key in children:
+ if child_key in passed_nodes:
+ raise ValueError('Error: cyclic directed graph detected.')
for child_key in children:
- assert child_key not in passed_nodes, 'Error, cyclic directed graph.'
- [queue.append(child_key) for child_key in children if child_key not in queue]
+ if child_key not in queue:
+ queue.append(child_key)
- def check_that_all_nodes_found_their_connectivity(self):
+ def check_that_all_nodes_found_their_connectivity(self) -> None:
""" Assert that there is no island, i.e. no node or groups of nodes that are not connected to the base. """
- good_nodes = [r for r in self.root_indices]
+ good_nodes = list(self.root_indices)
for children_list in self.adj_list:
- [good_nodes.append(child) for child in children_list if child not in good_nodes]
- assert len(good_nodes) == self.N, 'There are floating vertical_layers_print_data on directed graph. Investigate the process of \
- the creation of the graph. '
-
- def sort_queue_with_end_targets_last(self, queue):
+ for child in children_list:
+ if child not in good_nodes:
+ good_nodes.append(child)
+ if len(good_nodes) != self.N:
+ raise ValueError(
+ f'Floating vertical layers detected: {len(good_nodes)} connected nodes vs {self.N} total. '
+ 'Check graph creation process.'
+ )
+
+ def sort_queue_with_end_targets_last(self, queue: list[int]) -> list[int]:
""" Sorts the queue so that the vertical_layers_print_data that have an end target are always at the end. """
queue_copy = copy.deepcopy(queue)
for index in queue:
@@ -109,14 +128,15 @@ def sort_queue_with_end_targets_last(self, queue):
return queue_copy
# ------------------------------------ Find all topological orders
- def get_adjacency_list(self):
+ def get_adjacency_list(self) -> list[list[int]]:
""" Returns adjacency list. Nested list where adj_list[i] is a list of all the neighbors of the ith component"""
- adj_list = [[] for _ in range(self.N)] # adjacency list , size = len(Nodes), stores nodes' neighbors
+ adj_list: list[list[int]] = [[] for _ in range(self.N)] # adjacency list , size = len(Nodes), stores nodes' neighbors
for i, adjacent_to_node in self.G.adjacency():
- [adj_list[i].append(key) for key in adjacent_to_node]
+ for key in adjacent_to_node:
+ adj_list[i].append(key)
return adj_list
- def get_in_degree(self):
+ def get_in_degree(self) -> list[int]:
""" Returns in_degree list. Nested list where adj_list[i] is a list of all the edges pointing to the node."""
in_degree = [0] * self.N # in_degree, size = len(Nodes) , stores in-degree of a node
for key_degree_tuple in self.G.in_degree:
@@ -125,7 +145,7 @@ def get_in_degree(self):
in_degree[key] = degree
return in_degree
- def get_all_topological_orders(self):
+ def get_all_topological_orders(self) -> list[list[int]]:
"""
Finds all topological orders from source to sink.
Returns
@@ -134,12 +154,12 @@ def get_all_topological_orders(self):
"""
self.all_orders = [] # make sure list is empty
discovered = [False] * self.N
- path = [] # list to store the topological order
+ path: list[int] = [] # list to store the topological order
self.get_orders(path, discovered)
- logger.info('Found %d possible orders' % len(self.all_orders))
+ logger.info(f'Found {len(self.all_orders)} possible orders')
return self.all_orders
- def get_orders(self, path, discovered):
+ def get_orders(self, path: list[int], discovered: list[bool]) -> None:
"""
Finds all topological orders from source to sink.
Sorting algorithm taken from https://www.techiedelight.com/find-all-possible-topological-orderings-of-dag/
@@ -171,7 +191,7 @@ def get_orders(self, path, discovered):
if len(path) == self.N:
self.all_orders.append(copy.deepcopy(path))
- def get_parents_of_node(self, node_index):
+ def get_parents_of_node(self, node_index: int) -> list[int]:
""" Returns the parents of node with i = node_index. """
return [j for j, adj in enumerate(self.adj_list) if node_index in adj]
@@ -184,39 +204,37 @@ class MeshDirectedGraph(DirectedGraph):
""" The MeshDirectedGraph is used for topological sorting of multiple meshes that have been
generated as a result of region split over the saddle points of the mesh scalar function """
- def __init__(self, all_meshes, DATA_PATH):
+ def __init__(self, all_meshes: list[Mesh], DATA_PATH: str) -> None:
self.all_meshes = all_meshes
self.DATA_PATH = DATA_PATH
self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
DirectedGraph.__init__(self)
- def find_roots(self):
+ def find_roots(self) -> list[int]:
""" Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first. """
- roots = []
+ roots: list[int] = []
for i, mesh in enumerate(self.all_meshes):
- for vkey, data in mesh.vertices(data=True):
- if i not in roots:
- if data['boundary'] == 1:
- roots.append(i)
+ for _vkey, data in mesh.vertices(data=True):
+ if i not in roots and data['boundary'] == 1:
+ roots.append(i)
return roots
- def find_ends(self):
+ def find_ends(self) -> list[int]:
""" Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last. """
- ends = []
+ ends: list[int] = []
for i, mesh in enumerate(self.all_meshes):
- for vkey, data in mesh.vertices(data=True):
- if i not in ends:
- if data['boundary'] == 2:
- ends.append(i)
+ for _vkey, data in mesh.vertices(data=True):
+ if i not in ends and data['boundary'] == 2:
+ ends.append(i)
return ends
- def create_graph_nodes(self):
+ def create_graph_nodes(self) -> None:
""" Add each of the split meshes to the graph as nodes. Cuts and boundaries are stored as attributes. """
for i, m in enumerate(self.all_meshes):
self.G.add_node(i, cuts=get_existing_cut_indices(m),
boundaries=get_existing_boundary_indices(m))
- def get_children_of_node(self, root):
+ def get_children_of_node(self, root: int) -> tuple[list[int], list[list[int]]]:
"""
Find all the nodes that lie on the current root.
@@ -228,8 +246,8 @@ def get_children_of_node(self, root):
----------
2 lists [child1, child2, ...], [[common cuts 1], [common cuts 2] ...]
"""
- children = []
- cut_ids = []
+ children: list[int] = []
+ cut_ids: list[list[int]] = []
parent_data = self.G.nodes(data=True)[root]
for key, data in self.G.nodes(data=True):
@@ -237,29 +255,17 @@ def get_children_of_node(self, root):
if key != root and len(common_cuts) > 0 \
and (key, root) not in self.G.edges() \
- and (root, key) not in self.G.edges():
-
- if is_true_mesh_adjacency(self.all_meshes, key, root):
- if not len(common_cuts) == 1: # if all cuts worked, this should be 1. But life is not perfect.
- logger.error('More than one common cuts between two pieces in the following split \
- meshes. ' 'Root : %d, child : %d' % (root, key) + ' . Common cuts : ' + str(common_cuts) +
- 'Probably some cut did not separate components')
- children.append(key)
- cut_ids.append(common_cuts)
+ and (root, key) not in self.G.edges() and is_true_mesh_adjacency(self.all_meshes, key, root):
+ if len(common_cuts) != 1: # if all cuts worked, this should be 1. But life is not perfect.
+ logger.error(
+ f'More than one common cuts between two pieces in the following split meshes. '
+ f'Root : {root}, child : {key} . Common cuts : {common_cuts}'
+ 'Probably some cut did not separate components'
+ )
+ children.append(key)
+ cut_ids.append(common_cuts)
# --- debugging output
- # self.all_meshes[root].to_obj(self.OUTPUT_PATH + '/root.obj')
- # for child in children:
- # self.all_meshes[child].to_obj(self.OUTPUT_PATH + '/child_%d.obj' % child)
- # for cuts_id in cut_ids:
- # for common_cut in cuts_id:
- # pts = utils.get_mesh_vertex_coords_with_attribute(self.all_meshes[root], 'cut', common_cut)
- # utils.save_to_json(utils.point_list_to_dict(pts), self.OUTPUT_PATH, 'cut_%d.json' % common_cut)
- # print('root : ', root)
- # print('children : ', children)
- # print('cut_ids : ', cut_ids)
- # utils.interrupt()
-
return children, cut_ids
@@ -269,7 +275,9 @@ def get_children_of_node(self, root):
class SegmentsDirectedGraph(DirectedGraph):
""" The SegmentsDirectedGraph is used for topological sorting of multiple vertical_layers_print_data in one mesh"""
- def __init__(self, mesh, segments, max_d_threshold, DATA_PATH):
+ def __init__(
+ self, mesh: Mesh, segments: list[VerticalLayer], max_d_threshold: float, DATA_PATH: str
+ ) -> None:
self.mesh = mesh
self.segments = segments
self.max_d_threshold = max_d_threshold
@@ -277,34 +285,34 @@ def __init__(self, mesh, segments, max_d_threshold, DATA_PATH):
self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
DirectedGraph.__init__(self)
- def find_roots(self):
+ def find_roots(self) -> list[int]:
""" Roots are vertical_layers_print_data that lie on the build platform. Like that they can be print first. """
boundary_pts = utils.get_mesh_vertex_coords_with_attribute(self.mesh, 'boundary', 1)
- root_segments = []
+ root_segments: list[int] = []
for i, segment in enumerate(self.segments):
first_curve_pts = segment.paths[0].points
if are_neighboring_point_clouds(boundary_pts, first_curve_pts, 2 * self.max_d_threshold):
root_segments.append(i)
return root_segments
- def find_ends(self):
+ def find_ends(self) -> list[int]:
""" Ends are vertical_layers_print_data that belong to exclusively one segment. Like that they can be print last. """
boundary_pts = utils.get_mesh_vertex_coords_with_attribute(self.mesh, 'boundary', 2)
- end_segments = []
+ end_segments: list[int] = []
for i, segment in enumerate(self.segments):
last_curve_pts = segment.paths[-1].points
if are_neighboring_point_clouds(boundary_pts, last_curve_pts, self.max_d_threshold):
end_segments.append(i)
return end_segments
- def create_graph_nodes(self):
+ def create_graph_nodes(self) -> None:
""" Add each segment to to the graph as a node. """
- for i, segment in enumerate(self.segments):
+ for i, _segment in enumerate(self.segments):
self.G.add_node(i)
- def get_children_of_node(self, root):
+ def get_children_of_node(self, root: int) -> tuple[list[int], list[None]]:
""" Find all the nodes that lie on the current root. """
- children = []
+ children: list[int] = []
root_segment = self.segments[root]
root_last_crv_pts = root_segment.paths[-1].points
# utils.save_to_json(utils.point_list_to_dict(root_last_crv_pts), self.OUTPUT_PATH, "root_last_crv_pts.json")
@@ -322,7 +330,7 @@ def get_children_of_node(self, root):
#################################
# --- helpers
-def are_neighboring_point_clouds(pts1, pts2, threshold):
+def are_neighboring_point_clouds(pts1: list[Point], pts2: list[Point], threshold: float) -> bool:
"""
Returns True if 3 or more points of the point clouds are closer than the threshold. False otherwise.
@@ -332,17 +340,16 @@ def are_neighboring_point_clouds(pts1, pts2, threshold):
pts2: list, :class: 'compas.geometry.Point'
threshold: float
"""
- count = 0
- for pt in pts1:
- d = distance_point_point(pt, utils.get_closest_pt(pt, pts2))
- if d < threshold:
- count += 1
- if count > 5:
- return True
- return False
+ if len(pts1) == 0 or len(pts2) == 0:
+ return False
+ # Vectorized: compute min distance from each pt in pts1 to pts2
+ arr1 = np.asarray(pts1, dtype=np.float64)
+ arr2 = np.asarray(pts2, dtype=np.float64)
+ distances = min_distances_to_set(arr1, arr2)
+ return np.sum(distances < threshold) > 5
-def is_true_mesh_adjacency(all_meshes, key1, key2):
+def is_true_mesh_adjacency(all_meshes: list[Mesh], key1: int, key2: int) -> bool:
"""
Returns True if the two meshes share 3 or more vertices. False otherwise.
@@ -352,20 +359,20 @@ def is_true_mesh_adjacency(all_meshes, key1, key2):
key1: int, index of mesh1
key2: int, index of mesh2
"""
- count = 0
mesh1 = all_meshes[key1]
mesh2 = all_meshes[key2]
pts_mesh2 = [mesh2.vertex_coordinates(vkey) for vkey, data in mesh2.vertices(data=True)
if (data['cut'] > 0 or data['boundary'] > 0)]
- for vkey, data in mesh1.vertices(data=True):
- if data['cut'] > 0 or data['boundary'] > 0:
- pt = mesh1.vertex_coordinates(vkey)
- ci = utils.get_closest_pt_index(pt, pts_mesh2)
- if distance_point_point_sqrd(pt, pts_mesh2[ci]) < 0.00001:
- count += 1
- if count == 3:
- return True
- return False
+ pts_mesh1 = [mesh1.vertex_coordinates(vkey) for vkey, data in mesh1.vertices(data=True)
+ if (data['cut'] > 0 or data['boundary'] > 0)]
+ if len(pts_mesh1) == 0 or len(pts_mesh2) == 0:
+ return False
+ # Vectorized: compute min distance from each pt in mesh1 to pts_mesh2
+ arr1 = np.asarray(pts_mesh1, dtype=np.float64)
+ arr2 = np.asarray(pts_mesh2, dtype=np.float64)
+ distances = min_distances_to_set(arr1, arr2)
+ # Count points with essentially zero distance (shared vertices)
+ return np.sum(distances ** 2 < 0.00001) >= 3
if __name__ == '__main__':
diff --git a/src/compas_slicer/print_organization/__init__.py b/src/compas_slicer/print_organization/__init__.py
index 33dafde7..3959c6a0 100644
--- a/src/compas_slicer/print_organization/__init__.py
+++ b/src/compas_slicer/print_organization/__init__.py
@@ -1,63 +1,10 @@
-"""
-********************************************************************************
-print_organization
-********************************************************************************
-
-.. currentmodule:: compas_slicer.print_organization
-
-
-Classes
-=======
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- BasePrintOrganizer
-
-
-BasePrintOrganizer
-------------------
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- InterpolationPrintOrganizer
-
-
-Functions
-=========
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- set_extruder_toggle
- override_extruder_toggle
- set_blend_radius
- add_safety_printpoints
- set_wait_time_on_sharp_corners
- set_wait_time_based_on_extruder_toggle
- override_wait_time
- set_linear_velocity_constant
- set_linear_velocity_per_layer
- set_linear_velocity_by_range
- set_linear_velocity_by_overhang
-
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-
-from .base_print_organizer import * # noqa: F401 E402 F403
-from .planar_print_organizer import * # noqa: F401 E402 F403
-from .interpolation_print_organizer import * # noqa: F401 E402 F403
-from .scalar_field_print_organizer import * # noqa: F401 E402 F403
+"""Print organization for embedding fabrication parameters into toolpaths."""
+from .base_print_organizer import * # noqa: F401 F403
from .curved_print_organization import * # noqa: F401 E402 F403
+from .interpolation_print_organizer import * # noqa: F401 E402 F403
+from .planar_print_organizer import * # noqa: F401 E402 F403
from .print_organization_utilities import * # noqa: F401 E402 F403
+from .scalar_field_print_organizer import * # noqa: F401 E402 F403
__all__ = [name for name in dir() if not name.startswith('_')]
diff --git a/src/compas_slicer/print_organization/base_print_organizer.py b/src/compas_slicer/print_organization/base_print_organizer.py
index 485dc346..efe23f25 100644
--- a/src/compas_slicer/print_organization/base_print_organizer.py
+++ b/src/compas_slicer/print_organization/base_print_organizer.py
@@ -1,231 +1,233 @@
-import compas_slicer
-import logging
-from compas.geometry import Vector, distance_point_point, norm_vector, normalize_vector, subtract_vectors, \
- cross_vectors, scale_vector
-from compas.utilities import pairwise
-import numpy as np
+from __future__ import annotations
+
from abc import abstractmethod
+from collections.abc import Generator, Iterator
+from typing import TYPE_CHECKING, Any
-logger = logging.getLogger('logger')
+import numpy as np
+from compas.geometry import (
+ Vector,
+ cross_vectors,
+ distance_point_point,
+ norm_vector,
+ normalize_vector,
+ scale_vector,
+ subtract_vectors,
+)
+from compas.itertools import pairwise
+from loguru import logger
-__all__ = ['BasePrintOrganizer']
+from compas_slicer.config import GcodeConfig
+from compas_slicer.geometry import PrintPointsCollection
+from compas_slicer.print_organization.print_organization_utilities.gcode import create_gcode_text
+from compas_slicer.slicers.base_slicer import BaseSlicer
+if TYPE_CHECKING:
+ from compas_slicer.geometry import Path, PrintPoint
-class BasePrintOrganizer(object):
- """
- Base class for organizing the printing process.
- This class is meant to be extended for the implementation of the various print organizers.
- Do not use this class directly in your python code. Instead use PlanarPrintOrganizer or InterpolationPrintOrganizer.
+
+__all__ = ["BasePrintOrganizer"]
+
+
+class BasePrintOrganizer:
+ """Base class for organizing the printing process.
+
+ This class is meant to be extended for implementing various print organizers.
+ Do not use this class directly. Use PlanarPrintOrganizer or InterpolationPrintOrganizer.
Attributes
----------
- slicer: :class:`compas_slicer.slicers.PlanarSlicer`
- An instance of the compas_slicer.slicers.PlanarSlicer.
+ slicer : BaseSlicer
+ An instance of a slicer class.
+ printpoints : PrintPointsCollection
+ Collection of printpoints organized by layer and path.
+
"""
- def __init__(self, slicer):
- assert isinstance(slicer, compas_slicer.slicers.BaseSlicer) # check input
- logger.info('Print Organizer')
+ def __init__(self, slicer: BaseSlicer) -> None:
+ if not isinstance(slicer, BaseSlicer):
+ raise TypeError(f"slicer must be BaseSlicer, not {type(slicer)}")
+ logger.info("Print Organizer")
self.slicer = slicer
- self.printpoints_dict = {}
+ self.printpoints = PrintPointsCollection()
- def __repr__(self):
+ def __repr__(self) -> str:
return ""
- ######################
- # Abstract methods
- ######################
-
@abstractmethod
- def create_printpoints(self):
- """To be implemented by the inheriting classes"""
+ def create_printpoints(self) -> None:
+ """To be implemented by inheriting classes."""
pass
- ######################
- # Iterators
- ######################
- def printpoints_iterator(self):
- """
- Iterate over the printpoints of the print organizer.
+ def printpoints_iterator(self) -> Generator[PrintPoint, None, None]:
+ """Iterate over all printpoints.
Yields
------
- printpoint: :class: 'compas_slicer.geometry.Printpoint'
- """
- assert len(self.printpoints_dict) > 0, 'No printpoints have been created.'
- for layer_key in self.printpoints_dict:
- for path_key in self.printpoints_dict[layer_key]:
- for printpoint in self.printpoints_dict[layer_key][path_key]:
- yield printpoint
+ PrintPoint
+ Each printpoint in the organizer.
- def printpoints_indices_iterator(self):
"""
- Iterate over the printpoints of the print organizer.
+ if not self.printpoints.layers:
+ raise ValueError("No printpoints have been created.")
+ yield from self.printpoints.iter_printpoints()
+
+ def printpoints_indices_iterator(self) -> Iterator[tuple[PrintPoint, int, int, int]]:
+ """Iterate over printpoints with their indices.
Yields
------
- printpoint: :class: 'compas_slicer.geometry.Printpoint'
- i: int, layer index. To get the layer key use: layer_key = 'layer_%d' % i
- j: int, path index. To get the path key use: path_key = 'path_%d' % j
- k: int, printpoint index
- """
- assert len(self.printpoints_dict) > 0, 'No printpoints have been created.'
- for i, layer_key in enumerate(self.printpoints_dict):
- for j, path_key in enumerate(self.printpoints_dict[layer_key]):
- for k, printpoint in enumerate(self.printpoints_dict[layer_key][path_key]):
- yield printpoint, i, j, k
+ tuple[PrintPoint, int, int, int]
+ Printpoint, layer index, path index, printpoint index.
- ######################
- # Properties
- ######################
+ """
+ if not self.printpoints.layers:
+ raise ValueError("No printpoints have been created.")
+ yield from self.printpoints.iter_with_indices()
@property
- def number_of_printpoints(self):
- """int: Total number of points in the PrintOrganizer."""
- total_number_of_pts = 0
- for layer_key in self.printpoints_dict:
- for path_key in self.printpoints_dict[layer_key]:
- for _ in self.printpoints_dict[layer_key][path_key]:
- total_number_of_pts += 1
- return total_number_of_pts
+ def number_of_printpoints(self) -> int:
+ """Total number of printpoints."""
+ return self.printpoints.number_of_printpoints
@property
- def number_of_paths(self):
- total_number_of_paths = 0
- for layer_key in self.printpoints_dict:
- for _ in self.printpoints_dict[layer_key]:
- total_number_of_paths += 1
- return total_number_of_paths
+ def number_of_paths(self) -> int:
+ """Total number of paths."""
+ return self.printpoints.number_of_paths
@property
- def number_of_layers(self):
- """int: Number of layers in the PrintOrganizer."""
- return len(self.printpoints_dict)
+ def number_of_layers(self) -> int:
+ """Number of layers."""
+ return self.printpoints.number_of_layers
@property
- def total_length_of_paths(self):
- """ Returns the total length of all paths. Does not consider extruder toggle. """
- total_length = 0
- for layer_key in self.printpoints_dict:
- for path_key in self.printpoints_dict[layer_key]:
- for prev, curr in pairwise(self.printpoints_dict[layer_key][path_key]):
- length = distance_point_point(prev.pt, curr.pt)
- total_length += length
+ def total_length_of_paths(self) -> float:
+ """Total length of all paths (ignores extruder toggle)."""
+ total_length = 0.0
+ for layer in self.printpoints:
+ for path in layer:
+ for prev, curr in pairwise(path):
+ total_length += distance_point_point(prev.pt, curr.pt)
return total_length
@property
- def total_print_time(self):
- """ If the print speed is defined, it returns the total time of the print, else returns None"""
- if self.printpoints_dict['layer_0']['path_0'][0].velocity is not None: # assume that all ppts are set or none
- total_time = 0
- for layer_key in self.printpoints_dict:
- for path_key in self.printpoints_dict[layer_key]:
- for prev, curr in pairwise(self.printpoints_dict[layer_key][path_key]):
- length = distance_point_point(prev.pt, curr.pt)
- total_time += length / curr.velocity
- return total_time
-
- def number_of_paths_on_layer(self, layer_index):
- """int: Number of paths within a Layer of the PrintOrganizer."""
- return len(self.printpoints_dict['layer_%d' % layer_index])
-
- ######################
- # Utils
- ######################
-
- def remove_duplicate_points_in_path(self, layer_key, path_key, tolerance=0.0001):
- """Remove subsequent points that are within a certain threshold.
+ def total_print_time(self) -> float | None:
+ """Total print time if velocity is defined, else None."""
+ if self.printpoints[0][0][0].velocity is None:
+ return None
+
+ total_time = 0.0
+ for layer in self.printpoints:
+ for path in layer:
+ for prev, curr in pairwise(path):
+ length = distance_point_point(prev.pt, curr.pt)
+ total_time += length / curr.velocity
+ return total_time
+
+ def number_of_paths_on_layer(self, layer_index: int) -> int:
+ """Number of paths within a layer."""
+ return len(self.printpoints[layer_index])
+
+ def remove_duplicate_points_in_path(
+ self, layer_idx: int, path_idx: int, tolerance: float = 0.0001
+ ) -> None:
+ """Remove subsequent points within a threshold distance.
Parameters
----------
- layer_key: str
- They key of the layer to remove points from.
- path_key: str
- The key of the path to remove points from.
- tolerance: float, optional
- Distance between points to remove. Defaults to 0.0001.
- """
+ layer_idx : int
+ The layer index.
+ path_idx : int
+ The path index.
+ tolerance : float
+ Distance threshold for duplicate detection.
+ """
dup_index = []
- # find duplicates
duplicate_ppts = []
- for i, printpoint in enumerate(self.printpoints_dict[layer_key][path_key]):
- if i < len(self.printpoints_dict[layer_key][path_key]) - 1:
- next_ppt = self.printpoints_dict[layer_key][path_key][i + 1]
- if np.linalg.norm(np.array(printpoint.pt) - np.array(next_ppt.pt)) < tolerance:
- dup_index.append(i)
- duplicate_ppts.append(printpoint)
-
- # warn user
- if len(duplicate_ppts) > 0:
- logger.warning(
- 'Attention! %d Duplicate printpoint(s) ' % len(duplicate_ppts) + 'on ' + layer_key + ', ' + path_key +
- ', indices: ' + str(dup_index) + '. They will be removed.')
- # remove duplicates
- if len(duplicate_ppts) > 0:
+ path = self.printpoints[layer_idx][path_idx]
+ for i, printpoint in enumerate(path.printpoints[:-1]):
+ next_ppt = path.printpoints[i + 1]
+ if np.linalg.norm(np.array(printpoint.pt) - np.array(next_ppt.pt)) < tolerance:
+ dup_index.append(i)
+ duplicate_ppts.append(printpoint)
+
+ if duplicate_ppts:
+ logger.warning(
+ f"Attention! {len(duplicate_ppts)} Duplicate printpoint(s) on "
+ f"layer {layer_idx}, path {path_idx}, indices: {dup_index}. They will be removed."
+ )
for ppt in duplicate_ppts:
- self.printpoints_dict[layer_key][path_key].remove(ppt)
+ path.printpoints.remove(ppt)
- def get_printpoint_neighboring_items(self, layer_key, path_key, i):
- """
- layer_key: str
- They key of the layer the current printpoint belongs to.
- path_key: str
- They key of the path the current printpoint belongs to.
- i: int
- The index of the current printpoint.
+ def get_printpoint_neighboring_items(
+ self, layer_idx: int, path_idx: int, i: int
+ ) -> list[PrintPoint | None]:
+ """Get neighboring printpoints.
- Returns
+ Parameters
----------
- list, :class: 'compas_slicer.geometry.PrintPoint'
+ layer_idx : int
+ The layer index.
+ path_idx : int
+ The path index.
+ i : int
+ Index of current printpoint.
+
+ Returns
+ -------
+ list[PrintPoint | None]
+ Previous and next printpoints (None if at boundary).
+
"""
- neighboring_items = []
- if i > 0:
- neighboring_items.append(self.printpoints_dict[layer_key][path_key][i - 1])
- else:
- neighboring_items.append(None)
- if i < len(self.printpoints_dict[layer_key][path_key]) - 1:
- neighboring_items.append(self.printpoints_dict[layer_key][path_key][i + 1])
- else:
- neighboring_items.append(None)
- return neighboring_items
-
- def printout_info(self):
- """Prints out information from the PrintOrganizer"""
- ppts_attributes = {}
- for key in self.printpoints_dict['layer_0']['path_0'][0].attributes:
- ppts_attributes[key] = str(type(self.printpoints_dict['layer_0']['path_0'][0].attributes[key]))
-
- print("\n---- PrintOrganizer Info ----")
- print("Number of layers: %d" % self.number_of_layers)
- print("Number of paths: %d" % self.number_of_paths)
- print("Number of PrintPoints: %d" % self.number_of_printpoints)
- print("PrintPoints attributes: ")
- for key in ppts_attributes:
- print(' % s : % s' % (str(key), ppts_attributes[key]))
- print("Toolpath length: %d mm" % self.total_length_of_paths)
+ path = self.printpoints[layer_idx][path_idx]
+ prev_pt = path[i - 1] if i > 0 else None
+ next_pt = path[i + 1] if i < len(path) - 1 else None
+ return [prev_pt, next_pt]
+
+ def printout_info(self) -> None:
+ """Print information about the PrintOrganizer."""
+ ppts_attributes = {
+ key: str(type(val))
+ for key, val in self.printpoints[0][0][0].attributes.items()
+ }
+
+ logger.info("---- PrintOrganizer Info ----")
+ logger.info(f"Number of layers: {self.number_of_layers}")
+ logger.info(f"Number of paths: {self.number_of_paths}")
+ logger.info(f"Number of PrintPoints: {self.number_of_printpoints}")
+ logger.info("PrintPoints attributes: ")
+ for key, val in ppts_attributes.items():
+ logger.info(f" {key} : {val}")
+ logger.info(f"Toolpath length: {self.total_length_of_paths:.0f} mm")
print_time = self.total_print_time
if print_time:
- minutes, sec = divmod(self.total_print_time, 60)
+ minutes, sec = divmod(print_time, 60)
hour, minutes = divmod(minutes, 60)
- print("Total print time: %d hours, %d minutes, %d seconds" % (hour, minutes, sec))
+ logger.info(f"Total print time: {int(hour)} hours, {int(minutes)} minutes, {int(sec)} seconds")
else:
- print("Print Velocity has not been assigned, thus print time is not calculated.")
- print("")
+ logger.info("Print Velocity has not been assigned, thus print time is not calculated.")
- def get_printpoint_up_vector(self, path, k, normal):
- """
- Returns the printpoint up-vector so that it is orthogonal to the path direction and the normal
+ def get_printpoint_up_vector(self, path: Path, k: int, normal: Vector) -> Vector:
+ """Get printpoint up-vector orthogonal to path direction and normal.
Parameters
----------
- path: :class:`compas_slicer.geometry.Path`
- k: the index of the point in path.points that the PrintPoint represents
- normal: :class:`compas.geometry.Vector`
- """
+ path : Path
+ The path containing the point.
+ k : int
+ Index of the point in path.points.
+ normal : Vector
+ The normal vector.
+
+ Returns
+ -------
+ Vector
+ The up vector.
+ """
p = path.points[k]
if k < len(path.points) - 1:
negative = False
@@ -233,97 +235,111 @@ def get_printpoint_up_vector(self, path, k, normal):
else:
negative = True
other_pt = path.points[k - 1]
+
diff = normalize_vector(subtract_vectors(p, other_pt))
up_vec = normalize_vector(cross_vectors(normal, diff))
+
if negative:
up_vec = scale_vector(up_vec, -1.0)
if norm_vector(up_vec) == 0:
up_vec = Vector(0, 0, 1)
- return Vector(*up_vec)
- ######################
- # Output data
- ######################
+ return Vector(*up_vec)
- def output_printpoints_dict(self):
- """Creates a flattened PrintPoints as a dictionary.
+ def output_printpoints_dict(self) -> dict[int, dict[str, Any]]:
+ """Create a flattened printpoints dictionary.
Returns
- ----------
- dict, with printpoints that can be saved as json
+ -------
+ dict
+ Flattened printpoints data for JSON serialization.
+
"""
data = {}
-
count = 0
- for layer_key in self.printpoints_dict:
- for path_key in self.printpoints_dict[layer_key]:
- self.remove_duplicate_points_in_path(layer_key, path_key)
- for printpoint in self.printpoints_dict[layer_key][path_key]:
- data[count] = printpoint.to_data()
+ for i, layer in enumerate(self.printpoints):
+ for j, path in enumerate(layer):
+ self.remove_duplicate_points_in_path(i, j)
+ for printpoint in path:
+ data[count] = printpoint.to_data()
count += 1
- logger.info("Generated %d print points" % count)
+
+ logger.info(f"Generated {count} print points")
return data
- def output_nested_printpoints_dict(self):
- """Creates a nested PrintPoints as a dictionary.
+ def output_nested_printpoints_dict(self) -> dict[str, dict[str, dict[int, dict[str, Any]]]]:
+ """Create a nested printpoints dictionary.
Returns
- ----------
- dict, with printpoints that can be saved as json
- """
- data = {}
+ -------
+ dict
+ Nested printpoints data for JSON serialization.
+ """
+ data: dict[str, dict[str, dict[int, dict[str, Any]]]] = {}
count = 0
- for layer_key in self.printpoints_dict:
+
+ for i, layer in enumerate(self.printpoints):
+ layer_key = f"layer_{i}"
data[layer_key] = {}
- for path_key in self.printpoints_dict[layer_key]:
+ for j, path in enumerate(layer):
+ path_key = f"path_{j}"
data[layer_key][path_key] = {}
- self.remove_duplicate_points_in_path(layer_key, path_key)
- for i, printpoint in enumerate(self.printpoints_dict[layer_key][path_key]):
- data[layer_key][path_key][i] = printpoint.to_data()
-
+ self.remove_duplicate_points_in_path(i, j)
+ for k, printpoint in enumerate(path):
+ data[layer_key][path_key][k] = printpoint.to_data()
count += 1
- logger.info("Generated %d print points" % count)
+ logger.info(f"Generated {count} print points")
return data
- def output_gcode(self, parameters):
- """ Gets a gcode text file using the function that creates gcode
+ def output_gcode(self, config: GcodeConfig | None = None) -> str:
+ """Generate G-code text.
+
Parameters
----------
- parameters: dict with gcode parameters
+ config : GcodeConfig | None
+ G-code configuration. If None, uses defaults.
Returns
- ----------
- str, gcode text file
- """
- # check print organizer: Should have horizontal layers, ideally should be planar
- # ...
- gcode = compas_slicer.print_organization.create_gcode_text(self, parameters)
- return gcode
+ -------
+ str
+ G-code text.
- def get_printpoints_attribute(self, attr_name):
"""
- Returns a list of printpoint attributes that have key=attr_name.
+ return create_gcode_text(self, config)
+
+ def get_printpoints_attribute(self, attr_name: str) -> list[Any]:
+ """Get a list of attribute values from all printpoints.
Parameters
----------
- attr_name: str
+ attr_name : str
+ Name of the attribute.
Returns
-------
- list of size len(ppts) with whatever type the ppts.attribute[attr_name] is.
+ list
+ Attribute values from all printpoints.
+
"""
attr_values = []
- for layer_key in self.printpoints_dict:
- for path_key in self.printpoints_dict[layer_key]:
- for ppt in self.printpoints_dict[layer_key][path_key]:
- assert attr_name in ppt.attributes, \
- "The attribute '%s' is not in the printpoint.attributes" % attr_name
- attr_values.append(ppt.attributes[attr_name])
+ for pp in self.printpoints.iter_printpoints():
+ if attr_name not in pp.attributes:
+ raise KeyError(f"Attribute '{attr_name}' not in printpoint.attributes")
+ attr_values.append(pp.attributes[attr_name])
return attr_values
-
-if __name__ == "__main__":
- pass
+ # Legacy compatibility: provide printpoints_dict property that builds the old dict format
+ @property
+ def printpoints_dict(self) -> dict[str, dict[str, list[PrintPoint]]]:
+ """Legacy accessor for the old dict format. Prefer using self.printpoints directly."""
+ result: dict[str, dict[str, list[PrintPoint]]] = {}
+ for i, layer in enumerate(self.printpoints):
+ layer_key = f"layer_{i}"
+ result[layer_key] = {}
+ for j, path in enumerate(layer):
+ path_key = f"path_{j}"
+ result[layer_key][path_key] = list(path.printpoints)
+ return result
diff --git a/src/compas_slicer/print_organization/curved_print_organization/__init__.py b/src/compas_slicer/print_organization/curved_print_organization/__init__.py
index 4a487012..06d96939 100644
--- a/src/compas_slicer/print_organization/curved_print_organization/__init__.py
+++ b/src/compas_slicer/print_organization/curved_print_organization/__init__.py
@@ -1,7 +1,3 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
+from .base_boundary import * # noqa: F401 F403
-from .base_boundary import * # noqa: F401 E402 F403
-
-__all__ = [name for name in dir() if not name.startswith('_')]
+__all__ = [name for name in dir() if not name.startswith("_")]
diff --git a/src/compas_slicer/print_organization/curved_print_organization/base_boundary.py b/src/compas_slicer/print_organization/curved_print_organization/base_boundary.py
index 940faa09..a4516af0 100644
--- a/src/compas_slicer/print_organization/curved_print_organization/base_boundary.py
+++ b/src/compas_slicer/print_organization/curved_print_organization/base_boundary.py
@@ -1,9 +1,12 @@
-import logging
-from compas.geometry import Vector, normalize_vector
-from compas_slicer.geometry import PrintPoint
-import compas_slicer.utilities as utils
+from __future__ import annotations
+
+from typing import Any
-logger = logging.getLogger('logger')
+from compas.datastructures import Mesh
+from compas.geometry import Point, Vector, normalize_vector
+
+import compas_slicer.utilities as utils
+from compas_slicer.geometry import PrintPoint
__all__ = ['BaseBoundary']
@@ -21,11 +24,13 @@ class BaseBoundary:
override_vector :
"""
- def __init__(self, mesh, points, override_vector=None):
+ def __init__(
+ self, mesh: Mesh, points: list[Point], override_vector: Vector | None = None
+ ) -> None:
self.mesh = mesh
self.points = points
self.override_vector = override_vector
- closest_fks, projected_pts = utils.pull_pts_to_mesh_faces(self.mesh, [pt for pt in self.points])
+ closest_fks, projected_pts = utils.pull_pts_to_mesh_faces(self.mesh, list(self.points))
self.normals = [Vector(*self.mesh.face_normal(fkey)) for fkey in closest_fks]
if self.override_vector:
@@ -40,10 +45,10 @@ def __init__(self, mesh, points, override_vector=None):
for i, pp in enumerate(self.printpoints):
pp.up_vector = self.up_vectors[i]
- def __repr__(self):
- return "" % len(self.points)
+ def __repr__(self) -> str:
+ return f""
- def get_up_vectors(self):
+ def get_up_vectors(self) -> list[Vector]:
""" Finds the up_vectors of each point of the boundary. A smoothing step is also included. """
up_vectors = []
for i, p in enumerate(self.points):
@@ -56,7 +61,7 @@ def get_up_vectors(self):
up_vectors = utils.smooth_vectors(up_vectors, strength=0.4, iterations=3)
return up_vectors
- def to_data(self):
+ def to_data(self) -> dict[str, Any]:
""" Returns a dictionary with the data of the class. """
return {"points": utils.point_list_to_dict(self.points),
"up_vectors": utils.point_list_to_dict(self.up_vectors)}
diff --git a/src/compas_slicer/print_organization/interpolation_print_organizer.py b/src/compas_slicer/print_organization/interpolation_print_organizer.py
index d3798c6b..05d520d0 100644
--- a/src/compas_slicer/print_organization/interpolation_print_organizer.py
+++ b/src/compas_slicer/print_organization/interpolation_print_organizer.py
@@ -1,48 +1,127 @@
-from compas_slicer.print_organization import BasePrintOrganizer
+from __future__ import annotations
+
+from pathlib import Path as FilePath
+from typing import TYPE_CHECKING
+
+import numpy as np
+from compas.geometry import (
+ Point,
+ Polyline,
+ Vector,
+ closest_point_on_polyline,
+ distance_point_point,
+ dot_vectors,
+ scale_vector,
+ subtract_vectors,
+)
+from loguru import logger
+from numpy.typing import NDArray
+
+import compas_slicer.utilities as utils
+from compas_slicer.config import InterpolationConfig
+from compas_slicer.geometry import PrintLayer, PrintPath, PrintPoint, VerticalLayer
from compas_slicer.pre_processing.preprocessing_utils import topological_sorting as topo_sort
+from compas_slicer.print_organization.base_print_organizer import BasePrintOrganizer
from compas_slicer.print_organization.curved_print_organization import BaseBoundary
-import compas_slicer
-from compas.geometry import closest_point_on_polyline, distance_point_point, Polyline, Vector, Point, subtract_vectors, dot_vectors, scale_vector
-import logging
-from compas_slicer.geometry import Path, PrintPoint
-import compas_slicer.utilities as utils
-from compas_slicer.parameters import get_param
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from compas_slicer.slicers import InterpolationSlicer
+
+# Check for CGAL availability at module load
+_USE_CGAL = False
+try:
+ from compas_cgal.polylines import closest_points_on_polyline as _cgal_closest
+ _USE_CGAL = True
+except ImportError:
+ _cgal_closest = None
+
+
+def _batch_closest_points_on_polyline(
+ query_points: list[Point], polyline_points: list[Point]
+) -> tuple[NDArray[np.floating], NDArray[np.floating]]:
+ """Find closest points on polyline for batch of query points.
+
+ Returns closest points and distances.
+ Uses CGAL if available, otherwise falls back to compas.
+ """
+ if _USE_CGAL and len(query_points) > 10:
+ # Use CGAL batch query for larger sets
+ queries = [[p[0], p[1], p[2]] for p in query_points]
+ polyline = [[p[0], p[1], p[2]] for p in polyline_points]
+ closest = _cgal_closest(queries, polyline)
+ # Compute distances
+ queries_np = np.array(queries)
+ distances = np.linalg.norm(closest[:, :2] - queries_np[:, :2], axis=1)
+ return closest, distances
+ else:
+ # Fall back to per-point compas queries
+ polyline = Polyline(polyline_points)
+ closest = []
+ distances = []
+ for p in query_points:
+ cp = closest_point_on_polyline(p, polyline)
+ closest.append([cp[0], cp[1], cp[2]])
+ distances.append(distance_point_point(cp, p))
+ return np.array(closest), np.array(distances)
+
__all__ = ['InterpolationPrintOrganizer']
class InterpolationPrintOrganizer(BasePrintOrganizer):
- """
- Organizing the printing process for the realization of non-planar contours.
+ """Organize the printing process for non-planar contours.
Attributes
----------
- slicer: :class:`compas_slicer.slicers.PlanarSlicer`
- An instance of the compas_slicer.slicers.PlanarSlicer.
- parameters: dict
- DATA_PATH: str
+ slicer : InterpolationSlicer
+ An instance of InterpolationSlicer.
+ config : InterpolationConfig
+ Interpolation configuration.
+ DATA_PATH : str | Path
+ Data directory path.
+ vertical_layers : list[VerticalLayer]
+ Vertical layers from slicer.
+ horizontal_layers : list[Layer]
+ Horizontal layers from slicer.
+ base_boundaries : list[BaseBoundary]
+ Base boundaries for each vertical layer.
+
"""
- def __init__(self, slicer, parameters, DATA_PATH):
- assert isinstance(slicer, compas_slicer.slicers.InterpolationSlicer), 'Please provide an InterpolationSlicer'
+ slicer: InterpolationSlicer
+
+ def __init__(
+ self,
+ slicer: InterpolationSlicer,
+ config: InterpolationConfig | None = None,
+ DATA_PATH: str | FilePath = ".",
+ ) -> None:
+ from compas_slicer.slicers import InterpolationSlicer
+
+ if not isinstance(slicer, InterpolationSlicer):
+ raise TypeError('Please provide an InterpolationSlicer')
BasePrintOrganizer.__init__(self, slicer)
self.DATA_PATH = DATA_PATH
self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
- self.parameters = parameters
+ self.config = config if config else InterpolationConfig()
self.vertical_layers = slicer.vertical_layers
self.horizontal_layers = slicer.horizontal_layers
- assert len(self.vertical_layers) + len(self.horizontal_layers) == len(slicer.layers)
+ if len(self.vertical_layers) + len(self.horizontal_layers) != len(slicer.layers):
+ raise ValueError(
+ f"Layer count mismatch: {len(self.vertical_layers)} vertical + "
+ f"{len(self.horizontal_layers)} horizontal != {len(slicer.layers)} total"
+ )
if len(self.horizontal_layers) > 0:
- assert len(self.horizontal_layers) == 1, "Only one brim horizontal layer is currently supported."
- assert self.horizontal_layers[0].is_brim, "Only one brim horizontal layer is currently supported."
+ if len(self.horizontal_layers) != 1:
+ raise ValueError("Only one brim horizontal layer is currently supported.")
+ if not self.horizontal_layers[0].is_brim:
+ raise ValueError("Only one brim horizontal layer is currently supported.")
logger.info('Slicer has one horizontal brim layer.')
# topological sorting of vertical layers depending on their connectivity
- self.topo_sort_graph = None
+ self.topo_sort_graph: topo_sort.SegmentsDirectedGraph | None = None
if len(self.vertical_layers) > 1:
try:
self.topological_sorting()
@@ -51,30 +130,33 @@ def __init__(self, slicer, parameters, DATA_PATH):
logger.critical("integrity of the output data ")
# TODO: perhaps its better to be even more explicit and add a
# FAILED-timestamp.txt file?
- self.selected_order = None
+ self.selected_order: list[int] | None = None
# creation of one base boundary per vertical_layer
- self.base_boundaries = self.create_base_boundaries()
+ self.base_boundaries: list[BaseBoundary] = self.create_base_boundaries()
+
+ def __repr__(self) -> str:
+ return f""
- def __repr__(self):
- return "" % len(self.vertical_layers)
+ def topological_sorting(self) -> None:
+ """Create directed graph of parts with connectivity.
- def topological_sorting(self):
- """ When the print consists of various paths, this function initializes a class that creates
- a directed graph with all these parts, with the connectivity of each part reflecting which
- other parts it lies on, and which other parts lie on it."""
- avg_layer_height = get_param(self.parameters, key='avg_layer_height', defaults_type='layers')
+ Creates a directed graph where each part's connectivity reflects which
+ other parts it lies on and which other parts lie on it.
+
+ """
+ avg_layer_height = self.config.avg_layer_height
self.topo_sort_graph = topo_sort.SegmentsDirectedGraph(self.slicer.mesh, self.vertical_layers,
4 * avg_layer_height, DATA_PATH=self.DATA_PATH)
- def create_base_boundaries(self):
- """ Creates one BaseBoundary per vertical_layer."""
- bs = []
+ def create_base_boundaries(self) -> list[BaseBoundary]:
+ """Create one BaseBoundary per vertical_layer."""
+ bs: list[BaseBoundary] = []
root_vs = utils.get_mesh_vertex_coords_with_attribute(self.slicer.mesh, 'boundary', 1)
root_boundary = BaseBoundary(self.slicer.mesh, [Point(*v) for v in root_vs])
- if len(self.vertical_layers) > 1:
- for i, vertical_layer in enumerate(self.vertical_layers):
+ if len(self.vertical_layers) > 1 and self.topo_sort_graph is not None:
+ for i, _vertical_layer in enumerate(self.vertical_layers):
parents_of_current_node = self.topo_sort_graph.get_parents_of_node(i)
if len(parents_of_current_node) == 0:
boundary = root_boundary
@@ -94,24 +176,31 @@ def create_base_boundaries(self):
return bs
- def create_printpoints(self):
- """
- Create the print points of the fabrication process
+ def create_printpoints(self) -> None:
+ """Create the print points of the fabrication process.
+
Based on the directed graph, select one topological order.
- From each path collection in that order copy PrintPoints dictionary in the correct order.
+ From each path collection in that order, copy PrintPoints in the correct order.
+
"""
current_layer_index = 0
# (1) --- First add the printpoints of the horizontal brim layer (first layer of print)
- self.printpoints_dict['layer_0'] = {}
if len(self.horizontal_layers) > 0: # first add horizontal brim layers
+ print_layer = PrintLayer()
paths = self.horizontal_layers[0].paths
- for j, path in enumerate(paths):
- self.printpoints_dict['layer_0']['path_%d' % j] = \
- [PrintPoint(pt=point, layer_height=get_param(self.parameters, 'avg_layer_height', 'layers'),
- mesh_normal=utils.get_normal_of_path_on_xy_plane(k, point, path, self.slicer.mesh))
- for k, point in enumerate(path.points)]
+ for _j, path in enumerate(paths):
+ print_path = PrintPath(printpoints=[
+ PrintPoint(pt=point, layer_height=self.config.avg_layer_height,
+ mesh_normal=utils.get_normal_of_path_on_xy_plane(k, point, path, self.slicer.mesh))
+ for k, point in enumerate(path.points)
+ ])
+ print_layer.paths.append(print_path)
+ self.printpoints.layers.append(print_layer)
current_layer_index += 1
+ else:
+ # Add empty first layer placeholder if no horizontal layers
+ pass
# (2) --- Select order of vertical layers
if len(self.vertical_layers) > 1: # then you need to select one topological order
@@ -126,36 +215,43 @@ def create_printpoints(self):
self.selected_order = [0] # there is only one segment, only this option
# (3) --- Then create the printpoints of all the vertical layers in the selected order
- for index, i in enumerate(self.selected_order):
+ if self.selected_order is None:
+ raise RuntimeError("selected_order must be set before creating printpoints")
+ for _index, i in enumerate(self.selected_order):
layer = self.vertical_layers[i]
- self.printpoints_dict['layer_%d' % current_layer_index] = self.get_layer_ppts(layer, self.base_boundaries[i])
+ print_layer = self.get_layer_ppts(layer, self.base_boundaries[i])
+ self.printpoints.layers.append(print_layer)
current_layer_index += 1
- def get_layer_ppts(self, layer, base_boundary):
- """ Creates the PrintPoints of a single layer."""
- max_layer_height = get_param(self.parameters, key='max_layer_height', defaults_type='layers')
- min_layer_height = get_param(self.parameters, key='min_layer_height', defaults_type='layers')
- avg_layer_height = get_param(self.parameters, 'avg_layer_height', 'layers')
+ def get_layer_ppts(self, layer: VerticalLayer, base_boundary: BaseBoundary) -> PrintLayer:
+ """Create the PrintPoints of a single layer."""
+ max_layer_height = self.config.max_layer_height
+ min_layer_height = self.config.min_layer_height
+ avg_layer_height = self.config.avg_layer_height
all_pts = [pt for path in layer.paths for pt in path.points]
closest_fks, projected_pts = utils.pull_pts_to_mesh_faces(self.slicer.mesh, all_pts)
normals = [Vector(*self.slicer.mesh.face_normal(fkey)) for fkey in closest_fks]
count = 0
- crv_to_check = Path(base_boundary.points, True) # creation of fake path for the lower boundary
+ support_polyline_pts = base_boundary.points # Start with base boundary
- layer_ppts = {}
- for i, path in enumerate(layer.paths):
- layer_ppts['path_%d' % i] = []
+ print_layer = PrintLayer()
+ for _i, path in enumerate(layer.paths):
+ # Batch query: find closest points for all points in this path at once
+ closest_pts, distances = _batch_closest_points_on_polyline(
+ path.points, support_polyline_pts
+ )
+ print_path = PrintPath()
for k, p in enumerate(path.points):
- cp = closest_point_on_polyline(p, Polyline(crv_to_check.points))
- d = distance_point_point(cp, p)
+ cp = closest_pts[k]
+ d = distances[k]
normal = normals[count]
ppt = PrintPoint(pt=p, layer_height=avg_layer_height, mesh_normal=normal)
- ppt.closest_support_pt = Point(*cp)
+ ppt.closest_support_pt = Point(cp[0], cp[1], cp[2])
ppt.distance_to_support = d
ppt.layer_height = max(min(d, max_layer_height), min_layer_height)
ppt.up_vector = self.get_printpoint_up_vector(path, k, normal)
@@ -163,12 +259,13 @@ def get_layer_ppts(self, layer, base_boundary):
ppt.up_vector = Vector(*scale_vector(ppt.up_vector, -1))
ppt.frame = ppt.get_frame()
- layer_ppts['path_%d' % i].append(ppt)
+ print_path.printpoints.append(ppt)
count += 1
- crv_to_check = path
+ print_layer.paths.append(print_path)
+ support_polyline_pts = path.points # Next path checks against this one
- return layer_ppts
+ return print_layer
if __name__ == "__main__":
diff --git a/src/compas_slicer/print_organization/planar_print_organizer.py b/src/compas_slicer/print_organization/planar_print_organizer.py
index 7ee206dd..c1739f2c 100644
--- a/src/compas_slicer/print_organization/planar_print_organizer.py
+++ b/src/compas_slicer/print_organization/planar_print_organizer.py
@@ -1,41 +1,52 @@
-import logging
-from compas_slicer.print_organization import BasePrintOrganizer
-import compas_slicer.utilities as utils
-from compas_slicer.geometry import PrintPoint
-from compas.geometry import Vector
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
import progressbar
-import compas_slicer
+from compas.geometry import Vector
+from loguru import logger
+
+import compas_slicer.utilities as utils
+from compas_slicer.geometry import PrintLayer, PrintPath, PrintPoint
+from compas_slicer.print_organization.base_print_organizer import BasePrintOrganizer
+
+if TYPE_CHECKING:
+ from compas_slicer.slicers import PlanarSlicer
-logger = logging.getLogger('logger')
__all__ = ['PlanarPrintOrganizer']
class PlanarPrintOrganizer(BasePrintOrganizer):
- """
- Organizing the printing process for the realization of planar contours.
+ """Organize the printing process for planar contours.
Attributes
----------
- slicer: :class:`compas_slicer.slicers.PlanarSlicer`
- An instance of the compas_slicer.slicers.PlanarSlicer.
+ slicer : PlanarSlicer
+ An instance of PlanarSlicer.
+
"""
- def __init__(self, slicer):
- assert isinstance(slicer, compas_slicer.slicers.PlanarSlicer), 'Please provide a PlanarSlicer'
+ slicer: PlanarSlicer
+
+ def __init__(self, slicer: PlanarSlicer) -> None:
+ from compas_slicer.slicers import PlanarSlicer
+
+ if not isinstance(slicer, PlanarSlicer):
+ raise TypeError('Please provide a PlanarSlicer')
BasePrintOrganizer.__init__(self, slicer)
- def __repr__(self):
- return "" % len(self.slicer.layers)
+ def __repr__(self) -> str:
+ return f""
- def create_printpoints(self, generate_mesh_normals=True):
- """Create the print points of the fabrication process
+ def create_printpoints(self, generate_mesh_normals: bool = True) -> None:
+ """Create the print points of the fabrication process.
Parameters
----------
- generate_mesh_normals: bool
- Boolean toggle that controls whether to generate mesh normals or not.
- If False, mesh normals will be set to Vector(0, 0, 1)
+ generate_mesh_normals : bool
+ If True, compute mesh normals. If False, use Vector(0, 1, 0).
+
"""
count = 0
@@ -49,26 +60,31 @@ def create_printpoints(self, generate_mesh_normals=True):
closest_fks, projected_pts = utils.pull_pts_to_mesh_faces(self.slicer.mesh, all_pts)
normals = [Vector(*self.slicer.mesh.face_normal(fkey)) for fkey in closest_fks]
- for i, layer in enumerate(self.slicer.layers):
- self.printpoints_dict['layer_%d' % i] = {}
+ for _i, layer in enumerate(self.slicer.layers):
+ print_layer = PrintLayer()
- for j, path in enumerate(layer.paths):
- self.printpoints_dict['layer_%d' % i]['path_%d' % j] = []
+ for _j, path in enumerate(layer.paths):
+ print_path = PrintPath()
for k, point in enumerate(path.points):
n = normals[count] if generate_mesh_normals else Vector(0, 1, 0)
- printpoint = PrintPoint(pt=point, layer_height=self.slicer.layer_height, mesh_normal=n)
+ layer_h = self.slicer.layer_height if self.slicer.layer_height else 2.0
+ printpoint = PrintPoint(pt=point, layer_height=layer_h, mesh_normal=n)
if layer.is_brim or layer.is_raft:
printpoint.up_vector = Vector(0, 0, 1)
else:
printpoint.up_vector = self.get_printpoint_up_vector(path, k, n)
- self.printpoints_dict['layer_%d' % i]['path_%d' % j].append(printpoint)
+ print_path.printpoints.append(printpoint)
bar.update(count)
count += 1
+ print_layer.paths.append(print_path)
+
+ self.printpoints.layers.append(print_layer)
+
if __name__ == "__main__":
pass
diff --git a/src/compas_slicer/print_organization/print_organization_utilities/__init__.py b/src/compas_slicer/print_organization/print_organization_utilities/__init__.py
index 7517e12c..e3451ca6 100644
--- a/src/compas_slicer/print_organization/print_organization_utilities/__init__.py
+++ b/src/compas_slicer/print_organization/print_organization_utilities/__init__.py
@@ -1,14 +1,9 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
+from .blend_radius import * # noqa: F401 F403
+from .data_smoothing import * # noqa: F401 F403
+from .extruder_toggle import * # noqa: F401 F403
+from .gcode import * # noqa: F401 F403
+from .linear_velocity import * # noqa: F401 F403
+from .safety_printpoints import * # noqa: F401 F403
+from .wait_time import * # noqa: F401 F403
-from .safety_printpoints import * # noqa: F401 E402 F403
-from .blend_radius import * # noqa: F401 E402 F403
-from .linear_velocity import * # noqa: F401 E402 F403
-from .extruder_toggle import * # noqa: F401 E402 F403
-from .wait_time import * # noqa: F401 E402 F403
-from .gcode import * # noqa: F401 E402 F403
-from .data_smoothing import * # noqa: F401 E402 F403
-
-
-__all__ = [name for name in dir() if not name.startswith('_')]
+__all__ = [name for name in dir() if not name.startswith("_")]
diff --git a/src/compas_slicer/print_organization/print_organization_utilities/blend_radius.py b/src/compas_slicer/print_organization/print_organization_utilities/blend_radius.py
index c0fe5731..4264275c 100644
--- a/src/compas_slicer/print_organization/print_organization_utilities/blend_radius.py
+++ b/src/compas_slicer/print_organization/print_organization_utilities/blend_radius.py
@@ -1,12 +1,20 @@
-from compas.geometry import norm_vector, Vector
-import logging
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from compas.geometry import Vector, norm_vector
+from loguru import logger
+
+if TYPE_CHECKING:
+ from compas_slicer.print_organization import BasePrintOrganizer
-logger = logging.getLogger('logger')
__all__ = ['set_blend_radius']
-def set_blend_radius(print_organizer, d_fillet=10, buffer=0.3):
+def set_blend_radius(
+ print_organizer: BasePrintOrganizer, d_fillet: float = 10.0, buffer: float = 0.3
+) -> None:
"""Sets the blend radius (filleting) for the robotic motion.
Parameters
@@ -21,12 +29,10 @@ def set_blend_radius(print_organizer, d_fillet=10, buffer=0.3):
logger.info("Setting blend radius")
- extruder_state = 0
+ extruder_state: bool | None = None
for printpoint, i, j, k in print_organizer.printpoints_indices_iterator():
- layer_key = 'layer_%d' % i
- path_key = 'path_%d' % j
- neighboring_items = print_organizer.get_printpoint_neighboring_items(layer_key, path_key, k)
+ neighboring_items = print_organizer.get_printpoint_neighboring_items(i, j, k)
if not printpoint.wait_time:
diff --git a/src/compas_slicer/print_organization/print_organization_utilities/data_smoothing.py b/src/compas_slicer/print_organization/print_organization_utilities/data_smoothing.py
index 7aa34dea..63002685 100644
--- a/src/compas_slicer/print_organization/print_organization_utilities/data_smoothing.py
+++ b/src/compas_slicer/print_organization/print_organization_utilities/data_smoothing.py
@@ -1,14 +1,27 @@
-import logging
-from copy import deepcopy
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Callable
+
+import numpy as np
+from compas.geometry import Vector
+
+if TYPE_CHECKING:
+ from compas_slicer.geometry import PrintPoint
+ from compas_slicer.print_organization import BasePrintOrganizer
-logger = logging.getLogger('logger')
__all__ = ['smooth_printpoint_attribute',
'smooth_printpoints_up_vectors',
'smooth_printpoints_layer_heights']
-def smooth_printpoint_attribute(print_organizer, iterations, strength, get_attr_value, set_attr_value):
+def smooth_printpoint_attribute(
+ print_organizer: BasePrintOrganizer,
+ iterations: int,
+ strength: float,
+ get_attr_value: Callable[[PrintPoint], Any],
+ set_attr_value: Callable[[PrintPoint, Any], None],
+) -> None:
"""
Iterative smoothing of the printpoints attribute.
The attribute is accessed using the function 'get_attr_value(ppt)', and is set using the function
@@ -33,25 +46,28 @@ def smooth_printpoint_attribute(print_organizer, iterations, strength, get_attr_
# first smoothen the values
for ppt in print_organizer.printpoints_iterator():
- assert get_attr_value(ppt), 'The attribute you are trying to smooth has not been assigned a value'
+ if get_attr_value(ppt) is None:
+ raise ValueError('The attribute you are trying to smooth has not been assigned a value')
- attrs = [get_attr_value(ppt) for ppt in print_organizer.printpoints_iterator()]
- new_values = deepcopy(attrs)
+ attrs = np.array([get_attr_value(ppt) for ppt in print_organizer.printpoints_iterator()])
- for iteration in range(iterations):
- for i, ppt in enumerate(print_organizer.printpoints_iterator()):
- if 0 < i < len(attrs) - 1: # ignore first and last element
- mid = (attrs[i - 1] + attrs[i + 1]) * 0.5
- new_values[i] = mid * strength + attrs[i] * (1 - strength)
- attrs = new_values
+ # Vectorized smoothing: use numpy slicing instead of per-element loop
+ for _ in range(iterations):
+ # mid = 0.5 * (attrs[i-1] + attrs[i+1]) for interior points
+ mid = 0.5 * (attrs[:-2] + attrs[2:]) # shape: (n-2,)
+ # new_val = mid * strength + attrs[1:-1] * (1 - strength)
+ attrs[1:-1] = mid * strength + attrs[1:-1] * (1 - strength)
- # in the end assign the new (smoothened) values to the printpoints
- if iteration == iterations - 1:
- for i, ppt in enumerate(print_organizer.printpoints_iterator()):
- set_attr_value(ppt, attrs[i])
+ # Assign the smoothened values back to the printpoints
+ for i, ppt in enumerate(print_organizer.printpoints_iterator()):
+ val = attrs[i]
+ # Convert back from numpy type if needed
+ set_attr_value(ppt, val.tolist() if hasattr(val, 'tolist') else float(val))
-def smooth_printpoints_layer_heights(print_organizer, iterations, strength):
+def smooth_printpoints_layer_heights(
+ print_organizer: BasePrintOrganizer, iterations: int, strength: float
+) -> None:
""" This function is an example for how the 'smooth_printpoint_attribute' function can be used. """
def get_ppt_layer_height(printpoint):
@@ -63,14 +79,17 @@ def set_ppt_layer_height(printpoint, v):
smooth_printpoint_attribute(print_organizer, iterations, strength, get_ppt_layer_height, set_ppt_layer_height)
-def smooth_printpoints_up_vectors(print_organizer, iterations, strength):
+def smooth_printpoints_up_vectors(
+ print_organizer: BasePrintOrganizer, iterations: int, strength: float
+) -> None:
""" This function is an example for how the 'smooth_printpoint_attribute' function can be used. """
def get_ppt_up_vec(printpoint):
return printpoint.up_vector # get value
def set_ppt_up_vec(printpoint, v):
- printpoint.up_vector = v # set value
+ # Convert list back to Vector for proper serialization
+ printpoint.up_vector = Vector(*v) if isinstance(v, list) else v
smooth_printpoint_attribute(print_organizer, iterations, strength, get_ppt_up_vec, set_ppt_up_vec)
# finally update any values in the printpoints that are affected by the changed attribute
diff --git a/src/compas_slicer/print_organization/print_organization_utilities/extruder_toggle.py b/src/compas_slicer/print_organization/print_organization_utilities/extruder_toggle.py
index 13eb9839..3ad8a4cd 100644
--- a/src/compas_slicer/print_organization/print_organization_utilities/extruder_toggle.py
+++ b/src/compas_slicer/print_organization/print_organization_utilities/extruder_toggle.py
@@ -1,14 +1,22 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from loguru import logger
+
import compas_slicer
-import logging
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from compas_slicer.print_organization import BasePrintOrganizer
+ from compas_slicer.slicers import BaseSlicer
+
__all__ = ['set_extruder_toggle',
'override_extruder_toggle',
'check_assigned_extruder_toggle']
-def set_extruder_toggle(print_organizer, slicer):
+def set_extruder_toggle(print_organizer: BasePrintOrganizer, slicer: BaseSlicer) -> None:
"""Sets the extruder_toggle value for the printpoints.
Parameters
@@ -19,15 +27,11 @@ def set_extruder_toggle(print_organizer, slicer):
logger.info("Setting extruder toggle")
- pp_dict = print_organizer.printpoints_dict
-
for i, layer in enumerate(slicer.layers):
- layer_key = 'layer_%d' % i
is_vertical_layer = isinstance(layer, compas_slicer.geometry.VerticalLayer)
is_brim_layer = layer.is_brim
for j, path in enumerate(layer.paths):
- path_key = 'path_%d' % j
is_closed_path = path.is_closed
# --- decide if the path should be interrupted at the end
@@ -48,15 +52,14 @@ def set_extruder_toggle(print_organizer, slicer):
interrupt_path = True
# the last path of a vertical layer should be interrupted
- if i < len(slicer.layers)-1:
- if not slicer.layers[i+1].paths[0].is_closed:
- interrupt_path = True
+ if i < len(slicer.layers)-1 and not slicer.layers[i+1].paths[0].is_closed:
+ interrupt_path = True
# --- create extruder toggles
try:
- path_printpoints = pp_dict[layer_key][path_key]
- except KeyError:
- logger.exception("no path found for layer %s" % layer_key)
+ path_printpoints = print_organizer.printpoints[i][j]
+ except (KeyError, IndexError):
+ logger.exception(f"no path found for layer {i}")
else:
for k, printpoint in enumerate(path_printpoints):
@@ -68,16 +71,14 @@ def set_extruder_toggle(print_organizer, slicer):
else:
printpoint.extruder_toggle = True
- # set extruder toggle of last print point to false
- last_layer_key = 'layer_%d' % (len(pp_dict) - 1)
- last_path_key = 'path_%d' % (len(pp_dict[last_layer_key]) - 1)
- try:
- pp_dict[last_layer_key][last_path_key][-1].extruder_toggle = False
- except KeyError as e:
- logger.exception(e)
+ # set extruder toggle of last print point to false
+ try:
+ print_organizer.printpoints[-1][-1][-1].extruder_toggle = False
+ except (KeyError, IndexError) as e:
+ logger.exception(e)
-def override_extruder_toggle(print_organizer, override_value):
+def override_extruder_toggle(print_organizer: BasePrintOrganizer, override_value: bool) -> None:
"""Overrides the extruder_toggle value for the printpoints with a user-defined value.
Parameters
@@ -87,12 +88,13 @@ def override_extruder_toggle(print_organizer, override_value):
Value to override the extruder_toggle values with.
"""
- assert isinstance(override_value, bool), "Override value must be of type bool"
+ if not isinstance(override_value, bool):
+ raise TypeError("Override value must be of type bool")
for printpoint in print_organizer.printpoints_iterator():
printpoint.extruder_toggle = override_value
-def check_assigned_extruder_toggle(print_organizer):
+def check_assigned_extruder_toggle(print_organizer: BasePrintOrganizer) -> bool:
""" Checks that all the printpoints have an assigned extruder toggle. """
all_toggles_assigned = True
for printpoint in print_organizer.printpoints_iterator():
diff --git a/src/compas_slicer/print_organization/print_organization_utilities/gcode.py b/src/compas_slicer/print_organization/print_organization_utilities/gcode.py
index f43cd699..6789e1aa 100644
--- a/src/compas_slicer/print_organization/print_organization_utilities/gcode.py
+++ b/src/compas_slicer/print_organization/print_organization_utilities/gcode.py
@@ -1,182 +1,326 @@
-import logging
+"""G-code generation for compas_slicer.
+
+This module generates G-code for FDM 3D printing from organized print points.
+"""
+
+from __future__ import annotations
+
import math
-from compas_slicer.parameters import get_param
-from compas.geometry import Point, Vector
-from compas_slicer.geometry import PrintPoint
from datetime import datetime
+from typing import TYPE_CHECKING
+
+from compas.geometry import Point
+from loguru import logger
+
+from compas_slicer.config import GcodeConfig
+
+if TYPE_CHECKING:
+ from compas_slicer.print_organization import BasePrintOrganizer
+
+__all__ = ["create_gcode_text", "GcodeBuilder"]
+
+# =============================================================================
+# Constants
+# =============================================================================
+
+PURGE_LENGTH = 145.0 # mm, Y travel for purge line
+PURGE_START_X = 5.0 # mm
+PURGE_START_Y = 5.0 # mm
+PURGE_HEIGHT = 0.2 # mm, first layer height for purge
+STARTUP_FEEDRATE = 4500 # mm/min
+SHUTDOWN_FEEDRATE = 1000 # mm/min
+SHUTDOWN_ACCEL = 500 # mm/s^2
+
+
+# =============================================================================
+# GcodeBuilder
+# =============================================================================
+
+
+class GcodeBuilder:
+ """Builder for constructing G-code output efficiently.
+
+ Uses a list internally and joins at the end for better performance
+ than repeated string concatenation.
+ """
+
+ def __init__(self) -> None:
+ self._lines: list[str] = []
+
+ def comment(self, text: str) -> None:
+ """Add a comment line."""
+ self._lines.append(f";{text}")
+
+ def cmd(self, gcode: str, comment: str = "") -> None:
+ """Add a G-code command with optional inline comment."""
+ if comment:
+ self._lines.append(f"{gcode:<30} ;{comment}")
+ else:
+ self._lines.append(gcode)
+
+ def blank(self) -> None:
+ """Add a blank line."""
+ self._lines.append("")
+
+ def build(self) -> str:
+ """Return the complete G-code as a string."""
+ return "\n".join(self._lines)
-logger = logging.getLogger('logger')
-__all__ = ['create_gcode_text']
+# =============================================================================
+# Helper Functions
+# =============================================================================
-def create_gcode_text(print_organizer, parameters):
- """ Creates a gcode text file
+def _calc_extrusion(
+ distance: float,
+ layer_height: float,
+ path_width: float,
+ filament_diameter: float,
+ flowrate: float = 1.0,
+) -> float:
+ """Calculate extrusion amount using volumetric math.
+
Parameters
----------
- print_organizer: :class: compas_slicer.print_organization.PrintOrganizer
- parameters : dict with gcode parameters.
- The defaults for those parameters are in the file compas_slicer.parameters.defaults_gcode.
+ distance : float
+ Travel distance in mm.
+ layer_height : float
+ Layer height in mm.
+ path_width : float
+ Extrusion width in mm.
+ filament_diameter : float
+ Filament diameter in mm.
+ flowrate : float
+ Flow multiplier (default 1.0).
+
Returns
- ----------
- str, gcode text file
+ -------
+ float
+ Extrusion length in mm.
+
"""
- n_l = chr(10) # new line
- # get time stamp
- now = datetime.now()
- datetimestamp = now.strftime("%H:%M:%S - %d %B %Y")
- logger.info('Generating gcode')
- gcode = ''
-
- #######################################################################
- # get all the necessary parameters:
- # Physical parameters
- # nozzle_diameter = get_param(parameters, key='nozzle_diameter', defaults_type='gcode') # in mm
- filament_diameter = get_param(parameters, key='filament diameter', defaults_type='gcode') # in mm
-
- # Dimensional parameters
- path_width = get_param(parameters, key='layer_width', defaults_type='gcode') # in mm
-
- # Temperature parameters
- extruder_temperature = get_param(parameters, key='extruder_temperature', defaults_type='gcode') # in °C
- bed_temperature = get_param(parameters, key='bed_temperature', defaults_type='gcode') # in °C
- fan_speed = get_param(parameters, key='bed_temperature', defaults_type='gcode') # 0-255
- fan_start_z = get_param(parameters, key='fan_start_z', defaults_type='gcode') # in mm
-
- # Movement parameters
- flowrate = get_param(parameters, key='flowrate', defaults_type='gcode') # as fraction; this is a global flow multiplier
- feedrate = get_param(parameters, key='feedrate', defaults_type='gcode') # in mm/s
- feedrate_travel = get_param(parameters, key='feedrate_travel', defaults_type='gcode') # in mm/s
- feedrate_low = get_param(parameters, key='feedrate_low', defaults_type='gcode') # in mm/s, for z < min_over_z
- feedrate_retraction = get_param(parameters, key='feedrate_retraction', defaults_type='gcode') # in mm/s
- acceleration = get_param(parameters, key='acceleration', defaults_type='gcode') # in mm/s²; ignored if 0
- jerk = get_param(parameters, key='jerk', defaults_type='gcode') # in mm/s; if 0, the default driver value is used
-
- # Retraction and hop parameters
- z_hop = get_param(parameters, key='z_hop', defaults_type='gcode') # in mm
- retraction_length = get_param(parameters, key='retraction_length', defaults_type='gcode') # in mm
- retraction_min_travel = get_param(parameters, key='retraction_min_travel', defaults_type='gcode') # in mm
-
- # Adhesion parameters
- flow_over = get_param(parameters, key='flow_over', defaults_type='gcode') # as fraction > 1
- min_over_z = get_param(parameters, key='min_over_z', defaults_type='gcode') # in mm
- # ______________________________________________________________________/ get parmeters
-
- # ######################################################################
- # gcode header
- gcode += ";Gcode with compas_slicer " + n_l
- gcode += ";Ioana Mitropolou @ioannaMitropoulou" + n_l
- gcode += ";Joris Burger @joburger" + n_l
- gcode += ";Andrei Jipa @stratocaster" + n_l
- gcode += ";MIT License" + n_l
- gcode += ";" + n_l
- gcode += ";generated " + datetimestamp + n_l
- gcode += ";" + n_l
- gcode += "T0 ;set tool" + n_l # for printing with multiple nozzles this will be useful
- gcode += "G21 ;metric values" + n_l
- gcode += "G90 ;absolute positioning" + n_l
- gcode += "M107 ;start with the fan off" + n_l
- gcode += "M140 S" + str(bed_temperature) + " ;set bed temperature fast" + n_l
- gcode += "M104 S" + str(extruder_temperature) + " ;set extruder temperature fast" + n_l
- gcode += "M109 S" + str(extruder_temperature) + " ;set extruder temperature and wait" + n_l
- gcode += "M190 S" + str(bed_temperature) + " ;set bed temperature and wait" + n_l
- gcode += "G21 ;metric values" + n_l
- gcode += "G90 ;absolute positioning" + n_l
- gcode += "M83 ;set e-values to relative while in absolute mode" + n_l
- if acceleration != 0:
- gcode += "M201 X" + str(acceleration) + " Y" + str(acceleration) + " ;set max acceleration in xy" + n_l
- if jerk != 0:
- gcode += "M207 X" + str(jerk) + " ;set max jerk" + n_l # TODO: check firmware compatibility of M207
- gcode += "G28 X0 Y0 ;home x and y axes" + n_l
- gcode += "G28 Z0 ;home z axis independently" + n_l
- gcode += "G1 F4500 ;set feedrate to 4,500 mm/min (75 mm/s)" + n_l
- gcode += "G1 Z0.2 ;move nozzle up 0.2mm" + n_l
- gcode += "G1 X5 Y5 ;move nozzle up 0.2mm" + n_l
- ex_val = 560 * 0.2 * path_width / (math.pi * (filament_diameter ** 2))
- gcode += "G1 Y150 E" + '{:.3f}'.format(ex_val) + " ;extrude a line of filament" + n_l
- gcode += "G1 X" + '{:.3f}'.format(5 + path_width) + " ;move nozzle away from the first line" + n_l
- gcode += "G1 Y5 E" + '{:.3f}'.format(ex_val) + " ;extrude a second line of filament" + n_l
- gcode += "G1 Z2 ;move nozzle up 1.8mm" + n_l
- gcode += "G92 E0 ;reset the extruded length" + n_l # useless after M83, otherwise needed
- gcode += "G1 F" + str(feedrate_travel) + " ;set initial Feedrate" + n_l
- gcode += "M117 compas gcode print... ;show up text on LCD" + n_l
- gcode += ";" + n_l
- # ______________________________________________________________________/ header
-
- # ######################################################################
- # global parameters
- # retraction_on = True # boolean; is true when retraction is toggled
- fan_on = False # boolean; is true when fan is toggled
- prev_point = PrintPoint(Point(0, 0, 0), layer_height=1.0,
- mesh_normal=Vector(1.0, 0.0, 0.0)) # dummy print_point that is overwritten
- layer_height = 0.2 # dummy value that is overwritten
- # ______________________________________________________________________/ global parameters
-
- # ######################################################################
- # iterate all layers, paths
- print('')
- for point_v, i, j, k in print_organizer.printpoints_indices_iterator(): # i: layer; j: path; k: point index
- layer_height = point_v.layer_height
- # Calculate relative length
- re_l = ((point_v.pt.x - prev_point.pt.x) ** 2 + (point_v.pt.y - prev_point.pt.y) ** 2 + (
- point_v.pt.z - prev_point.pt.z) ** 2) ** 0.5
- if k == 0: # 'First point
- # retract before moving to first point in path if necessary
- if (retraction_min_travel < re_l) and (point_v.extruder_toggle is False):
- gcode += "G1 F" + str(feedrate_retraction) + " ;set retraction feedrate" + n_l
- gcode += "G1" + " E-" + str(retraction_length) + " ;retract" + n_l
- # ZHOP
- gcode += "G1" + " Z" + '{:.3f}'.format(prev_point.pt.z + z_hop) + " ;z-hop" + n_l
- # move to first point in path:
- gcode += "G1" + " F" + str(feedrate_travel) + " ;set travel feedrate" + n_l
- if prev_point.pt.z != point_v.pt.z:
- gcode += "G1 X" + '{:.3f}'.format(point_v.pt.x) + " Y" + '{:.3f}'.format(point_v.pt.y) + " Z" + '{:.3f}'.format(point_v.pt.z) + n_l
- else:
- gcode += "G1 X" + '{:.3f}'.format(point_v.pt.x) + " Y" + '{:.3f}'.format(point_v.pt.y) + n_l
- # reverse z-hop after reaching the first point
- gcode += "G1 F" + str(feedrate_retraction) + " ;set retraction feedrate" + n_l
- gcode += "G1" + " Z" + '{:.3f}'.format(point_v.pt.z) + " ;reverse z-hop" + n_l
- # reverse retract after reaching the first point
- gcode += "G1" + " E" + str(retraction_length) + " ;reverse retraction" + n_l
- else:
- if prev_point.pt.z != point_v.pt.z:
- gcode += "G1 X" + '{:.3f}'.format(point_v.pt.x) + " Y" + '{:.3f}'.format(
- point_v.pt.y) + " Z" + '{:.3f}'.format(point_v.pt.z) + n_l
- else:
- gcode += "G1 X" + '{:.3f}'.format(point_v.pt.x) + " Y" + '{:.3f}'.format(point_v.pt.y) + n_l
- # set extrusion feedrate: low for adhesion to bed and normal otherwise
- if point_v.pt.z < min_over_z:
- gcode += "G1" + " F" + str(feedrate_low) + " ;set low feedrate" + n_l
+ cross_section = layer_height * path_width
+ filament_area = math.pi * (filament_diameter / 2) ** 2
+ return flowrate * distance * cross_section / filament_area
+
+
+def _distance_3d(p1: Point, p2: Point) -> float:
+ """Calculate 3D Euclidean distance between two points."""
+ return math.sqrt((p2.x - p1.x) ** 2 + (p2.y - p1.y) ** 2 + (p2.z - p1.z) ** 2)
+
+
+# =============================================================================
+# G-code Sections
+# =============================================================================
+
+
+def _write_header(gb: GcodeBuilder, config: GcodeConfig, timestamp: str) -> None:
+ """Write G-code header with machine setup commands."""
+ # File info
+ gb.comment("G-code generated by compas_slicer")
+ gb.comment(f"Generated: {timestamp}")
+ gb.comment("MIT License")
+ gb.blank()
+
+ # Machine setup
+ gb.cmd("T0", "select tool 0")
+ gb.cmd("G21", "metric units")
+ gb.cmd("G90", "absolute positioning")
+ gb.cmd("M107", "fan off")
+ gb.blank()
+
+ # Heating
+ gb.cmd(f"M140 S{config.bed_temperature}", "set bed temp (no wait)")
+ gb.cmd(f"M104 S{config.extruder_temperature}", "set extruder temp (no wait)")
+ gb.cmd(f"M109 S{config.extruder_temperature}", "wait for extruder temp")
+ gb.cmd(f"M190 S{config.bed_temperature}", "wait for bed temp")
+ gb.blank()
+
+ # Motion setup
+ gb.cmd("G21", "metric units")
+ gb.cmd("G90", "absolute positioning")
+ gb.cmd("M83", "relative extrusion")
+
+ if config.acceleration > 0:
+ gb.cmd(f"M201 X{config.acceleration} Y{config.acceleration}", "set max acceleration")
+ if config.jerk > 0:
+ gb.cmd(f"M205 X{config.jerk} Y{config.jerk}", "set jerk")
+
+ gb.blank()
+
+ # Homing
+ gb.cmd("G28 X0 Y0", "home X and Y")
+ gb.cmd("G28 Z0", "home Z")
+ gb.blank()
+
+
+def _write_purge_line(gb: GcodeBuilder, config: GcodeConfig) -> None:
+ """Write purge/prime line sequence to prepare the nozzle."""
+ gb.comment("Purge line")
+ gb.cmd(f"G1 F{STARTUP_FEEDRATE}", "set feedrate")
+ gb.cmd(f"G1 Z{PURGE_HEIGHT}", "move to purge height")
+ gb.cmd(f"G1 X{PURGE_START_X} Y{PURGE_START_Y}", "move to purge start")
+
+ # Calculate extrusion for purge lines
+ e_purge = _calc_extrusion(
+ PURGE_LENGTH, PURGE_HEIGHT, config.layer_width, config.filament_diameter
+ )
+
+ # First purge line
+ gb.cmd(f"G1 Y{PURGE_START_Y + PURGE_LENGTH} E{e_purge:.3f}", "purge line 1")
+ # Move over
+ gb.cmd(f"G1 X{PURGE_START_X + config.layer_width:.3f}", "step over")
+ # Second purge line
+ gb.cmd(f"G1 Y{PURGE_START_Y} E{e_purge:.3f}", "purge line 2")
+
+ # Prepare for print
+ gb.cmd("G1 Z2", "lift nozzle")
+ gb.cmd("G92 E0", "reset extruder position")
+ gb.cmd(f"G1 F{config.feedrate_travel}", "set travel feedrate")
+ gb.cmd("M117 Printing...", "LCD status")
+ gb.blank()
+
+
+def _write_toolpath(
+ gb: GcodeBuilder, print_organizer: BasePrintOrganizer, config: GcodeConfig
+) -> float:
+ """Write the main toolpath G-code.
+
+ Returns the final Z height for use in footer.
+ """
+ gb.comment("Begin toolpath")
+
+ fan_on = False
+ prev_pt = Point(0, 0, 0)
+ prev_z = 0.0
+ layer_height = PURGE_HEIGHT
+
+ for ppt, layer_idx, _path_idx, point_idx in print_organizer.printpoints_indices_iterator():
+ pt = ppt.pt
+ layer_height = ppt.layer_height
+ distance = _distance_3d(prev_pt, pt)
+
+ if point_idx == 0:
+ # First point in path - handle travel move
+ _write_travel_to_path_start(gb, config, prev_pt, pt, distance)
+
+ # Set feedrate based on Z height (slower near bed for adhesion)
+ if pt.z < config.min_over_z:
+ gb.cmd(f"G1 F{config.feedrate_low}", "slow feedrate for adhesion")
else:
- gcode += "G1" + " F" + str(feedrate) + " ;set extrusion feedrate" + n_l
- else: # from 2nd point in each path onwards
- # Calculate feedrate : TODO: just a basic formula for now, better ones in the future
- e_val = flowrate * 4 * re_l * layer_height * path_width / (math.pi * (filament_diameter ** 2))
- if point_v.pt.z < min_over_z:
- e_val *= flow_over
- gcode += "G1 X" + '{:.3f}'.format(point_v.pt.x) + " Y" + '{:.3f}'.format(
- point_v.pt.y) + " E" + '{:.3f}'.format(e_val) + n_l
- prev_point = point_v
- if fan_on is False:
- if i * layer_height >= fan_start_z: # 'Fan On:
- gcode += "M106 S" + str(fan_speed) + " ;set fan on to set speed" + n_l
- fan_on = True
-
- # 'retract after last path
- gcode += "G1 F" + str(feedrate_retraction) + " ;set ret spd" + n_l
- gcode += "G1" + " E-" + str(retraction_length) + " ;ret fil" + n_l
- gcode += "G1" + " Z" + '{:.3f}'.format(3 * (prev_point.pt.z + z_hop)) + " ;ZHop" + n_l
- gcode += "G1 F" + str(feedrate_travel) + " ;set ret spd" + n_l
-
- #######################################################################
- # Footer
- gcode += "M201 X500 Y500 ;set acceleration to 500mm/s^2" + n_l
- gcode += "G1" + " F 1000 ;set feedrate to 1000 mm/min" + n_l
- gcode += "G1 X0 Y0 ;home x and y axes" + n_l
- gcode += "M104 S0 ;turn extruder heater off" + n_l
- gcode += "M140 S0 ;turn bed heater off (if it exists)" + n_l
- gcode += "M84 ;turn steppers off" + n_l
- gcode += "M106 S0 ;turn fan off" + n_l
- # ______________________________________________________________________/ footer
-
- return gcode
+ gb.cmd(f"G1 F{config.feedrate}", "print feedrate")
+ else:
+ # Subsequent points - extrude
+ e_val = _calc_extrusion(
+ distance,
+ layer_height,
+ config.layer_width,
+ config.filament_diameter,
+ config.flowrate,
+ )
+ # Apply overextrusion near bed
+ if pt.z < config.min_over_z:
+ e_val *= config.flow_over
+
+ gb.cmd(f"G1 X{pt.x:.3f} Y{pt.y:.3f} E{e_val:.3f}")
+
+ # Fan control
+ if not fan_on and layer_idx * layer_height >= config.fan_start_z:
+ gb.cmd(f"M106 S{config.fan_speed}", "fan on")
+ fan_on = True
+
+ prev_pt = pt
+ prev_z = pt.z
+
+ gb.blank()
+ return prev_z
+
+
+def _write_travel_to_path_start(
+ gb: GcodeBuilder,
+ config: GcodeConfig,
+ prev_pt: Point,
+ target_pt: Point,
+ distance: float,
+) -> None:
+ """Write travel move to start of a new path, with retraction if needed."""
+ needs_retract = distance > config.retraction_min_travel
+
+ if needs_retract:
+ # Retract
+ gb.cmd(f"G1 F{config.feedrate_retraction}", "retraction feedrate")
+ gb.cmd(f"G1 E-{config.retraction_length}", "retract")
+ # Z-hop
+ gb.cmd(f"G1 Z{prev_pt.z + config.z_hop:.3f}", "z-hop")
+ # Travel
+ gb.cmd(f"G1 F{config.feedrate_travel}", "travel feedrate")
+
+ # Move to target
+ if prev_pt.z != target_pt.z or needs_retract:
+ gb.cmd(f"G1 X{target_pt.x:.3f} Y{target_pt.y:.3f} Z{target_pt.z:.3f}")
+ else:
+ gb.cmd(f"G1 X{target_pt.x:.3f} Y{target_pt.y:.3f}")
+
+ if needs_retract:
+ # Reverse z-hop and retraction
+ gb.cmd(f"G1 F{config.feedrate_retraction}", "retraction feedrate")
+ gb.cmd(f"G1 Z{target_pt.z:.3f}", "reverse z-hop")
+ gb.cmd(f"G1 E{config.retraction_length}", "unretract")
+
+
+def _write_footer(gb: GcodeBuilder, config: GcodeConfig, final_z: float) -> None:
+ """Write G-code footer with shutdown sequence."""
+ gb.comment("End of print")
+
+ # Final retract and lift
+ gb.cmd(f"G1 F{config.feedrate_retraction}", "retraction feedrate")
+ gb.cmd(f"G1 E-{config.retraction_length}", "final retract")
+ gb.cmd(f"G1 Z{min(final_z + config.z_hop * 3, config.print_volume_z):.3f}", "lift nozzle")
+ gb.blank()
+
+ # Shutdown sequence
+ gb.cmd(f"M201 X{SHUTDOWN_ACCEL} Y{SHUTDOWN_ACCEL}", "reduce acceleration")
+ gb.cmd(f"G1 F{SHUTDOWN_FEEDRATE}", "slow feedrate")
+ gb.cmd("G1 X0 Y0", "move to home")
+ gb.cmd("M104 S0", "extruder heater off")
+ gb.cmd("M140 S0", "bed heater off")
+ gb.cmd("M84", "motors off")
+ gb.cmd("M106 S0", "fan off")
+
+
+# =============================================================================
+# Main Function
+# =============================================================================
+
+
+def create_gcode_text(
+ print_organizer: BasePrintOrganizer, config: GcodeConfig | None = None
+) -> str:
+ """Create G-code text from organized print points.
+
+ Parameters
+ ----------
+ print_organizer : BasePrintOrganizer
+ The print organizer containing printpoints.
+ config : GcodeConfig | None
+ G-code configuration. If None, uses defaults.
+
+ Returns
+ -------
+ str
+ Complete G-code file content.
+
+ """
+ config = config or GcodeConfig()
+ logger.info("Generating G-code")
+
+ gb = GcodeBuilder()
+ timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+
+ _write_header(gb, config, timestamp)
+ _write_purge_line(gb, config)
+ final_z = _write_toolpath(gb, print_organizer, config)
+ _write_footer(gb, config, final_z)
+
+ return gb.build()
diff --git a/src/compas_slicer/print_organization/print_organization_utilities/linear_velocity.py b/src/compas_slicer/print_organization/print_organization_utilities/linear_velocity.py
index 6a88e1b2..8cc868dd 100644
--- a/src/compas_slicer/print_organization/print_organization_utilities/linear_velocity.py
+++ b/src/compas_slicer/print_organization/print_organization_utilities/linear_velocity.py
@@ -1,8 +1,16 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Callable
+
from compas.geometry import Vector, dot_vectors
+from loguru import logger
+
from compas_slicer.utilities import remap, remap_unbound
-import logging
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from compas_slicer.geometry import PrintPoint
+ from compas_slicer.print_organization import BasePrintOrganizer
+
__all__ = ['set_linear_velocity_constant',
'set_linear_velocity_per_layer',
@@ -10,7 +18,7 @@
'set_linear_velocity_by_overhang']
-def set_linear_velocity_constant(print_organizer, v=25.0):
+def set_linear_velocity_constant(print_organizer: BasePrintOrganizer, v: float = 25.0) -> None:
"""Sets the linear velocity parameter of the printpoints depending on the selected type.
Parameters
@@ -24,7 +32,9 @@ def set_linear_velocity_constant(print_organizer, v=25.0):
printpoint.velocity = v
-def set_linear_velocity_per_layer(print_organizer, per_layer_velocities):
+def set_linear_velocity_per_layer(
+ print_organizer: BasePrintOrganizer, per_layer_velocities: list[float]
+) -> None:
"""Sets the linear velocity parameter of the printpoints depending on the selected type.
Parameters
@@ -35,14 +45,22 @@ def set_linear_velocity_per_layer(print_organizer, per_layer_velocities):
"""
logger.info("Setting per-layer linear velocity")
- assert len(per_layer_velocities) == print_organizer.number_of_layers, 'Wrong number of velocity values. You need \
- to provide one velocity value per layer, on the "per_layer_velocities" list.'
- for printpoint, i, j, k in print_organizer.printpoints_indices_iterator():
+ if len(per_layer_velocities) != print_organizer.number_of_layers:
+ raise ValueError(
+ f'Wrong number of velocity values: got {len(per_layer_velocities)}, '
+ f'need {print_organizer.number_of_layers} (one per layer)'
+ )
+ for printpoint, i, _j, _k in print_organizer.printpoints_indices_iterator():
printpoint.velocity = per_layer_velocities[i]
-def set_linear_velocity_by_range(print_organizer, param_func, parameter_range, velocity_range,
- bound_remapping=True):
+def set_linear_velocity_by_range(
+ print_organizer: BasePrintOrganizer,
+ param_func: Callable[[PrintPoint], float],
+ parameter_range: tuple[float, float],
+ velocity_range: tuple[float, float],
+ bound_remapping: bool = True,
+) -> None:
"""Sets the linear velocity parameter of the printpoints depending on the selected type.
Parameters
@@ -61,7 +79,8 @@ def set_linear_velocity_by_range(print_organizer, param_func, parameter_range, v
logger.info("Setting linear velocity based on parameter range")
for printpoint in print_organizer.printpoints_iterator():
param = param_func(printpoint)
- assert param, 'The param_func does not return any value for calculating the velocity range.'
+ if param is None:
+ raise ValueError('The param_func does not return any value for calculating the velocity range.')
if bound_remapping:
v = remap(param, parameter_range[0], parameter_range[1], velocity_range[0], velocity_range[1])
else:
@@ -69,7 +88,12 @@ def set_linear_velocity_by_range(print_organizer, param_func, parameter_range, v
printpoint.velocity = v
-def set_linear_velocity_by_overhang(print_organizer, overhang_range, velocity_range, bound_remapping=True):
+def set_linear_velocity_by_overhang(
+ print_organizer: BasePrintOrganizer,
+ overhang_range: tuple[float, float],
+ velocity_range: tuple[float, float],
+ bound_remapping: bool = True,
+) -> None:
"""Set velocity by overhang by using set_linear_velocity_by_range.
An example function for how to use the 'set_linear_velocity_by_range'. In this case the parameter that controls the
diff --git a/src/compas_slicer/print_organization/print_organization_utilities/safety_printpoints.py b/src/compas_slicer/print_organization/print_organization_utilities/safety_printpoints.py
index 0710da20..5523a6c9 100644
--- a/src/compas_slicer/print_organization/print_organization_utilities/safety_printpoints.py
+++ b/src/compas_slicer/print_organization/print_organization_utilities/safety_printpoints.py
@@ -1,15 +1,23 @@
+from __future__ import annotations
+
+import copy
+from typing import TYPE_CHECKING
+
from compas.geometry import Vector
+from loguru import logger
+
+from compas_slicer.geometry import PrintLayer, PrintPath, PrintPoint
from compas_slicer.print_organization.print_organization_utilities.extruder_toggle import check_assigned_extruder_toggle
from compas_slicer.utilities import find_next_printpoint
-import copy
-import logging
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from compas_slicer.print_organization import BasePrintOrganizer
+
__all__ = ['add_safety_printpoints']
-def add_safety_printpoints(print_organizer, z_hop=10.0):
+def add_safety_printpoints(print_organizer: BasePrintOrganizer, z_hop: float = 10.0) -> None:
"""Generates a safety print point at the interruptions of the print paths.
Parameters
@@ -19,47 +27,51 @@ def add_safety_printpoints(print_organizer, z_hop=10.0):
z_hop: float
Vertical distance (in millimeters) of the safety point above the PrintPoint.
"""
- assert check_assigned_extruder_toggle(print_organizer), \
- 'You need to set the extruder toggles first, before you can create safety points'
- logger.info("Generating safety print points with height " + str(z_hop) + " mm")
+ if not check_assigned_extruder_toggle(print_organizer):
+ raise ValueError('You need to set the extruder toggles first, before you can create safety points')
+ logger.info(f"Generating safety print points with height {z_hop} mm")
+
+ from compas_slicer.geometry import PrintPointsCollection
- pp_dict = print_organizer.printpoints_dict
- pp_copy_dict = {} # should not be altering the dict that we are iterating through > copy
+ new_collection = PrintPointsCollection()
- for i, layer_key in enumerate(pp_dict):
- pp_copy_dict[layer_key] = {}
+ for i, layer in enumerate(print_organizer.printpoints):
+ new_layer = PrintLayer()
- for j, path_key in enumerate(pp_dict[layer_key]):
- pp_copy_dict[layer_key][path_key] = []
+ for j, path in enumerate(layer):
+ new_path = PrintPath()
- for k, printpoint in enumerate(pp_dict[layer_key][path_key]):
+ for k, printpoint in enumerate(path):
# add regular printing points
- pp_copy_dict[layer_key][path_key].append(printpoint)
+ new_path.printpoints.append(printpoint)
# add safety printpoints if there is an interruption
if printpoint.extruder_toggle is False:
# safety ppt after current printpoint
- pp_copy_dict[layer_key][path_key].append(create_safety_printpoint(printpoint, z_hop, False))
+ new_path.printpoints.append(create_safety_printpoint(printpoint, z_hop, False))
# safety ppt before next printpoint (if there exists one)
- next_ppt = find_next_printpoint(pp_dict, i, j, k)
- if next_ppt:
- if next_ppt.extruder_toggle is True: # if it is a printing ppt
- pp_copy_dict[layer_key][path_key].append(create_safety_printpoint(next_ppt, z_hop, False))
+ next_ppt = find_next_printpoint(print_organizer.printpoints, i, j, k)
+ if next_ppt and next_ppt.extruder_toggle is True: # if it is a printing ppt
+ new_path.printpoints.append(create_safety_printpoint(next_ppt, z_hop, False))
+
+ new_layer.paths.append(new_path)
+
+ new_collection.layers.append(new_layer)
# finally, insert a safety print point at the beginning of the entire print
try:
- safety_printpoint = create_safety_printpoint(pp_dict['layer_0']['path_0'][0], z_hop, False)
- pp_copy_dict['layer_0']['path_0'].insert(0, safety_printpoint)
- except KeyError as e:
+ safety_printpoint = create_safety_printpoint(new_collection[0][0][0], z_hop, False)
+ new_collection[0][0].printpoints.insert(0, safety_printpoint)
+ except (KeyError, IndexError) as e:
logger.exception(e)
# the safety printpoint has already been added at the end since the last printpoint extruder_toggle_type is False
- print_organizer.printpoints_dict = pp_copy_dict
+ print_organizer.printpoints = new_collection
-def create_safety_printpoint(printpoint, z_hop, extruder_toggle):
+def create_safety_printpoint(printpoint: PrintPoint, z_hop: float, extruder_toggle: bool) -> PrintPoint:
"""
Parameters
@@ -76,7 +88,8 @@ def create_safety_printpoint(printpoint, z_hop, extruder_toggle):
pt0 = printpoint.pt
safety_printpoint = copy.deepcopy(printpoint)
safety_printpoint.pt = pt0 + Vector(0, 0, z_hop)
- safety_printpoint.frame.point = safety_printpoint.pt
+ if safety_printpoint.frame is not None:
+ safety_printpoint.frame.point = safety_printpoint.pt
safety_printpoint.extruder_toggle = extruder_toggle
return safety_printpoint
diff --git a/src/compas_slicer/print_organization/print_organization_utilities/wait_time.py b/src/compas_slicer/print_organization/print_organization_utilities/wait_time.py
index 4350248b..a160da5c 100644
--- a/src/compas_slicer/print_organization/print_organization_utilities/wait_time.py
+++ b/src/compas_slicer/print_organization/print_organization_utilities/wait_time.py
@@ -1,16 +1,32 @@
-import logging
-from compas_slicer.utilities import find_next_printpoint
+from __future__ import annotations
+
import math
+from typing import TYPE_CHECKING, Literal
+
from compas.geometry import Vector, normalize_vector
+from loguru import logger
+
+from compas_slicer.utilities import find_next_printpoint
+
+if TYPE_CHECKING:
+ from compas_slicer.print_organization import BasePrintOrganizer
-logger = logging.getLogger('logger')
__all__ = ['set_wait_time_on_sharp_corners',
'set_wait_time_based_on_extruder_toggle',
'override_wait_time']
+WaitType = Literal[
+ 'wait_before_extrusion',
+ 'wait_after_extrusion',
+ 'wait_before_and_after_extrusion',
+ 'wait_at_sharp_corners',
+]
+
-def set_wait_time_on_sharp_corners(print_organizer, threshold=0.5 * math.pi, wait_time=0.3):
+def set_wait_time_on_sharp_corners(
+ print_organizer: BasePrintOrganizer, threshold: float = 0.5 * math.pi, wait_time: float = 0.3
+) -> None:
"""
Sets a wait time at the sharp corners of the path, based on the angle threshold.
@@ -24,7 +40,7 @@ def set_wait_time_on_sharp_corners(print_organizer, threshold=0.5 * math.pi, wai
"""
number_of_wait_points = 0
for printpoint, i, j, k in print_organizer.printpoints_indices_iterator():
- neighbors = print_organizer.get_printpoint_neighboring_items('layer_%d' % i, 'path_%d' % j, k)
+ neighbors = print_organizer.get_printpoint_neighboring_items(i, j, k)
prev_ppt = neighbors[0]
next_ppt = neighbors[1]
@@ -37,10 +53,12 @@ def set_wait_time_on_sharp_corners(print_organizer, threshold=0.5 * math.pi, wai
printpoint.wait_time = wait_time
printpoint.blend_radius = 0.0 # 0.0 blend radius for points where the robot will wait
number_of_wait_points += 1
- logger.info('Added wait times for %d points' % number_of_wait_points)
+ logger.info(f'Added wait times for {number_of_wait_points} points')
-def set_wait_time_based_on_extruder_toggle(print_organizer, wait_type, wait_time=0.3):
+def set_wait_time_based_on_extruder_toggle(
+ print_organizer: BasePrintOrganizer, wait_type: WaitType, wait_time: float = 0.3
+) -> None:
"""
Sets a wait time for the printpoints, either before extrusion starts,
after extrusion finishes, or in both cases.
@@ -58,14 +76,14 @@ def set_wait_time_based_on_extruder_toggle(print_organizer, wait_type, wait_time
"""
for printpoint in print_organizer.printpoints_iterator():
- assert printpoint.extruder_toggle is not None, \
- 'You need to set the extruder toggles first, before you can automatically set the wait time'
+ if printpoint.extruder_toggle is None:
+ raise ValueError('You need to set the extruder toggles first, before you can automatically set the wait time')
logger.info("Setting wait time")
for printpoint, i, j, k in print_organizer.printpoints_indices_iterator():
number_of_wait_points = 0
- next_ppt = find_next_printpoint(print_organizer.printpoints_dict, i, j, k)
+ next_ppt = find_next_printpoint(print_organizer.printpoints, i, j, k)
# for the brim layer don't add any wait times
if not print_organizer.slicer.layers[i].is_brim and next_ppt:
@@ -89,12 +107,12 @@ def set_wait_time_based_on_extruder_toggle(print_organizer, wait_type, wait_time
next_ppt.blend_radius = 0.0
number_of_wait_points += 1
else:
- logger.error('Unknown wait type : ' + str(wait_type))
+ logger.error(f'Unknown wait type: {wait_type}')
- logger.info('Added wait times for %d points' % number_of_wait_points)
+ logger.info(f'Added wait times for {number_of_wait_points} points')
-def override_wait_time(print_organizer, override_value):
+def override_wait_time(print_organizer: BasePrintOrganizer, override_value: float) -> None:
"""
Overrides the wait_time value for the printpoints with a user-defined value.
diff --git a/src/compas_slicer/print_organization/scalar_field_print_organizer.py b/src/compas_slicer/print_organization/scalar_field_print_organizer.py
index 9cb1a9b7..3af4bfa4 100644
--- a/src/compas_slicer/print_organization/scalar_field_print_organizer.py
+++ b/src/compas_slicer/print_organization/scalar_field_print_organizer.py
@@ -1,81 +1,116 @@
+from __future__ import annotations
+
+from pathlib import Path as FilePath
+from typing import TYPE_CHECKING
+
+import progressbar
from compas.geometry import Vector, normalize_vector
-from compas_slicer.print_organization import BasePrintOrganizer
+from loguru import logger
+
import compas_slicer.utilities as utils
-from compas_slicer.geometry import PrintPoint
-import progressbar
-import logging
+from compas_slicer.config import InterpolationConfig
+from compas_slicer.geometry import PrintLayer, PrintPath, PrintPoint
from compas_slicer.pre_processing import GradientEvaluation
+from compas_slicer.print_organization.base_print_organizer import BasePrintOrganizer
from compas_slicer.utilities.attributes_transfer import transfer_mesh_attributes_to_printpoints
-from compas_slicer.parameters import get_param
-import compas_slicer
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from compas_slicer.slicers import ScalarFieldSlicer
+
__all__ = ['ScalarFieldPrintOrganizer']
class ScalarFieldPrintOrganizer(BasePrintOrganizer):
- """
- Organizing the printing process for the realization of planar contours.
+ """Organize the printing process for scalar field contours.
Attributes
----------
- slicer: :class:`compas_slicer.slicers.PlanarSlicer`
- An instance of the compas_slicer.slicers.PlanarSlicer.
+ slicer : ScalarFieldSlicer
+ An instance of ScalarFieldSlicer.
+ config : InterpolationConfig
+ Configuration parameters.
+ DATA_PATH : str | Path
+ Data directory path.
+ vertical_layers : list[VerticalLayer]
+ Vertical layers from slicer.
+ horizontal_layers : list[Layer]
+ Horizontal layers from slicer.
+ g_evaluation : GradientEvaluation
+ Gradient evaluation object.
+
"""
- def __init__(self, slicer, parameters, DATA_PATH):
- assert isinstance(slicer, compas_slicer.slicers.ScalarFieldSlicer), 'Please provide a ScalarFieldSlicer'
+ slicer: ScalarFieldSlicer
+
+ def __init__(
+ self,
+ slicer: ScalarFieldSlicer,
+ config: InterpolationConfig | None = None,
+ DATA_PATH: str | FilePath = ".",
+ ) -> None:
+ from compas_slicer.slicers import ScalarFieldSlicer
+
+ if not isinstance(slicer, ScalarFieldSlicer):
+ raise TypeError('Please provide a ScalarFieldSlicer')
BasePrintOrganizer.__init__(self, slicer)
self.DATA_PATH = DATA_PATH
self.OUTPUT_PATH = utils.get_output_directory(DATA_PATH)
- self.parameters = parameters
+ self.config = config if config else InterpolationConfig()
self.vertical_layers = slicer.vertical_layers
self.horizontal_layers = slicer.horizontal_layers
- assert len(self.vertical_layers) + len(self.horizontal_layers) == len(slicer.layers)
+ if len(self.vertical_layers) + len(self.horizontal_layers) != len(slicer.layers):
+ raise ValueError(
+ f"Layer count mismatch: {len(self.vertical_layers)} vertical + "
+ f"{len(self.horizontal_layers)} horizontal != {len(slicer.layers)} total"
+ )
if len(self.horizontal_layers) > 0:
- assert len(self.horizontal_layers) == 1, "Only one brim horizontal layer is currently supported."
- assert self.horizontal_layers[0].is_brim, "Only one brim horizontal layer is currently supported."
+ if len(self.horizontal_layers) != 1:
+ raise ValueError("Only one brim horizontal layer is currently supported.")
+ if not self.horizontal_layers[0].is_brim:
+ raise ValueError("Only one brim horizontal layer is currently supported.")
logger.info('Slicer has one horizontal brim layer.')
- self.g_evaluation = self.add_gradient_to_vertices()
+ self.g_evaluation: GradientEvaluation = self.add_gradient_to_vertices()
- def __repr__(self):
- return "" % len(self.slicer.layers)
+ def __repr__(self) -> str:
+ return f""
- def create_printpoints(self):
- """ Create the print points of the fabrication process """
+ def create_printpoints(self) -> None:
+ """Create the print points of the fabrication process."""
count = 0
logger.info('Creating print points ...')
with progressbar.ProgressBar(max_value=self.slicer.number_of_points) as bar:
- for i, layer in enumerate(self.slicer.layers):
- self.printpoints_dict['layer_%d' % i] = {}
+ for _i, layer in enumerate(self.slicer.layers):
+ print_layer = PrintLayer()
- for j, path in enumerate(layer.paths):
- self.printpoints_dict['layer_%d' % i]['path_%d' % j] = []
+ for _j, path in enumerate(layer.paths):
+ print_path = PrintPath()
for k, point in enumerate(path.points):
normal = utils.get_normal_of_path_on_xy_plane(k, point, path, self.slicer.mesh)
- h = get_param(self.parameters, 'avg_layer_height', defaults_type='layers')
+ h = self.config.avg_layer_height
printpoint = PrintPoint(pt=point, layer_height=h, mesh_normal=normal)
- self.printpoints_dict['layer_%d' % i]['path_%d' % j].append(printpoint)
+ print_path.printpoints.append(printpoint)
bar.update(count)
count += 1
+ print_layer.paths.append(print_path)
+
+ self.printpoints.layers.append(print_layer)
+
# transfer gradient information to printpoints
- transfer_mesh_attributes_to_printpoints(self.slicer.mesh, self.printpoints_dict)
+ transfer_mesh_attributes_to_printpoints(self.slicer.mesh, self.printpoints)
# add non-planar print data to printpoints
- for i, layer in enumerate(self.slicer.layers):
- layer_key = 'layer_%d' % i
- for j, path in enumerate(layer.paths):
- path_key = 'path_%d' % j
- for pp in self.printpoints_dict[layer_key][path_key]:
+ for layer in self.printpoints:
+ for path in layer:
+ for pp in path:
grad_norm = pp.attributes['gradient_norm']
grad = pp.attributes['gradient']
pp.distance_to_support = grad_norm
@@ -83,7 +118,7 @@ def create_printpoints(self):
pp.up_vector = Vector(*normalize_vector(grad))
pp.frame = pp.get_frame()
- def add_gradient_to_vertices(self):
+ def add_gradient_to_vertices(self) -> GradientEvaluation:
g_evaluation = GradientEvaluation(self.slicer.mesh, self.DATA_PATH)
g_evaluation.compute_gradient()
g_evaluation.compute_gradient_norm()
@@ -93,7 +128,7 @@ def add_gradient_to_vertices(self):
self.slicer.mesh.update_default_vertex_attributes({'gradient': 0.0})
self.slicer.mesh.update_default_vertex_attributes({'gradient_norm': 0.0})
- for i, (v_key, data) in enumerate(self.slicer.mesh.vertices(data=True)):
+ for i, (_v_key, data) in enumerate(self.slicer.mesh.vertices(data=True)):
data['gradient'] = g_evaluation.vertex_gradient[i]
data['gradient_norm'] = g_evaluation.vertex_gradient_norm[i]
return g_evaluation
diff --git a/src/compas_slicer/slicers/__init__.py b/src/compas_slicer/slicers/__init__.py
index 5cf5ce9c..9a9d2362 100644
--- a/src/compas_slicer/slicers/__init__.py
+++ b/src/compas_slicer/slicers/__init__.py
@@ -1,43 +1,11 @@
-"""
-********************************************************************************
-slicers
-********************************************************************************
+"""Mesh slicing algorithms."""
-.. currentmodule:: compas_slicer.slicers
-
-Classes
-=======
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- BaseSlicer
-
-
-BaseSlicer
-----------
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- PlanarSlicer
- InterpolationSlicer
-"""
-
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-from .base_slicer import * # noqa: F401 E402 F403
-from .planar_slicer import * # noqa: F401 E402 F403
+from .base_slicer import * # noqa: F401 F403
from .interpolation_slicer import * # noqa: F401 E402 F403
+from .planar_slicer import * # noqa: F401 E402 F403
from .planar_slicing import * # noqa: F401 E402 F403
from .scalar_field_slicer import * # noqa: F401 E402 F403
from .uv_slicer import * # noqa: F401 E402 F403
-
__all__ = [name for name in dir() if not name.startswith('_')]
diff --git a/src/compas_slicer/slicers/base_slicer.py b/src/compas_slicer/slicers/base_slicer.py
index ed148d27..e4e9030f 100644
--- a/src/compas_slicer/slicers/base_slicer.py
+++ b/src/compas_slicer/slicers/base_slicer.py
@@ -1,219 +1,204 @@
-import compas
+from __future__ import annotations
+
+from abc import abstractmethod
+from pathlib import Path as FilePath
+from typing import TYPE_CHECKING, Any
+
import numpy as np
from compas.datastructures import Mesh
-from compas_slicer.utilities import utils
+from compas.geometry import bounding_box, distance_point_point_sqrd
+from loguru import logger
+
from compas_slicer.geometry import Layer, VerticalLayer
-from compas_slicer.post_processing import seams_align
-from compas_slicer.post_processing import unify_paths_orientation
-import logging
-from abc import abstractmethod
-from compas.datastructures import mesh_bounding_box
-from compas.geometry import distance_point_point_sqrd
+from compas_slicer.post_processing.seams_align import seams_align
+from compas_slicer.post_processing.unify_paths_orientation import unify_paths_orientation
+from compas_slicer.utilities import utils
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from compas_slicer.geometry import Path
-__all__ = ['BaseSlicer']
+__all__ = ["BaseSlicer"]
-class BaseSlicer(object):
- """
- This is an organizational class that holds all the information for the slice process.
- Do not use this class directly in your python code. Instead use PlanarSlicer or InterpolationSlicer.
- This class is meant to be extended for the implementation of the various slicers.
- See :class:`compas.slicer.slicers.PlanarSlicer` and :class:`compas.slicer.slicers.InterpolationSlicer` as examples.
+
+class BaseSlicer:
+ """Base class for slicers that holds all information for the slice process.
+
+ Do not use this class directly. Instead use PlanarSlicer or InterpolationSlicer.
+ This class is meant to be extended for implementing various slicers.
Attributes
----------
- mesh: :class:`compas.datastructures.Mesh`
- Input mesh, has to be a triangular mesh (i.e. no quads or n-gons allowed)
+ mesh : Mesh
+ Input mesh, must be triangular (no quads or n-gons allowed).
+ layer_height : float | None
+ Height between layers.
+ layers : list[Layer]
+ List of layers generated by slicing.
+
"""
- def __init__(self, mesh):
- # check input
- assert isinstance(mesh, compas.datastructures.Mesh), \
- "Input mesh must be of type , not " + str(type(mesh))
+ def __init__(self, mesh: Mesh) -> None:
+ if not isinstance(mesh, Mesh):
+ raise TypeError(f"Input mesh must be Mesh, not {type(mesh)}")
utils.check_triangular_mesh(mesh)
- # input
self.mesh = mesh
- logger.info("Input Mesh with : %d vertices, %d Faces"
- % (len(list(self.mesh.vertices())), len(list(self.mesh.faces()))))
-
- self.layer_height = None
- self.layers = [] # any class inheriting from Layer(horizontal sorting)
+ logger.info(f"Input Mesh with: {len(list(self.mesh.vertices()))} vertices, {len(list(self.mesh.faces()))} faces")
- ##############################
- # --- Properties
+ self.layer_height: float | None = None
+ self.layers: list[Layer] = []
@property
- def number_of_points(self):
- """ Returns int: Total number of points in the slicer."""
- total_number_of_pts = 0
- for layer in self.layers:
- for path in layer.paths:
- total_number_of_pts += len(path.points)
- return total_number_of_pts
+ def number_of_points(self) -> int:
+ """Total number of points in the slicer."""
+ return sum(len(path.points) for layer in self.layers for path in layer.paths)
@property
- def number_of_layers(self):
- """ Returns int: Total number of layers."""
+ def number_of_layers(self) -> int:
+ """Total number of layers."""
return len(self.layers)
@property
- def number_of_paths(self):
- """ Returns tuple (int, int, int): Total number of paths, number of open paths, number of closed paths. """
- total_number_of_paths = 0
- closed_paths = 0
+ def number_of_paths(self) -> tuple[int, int, int]:
+ """Total paths, open paths, closed paths."""
+ total = 0
+ closed = 0
open_paths = 0
for layer in self.layers:
- total_number_of_paths += len(layer.paths)
+ total += len(layer.paths)
for path in layer.paths:
if path.is_closed:
- closed_paths += 1
+ closed += 1
else:
open_paths += 1
-
- return total_number_of_paths, closed_paths, open_paths
+ return total, closed, open_paths
@property
- def vertical_layers(self):
- """ Returns a list of all the vertical layers stored in the slicer. """
+ def vertical_layers(self) -> list[VerticalLayer]:
+ """List of all vertical layers in the slicer."""
return [layer for layer in self.layers if isinstance(layer, VerticalLayer)]
@property
- def horizontal_layers(self):
- """ Returns a list of all the layers stored in the slicer that are NOT vertical. """
+ def horizontal_layers(self) -> list[Layer]:
+ """List of all non-vertical layers in the slicer."""
return [layer for layer in self.layers if not isinstance(layer, VerticalLayer)]
- ##############################
- # --- Functions
-
- def slice_model(self, *args, **kwargs):
- """Slices the model and applies standard post-processing and removing of invalid paths."""
-
+ def slice_model(self, *args: Any, **kwargs: Any) -> None:
+ """Slices the model and applies standard post-processing."""
self.generate_paths()
self.remove_invalid_paths_and_layers()
self.post_processing()
@abstractmethod
- def generate_paths(self):
- """To be implemented by the inheriting classes. """
+ def generate_paths(self) -> None:
+ """Generate paths. To be implemented by inheriting classes."""
pass
- def post_processing(self):
- """Applies standard post-processing operations: seams_align and unify_paths."""
+ def post_processing(self) -> None:
+ """Applies standard post-processing: seams_align and unify_paths."""
self.close_paths()
-
- # --- Align the seams between layers and unify orientation
- seams_align(self, align_with='next_path')
+ seams_align(self, align_with="next_path")
unify_paths_orientation(self)
-
self.close_paths()
- logger.info("Created %d Layers with %d total number of points" % (len(self.layers), self.number_of_points))
+ logger.info(f"Created {len(self.layers)} Layers with {self.number_of_points} total points")
- def close_paths(self):
- """ For paths that are labeled as closed, it makes sure that the first and the last point are identical. """
+ def close_paths(self) -> None:
+ """For closed paths, ensures first and last point are identical."""
for layer in self.layers:
for path in layer.paths:
- if path.is_closed: # if the path is closed, first and last point should be the same.
- if distance_point_point_sqrd(path.points[0], path.points[-1]) > 0.00001: # if not already the same
- path.points.append(path.points[0])
+ if path.is_closed and distance_point_point_sqrd(path.points[0], path.points[-1]) > 0.00001:
+ path.points.append(path.points[0])
- def remove_invalid_paths_and_layers(self):
+ def remove_invalid_paths_and_layers(self) -> None:
"""Removes invalid layers and paths from the slicer."""
-
paths_to_remove = []
layers_to_remove = []
for i, layer in enumerate(self.layers):
for j, path in enumerate(layer.paths):
- # check if a path has less than two points and appends to list to_remove
if len(path.points) < 2:
paths_to_remove.append(path)
- logger.warning("Invalid Path found: Layer %d, Path %d, %s" % (i, j, str(path)))
- # check if the layer that the invalid path was in has only one path
- # this means that path is now invalid, and the entire layer should be removed
+ logger.warning(f"Invalid Path: Layer {i}, Path {j}, {path}")
if len(layer.paths) == 1:
layers_to_remove.append(layer)
- logger.warning("Invalid Layer found: Layer %d, %s" % (i, str(layer)))
- # check for layers with less than one path and appends to list to_remove
+ logger.warning(f"Invalid Layer: Layer {i}, {layer}")
if len(layer.paths) < 1:
layers_to_remove.append(layer)
- logger.warning("Invalid Layer found: Layer %d, %s" % (i, str(layer)))
+ logger.warning(f"Invalid Layer: Layer {i}, {layer}")
- # compares the two lists and removes any invalid items
- for i, layer in enumerate(self.layers):
- for j, path in enumerate(layer.paths):
+ for layer in self.layers:
+ for path in list(layer.paths):
if path in paths_to_remove:
layer.paths.remove(path)
if layer in layers_to_remove:
self.layers.remove(layer)
- def find_vertical_layers_with_first_path_on_base(self):
- bbox = mesh_bounding_box(self.mesh)
- z_min = min([p[2] for p in bbox])
+ def find_vertical_layers_with_first_path_on_base(self) -> tuple[list[Path], list[int]]:
+ """Find vertical layers whose first path is on the base.
+
+ Returns
+ -------
+ tuple[list[Path], list[int]]
+ Paths on base and their vertical layer indices.
+
+ """
+ vertices = list(self.mesh.vertices_attributes('xyz'))
+ bbox = bounding_box(vertices)
+ z_min = min(p[2] for p in bbox)
paths_on_base = []
vertical_layer_indices = []
d_threshold = 30
for i, vertical_layer in enumerate(self.vertical_layers):
first_path = vertical_layer.paths[0]
- avg_z_dist_from_min = np.average(np.array([abs(pt[2] - z_min) for pt in first_path.points]))
-
- if avg_z_dist_from_min < d_threshold:
- paths_on_base.append(vertical_layer.paths[0])
+ avg_z_dist = np.average(np.array([abs(pt[2] - z_min) for pt in first_path.points]))
+ if avg_z_dist < d_threshold:
+ paths_on_base.append(first_path)
vertical_layer_indices.append(i)
return paths_on_base, vertical_layer_indices
- ##############################
- # --- Output
-
- def printout_info(self):
- """Prints out information from the slicing process."""
+ def printout_info(self) -> None:
+ """Prints out slicing information."""
no_of_paths, closed_paths, open_paths = self.number_of_paths
-
- print("\n---- Slicer Info ----")
- print("Number of layers: %d" % self.number_of_layers)
- print("Number of paths: %d, open paths: %d, closed paths: %d" % (no_of_paths, open_paths, closed_paths))
- print("Number of sampling printpoints on layers: %d" % self.number_of_points)
- print("")
-
- ##############################
- # --- To data, from data
+ logger.info("---- Slicer Info ----")
+ logger.info(f"Number of layers: {self.number_of_layers}")
+ logger.info(f"Number of paths: {no_of_paths}, open: {open_paths}, closed: {closed_paths}")
+ logger.info(f"Number of sampling printpoints: {self.number_of_points}")
@classmethod
- def from_data(cls, data):
+ def from_data(cls, data: dict[str, Any]) -> BaseSlicer:
"""Construct a slicer from its data representation.
Parameters
----------
- data: dict
+ data : dict
The data dictionary.
Returns
-------
- layer
+ BaseSlicer
The constructed slicer.
"""
- mesh = Mesh.from_data(data['mesh'])
+ mesh = Mesh.__from_data__(data["mesh"])
slicer = cls(mesh)
- layers_data = data['layers']
+ layers_data = data["layers"]
for layer_key in layers_data:
- if layers_data[layer_key]['layer_type'] == 'horizontal_layer':
+ if layers_data[layer_key]["layer_type"] == "horizontal_layer":
slicer.layers.append(Layer.from_data(layers_data[layer_key]))
- else: # 'vertical_layer'
+ else:
slicer.layers.append(VerticalLayer.from_data(layers_data[layer_key]))
- slicer.layer_height = data['layer_height']
+ slicer.layer_height = data["layer_height"]
return slicer
- def to_json(self, filepath, name):
+ def to_json(self, filepath: str | FilePath, name: str) -> None:
"""Writes the slicer to a JSON file."""
utils.save_to_json(self.to_data(), filepath, name)
- def to_data(self):
- """Returns a dictionary of structured data representing the data structure.
+ def to_data(self) -> dict[str, Any]:
+ """Returns a dictionary of structured data representing the slicer.
Returns
-------
@@ -221,39 +206,28 @@ def to_data(self):
The slicer's data.
"""
- # To avoid errors when saving to Json, create a copy of the self.mesh and remove from it
- # any non-serializable attributes (by checking a random face and a random vertex, assuming
- # that all faces and vertices share the same types of attributes).
mesh = self.mesh.copy()
- v_key = mesh.get_any_vertex()
+ v_key = next(iter(mesh.vertices()))
v_attrs = mesh.vertex_attributes(v_key)
for attr_key in v_attrs:
if not utils.is_jsonable(v_attrs[attr_key]):
- logger.error('vertex : ' + attr_key + str(v_attrs[attr_key]))
+ logger.error(f"vertex: {attr_key} {v_attrs[attr_key]}")
for v in mesh.vertices():
mesh.unset_vertex_attribute(v, attr_key)
- f_key = mesh.get_any_face()
+ f_key = next(iter(mesh.faces()))
f_attrs = mesh.face_attributes(f_key)
for attr_key in f_attrs:
if not utils.is_jsonable(f_attrs[attr_key]):
- logger.error('face : ' + attr_key, f_attrs[attr_key])
- mesh.update_default_face_attributes({attr_key: 0.0}) # just set all to 0.0
-
- # fill data dictionary with slicer info
- data = {'layers': self.get_layers_dict(),
- 'mesh': mesh.to_data(),
- 'layer_height': self.layer_height}
- return data
-
- def get_layers_dict(self):
- """Returns a dictionary consisting of the layers.
- """
- data = {}
- for i, layer in enumerate(self.layers):
- data[i] = layer.to_data()
- return data
-
-
-if __name__ == "__main__":
- pass
+ logger.error(f"face: {attr_key} {f_attrs[attr_key]}")
+ mesh.update_default_face_attributes({attr_key: 0.0})
+
+ return {
+ "layers": self.get_layers_dict(),
+ "mesh": mesh.__data__,
+ "layer_height": self.layer_height,
+ }
+
+ def get_layers_dict(self) -> dict[int, dict[str, Any]]:
+ """Returns a dictionary of layers."""
+ return {i: layer.to_data() for i, layer in enumerate(self.layers)}
diff --git a/src/compas_slicer/slicers/interpolation_slicer.py b/src/compas_slicer/slicers/interpolation_slicer.py
index 0f48f79a..4ca2e036 100644
--- a/src/compas_slicer/slicers/interpolation_slicer.py
+++ b/src/compas_slicer/slicers/interpolation_slicer.py
@@ -1,50 +1,75 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
import numpy as np
-from compas_slicer.slicers import BaseSlicer
-import logging
import progressbar
-from compas_slicer.parameters import get_param
-from compas_slicer.pre_processing import assign_interpolation_distance_to_mesh_vertices
-from compas_slicer.slicers.slice_utilities import ScalarFieldContours
+from loguru import logger
+
+from compas_slicer.config import InterpolationConfig
from compas_slicer.geometry import VerticalLayersManager
+from compas_slicer.pre_processing.preprocessing_utils.assign_vertex_distance import (
+ assign_interpolation_distance_to_mesh_vertices,
+)
+from compas_slicer.slicers import BaseSlicer
+from compas_slicer.slicers.slice_utilities import ScalarFieldContours
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
+
+ from compas_slicer.pre_processing import InterpolationSlicingPreprocessor
-logger = logging.getLogger('logger')
__all__ = ['InterpolationSlicer']
class InterpolationSlicer(BaseSlicer):
- """
- Generates non-planar contours that interpolate user-defined boundaries.
+ """Generates non-planar contours that interpolate user-defined boundaries.
Attributes
----------
- mesh: :class: 'compas.datastructures.Mesh'
- Input mesh, it must be a triangular mesh (i.e. no quads or n-gons allowed)
- Note that the topology of the mesh matters, irregular tesselation can lead to undesired results.
- We recommend to 1)re-topologize, 2) triangulate, and 3) weld your mesh in advance.
- preprocessor: :class: 'compas_slicer.pre_processing.InterpolationSlicingPreprocessor'
- parameters: dict
+ mesh : Mesh
+ Input mesh, must be triangular (no quads or n-gons allowed).
+ Topology matters; irregular tessellation can lead to undesired results.
+ Recommend: re-topologize, triangulate, and weld mesh in advance.
+ preprocessor : InterpolationSlicingPreprocessor | None
+ Preprocessor containing compound targets.
+ config : InterpolationConfig
+ Interpolation configuration.
+ n_multiplier : float
+ Multiplier for number of isocurves.
+
"""
- def __init__(self, mesh, preprocessor=None, parameters=None):
+ def __init__(
+ self,
+ mesh: Mesh,
+ preprocessor: InterpolationSlicingPreprocessor | None = None,
+ config: InterpolationConfig | None = None,
+ ) -> None:
logger.info('InterpolationSlicer')
BaseSlicer.__init__(self, mesh)
- if preprocessor: # make sure the mesh of the preprocessor and the mesh of the slicer match
- assert len(list(mesh.vertices())) == len(list(preprocessor.mesh.vertices()))
+ # make sure the mesh of the preprocessor and the mesh of the slicer match
+ if preprocessor and len(list(mesh.vertices())) != len(list(preprocessor.mesh.vertices())):
+ raise ValueError(
+ f"Mesh vertex count mismatch: slicer mesh has {len(list(mesh.vertices()))} vertices, "
+ f"preprocessor mesh has {len(list(preprocessor.mesh.vertices()))} vertices"
+ )
- self.parameters = parameters if parameters else {}
+ self.config = config if config else InterpolationConfig()
self.preprocessor = preprocessor
- self.n_multiplier = 1.0
+ self.n_multiplier: float = 1.0
- def generate_paths(self):
- """ Generates curved paths. """
- assert self.preprocessor, 'You need to provide a pre-processor in order to generate paths.'
+ def generate_paths(self) -> None:
+ """Generate curved paths."""
+ if not self.preprocessor:
+ raise ValueError('You need to provide a pre-processor in order to generate paths.')
- avg_layer_height = get_param(self.parameters, key='avg_layer_height', defaults_type='layers')
+ avg_layer_height = self.config.avg_layer_height
n = find_no_of_isocurves(self.preprocessor.target_LOW, self.preprocessor.target_HIGH, avg_layer_height)
params_list = get_interpolation_parameters_list(n)
- logger.info('%d paths will be generated' % n)
+ logger.info(f'{n} paths will be generated')
vertical_layers_manager = VerticalLayersManager(avg_layer_height)
@@ -62,18 +87,45 @@ def generate_paths(self):
self.layers = vertical_layers_manager.layers
-def find_no_of_isocurves(target_0, target_1, avg_layer_height=1.1):
- """ Returns the average number of isocurves that can cover the get_distance from target_0 to target_1. """
+def find_no_of_isocurves(target_0: Any, target_1: Any, avg_layer_height: float = 1.1) -> int:
+ """Return the number of isocurves to cover the distance from target_0 to target_1.
+
+ Parameters
+ ----------
+ target_0 : CompoundTarget
+ First target boundary.
+ target_1 : CompoundTarget
+ Second target boundary.
+ avg_layer_height : float
+ Average layer height in mm.
+
+ Returns
+ -------
+ int
+ Number of isocurves.
+
+ """
avg_ds0 = target_0.get_avg_distances_from_other_target(target_1)
avg_ds1 = target_1.get_avg_distances_from_other_target(target_0)
number_of_curves = ((avg_ds0 + avg_ds1) * 0.5) / avg_layer_height
return max(1, int(number_of_curves))
-def get_interpolation_parameters_list(number_of_curves):
- """ Returns a list of #number_of_curves floats from 0.001 to 0.997. """
- # t_list = [0.001]
- t_list = []
+def get_interpolation_parameters_list(number_of_curves: int) -> list[float]:
+ """Return list of interpolation parameters from 0.0 to 0.997.
+
+ Parameters
+ ----------
+ number_of_curves : int
+ Number of curves to generate.
+
+ Returns
+ -------
+ list[float]
+ List of interpolation parameter values.
+
+ """
+ t_list: list[float] = []
a = list(np.arange(number_of_curves + 1) / (number_of_curves + 1))
a.pop(0)
t_list.extend(a)
diff --git a/src/compas_slicer/slicers/planar_slicer.py b/src/compas_slicer/slicers/planar_slicer.py
index 1e9eb181..b6888b03 100644
--- a/src/compas_slicer/slicers/planar_slicer.py
+++ b/src/compas_slicer/slicers/planar_slicer.py
@@ -1,54 +1,53 @@
-import compas_slicer
-from compas_slicer.slicers import BaseSlicer
-from compas.geometry import Vector, Plane, Point
-import logging
+from __future__ import annotations
-logger = logging.getLogger('logger')
+from compas.datastructures import Mesh
+from compas.geometry import Plane, Point, Vector
+from loguru import logger
+
+from compas_slicer.slicers.base_slicer import BaseSlicer
+from compas_slicer.slicers.planar_slicing import create_planar_paths
__all__ = ['PlanarSlicer']
class PlanarSlicer(BaseSlicer):
- """
- Generates planar contours on a mesh that are parallel to the xy plane.
+ """Generates planar contours on a mesh that are parallel to the xy plane.
Attributes
----------
- mesh: :class:`compas.datastructures.Mesh`
- Input mesh, it must be a triangular mesh (i.e. no quads or n-gons allowed).
- slicer_type: str
- String representing which slicing method to use.
- options: 'default', 'cgal'
- layer_height: float
- Distance between layers (slices).
- slice_height_range: tuple (optional)
- Optional tuple that lets the user slice only a part of the model.
- Defaults to None which slices the entire model.
- First value is the Z height to start slicing from, second value is the Z height to end.
- The range values are not absolute height values, but relative to the current minimum height value of the mesh.
- I.e. if you want to only slice the first 100 mm of the mesh, you use (0,100) regardless of the position of the mesh.
+ mesh : Mesh
+ Input mesh, must be triangular (no quads or n-gons allowed).
+ layer_height : float
+ Distance between layers (slices) in mm.
+ slice_height_range : tuple[float, float] | None
+ Optional tuple (z_start, z_end) to slice only part of the model.
+ Values are relative to mesh minimum height.
+
"""
- def __init__(self, mesh, slicer_type="default", layer_height=2.0, slice_height_range=None):
+ def __init__(
+ self,
+ mesh: Mesh,
+ layer_height: float = 2.0,
+ slice_height_range: tuple[float, float] | None = None,
+ ) -> None:
logger.info('PlanarSlicer')
BaseSlicer.__init__(self, mesh)
self.layer_height = layer_height
- self.slicer_type = slicer_type
self.slice_height_range = slice_height_range
- def __repr__(self):
- return "" % \
- (len(self.layers), self.layer_height)
+ def __repr__(self) -> str:
+ return f""
- def generate_paths(self):
- """Generates the planar slicing paths."""
+ def generate_paths(self) -> None:
+ """Generate the planar slicing paths."""
z = [self.mesh.vertex_attribute(key, 'z') for key in self.mesh.vertices()]
min_z, max_z = min(z), max(z)
if self.slice_height_range:
if min_z <= self.slice_height_range[0] <= max_z and min_z <= self.slice_height_range[1] <= max_z:
- logger.info("Slicing mesh in range from Z = %d to Z = %d." % (self.slice_height_range[0], self.slice_height_range[1]))
+ logger.info(f"Slicing mesh in range from Z = {self.slice_height_range[0]} to Z = {self.slice_height_range[1]}.")
max_z = min_z + self.slice_height_range[1]
min_z = min_z + self.slice_height_range[0]
else:
@@ -59,19 +58,5 @@ def generate_paths(self):
normal = Vector(0, 0, 1)
planes = [Plane(Point(0, 0, min_z + i * self.layer_height), normal) for i in range(no_of_layers)]
- if self.slicer_type == "default":
- logger.info('')
- logger.info("Planar slicing using default function ...")
- self.layers = compas_slicer.slicers.create_planar_paths(self.mesh, planes)
-
- elif self.slicer_type == "cgal":
- logger.info('')
- logger.info("Planar slicing using CGAL ...")
- self.layers = compas_slicer.slicers.create_planar_paths_cgal(self.mesh, planes)
-
- else:
- raise NameError("Invalid slicing type : " + self.slicer_type)
-
-
-if __name__ == "__main__":
- pass
+ logger.info("Planar slicing using CGAL ...")
+ self.layers = create_planar_paths(self.mesh, planes)
diff --git a/src/compas_slicer/slicers/planar_slicing/__init__.py b/src/compas_slicer/slicers/planar_slicing/__init__.py
index 03788100..d4edf90c 100644
--- a/src/compas_slicer/slicers/planar_slicing/__init__.py
+++ b/src/compas_slicer/slicers/planar_slicing/__init__.py
@@ -1,8 +1,3 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
+from .planar_slicing_cgal import * # noqa: F401 F403
-from .planar_slicing import * # noqa: F401 E402 F403
-from .planar_slicing_cgal import * # noqa: F401 E402 F403
-
-__all__ = [name for name in dir() if not name.startswith('_')]
+__all__ = [name for name in dir() if not name.startswith("_")]
diff --git a/src/compas_slicer/slicers/planar_slicing/planar_slicing.py b/src/compas_slicer/slicers/planar_slicing/planar_slicing.py
deleted file mode 100644
index 217ab19d..00000000
--- a/src/compas_slicer/slicers/planar_slicing/planar_slicing.py
+++ /dev/null
@@ -1,76 +0,0 @@
-from compas_slicer.geometry import Path
-from compas_slicer.geometry import Layer
-import logging
-import progressbar
-from compas.geometry import intersection_segment_plane
-from compas_slicer.slicers.slice_utilities import ContoursBase
-
-logger = logging.getLogger('logger')
-
-__all__ = ['create_planar_paths']
-
-
-def create_planar_paths(mesh, planes):
- """
- Creates planar contours. Does not rely on external libraries.
- It is currently the only method that can return identify OPEN versus CLOSED paths.
-
- Parameters
- ----------
- mesh: :class: 'compas.datastructures.Mesh'
- The mesh to be sliced
- planes: list, :class: 'compas.geometry.Plane'
- """
-
- layers = []
-
- with progressbar.ProgressBar(max_value=len(planes)) as bar:
- for i, plane in enumerate(planes):
-
- intersection = PlanarContours(mesh, plane)
- intersection.compute()
-
- paths = []
- if len(intersection.sorted_point_clusters) > 0 and intersection.is_valid:
- for key in intersection.sorted_point_clusters:
- is_closed = intersection.closed_paths_booleans[key]
- path = Path(points=intersection.sorted_point_clusters[key], is_closed=is_closed)
- paths.append(path)
-
- layers.append(Layer(paths))
-
- bar.update(i)
-
- return layers
-
-
-class PlanarContours(ContoursBase):
- """
- Finds the iso-contours of the function f(x) = vertex_coords.z - plane.z
- on the mesh.
-
- Attributes
- ----------
- mesh: :class: 'compas.datastructures.Mesh'
- plane: list, :class: 'compas.geometry.Plane'
- """
- def __init__(self, mesh, plane):
- self.plane = plane
- ContoursBase.__init__(self, mesh) # initialize from parent class
-
- def edge_is_intersected(self, u, v):
- """ Returns True if the edge u,v has a zero-crossing, False otherwise. """
- a = self.mesh.vertex_attributes(u, 'xyz')
- b = self.mesh.vertex_attributes(v, 'xyz')
- z = [a[2], b[2]] # check if the plane.z is withing the range of [a.z, b.z]
- return min(z) <= self.plane.point[2] < max(z)
-
- def find_zero_crossing_data(self, u, v):
- """ Finds the position of the zero-crossing on the edge u,v. """
- a = self.mesh.vertex_attributes(u, 'xyz')
- b = self.mesh.vertex_attributes(v, 'xyz')
- return intersection_segment_plane((a, b), self.plane)
-
-
-if __name__ == "__main__":
- pass
diff --git a/src/compas_slicer/slicers/planar_slicing/planar_slicing_cgal.py b/src/compas_slicer/slicers/planar_slicing/planar_slicing_cgal.py
index baee5d57..31667c29 100644
--- a/src/compas_slicer/slicers/planar_slicing/planar_slicing_cgal.py
+++ b/src/compas_slicer/slicers/planar_slicing/planar_slicing_cgal.py
@@ -1,18 +1,22 @@
+from __future__ import annotations
+
import itertools
-from compas.geometry import Point
-from compas_slicer.geometry import Layer
-from compas_slicer.geometry import Path
+from typing import TYPE_CHECKING, Any, Callable
+
import progressbar
-import logging
-import compas_slicer.utilities as utils
+from compas.geometry import Plane, Point
from compas.plugins import PluginNotInstalledError
-logger = logging.getLogger('logger')
+from compas_slicer.geometry import Layer, Path
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
-__all__ = ['create_planar_paths_cgal']
+__all__ = ['create_planar_paths']
-def create_planar_paths_cgal(mesh, planes):
+
+def create_planar_paths(mesh: Mesh, planes: list[Plane]) -> list[Layer]:
"""Creates planar contours very efficiently using CGAL.
Parameters
@@ -21,16 +25,14 @@ def create_planar_paths_cgal(mesh, planes):
A compas mesh.
planes: list, :class: 'compas.geometry.Plane'
"""
- packages = utils.TerminalCommand('conda list').get_split_output_strings()
-
- if 'compas-cgal' in packages or 'compas_cgal' in packages:
+ try:
from compas_cgal.slicer import slice_mesh
- else:
- raise PluginNotInstalledError("--------ATTENTION! ----------- \
- Compas_cgal library is missing! \
- You can't use this planar slicing method without it. \
- Check the README instructions for how to install it, \
- or use another planar slicing method.")
+ except ImportError as e:
+ raise PluginNotInstalledError(
+ "Compas_cgal library is missing! "
+ "You can't use this planar slicing method without it. "
+ "Install it with: pip install compas_cgal"
+ ) from e
# prepare mesh for slicing
M = mesh.to_vertices_and_faces()
@@ -68,7 +70,9 @@ def create_planar_paths_cgal(mesh, planes):
return layers
-def get_grouped_list(item_list, key_function):
+def get_grouped_list(
+ item_list: list[Any], key_function: Callable[[Any], Any]
+) -> list[list[Any]]:
""" Groups layers horizontally. """
# first sort, because grouping only groups consecutively matching items
sorted_list = sorted(item_list, key=key_function)
@@ -78,7 +82,7 @@ def get_grouped_list(item_list, key_function):
return [list(group) for _key, group in grouped_iter]
-def key_function(item):
+def key_function(item: list[list[float]]) -> float:
return item[0][2]
diff --git a/src/compas_slicer/slicers/scalar_field_slicer.py b/src/compas_slicer/slicers/scalar_field_slicer.py
index e1eb12a4..d366ad92 100644
--- a/src/compas_slicer/slicers/scalar_field_slicer.py
+++ b/src/compas_slicer/slicers/scalar_field_slicer.py
@@ -1,46 +1,65 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
import numpy as np
-from compas_slicer.slicers import BaseSlicer
-import logging
-from compas_slicer.slicers.slice_utilities import ScalarFieldContours
import progressbar
+from loguru import logger
+
+from compas_slicer.config import InterpolationConfig
from compas_slicer.geometry import VerticalLayersManager
-from compas_slicer.parameters import get_param
+from compas_slicer.slicers import BaseSlicer
+from compas_slicer.slicers.slice_utilities import ScalarFieldContours
+
+if TYPE_CHECKING:
+ from collections.abc import Sequence
+
+ from compas.datastructures import Mesh
-logger = logging.getLogger('logger')
__all__ = ['ScalarFieldSlicer']
class ScalarFieldSlicer(BaseSlicer):
- """
- Generates the isocontours of a scalar field defined on the mesh vertices.
+ """Generates the isocontours of a scalar field defined on mesh vertices.
Attributes
----------
- mesh: :class: 'compas.datastructures.Mesh'
- Input mesh, it must be a triangular mesh (i.e. no quads or n-gons allowed)
- Note that the topology of the mesh matters, irregular tesselation can lead to undesired results.
- We recommend to 1)re-topologize, 2) triangulate, and 3) weld your mesh in advance.
- scalar_field: list, Vx1 (one float per vertex that represents the scalar field)
- no_of_isocurves: int, how many isocontours to be generated
+ mesh : Mesh
+ Input mesh, must be triangular (no quads or n-gons allowed).
+ Topology matters; irregular tessellation can lead to undesired results.
+ Recommend: re-topologize, triangulate, and weld mesh in advance.
+ scalar_field : list[float]
+ One float per vertex representing the scalar field.
+ no_of_isocurves : int
+ Number of isocontours to generate.
+ config : InterpolationConfig
+ Configuration parameters.
+
"""
- def __init__(self, mesh, scalar_field, no_of_isocurves, parameters=None):
+ def __init__(
+ self,
+ mesh: Mesh,
+ scalar_field: Sequence[float],
+ no_of_isocurves: int,
+ config: InterpolationConfig | None = None,
+ ) -> None:
logger.info('ScalarFieldSlicer')
BaseSlicer.__init__(self, mesh)
self.no_of_isocurves = no_of_isocurves
- self.scalar_field = list(np.array(scalar_field) - np.min(np.array(scalar_field)))
- self.parameters = parameters if parameters else {}
+ self.scalar_field: list[float] = list(np.array(scalar_field) - np.min(np.array(scalar_field)))
+ self.config = config if config else InterpolationConfig()
mesh.update_default_vertex_attributes({'scalar_field': 0})
- def generate_paths(self):
- """ Generates isocontours. """
+ def generate_paths(self) -> None:
+ """Generate isocontours."""
start_domain, end_domain = min(self.scalar_field), max(self.scalar_field)
step = (end_domain - start_domain) / (self.no_of_isocurves + 1)
- max_dist = get_param(self.parameters, key='vertical_layers_max_centroid_dist', defaults_type='layers')
+ max_dist = self.config.vertical_layers_max_centroid_dist
vertical_layers_manager = VerticalLayersManager(max_dist)
# create paths + layers
diff --git a/src/compas_slicer/slicers/slice_utilities/__init__.py b/src/compas_slicer/slicers/slice_utilities/__init__.py
index 1a2e2fd2..e84292c0 100644
--- a/src/compas_slicer/slicers/slice_utilities/__init__.py
+++ b/src/compas_slicer/slicers/slice_utilities/__init__.py
@@ -1,10 +1,6 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
+from .contours_base import * # noqa: F401 F403
+from .graph_connectivity import * # noqa: F401 F403
+from .scalar_field_contours import * # noqa: F401 F403
+from .uv_contours import * # noqa: F401 F403
-from .graph_connectivity import * # noqa: F401 E402 F403
-from .contours_base import * # noqa: F401 E402 F403
-from .scalar_field_contours import * # noqa: F401 E402 F403
-from .uv_contours import * # noqa: F401 E402 F403
-
-__all__ = [name for name in dir() if not name.startswith('_')]
+__all__ = [name for name in dir() if not name.startswith("_")]
diff --git a/src/compas_slicer/slicers/slice_utilities/contours_base.py b/src/compas_slicer/slicers/slice_utilities/contours_base.py
index 1e43a2e6..9b2faccc 100644
--- a/src/compas_slicer/slicers/slice_utilities/contours_base.py
+++ b/src/compas_slicer/slicers/slice_utilities/contours_base.py
@@ -1,17 +1,27 @@
+from __future__ import annotations
+
+from abc import abstractmethod
+from pathlib import Path as FilePath
+from typing import TYPE_CHECKING, Any
+
from compas.geometry import Point, distance_point_point_sqrd
-from compas.utilities.itertools import pairwise
-from compas_slicer.slicers.slice_utilities import create_graph_from_mesh_edges, sort_graph_connected_components
+from compas.itertools import pairwise
+
import compas_slicer.utilities as utils
-import logging
-from abc import abstractmethod
-from compas_slicer.geometry import Path
+from compas_slicer.geometry import Path, VerticalLayersManager
+from compas_slicer.slicers.slice_utilities.graph_connectivity import (
+ create_graph_from_mesh_edges,
+ sort_graph_connected_components,
+)
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
-logger = logging.getLogger('logger')
-__all__ = ['ContoursBase']
+__all__ = ["ContoursBase"]
-class ContoursBase(object):
+class ContoursBase:
"""
This is meant to be extended by all classes that generate isocontours of a scalar function on a mesh.
This class handles the two steps of iso-contouring of a triangular mesh consists of two steps;
@@ -26,25 +36,25 @@ class ContoursBase(object):
"""
- def __init__(self, mesh):
+ def __init__(self, mesh: Mesh) -> None:
self.mesh = mesh
- self.intersection_data = {} # dict: (ui,vi) : {compas.Point}
+ self.intersection_data: dict[tuple[int, int], Point] = {}
# key: tuple (int, int), The edge from which the intersection point originates.
# value: :class: 'compas.geometry.Point', The zero-crossing point.
- self.edge_to_index = {} # dict that stores node_index and edge relationship
+ self.edge_to_index: dict[tuple[int, int], int] = {}
# key: tuple (int, int) edge
# value: int, index of the intersection point
- self.sorted_point_clusters = {} # dict
+ self.sorted_point_clusters: dict[int, list[Point]] = {}
# key: int, The index of the connected component
# value: list, :class: 'compas.geometry.Point', The sorted zero-crossing points.
- self.sorted_edge_clusters = {} # dict
+ self.sorted_edge_clusters: dict[int, list[tuple[int, int]]] = {}
# key: int, The index of the connected component.
# value: list, tuple (int, int), The sorted intersected edges.
- self.closed_paths_booleans = {} # dict
+ self.closed_paths_booleans: dict[int, bool] = {}
# key: int, The index of the connected component.
# value: bool, True if path is closed, False otherwise.
- def compute(self):
+ def compute(self) -> None:
self.find_intersections()
G = create_graph_from_mesh_edges(self.mesh, self.intersection_data, self.edge_to_index)
sorted_indices_dict = sort_graph_connected_components(G)
@@ -57,14 +67,14 @@ def compute(self):
self.label_closed_paths()
- def label_closed_paths(self):
+ def label_closed_paths(self) -> None:
for key in self.sorted_edge_clusters:
first_edge = self.sorted_edge_clusters[key][0]
last_edge = self.sorted_edge_clusters[key][-1]
u, v = first_edge
self.closed_paths_booleans[key] = u in last_edge or v in last_edge
- def find_intersections(self):
+ def find_intersections(self) -> None:
"""
Fills in the
dict self.intersection_data: key=(ui,vi) : [xi,yi,zi],
@@ -72,8 +82,7 @@ def find_intersections(self):
for edge in list(self.mesh.edges()):
if self.edge_is_intersected(edge[0], edge[1]):
point = self.find_zero_crossing_data(edge[0], edge[1])
- if point: # Sometimes the result can be None
- if edge not in self.intersection_data and tuple(reversed(edge)) not in self.intersection_data:
+ if point and edge not in self.intersection_data and tuple(reversed(edge)) not in self.intersection_data:
# create [edge - point] dictionary
self.intersection_data[edge] = {}
self.intersection_data[edge] = Point(point[0], point[1], point[2])
@@ -82,26 +91,30 @@ def find_intersections(self):
for i, e in enumerate(self.intersection_data):
self.edge_to_index[e] = i
- def save_point_clusters_as_polylines_to_json(self, DATA_PATH, name):
- all_points = {}
+ def save_point_clusters_as_polylines_to_json(
+ self, DATA_PATH: str | FilePath, name: str
+ ) -> None:
+ all_points: dict[str, Any] = {}
for i, key in enumerate(self.sorted_point_clusters):
- all_points[i] = utils.point_list_to_dict(self.sorted_point_clusters[key])
+ all_points[str(i)] = utils.point_list_to_dict(self.sorted_point_clusters[key])
utils.save_to_json(all_points, DATA_PATH, name)
# --- Abstract methods
@abstractmethod
- def edge_is_intersected(self, u, v):
+ def edge_is_intersected(self, u: int, v: int) -> bool:
""" Returns True if the edge u,v has a zero-crossing, False otherwise. """
# to be implemented by the inheriting classes
pass
@abstractmethod
- def find_zero_crossing_data(self, u, v):
+ def find_zero_crossing_data(self, u: int, v: int) -> list[float] | None:
""" Finds the position of the zero-crossing on the edge u,v. """
# to be implemented by the inheriting classes
pass
- def add_to_vertical_layers_manager(self, vertical_layers_manager):
+ def add_to_vertical_layers_manager(
+ self, vertical_layers_manager: VerticalLayersManager
+ ) -> None:
for key in self.sorted_point_clusters:
pts = self.sorted_point_clusters[key]
if len(pts) > 3: # discard curves that are too small
diff --git a/src/compas_slicer/slicers/slice_utilities/graph_connectivity.py b/src/compas_slicer/slicers/slice_utilities/graph_connectivity.py
index 03fdb385..93695062 100644
--- a/src/compas_slicer/slicers/slice_utilities/graph_connectivity.py
+++ b/src/compas_slicer/slicers/slice_utilities/graph_connectivity.py
@@ -1,11 +1,23 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
import networkx as nx
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
+ from compas.geometry import Point
+
__all__ = ['create_graph_from_mesh_edges',
'sort_graph_connected_components',
'create_graph_from_mesh_vkeys']
-def create_graph_from_mesh_edges(mesh, intersection_data, edge_to_index):
+def create_graph_from_mesh_edges(
+ mesh: Mesh,
+ intersection_data: dict[tuple[int, int], Point],
+ edge_to_index: dict[tuple[int, int], int],
+) -> nx.Graph:
"""
Creates a graph with one node for every intersected edge.
The connectivity of nodes (i.e. edges between them) is based on their neighboring on the mesh.
@@ -35,7 +47,7 @@ def create_graph_from_mesh_edges(mesh, intersection_data, edge_to_index):
# find current neighboring edges that are also intersected
current_edge_connections = []
- for f in mesh.edge_faces(u=mesh_edge[0], v=mesh_edge[1]):
+ for f in mesh.edge_faces(mesh_edge):
if f is not None:
face_edges = mesh.face_halfedges(f)
for e in face_edges:
@@ -54,7 +66,7 @@ def create_graph_from_mesh_edges(mesh, intersection_data, edge_to_index):
return G
-def create_graph_from_mesh_vkeys(mesh, v_keys):
+def create_graph_from_mesh_vkeys(mesh: Mesh, v_keys: list[int]) -> nx.Graph:
"""
Creates a graph with one node for every vertex, and edges between neighboring vertices.
@@ -78,7 +90,7 @@ def create_graph_from_mesh_vkeys(mesh, v_keys):
return G
-def sort_graph_connected_components(G):
+def sort_graph_connected_components(G: nx.Graph) -> dict[int, list[int]]:
"""
For every connected component of the graph G:
1) It finds a start node. For open paths it is on one of its ends, for closed paths it can be any of its points.
@@ -103,7 +115,7 @@ def sort_graph_connected_components(G):
current_index = 0
- for j, cp in enumerate(nx.connected_components(G)):
+ for _j, cp in enumerate(nx.connected_components(G)):
if len(cp) > 1: # we need at least 2 elements to have an edge
sorted_node_indices = []
@@ -127,7 +139,8 @@ def sort_graph_connected_components(G):
if node_index_2 not in sorted_node_indices:
sorted_node_indices.append(node_index_2)
- assert len(sorted_node_indices) == len(cp), 'Attention. len(sorted_node_indices) != len(G.nodes())'
+ if len(sorted_node_indices) != len(cp):
+ raise RuntimeError(f'Node sorting error: {len(sorted_node_indices)} sorted != {len(cp)} in component')
sorted_indices_dict[current_index] = sorted_node_indices
current_index += 1
diff --git a/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py b/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py
index 9ff72017..1124f3fe 100644
--- a/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py
+++ b/src/compas_slicer/slicers/slice_utilities/scalar_field_contours.py
@@ -1,5 +1,14 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import numpy as np
+from compas.geometry import Point, Vector, add_vectors, scale_vector
+
from compas_slicer.slicers.slice_utilities import ContoursBase
-from compas.geometry import Vector, add_vectors, scale_vector
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
__all__ = ['ScalarFieldContours']
@@ -13,27 +22,85 @@ class ScalarFieldContours(ContoursBase):
----------
mesh: :class: 'compas.datastructures.Mesh'
"""
- def __init__(self, mesh):
+ def __init__(self, mesh: Mesh) -> None:
ContoursBase.__init__(self, mesh) # initialize from parent class
- def edge_is_intersected(self, u, v):
+ def find_intersections(self) -> None:
+ """Vectorized intersection finding for scalar field contours.
+
+ Overrides parent method for ~10x speedup on large meshes.
+ """
+ # Get all edges as numpy array
+ edges = np.array(list(self.mesh.edges()))
+ n_edges = len(edges)
+
+ if n_edges == 0:
+ return
+
+ # Get scalar field values for all vertices
+ scalar_field = np.array([
+ self.mesh.vertex[v]['scalar_field']
+ for v in range(len(list(self.mesh.vertices())))
+ ])
+
+ # Get scalar values at edge endpoints
+ d1 = scalar_field[edges[:, 0]]
+ d2 = scalar_field[edges[:, 1]]
+
+ # Vectorized intersection test: sign change across edge
+ intersected = (d1 * d2) <= 0 # different signs or zero
+
+ # Get vertex coordinates
+ vertices = np.array([self.mesh.vertex_coordinates(v) for v in self.mesh.vertices()])
+
+ # Compute zero crossings for intersected edges
+ intersected_edges = edges[intersected]
+ d1_int = d1[intersected]
+ d2_int = d2[intersected]
+
+ # Interpolation parameter (avoid division by zero)
+ abs_d1 = np.abs(d1_int)
+ abs_d2 = np.abs(d2_int)
+ denom = abs_d1 + abs_d2
+ valid = denom > 0
+
+ # Compute intersection points
+ v1 = vertices[intersected_edges[:, 0]]
+ v2 = vertices[intersected_edges[:, 1]]
+
+ # Linear interpolation: pt = v1 + t * (v2 - v1) where t = |d1| / (|d1| + |d2|)
+ t = np.zeros(len(intersected_edges))
+ t[valid] = abs_d1[valid] / denom[valid]
+ pts = v1 + t[:, np.newaxis] * (v2 - v1)
+
+ # Store results
+ for edge, pt, is_valid in zip(intersected_edges, pts, valid):
+ if is_valid:
+ edge_tuple = (int(edge[0]), int(edge[1]))
+ rev_edge = (int(edge[1]), int(edge[0]))
+ if edge_tuple not in self.intersection_data and rev_edge not in self.intersection_data:
+ self.intersection_data[edge_tuple] = Point(pt[0], pt[1], pt[2])
+
+ # Build edge to index mapping
+ for i, e in enumerate(self.intersection_data):
+ self.edge_to_index[e] = i
+
+ def edge_is_intersected(self, u: int, v: int) -> bool:
""" Returns True if the edge u,v has a zero-crossing, False otherwise. """
d1 = self.mesh.vertex[u]['scalar_field']
d2 = self.mesh.vertex[v]['scalar_field']
- if (d1 > 0 and d2 > 0) or (d1 < 0 and d2 < 0):
- return False
- else:
- return True
+ return not (d1 > 0 and d2 > 0 or d1 < 0 and d2 < 0)
- def find_zero_crossing_data(self, u, v):
+ def find_zero_crossing_data(self, u: int, v: int) -> list[float] | None:
""" Finds the position of the zero-crossing on the edge u,v. """
dist_a, dist_b = self.mesh.vertex[u]['scalar_field'], self.mesh.vertex[v]['scalar_field']
if abs(dist_a) + abs(dist_b) > 0:
v_coords_a, v_coords_b = self.mesh.vertex_coordinates(u), self.mesh.vertex_coordinates(v)
vec = Vector.from_start_end(v_coords_a, v_coords_b)
vec = scale_vector(vec, abs(dist_a) / (abs(dist_a) + abs(dist_b)))
- pt = add_vectors(v_coords_a, vec)
+ pt: list[float] = add_vectors(v_coords_a, vec)
return pt
+ return None
if __name__ == "__main__":
diff --git a/src/compas_slicer/slicers/slice_utilities/uv_contours.py b/src/compas_slicer/slicers/slice_utilities/uv_contours.py
index 066f857a..9c838bc5 100644
--- a/src/compas_slicer/slicers/slice_utilities/uv_contours.py
+++ b/src/compas_slicer/slicers/slice_utilities/uv_contours.py
@@ -1,61 +1,45 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from compas.geometry import (
+ add_vectors,
+ distance_point_point_xy,
+ intersection_line_line_xy,
+ is_point_on_segment_xy,
+ scale_vector,
+)
+
from compas_slicer.slicers.slice_utilities import ContoursBase
-from compas.geometry import intersection_line_line_xy, distance_point_point_xy, scale_vector, add_vectors
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
+
+__all__ = ['UVContours']
class UVContours(ContoursBase):
- def __init__(self, mesh, p1, p2):
+ def __init__(self, mesh: Mesh, p1: tuple[float, float], p2: tuple[float, float]) -> None:
ContoursBase.__init__(self, mesh) # initialize from parent class
self.p1 = p1 # tuple (u,v); first point in uv domain defining the cutting line
self.p2 = p2 # tuple (u,v); second point in uv domain defining the cutting line
- def uv(self, vkey):
- return self.mesh.vertex[vkey]['uv']
+ def uv(self, vkey: int) -> tuple[float, float]:
+ uv: tuple[float, float] = self.mesh.vertex[vkey]['uv']
+ return uv
- def edge_is_intersected(self, v1, v2):
+ def edge_is_intersected(self, v1: int, v2: int) -> bool:
""" Returns True if the edge v1,v2 intersects the line in the uv domain, False otherwise. """
p = intersection_line_line_xy((self.p1, self.p2), (self.uv(v1), self.uv(v2)))
- if p:
- if is_point_on_line_xy(p, (self.uv(v1), self.uv(v2))):
- if is_point_on_line_xy(p, (self.p1, self.p2)):
- return True
- return False
+ return bool(p and is_point_on_segment_xy(p, (self.uv(v1), self.uv(v2))) and is_point_on_segment_xy(p, (self.p1, self.p2)))
- def find_zero_crossing_data(self, v1, v2):
+ def find_zero_crossing_data(self, v1: int, v2: int) -> list[float] | None:
""" Finds the position of the zero-crossing on the edge u,v. """
p = intersection_line_line_xy((self.p1, self.p2), (self.uv(v1), self.uv(v2)))
d1, d2 = distance_point_point_xy(self.uv(v1), p), distance_point_point_xy(self.uv(v2), p)
if d1 + d2 > 0:
vec = self.mesh.edge_vector(v1, v2)
vec = scale_vector(vec, d1 / (d1 + d2))
- pt = add_vectors(self.mesh.vertex_coordinates(v1), vec)
+ pt: list[float] = add_vectors(self.mesh.vertex_coordinates(v1), vec)
return pt
-
-
-# utility function
-
-def is_point_on_line_xy(c, line, epsilon=1e-6):
- """
- Not using the equivalent function of compas, because for some reason it always returns True.
-
- c: list that represents a point with 2 coordinates [x.y] of [x,y,0]
- line: (p1, p2) where each pt represents a point with 2 coordinates [x.y] of [x,y,0]
- """
- a, b = line[0], line[1]
- cross_product = (c[1] - a[1]) * (b[0] - a[0]) - (c[0] - a[0]) * (b[1] - a[1])
-
- if abs(cross_product) > epsilon:
- return False
-
- dot_product = (c[0] - a[0]) * (b[0] - a[0]) + (c[1] - a[1]) * (b[1] - a[1])
- if dot_product < 0:
- return False
-
- squared_length_ba = (b[0] - a[0]) * (b[0] - a[0]) + (b[1] - a[1]) * (b[1] - a[1])
- if dot_product > squared_length_ba:
- return False
-
- return True
-
-
-if __name__ == "__main__":
- pass
+ return None
diff --git a/src/compas_slicer/slicers/uv_slicer.py b/src/compas_slicer/slicers/uv_slicer.py
index ae9827d3..7f7c34bc 100644
--- a/src/compas_slicer/slicers/uv_slicer.py
+++ b/src/compas_slicer/slicers/uv_slicer.py
@@ -1,66 +1,84 @@
-from compas_slicer.slicers import BaseSlicer
-import logging
-from compas_slicer.slicers.slice_utilities import UVContours
-import numpy as np
+from __future__ import annotations
+from typing import TYPE_CHECKING
+
+import numpy as np
import progressbar
+from loguru import logger
+
+from compas_slicer.config import InterpolationConfig
from compas_slicer.geometry import VerticalLayersManager
-from compas_slicer.parameters import get_param
+from compas_slicer.slicers import BaseSlicer
+from compas_slicer.slicers.slice_utilities import UVContours
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
-logger = logging.getLogger('logger')
__all__ = ['UVSlicer']
class UVSlicer(BaseSlicer):
- """
- Generates the contours on the mesh that correspond to straight lines on the plane,
- using on a UV map (from 3D space to the plane) defined on the mesh vertices.
+ """Generates contours on the mesh corresponding to straight lines on the UV plane.
+
+ Uses a UV map (from 3D space to plane) defined on mesh vertices.
Attributes
----------
- mesh: :class: 'compas.datastructures.Mesh'
- Input mesh, it must be a triangular mesh (i.e. no quads or n-gons allowed)
- Note that the topology of the mesh matters, irregular tesselation can lead to undesired results.
- We recommend to 1)re-topologize, 2) triangulate, and 3) weld your mesh in advance.
- vkey_to_uv: dict {vkey : tuple (u,v)}. U,V coordinates should be in the domain [0,1]. The U coordinate
- no_of_isocurves: int, how many levels to be generated
+ mesh : Mesh
+ Input mesh, must be triangular (no quads or n-gons allowed).
+ Topology matters; irregular tessellation can lead to undesired results.
+ Recommend: re-topologize, triangulate, and weld mesh in advance.
+ vkey_to_uv : dict[int, tuple[float, float]]
+ Mapping from vertex key to UV coordinates. UV should be in [0,1].
+ no_of_isocurves : int
+ Number of levels to generate.
+ config : InterpolationConfig
+ Configuration parameters.
+
"""
- def __init__(self, mesh, vkey_to_uv, no_of_isocurves, parameters=None):
+ def __init__(
+ self,
+ mesh: Mesh,
+ vkey_to_uv: dict[int, tuple[float, float]],
+ no_of_isocurves: int,
+ config: InterpolationConfig | None = None,
+ ) -> None:
logger.info('UVSlicer')
BaseSlicer.__init__(self, mesh)
self.vkey_to_uv = vkey_to_uv
self.no_of_isocurves = no_of_isocurves
- self.parameters = parameters if parameters else {}
+ self.config = config if config else InterpolationConfig()
u = [self.vkey_to_uv[vkey][0] for vkey in mesh.vertices()]
v = [self.vkey_to_uv[vkey][1] for vkey in mesh.vertices()]
- u = np.array(u) * float(no_of_isocurves + 1)
+ u_arr = np.array(u) * float(no_of_isocurves + 1)
vkey_to_i = self.mesh.key_index()
mesh.update_default_vertex_attributes({'uv': 0})
for vkey in mesh.vertices():
- mesh.vertex_attribute(vkey, 'uv', (u[vkey_to_i[vkey]], v[vkey_to_i[vkey]]))
+ mesh.vertex_attribute(vkey, 'uv', (u_arr[vkey_to_i[vkey]], v[vkey_to_i[vkey]]))
- def generate_paths(self):
- """ Generates isocontours. """
+ def generate_paths(self) -> None:
+ """Generate isocontours."""
paths_type = 'flat' # 'spiral' # 'zigzag'
v_left, v_right = 0.0, 1.0 - 1e-5
- max_dist = get_param(self.parameters, key='vertical_layers_max_centroid_dist', defaults_type='layers')
+ max_dist = self.config.vertical_layers_max_centroid_dist
vertical_layers_manager = VerticalLayersManager(max_dist)
# create paths + layers
with progressbar.ProgressBar(max_value=self.no_of_isocurves) as bar:
for i in range(0, self.no_of_isocurves + 1):
+ u_val = float(i)
if i == 0:
- i += 0.05 # contours are a bit tricky in the edges
+ u_val += 0.05 # contours are a bit tricky in the edges
if paths_type == 'spiral':
- u1, u2 = i, i + 1.0
+ u1, u2 = u_val, u_val + 1.0
else: # 'flat'
- u1 = u2 = i
+ u1 = u2 = u_val
p1 = (u1, v_left)
p2 = (u2, v_right)
diff --git a/src/compas_slicer/utilities/__init__.py b/src/compas_slicer/utilities/__init__.py
index 3b39559a..16bd8fb9 100644
--- a/src/compas_slicer/utilities/__init__.py
+++ b/src/compas_slicer/utilities/__init__.py
@@ -1,41 +1,7 @@
-"""
-********************************************************************************
-utilities
-********************************************************************************
+"""Helper utilities for I/O, geometry operations, and more."""
-.. currentmodule:: compas_slicer.utilities
-
-
-utils
-=========
-
-.. autosummary::
- :toctree: generated/
- :nosignatures:
-
- save_to_json
- load_from_json
- flattened_list_of_dictionary
- interrupt
- point_list_to_dict
- get_closest_mesh_normal_to_pt
- get_closest_pt_index
- get_closest_pt
- plot_networkx_graph
- get_mesh_vertex_coords_with_attribute
- get_dict_key_from_value
- get_closest_mesh_normal_to_pt
- smooth_vectors
- get_normal_of_path_on_xy_plane
-
-"""
-
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
-from .terminal_command import * # noqa: F401 E402 F403
-from .utils import * # noqa: F401 E402 F403
from .attributes_transfer import * # noqa: F401 E402 F403
+from .terminal_command import * # noqa: F401 F403
+from .utils import * # noqa: F401 E402 F403
__all__ = [name for name in dir() if not name.startswith('_')]
diff --git a/src/compas_slicer/utilities/attributes_transfer.py b/src/compas_slicer/utilities/attributes_transfer.py
index e84f746f..cda72f0d 100644
--- a/src/compas_slicer/utilities/attributes_transfer.py
+++ b/src/compas_slicer/utilities/attributes_transfer.py
@@ -1,9 +1,18 @@
-from compas.geometry import barycentric_coordinates
-import logging
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
import progressbar
+from compas.geometry import barycentric_coordinates
+from loguru import logger
+
from compas_slicer.utilities.utils import pull_pts_to_mesh_faces
-logger = logging.getLogger('logger')
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
+
+ from compas_slicer.geometry import PrintPointsCollection
+
__all__ = ['transfer_mesh_attributes_to_printpoints']
@@ -13,7 +22,10 @@
# PrintPoints Attributes
######################
-def transfer_mesh_attributes_to_printpoints(mesh, printpoints_dict):
+def transfer_mesh_attributes_to_printpoints(
+ mesh: Mesh,
+ printpoints: PrintPointsCollection,
+) -> None:
"""
Transfers face and vertex attributes from the mesh to the printpoints.
Each printpoint is projected to the closest mesh face. It takes directly all the face attributes.
@@ -24,37 +36,39 @@ def transfer_mesh_attributes_to_printpoints(mesh, printpoints_dict):
with scalars and np.arrays.
The reserved attribute names (see 'is_reserved_attribute(attr)') are not passed on to the printpoints.
+
+ Parameters
+ ----------
+ mesh : Mesh
+ The mesh to transfer attributes from.
+ printpoints : PrintPointsCollection
+ The collection of printpoints to transfer attributes to.
+
"""
logger.info('Transferring mesh attributes to the printpoints.')
- all_pts = []
- for layer_key in printpoints_dict:
- for path_key in printpoints_dict[layer_key]:
- for ppt in printpoints_dict[layer_key][path_key]:
- all_pts.append(ppt.pt)
+ all_pts = [ppt.pt for ppt in printpoints.iter_printpoints()]
closest_fks, projected_pts = pull_pts_to_mesh_faces(mesh, all_pts)
i = 0
with progressbar.ProgressBar(max_value=len(all_pts)) as bar:
- for layer_key in printpoints_dict:
- for path_key in printpoints_dict[layer_key]:
- for ppt in printpoints_dict[layer_key][path_key]:
- fkey = closest_fks[i]
- proj_pt = projected_pts[i]
- ppt.attributes = transfer_mesh_attributes_to_point(mesh, fkey, proj_pt)
- i += 1
- bar.update(i)
+ for pp in printpoints.iter_printpoints():
+ fkey = closest_fks[i]
+ proj_pt = projected_pts[i]
+ pp.attributes = transfer_mesh_attributes_to_point(mesh, fkey, proj_pt)
+ i += 1
+ bar.update(i)
-def is_reserved_attribute(attr):
+def is_reserved_attribute(attr: str) -> bool:
""" Returns True if the attribute name is a reserved, false otherwise. """
taken_attributes = ['x', 'y', 'z', 'uv',
'scalar_field']
return attr in taken_attributes
-def transfer_mesh_attributes_to_point(mesh, fkey, proj_pt):
+def transfer_mesh_attributes_to_point(mesh: Mesh, fkey: int, proj_pt: list[float]) -> dict[str, Any]:
"""
It projects the point on the closest face of the mesh. Then if finds
all the vertex and face attributes of the face and its attributes and transfers them to the point.
@@ -84,11 +98,11 @@ def transfer_mesh_attributes_to_point(mesh, fkey, proj_pt):
# get vertex attributes using barycentric coordinates
vs = mesh.face_vertices(fkey)
- vertex_attrs = {}
- checked_attrs = []
+ vertex_attrs: dict[str, Any] = {}
+ checked_attrs: list[str] = []
for attr in mesh.vertex_attributes(vs[0]):
if not is_reserved_attribute(attr):
- if not (attr in checked_attrs):
+ if attr not in checked_attrs:
check_that_attribute_can_be_multiplied(attr, mesh.vertex_attributes(vs[0])[attr])
checked_attrs.append(attr)
vertex_attrs[attr] = 0
@@ -100,10 +114,11 @@ def transfer_mesh_attributes_to_point(mesh, fkey, proj_pt):
return vertex_attrs
-def check_that_attribute_can_be_multiplied(attr_name, value):
+def check_that_attribute_can_be_multiplied(attr_name: str, value: Any) -> bool:
try:
value * 1.0
return True
- except TypeError:
- raise ValueError('Attention! The following vertex attribute cannot be multiplied with a scalar. %s : %s '
- % (attr_name, str(type(value))))
+ except TypeError as err:
+ raise ValueError(
+ f'Attention! The following vertex attribute cannot be multiplied with a scalar. {attr_name} : {type(value)!s} '
+ ) from err
diff --git a/src/compas_slicer/utilities/terminal_command.py b/src/compas_slicer/utilities/terminal_command.py
index b269075e..9686ae9c 100644
--- a/src/compas_slicer/utilities/terminal_command.py
+++ b/src/compas_slicer/utilities/terminal_command.py
@@ -37,7 +37,9 @@ def is_failure(self):
def get_split_output_strings(self):
lines = self.stdout.splitlines()
words = []
- [words.append(word) for line in lines for word in line.split()]
+ for line in lines:
+ for word in line.split():
+ words.append(word)
return words
diff --git a/src/compas_slicer/utilities/utils.py b/src/compas_slicer/utilities/utils.py
index 09ee04f8..c6de5543 100644
--- a/src/compas_slicer/utilities/utils.py
+++ b/src/compas_slicer/utilities/utils.py
@@ -1,16 +1,33 @@
-import os
+from __future__ import annotations
+
import json
-import logging
-from compas.geometry import Point, distance_point_point_sqrd, normalize_vector
-from compas.geometry import Vector, length_vector, closest_point_in_cloud, closest_point_on_plane
-import matplotlib.pyplot as plt
-import networkx as nx
+from pathlib import Path
+from typing import TYPE_CHECKING, Any
+
import numpy as np
import scipy
+from compas.geometry import (
+ Point,
+ Vector,
+ closest_point_in_cloud,
+ closest_point_on_plane,
+ distance_point_point_sqrd,
+ length_vector,
+ normalize_vector,
+)
from compas.plugins import PluginNotInstalledError
-from compas_slicer.utilities import TerminalCommand
+from loguru import logger
+
+from compas_slicer.utilities.terminal_command import TerminalCommand
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
+ from numpy.typing import NDArray
+ from scipy.sparse import csr_matrix
+
+ from compas_slicer.geometry import Path as SlicerPath
+ from compas_slicer.geometry import PrintPoint, PrintPointsCollection
-logger = logging.getLogger('logger')
__all__ = ['remap',
'remap_unbound',
@@ -25,12 +42,14 @@
'point_list_to_dict',
'point_list_from_dict',
'get_closest_mesh_vkey_to_pt',
+ 'get_mesh_cotmatrix',
+ 'get_mesh_cotans',
+ 'get_mesh_massmatrix',
'get_mesh_cotmatrix_igl',
'get_mesh_cotans_igl',
'get_closest_pt_index',
'get_closest_pt',
'pull_pts_to_mesh_faces',
- 'plot_networkx_graph',
'get_mesh_vertex_coords_with_attribute',
'get_dict_key_from_value',
'find_next_printpoint',
@@ -42,8 +61,8 @@
'check_package_is_installed']
-def remap(input_val, in_from, in_to, out_from, out_to):
- """ Bounded remap. """
+def remap(input_val: float, in_from: float, in_to: float, out_from: float, out_to: float) -> float:
+ """Bounded remap from source domain to target domain."""
if input_val <= in_from:
return out_from
elif input_val >= in_to:
@@ -52,12 +71,8 @@ def remap(input_val, in_from, in_to, out_from, out_to):
return remap_unbound(input_val, in_from, in_to, out_from, out_to)
-def remap_unbound(input_val, in_from, in_to, out_from, out_to):
- """
- Remaps input_val from source domain to target domain.
- No clamping is performed, the result can be outside of the target domain
- if the input is outside of the source domain.
- """
+def remap_unbound(input_val: float, in_from: float, in_to: float, out_from: float, out_to: float) -> float:
+ """Remap input_val from source domain to target domain (no clamping)."""
out_range = out_to - out_from
in_range = in_to - in_from
in_val = input_val - in_from
@@ -66,103 +81,108 @@ def remap_unbound(input_val, in_from, in_to, out_from, out_to):
return out_val
-def get_output_directory(path):
- """
- Checks if a directory with the name 'output' exists in the path. If not it creates it.
+def get_output_directory(path: str | Path) -> Path:
+ """Get or create 'output' directory in the given path.
Parameters
----------
- path: str
- The path where the 'output' directory will be created
+ path : str | Path
+ The path where the 'output' directory will be created.
Returns
- ----------
- str
- The path to the new (or already existing) 'output' directory
+ -------
+ Path
+ The path to the 'output' directory.
+
"""
- output_dir = os.path.join(path, 'output')
- if not os.path.exists(output_dir):
- os.mkdir(output_dir)
+ output_dir = Path(path) / 'output'
+ output_dir.mkdir(exist_ok=True)
return output_dir
-def get_closest_pt_index(pt, pts):
- """
- Finds the index of the closest point of 'pt' in the point cloud 'pts'.
+def get_closest_pt_index(pt: Point | NDArray, pts: list[Point] | NDArray) -> int:
+ """Find the index of the closest point to pt in pts.
Parameters
----------
- pt: compas.geometry.Point3d
- pts: list, compas.geometry.Point3d
+ pt : Point | NDArray
+ Query point.
+ pts : list[Point] | NDArray
+ Point cloud to search.
Returns
- ----------
+ -------
int
- The index of the closest point
+ Index of the closest point.
+
"""
- ci = closest_point_in_cloud(point=pt, cloud=pts)[2]
- # distances = [distance_point_point_sqrd(p, pt) for p in pts]
- # ci = distances.index(min(distances))
+ ci: int = closest_point_in_cloud(point=pt, cloud=pts)[2]
return ci
-def get_closest_pt(pt, pts):
- """
- Finds the closest point of 'pt' in the point cloud 'pts'.
+def get_closest_pt(pt: Point | NDArray, pts: list[Point]) -> Point:
+ """Find the closest point to pt in pts.
Parameters
----------
- pt: :class: 'compas.geometry.Point'
- pts: list, :class: 'compas.geometry.Point3d'
+ pt : Point | NDArray
+ Query point.
+ pts : list[Point]
+ Point cloud to search.
Returns
- ----------
- compas.geometry.Point3d
- The closest point
+ -------
+ Point
+ The closest point.
+
"""
ci = closest_point_in_cloud(point=pt, cloud=pts)[2]
return pts[ci]
-def pull_pts_to_mesh_faces(mesh, points):
- """
- Very fast method for projecting a list of points on a mesh, and finding their closest face keys.
+def pull_pts_to_mesh_faces(mesh: Mesh, points: list[Point]) -> tuple[list[int], list[Point]]:
+ """Project points to mesh and find their closest face keys.
Parameters
----------
- mesh: :class: compas.datastructures.Mesh
- points: list, compas.geometry.Point
+ mesh : Mesh
+ The mesh to project onto.
+ points : list[Point]
+ Points to project.
Returns
-------
- closest_fks: a list of the closest face keys
- projected_pts: a list of the projected points on the mesh
+ tuple[list[int], list[Point]]
+ Closest face keys and projected points.
+
"""
- points = np.array(points, dtype=np.float64).reshape((-1, 3))
- fi_fk = {index: fkey for index, fkey in enumerate(mesh.faces())}
+ points_arr = np.array(points, dtype=np.float64).reshape((-1, 3))
+ fi_fk = dict(enumerate(mesh.faces()))
f_centroids = np.array([mesh.face_centroid(fkey) for fkey in mesh.faces()], dtype=np.float64)
- closest_fis = np.argmin(scipy.spatial.distance_matrix(points, f_centroids), axis=1)
+ closest_fis = np.argmin(scipy.spatial.distance_matrix(points_arr, f_centroids), axis=1)
closest_fks = [fi_fk[fi] for fi in closest_fis]
- projected_pts = [closest_point_on_plane(point, mesh.face_plane(fi)) for point, fi in zip(points, closest_fis)]
+ projected_pts = [closest_point_on_plane(point, mesh.face_plane(fi)) for point, fi in zip(points_arr, closest_fis)]
return closest_fks, projected_pts
-def smooth_vectors(vectors, strength, iterations):
- """
- Smooths the vector iteratively, with the given number of iterations and strength per iteration
+def smooth_vectors(vectors: list[Vector], strength: float, iterations: int) -> list[Vector]:
+ """Smooth vectors iteratively.
Parameters
----------
- vectors: list, :class: 'compas.geometry.Vector'
- strength: float
- iterations: int
+ vectors : list[Vector]
+ Vectors to smooth.
+ strength : float
+ Smoothing strength (0-1).
+ iterations : int
+ Number of smoothing iterations.
Returns
- ----------
- list, :class: 'compas.geometry.Vector3d'
- The smoothened vectors
- """
+ -------
+ list[Vector]
+ Smoothed vectors.
+ """
for _ in range(iterations):
for i, n in enumerate(vectors):
if 0 < i < len(vectors) - 1:
@@ -176,42 +196,50 @@ def smooth_vectors(vectors, strength, iterations):
#######################################
# json
-def save_to_json(data, filepath, name):
- """
- Save the provided data to json on the filepath, with the given name
+def save_to_json(
+ data: dict[str, Any] | dict[int, Any] | list[Any], filepath: str | Path, name: str
+) -> None:
+ """Save data to JSON file.
Parameters
----------
- data: dict_or_list
- filepath: str
- name: str
- """
+ data : dict | list
+ Data to save.
+ filepath : str | Path
+ Directory path.
+ name : str
+ Filename.
- filename = os.path.join(filepath, name)
- logger.info("Saving to json: " + filename)
- with open(filename, 'w') as f:
- f.write(json.dumps(data, indent=3, sort_keys=True))
+ """
+ filename = Path(filepath) / name
+ logger.info(f"Saving to json: {filename}")
+ filename.write_text(json.dumps(data, indent=3, sort_keys=True))
-def load_from_json(filepath, name):
- """
- Loads json from the filepath
+def load_from_json(filepath: str | Path, name: str) -> Any:
+ """Load data from JSON file.
Parameters
----------
- filepath: str
- name: str
- """
+ filepath : str | Path
+ Directory path.
+ name : str
+ Filename.
+
+ Returns
+ -------
+ Any
+ Loaded data.
- filename = os.path.join(filepath, name)
- with open(filename, 'r') as f:
- data = json.load(f)
- logger.info("Loaded json: " + filename)
+ """
+ filename = Path(filepath) / name
+ data = json.loads(filename.read_text())
+ logger.info(f"Loaded json: {filename}")
return data
-def is_jsonable(x):
- """ Returns True if x can be json-serialized, False otherwise. """
+def is_jsonable(x: Any) -> bool:
+ """Return True if x can be JSON-serialized."""
try:
json.dumps(x)
return True
@@ -219,8 +247,9 @@ def is_jsonable(x):
return False
-def get_jsonable_attributes(attributes_dict):
- jsonable_attr = {}
+def get_jsonable_attributes(attributes_dict: dict[str, Any]) -> dict[str, Any]:
+ """Convert attributes dict to JSON-serializable form."""
+ jsonable_attr: dict[str, Any] = {}
for attr_key in attributes_dict:
attr = attributes_dict[attr_key]
if is_jsonable(attr):
@@ -230,130 +259,141 @@ def get_jsonable_attributes(attributes_dict):
jsonable_attr[attr_key] = list(attr)
else:
jsonable_attr[attr_key] = 'non serializable attribute'
-
return jsonable_attr
#######################################
# text file
-def save_to_text_file(data, filepath, name):
- """
- Save the provided text on the filepath, with the given name
+def save_to_text_file(data: str, filepath: str | Path, name: str) -> None:
+ """Save text to file.
Parameters
----------
- data: str
- filepath: str
- name: str
- """
+ data : str
+ Text to save.
+ filepath : str | Path
+ Directory path.
+ name : str
+ Filename.
- filename = os.path.join(filepath, name)
- logger.info("Saving to text file: " + filename)
- with open(filename, 'w') as f:
- f.write(data)
+ """
+ filename = Path(filepath) / name
+ logger.info(f"Saving to text file: {filename}")
+ filename.write_text(data)
#######################################
# mesh utils
-def check_triangular_mesh(mesh):
- """
- Checks if the mesh is triangular. If not, then it raises an error
+def check_triangular_mesh(mesh: Mesh) -> None:
+ """Check if mesh is triangular, raise TypeError if not.
Parameters
----------
- mesh: :class: 'compas.datastructures.Mesh'
- """
+ mesh : Mesh
+ The mesh to check.
+ Raises
+ ------
+ TypeError
+ If any face is not a triangle.
+
+ """
for f_key in mesh.faces():
vs = mesh.face_vertices(f_key)
if len(vs) != 3:
- raise TypeError("Found a quad at face key: " + str(f_key) + " ,number of face vertices:" + str(
- len(vs)) + ". \nOnly triangular meshes supported.")
+ raise TypeError(f"Found quad at face {f_key}, vertices: {len(vs)}. Only triangular meshes supported.")
-def get_closest_mesh_vkey_to_pt(mesh, pt):
- """
- Finds the vertex key that is the closest to the point.
+def get_closest_mesh_vkey_to_pt(mesh: Mesh, pt: Point) -> int:
+ """Find the vertex key closest to the point.
Parameters
----------
- mesh: :class: 'compas.datastructures.Mesh'
- pt: :class: 'compas.geometry.Point'
+ mesh : Mesh
+ The mesh.
+ pt : Point
+ Query point.
Returns
- ----------
+ -------
int
- the closest vertex key
+ Closest vertex key.
+
"""
- # cloud = [Point(data['x'], data['y'], data['z']) for v_key, data in mesh.vertices(data=True)]
- # closest_index = compas.geometry.closest_point_in_cloud(pt, cloud)[2]
vertex_tupples = [(v_key, Point(data['x'], data['y'], data['z'])) for v_key, data in mesh.vertices(data=True)]
vertex_tupples = sorted(vertex_tupples, key=lambda v_tupple: distance_point_point_sqrd(pt, v_tupple[1]))
- closest_vkey = vertex_tupples[0][0]
+ closest_vkey: int = vertex_tupples[0][0]
return closest_vkey
-def get_closest_mesh_normal_to_pt(mesh, pt):
- """
- Finds the closest vertex normal to the point.
+def get_closest_mesh_normal_to_pt(mesh: Mesh, pt: Point) -> Vector:
+ """Find the closest vertex normal to the point.
Parameters
----------
- mesh: :class: 'compas.datastructures.Mesh'
- pt: :class: 'compas.geometry.Point'
+ mesh : Mesh
+ The mesh.
+ pt : Point
+ Query point.
Returns
- ----------
- :class: 'compas.geometry.Vector'
- The closest normal of the mesh.
- """
+ -------
+ Vector
+ Normal at closest vertex.
+ """
closest_vkey = get_closest_mesh_vkey_to_pt(mesh, pt)
v = mesh.vertex_normal(closest_vkey)
return Vector(v[0], v[1], v[2])
-def get_mesh_vertex_coords_with_attribute(mesh, attr, value):
- """
- Finds the coordinates of all the vertices that have an attribute with key=attr that equals the value.
+def get_mesh_vertex_coords_with_attribute(mesh: Mesh, attr: str, value: Any) -> list[Point]:
+ """Get coordinates of vertices where attribute equals value.
Parameters
----------
- mesh: :class: 'compas.datastructures.Mesh'
- attr: str
- value: anything that can be stored into a dictionary
+ mesh : Mesh
+ The mesh.
+ attr : str
+ Attribute name.
+ value : Any
+ Value to match.
Returns
- ----------
- list, :class: 'compas.geometry.Point'
- the closest vertex key
- """
+ -------
+ list[Point]
+ Points of matching vertices.
- pts = []
+ """
+ pts: list[Point] = []
for vkey, data in mesh.vertices(data=True):
if data[attr] == value:
pts.append(Point(*mesh.vertex_coordinates(vkey)))
return pts
-def get_normal_of_path_on_xy_plane(k, point, path, mesh):
- """
- Finds the normal of the curve that lies on the xy plane at the point with index k
+def get_normal_of_path_on_xy_plane(k: int, point: Point, path: SlicerPath, mesh: Mesh) -> Vector:
+ """Find the normal of the curve on xy plane at point with index k.
Parameters
----------
- k: int, index of the point
- point: :class: 'compas.geometry.Point'
- path: :class: 'compas_slicer.geometry.Path'
- mesh: :class: 'compas.datastructures.Mesh'
+ k : int
+ Index of the point.
+ point : Point
+ The point.
+ path : SlicerPath
+ The path containing the point.
+ mesh : Mesh
+ The mesh (fallback for degenerate cases).
Returns
- ----------
- :class: 'compas.geometry.Vector'
- """
+ -------
+ Vector
+ Normal vector.
+ """
# find mesh normal is not really needed in the 2D case of planar slicer
# instead we only need the normal of the curve based on the neighboring pts
if (0 < k < len(path.points) - 1) or path.is_closed:
@@ -386,201 +426,340 @@ def get_normal_of_path_on_xy_plane(k, point, path, mesh):
#######################################
-# igl utils
+# mesh matrix utils (NumPy implementations)
-def get_mesh_cotmatrix_igl(mesh, fix_boundaries=True):
- """
- Gets the laplace operator of the mesh
+def get_mesh_cotmatrix(mesh: Mesh, fix_boundaries: bool = True) -> csr_matrix:
+ """Get the cotangent Laplacian matrix of the mesh.
+
+ Computes L_ij = (cot α_ij + cot β_ij) / 2 for adjacent vertices,
+ with L_ii = -sum_j L_ij (row sum = 0).
Parameters
----------
- mesh: :class: 'compas.datastructures.Mesh'
+ mesh : Mesh
+ The mesh (must be triangulated).
fix_boundaries : bool
+ If True, zero out rows for boundary vertices.
Returns
- ----------
- :class: 'scipy.sparse.csr_matrix'
- sparse matrix (dimensions: #V x #V), laplace operator, each row i corresponding to v(i, :)
+ -------
+ csr_matrix
+ Sparse matrix (V x V), cotangent Laplacian.
+
"""
- # check_package_is_installed('igl')
- import igl
- v, f = mesh.to_vertices_and_faces()
- C = igl.cotmatrix(np.array(v), np.array(f))
+ V, F = mesh.to_vertices_and_faces()
+ vertices = np.array(V, dtype=np.float64)
+ faces = np.array(F, dtype=np.int32)
+
+ n_vertices = len(vertices)
+
+ # Get cotangent weights for each half-edge
+ # For each face, compute cotangents of all three angles
+ i0, i1, i2 = faces[:, 0], faces[:, 1], faces[:, 2]
+ v0, v1, v2 = vertices[i0], vertices[i1], vertices[i2]
+
+ # Edge vectors
+ e0 = v2 - v1 # opposite to vertex 0
+ e1 = v0 - v2 # opposite to vertex 1
+ e2 = v1 - v0 # opposite to vertex 2
+
+ # Cotangent of angle at vertex i = dot(e_j, e_k) / |cross(e_j, e_k)|
+ # where e_j and e_k are edges adjacent to vertex i
+ def cotangent(a: NDArray, b: NDArray) -> NDArray:
+ cross = np.cross(a, b)
+ cross_norm = np.linalg.norm(cross, axis=1)
+ dot = np.sum(a * b, axis=1)
+ # Avoid division by zero
+ cross_norm = np.maximum(cross_norm, 1e-10)
+ return dot / cross_norm
+
+ # Cotangent at each vertex of each face
+ cot0 = cotangent(-e2, e1) # angle at vertex 0
+ cot1 = cotangent(-e0, e2) # angle at vertex 1
+ cot2 = cotangent(-e1, e0) # angle at vertex 2
+
+ # Build sparse matrix
+ # L_ij += 0.5 * cot(angle opposite to edge ij)
+ row = np.concatenate([i0, i1, i1, i2, i2, i0])
+ col = np.concatenate([i1, i0, i2, i1, i0, i2])
+ data = np.concatenate([cot2, cot2, cot0, cot0, cot1, cot1]) * 0.5
+
+ L = csr_matrix((data, (row, col)), shape=(n_vertices, n_vertices))
+
+ # Make symmetric and set diagonal to negative row sum
+ L = L + L.T
+ L = L - scipy.sparse.diags(np.array(L.sum(axis=1)).flatten())
if fix_boundaries:
- # fix boundaries by putting the corresponding columns of the sparse matrix to 0
- C_dense = C.toarray()
- for i, (vkey, data) in enumerate(mesh.vertices(data=True)):
- if data['boundary'] > 0:
- C_dense[i][:] = np.zeros(len(v))
- C = scipy.sparse.csr_matrix(C_dense)
- return C
+ # Zero out rows for boundary vertices
+ boundary_mask = np.zeros(n_vertices, dtype=bool)
+ for i, (_vkey, vdata) in enumerate(mesh.vertices(data=True)):
+ if vdata.get('boundary', 0) > 0:
+ boundary_mask[i] = True
+ if np.any(boundary_mask):
+ L = L.tolil()
+ for i in np.where(boundary_mask)[0]:
+ L[i, :] = 0
+ L = L.tocsr()
-def get_mesh_cotans_igl(mesh):
- """
- Gets the cotangent entries of the mesh
+ return L
+def get_mesh_cotans(mesh: Mesh) -> NDArray:
+ """Get the cotangent entries of the mesh.
+
Parameters
----------
- mesh: :class: 'compas.datastructures.Mesh'
+ mesh : Mesh
+ The mesh (must be triangulated).
Returns
- ----------
- :class: 'np.array'
- Dimensions: F by 3 list of 1/2*cotangents corresponding angles
+ -------
+ NDArray
+ F x 3 array of 1/2*cotangents for corresponding angles.
+ Column i contains cotangent of angle at vertex i of each face.
+
"""
- # check_package_is_installed('igl')
- import igl
- v, f = mesh.to_vertices_and_faces()
- return igl.cotmatrix_entries(np.array(v), np.array(f))
+ V, F = mesh.to_vertices_and_faces()
+ vertices = np.array(V, dtype=np.float64)
+ faces = np.array(F, dtype=np.int32)
+ i0, i1, i2 = faces[:, 0], faces[:, 1], faces[:, 2]
+ v0, v1, v2 = vertices[i0], vertices[i1], vertices[i2]
-#######################################
-# networkx graph
+ e0 = v2 - v1
+ e1 = v0 - v2
+ e2 = v1 - v0
-def plot_networkx_graph(G):
- """
- Plots the graph G
+ def cotangent(a: NDArray, b: NDArray) -> NDArray:
+ cross = np.cross(a, b)
+ cross_norm = np.linalg.norm(cross, axis=1)
+ dot = np.sum(a * b, axis=1)
+ cross_norm = np.maximum(cross_norm, 1e-10)
+ return dot / cross_norm
+
+ cot0 = cotangent(-e2, e1)
+ cot1 = cotangent(-e0, e2)
+ cot2 = cotangent(-e1, e0)
+
+ return np.column_stack([cot0, cot1, cot2]) * 0.5
+
+
+def get_mesh_massmatrix(mesh: Mesh) -> csr_matrix:
+ """Get the mass matrix of the mesh (Voronoi area weights).
Parameters
----------
- G: networkx.Graph
+ mesh : Mesh
+ The mesh (must be triangulated).
+
+ Returns
+ -------
+ csr_matrix
+ Sparse diagonal matrix (V x V), vertex areas.
+
"""
+ V, F = mesh.to_vertices_and_faces()
+ vertices = np.array(V, dtype=np.float64)
+ faces = np.array(F, dtype=np.int32)
- plt.subplot(121)
- nx.draw(G, with_labels=True, font_weight='bold', node_color=range(len(list(G.nodes()))))
- plt.show()
+ n_vertices = len(vertices)
+
+ # Compute face areas
+ i0, i1, i2 = faces[:, 0], faces[:, 1], faces[:, 2]
+ v0, v1, v2 = vertices[i0], vertices[i1], vertices[i2]
+
+ cross = np.cross(v1 - v0, v2 - v0)
+ face_areas = 0.5 * np.linalg.norm(cross, axis=1)
+
+ # Distribute 1/3 of each face area to each vertex
+ vertex_areas = np.zeros(n_vertices)
+ np.add.at(vertex_areas, i0, face_areas / 3)
+ np.add.at(vertex_areas, i1, face_areas / 3)
+ np.add.at(vertex_areas, i2, face_areas / 3)
+
+ return scipy.sparse.diags(vertex_areas)
+
+
+# Backwards compatibility aliases
+get_mesh_cotmatrix_igl = get_mesh_cotmatrix
+get_mesh_cotans_igl = get_mesh_cotans
#######################################
# dict utils
-def point_list_to_dict(pts_list):
- """
- Turns a list of compas.geometry.Point into a dictionary, so that it can be saved to Json. Works identically for
- 3D vectors.
+def point_list_to_dict(pts_list: list[Point | Vector]) -> dict[int, list[float]]:
+ """Convert list of points/vectors to dict for JSON.
Parameters
----------
- pts_list: list, :class:`compas.geometry.Point` / :class:`compas.geometry.Vector`
+ pts_list : list[Point | Vector]
+ List of points or vectors.
Returns
- ----------
- dict: The dictionary of pts in the form { key=index : [x,y,z] }
+ -------
+ dict[int, list[float]]
+ Dict mapping index to [x, y, z].
+
"""
- data = {}
+ data: dict[int, list[float]] = {}
for i in range(len(pts_list)):
data[i] = list(pts_list[i])
return data
-def point_list_from_dict(data):
- """
- Turns a dictionary of pts to a list of Compas.geometry.Point. Works identically for 3D vectors.
+def point_list_from_dict(data: dict[Any, list[float]]) -> list[list[float]]:
+ """Convert dict of points to list of [x, y, z].
Parameters
----------
- dict: The dictionary of pts in the form { key=index : [x,y,z] }
+ data : dict[Any, list[float]]
+ Dict mapping keys to [x, y, z].
Returns
- ----------
- 2D list, [[x1, y1, z1], ... , [xn, yn, zn]]
+ -------
+ list[list[float]]
+ List of [x, y, z] coordinates.
+
"""
return [[data[i][0], data[i][1], data[i][2]] for i in data]
-# --- Flattened list of dictionary
-def flattened_list_of_dictionary(dictionary):
- """
- Turns the dictionary into a flat list
+def flattened_list_of_dictionary(dictionary: dict[Any, list[Any]]) -> list[Any]:
+ """Flatten dictionary values into a single list.
Parameters
----------
- dictionary: dict
+ dictionary : dict[Any, list[Any]]
+ Dictionary with list values.
Returns
- ----------
- list
+ -------
+ list[Any]
+ Flattened list.
+
"""
- flattened_list = []
+ flattened_list: list[Any] = []
for key in dictionary:
- [flattened_list.append(item) for item in dictionary[key]]
+ for item in dictionary[key]:
+ flattened_list.append(item)
return flattened_list
-def get_dict_key_from_value(dictionary, val):
- """
- Return the key of a dictionary that stores the val
+def get_dict_key_from_value(dictionary: dict[Any, Any], val: Any) -> Any | None:
+ """Return the key of a dictionary that stores the value.
Parameters
----------
- dictionary: dict
- val: anything that can be stored in a dictionary
- """
+ dictionary : dict
+ The dictionary to search.
+ val : Any
+ Value to find.
+ Returns
+ -------
+ Any | None
+ The key, or None if not found.
+
+ """
for key in dictionary:
value = dictionary[key]
if val == value:
return key
- return "key doesn't exist"
+ return None
-def find_next_printpoint(pp_dict, i, j, k):
+def find_next_printpoint(
+ printpoints: PrintPointsCollection, i: int, j: int, k: int
+) -> PrintPoint | None:
"""
Returns the next printpoint from the current printpoint if it exists, otherwise returns None.
+
+ Parameters
+ ----------
+ printpoints : PrintPointsCollection
+ The collection of printpoints.
+ i : int
+ Layer index.
+ j : int
+ Path index.
+ k : int
+ Printpoint index within the path.
+
+ Returns
+ -------
+ PrintPoint | None
+ The next printpoint or None if at the end.
+
"""
next_ppt = None
- layer_key, path_key = 'layer_%d' % i, 'path_%d' % j
- if k < len(pp_dict[layer_key][path_key]) - 1: # If there are more ppts in the current path, then take the next ppt
- next_ppt = pp_dict[layer_key][path_key][k + 1]
+ if k < len(printpoints[i][j]) - 1: # If there are more ppts in the current path
+ next_ppt = printpoints[i][j][k + 1]
else:
- if j < len(pp_dict[layer_key]) - 1: # Otherwise take the next path if there are more paths in the current layer
- next_ppt = pp_dict[layer_key]['path_%d' % (j + 1)][0]
+ if j < len(printpoints[i]) - 1: # Otherwise take the next path
+ next_ppt = printpoints[i][j + 1][0]
else:
- if i < len(pp_dict) - 1: # Otherwise take the next layer if there are more layers in the current slicer
- next_ppt = pp_dict['layer_%d' % (i + 1)]['path_0'][0]
+ if i < len(printpoints) - 1: # Otherwise take the next layer
+ next_ppt = printpoints[i + 1][0][0]
return next_ppt
-def find_previous_printpoint(pp_dict, layer_key, path_key, i, j, k):
+def find_previous_printpoint(
+ printpoints: PrintPointsCollection, i: int, j: int, k: int
+) -> PrintPoint | None:
"""
Returns the previous printpoint from the current printpoint if it exists, otherwise returns None.
+
+ Parameters
+ ----------
+ printpoints : PrintPointsCollection
+ The collection of printpoints.
+ i : int
+ Layer index.
+ j : int
+ Path index.
+ k : int
+ Printpoint index within the path.
+
+ Returns
+ -------
+ PrintPoint | None
+ The previous printpoint or None if at the start.
+
"""
prev_ppt = None
- if k > 0: # If not the first point in a path, take the previous point in the path
- prev_ppt = pp_dict[layer_key][path_key][k - 1]
+ if k > 0: # If not the first point in a path
+ prev_ppt = printpoints[i][j][k - 1]
else:
- if j > 0: # Otherwise take the last point of the previous path, if there are more paths in the current layer
- prev_ppt = pp_dict[layer_key]['path_%d' % (j - 1)][-1]
+ if j > 0: # Otherwise take the last point of the previous path
+ prev_ppt = printpoints[i][j - 1][-1]
else:
- if i > 0: # Otherwise take the last path of the previous layer if there are more layers in the current slicer
- last_path_key = len(pp_dict[layer_key]) - 1
- prev_ppt = pp_dict['layer_%d' % (i - 1)]['path_%d' % (last_path_key)][-1]
+ if i > 0: # Otherwise take the last path of the previous layer
+ prev_ppt = printpoints[i - 1][-1][-1]
return prev_ppt
#######################################
# control flow
-def interrupt():
+def interrupt() -> None:
"""
Interrupts the flow of the code while it is running.
It asks for the user to press a enter to continue or abort.
"""
-
value = input("Press enter to continue, Press 1 to abort ")
- print("")
- if isinstance(value, str):
- if value == '1':
- raise ValueError("Aborted")
+ if isinstance(value, str) and value == '1':
+ raise ValueError("Aborted")
#######################################
# load all files with name
-def get_all_files_with_name(startswith, endswith, DATA_PATH):
+def get_all_files_with_name(
+ startswith: str, endswith: str, DATA_PATH: str | Path
+) -> list[str]:
"""
Finds all the filenames in the DATA_PATH that start and end with the provided strings
@@ -588,20 +767,16 @@ def get_all_files_with_name(startswith, endswith, DATA_PATH):
----------
startswith: str
endswith: str
- DATA_PATH: str
+ DATA_PATH: str | Path
Returns
----------
- list, str
+ list[str]
All the filenames
"""
-
- files = []
- for file in os.listdir(DATA_PATH):
- if file.startswith(startswith) and file.endswith(endswith):
- files.append(file)
- print('')
- logger.info('Reloading : ' + str(files))
+ files = [f.name for f in Path(DATA_PATH).iterdir()
+ if f.name.startswith(startswith) and f.name.endswith(endswith)]
+ logger.info(f'Reloading: {files}')
return files
@@ -609,7 +784,7 @@ def get_all_files_with_name(startswith, endswith, DATA_PATH):
# check installation
-def check_package_is_installed(package_name):
+def check_package_is_installed(package_name: str) -> None:
""" Throws an error if igl python bindings are not installed in the current environment. """
packages = TerminalCommand('conda list').get_split_output_strings()
if package_name not in packages:
diff --git a/src/compas_slicer/visualization/__init__.py b/src/compas_slicer/visualization/__init__.py
new file mode 100644
index 00000000..2258e303
--- /dev/null
+++ b/src/compas_slicer/visualization/__init__.py
@@ -0,0 +1,9 @@
+"""Visualization utilities for slicing results."""
+
+from .visualization import plot_networkx_graph, should_visualize, visualize_slicer
+
+__all__ = [
+ "plot_networkx_graph",
+ "should_visualize",
+ "visualize_slicer",
+]
diff --git a/src/compas_slicer/visualization/visualization.py b/src/compas_slicer/visualization/visualization.py
new file mode 100644
index 00000000..5f488ce8
--- /dev/null
+++ b/src/compas_slicer/visualization/visualization.py
@@ -0,0 +1,91 @@
+"""Visualization utilities for compas_slicer using compas_viewer."""
+from __future__ import annotations
+
+import sys
+from typing import TYPE_CHECKING
+
+import networkx as nx
+
+if TYPE_CHECKING:
+ from compas.datastructures import Mesh
+
+ from compas_slicer.slicers import BaseSlicer
+
+__all__ = ["should_visualize", "visualize_slicer", "plot_networkx_graph"]
+
+
+def should_visualize() -> bool:
+ """Check if visualization should run.
+
+ Returns False when running under pytest.
+
+ Returns
+ -------
+ bool
+ True if visualization should be shown.
+
+ """
+ return "pytest" not in sys.modules
+
+
+def visualize_slicer(
+ slicer: BaseSlicer,
+ mesh: Mesh | None = None,
+ show_mesh: bool = True,
+ mesh_opacity: float = 0.3,
+) -> None:
+ """Visualize slicer toolpaths in compas_viewer.
+
+ Parameters
+ ----------
+ slicer : BaseSlicer
+ Slicer with layers containing paths.
+ mesh : Mesh, optional
+ Mesh to display alongside paths.
+ show_mesh : bool
+ If True, display the mesh.
+ mesh_opacity : float
+ Opacity for mesh display (0-1).
+
+ """
+ from compas.colors import Color
+ from compas.geometry import Polyline
+ from compas_viewer import Viewer
+
+ viewer = Viewer()
+
+ # Add mesh if provided
+ if mesh and show_mesh:
+ viewer.scene.add(mesh, opacity=mesh_opacity)
+
+ # Add paths as polylines with color gradient by layer
+ n_layers = len(slicer.layers)
+ for i, layer in enumerate(slicer.layers):
+ t = i / max(n_layers - 1, 1)
+ color = Color(t, 0.5, 1 - t) # Blue -> Purple gradient
+
+ for path in layer.paths:
+ if len(path.points) > 1:
+ pts = list(path.points)
+ if path.is_closed and pts[0] != pts[-1]:
+ pts.append(pts[0])
+ polyline = Polyline(pts)
+ viewer.scene.add(polyline, linecolor=color, linewidth=1)
+
+ viewer.show()
+
+
+def plot_networkx_graph(G: nx.Graph) -> None:
+ """Plot a networkx graph.
+
+ Parameters
+ ----------
+ G : nx.Graph
+ The graph to plot.
+
+ """
+ import matplotlib.pyplot as plt
+
+ plt.subplot(121)
+ nx.draw(G, with_labels=True, font_weight='bold', node_color=range(len(list(G.nodes()))))
+ plt.show()
diff --git a/src/compas_slicer_ghpython/__init__.py b/src/compas_slicer_ghpython/__init__.py
index c0b8ef69..4874d886 100644
--- a/src/compas_slicer_ghpython/__init__.py
+++ b/src/compas_slicer_ghpython/__init__.py
@@ -1,3 +1 @@
-from __future__ import absolute_import
-
-__all_plugins__ = ['compas_slicer_ghpython.install']
+__all_plugins__ = ["compas_slicer_ghpython.install"]
diff --git a/src/compas_slicer_ghpython/install.py b/src/compas_slicer_ghpython/install.py
index a94ac6dc..c17b6653 100644
--- a/src/compas_slicer_ghpython/install.py
+++ b/src/compas_slicer_ghpython/install.py
@@ -1,50 +1,58 @@
-from __future__ import absolute_import
-from __future__ import division
-from __future__ import print_function
-
import argparse
-import glob
-import os
import shutil
+from pathlib import Path
-import compas
import compas.plugins
-import compas_ghpython
-import compas_rhino
+
+try:
+ import compas_ghpython
+ import compas_rhino # noqa: F401
+
+ HAS_RHINO_DEPS = True
+except ImportError:
+ HAS_RHINO_DEPS = False
-@compas.plugins.plugin(category='install')
+@compas.plugins.plugin(category="install")
def installable_rhino_packages():
- return ['compas_slicer_ghpython']
+ return ["compas_slicer_ghpython"]
-@compas.plugins.plugin(category='install')
+@compas.plugins.plugin(category="install")
def after_rhino_install(installed_packages):
- if 'compas_slicer_ghpython' not in installed_packages:
+ if "compas_slicer_ghpython" not in installed_packages:
return []
+ if not HAS_RHINO_DEPS:
+ return [("compas_slicer_ghpython", "compas_rhino not installed, skipping GH components", False)]
+
results = []
try:
version = _get_version_from_args()
dstdir = _get_grasshopper_userobjects_path(version)
- srcdir = os.path.join(os.path.dirname(__file__), 'gh_components')
- userobjects = glob.glob(os.path.join(srcdir, '*.ghuser'))
+ srcdir = Path(__file__).parent / "gh_components"
+ userobjects = list(srcdir.glob("*.ghuser"))
for src in userobjects:
- dst = os.path.join(dstdir, os.path.basename(src))
+ dst = Path(dstdir) / src.name
shutil.copyfile(src, dst)
- results.append(('compas_slicer_ghpython', 'Installed {} GH User Objects on {}'.format(len(userobjects), dstdir), True))
- except PermissionError:
- raise Exception('Please close first all instances of Rhino and then rerun the command')
+ results.append(
+ ("compas_slicer_ghpython", f"Installed {len(userobjects)} GH User Objects on {dstdir}", True)
+ )
+ except PermissionError as err:
+ raise Exception("Please close all instances of Rhino first and then rerun the command") from err
return results
-@compas.plugins.plugin(category='install')
+@compas.plugins.plugin(category="install")
def after_rhino_uninstall(installed_packages):
- if 'compas_slicer_ghpython' not in installed_packages:
+ if "compas_slicer_ghpython" not in installed_packages:
+ return []
+
+ if not HAS_RHINO_DEPS:
return []
results = []
@@ -52,30 +60,29 @@ def after_rhino_uninstall(installed_packages):
try:
version = _get_version_from_args()
dstdir = _get_grasshopper_userobjects_path(version)
- srcdir = os.path.join(os.path.dirname(__file__), 'gh_components')
- userobjects = glob.glob(os.path.join(srcdir, '*.ghuser'))
+ srcdir = Path(__file__).parent / "gh_components"
+ userobjects = list(srcdir.glob("*.ghuser"))
for src in userobjects:
- dst = os.path.join(dstdir, os.path.basename(src))
- os.remove(dst)
+ dst = Path(dstdir) / src.name
+ if dst.exists():
+ dst.unlink()
- results.append(('compas_slicer_ghpython', 'Uninstalled {} GH User Objects'.format(len(userobjects)), True))
- except PermissionError:
- raise Exception('Please close first all instances of Rhino and then rerun the command')
+ results.append(("compas_slicer_ghpython", f"Uninstalled {len(userobjects)} GH User Objects", True))
+ except PermissionError as err:
+ raise Exception("Please close all instances of Rhino first and then rerun the command") from err
return results
def _get_version_from_args():
parser = argparse.ArgumentParser()
- parser.add_argument('-v', '--version', choices=['5.0', '6.0', '7.0'], default='6.0')
- args = parser.parse_args()
- return compas_rhino._check_rhino_version(args.version)
+ parser.add_argument("-v", "--version", choices=["6.0", "7.0", "8.0"], default="8.0")
+ args, _ = parser.parse_known_args()
+ return args.version
-# TODO: Remove once this PR is released: https://github.com/compas-dev/compas/pull/802
-# For now, we just fake it get_grasshopper_library_path()
def _get_grasshopper_userobjects_path(version):
lib_path = compas_ghpython.get_grasshopper_library_path(version)
- userobjects_path = lib_path.split(os.path.sep)[:-1] + ['UserObjects']
- return os.path.sep.join(userobjects_path)
+ userobjects_path = Path(lib_path).parent / "UserObjects"
+ return str(userobjects_path)
diff --git a/src/compas_slicer_ghpython/visualization.py b/src/compas_slicer_ghpython/visualization.py
index 67cb7e82..c1af17fd 100644
--- a/src/compas_slicer_ghpython/visualization.py
+++ b/src/compas_slicer_ghpython/visualization.py
@@ -1,13 +1,13 @@
-import os
import json
+from pathlib import Path
+
+import Rhino.Geometry as rg
import rhinoscriptsyntax as rs
from compas.datastructures import Mesh
-import Rhino.Geometry as rg
-from compas_ghpython.artists import MeshArtist
from compas.geometry import Frame
+from compas_ghpython.artists import MeshArtist
from compas_ghpython.utilities import list_to_ghtree
-
#######################################
# --- Slicer
@@ -23,7 +23,7 @@ def load_slicer(path, folder_name, json_name):
if data:
if 'mesh' in data:
- compas_mesh = Mesh.from_data(data['mesh'])
+ compas_mesh = Mesh.__from_data__(data['mesh'])
artist = MeshArtist(compas_mesh)
artist.show_mesh = True
artist.show_vertices = False
@@ -59,7 +59,7 @@ def load_slicer(path, folder_name, json_name):
else:
print('No layers have been saved in the json file. Is this the correct json?')
- print('The slicer contains %d layers. ' % len(paths_nested_list))
+ print(f'The slicer contains {len(paths_nested_list)} layers. ')
paths_nested_list = list_to_ghtree(paths_nested_list)
return mesh, paths_nested_list, are_closed, all_points
@@ -112,7 +112,7 @@ def load_nested_printpoints(path, folder_name, json_name, load_frames, load_laye
ppt = PrintPointGH(rg.Point3d(ppt_data["point"][0], ppt_data["point"][1], ppt_data["point"][2]))
if load_frames:
- compas_frame = Frame.from_data(ppt_data["frame"])
+ compas_frame = Frame.__from_data__(ppt_data["frame"])
pt, x_axis, y_axis = compas_frame.point, compas_frame.xaxis, compas_frame.yaxis
ppt.frame = rs.PlaneFromFrame(pt, x_axis, y_axis)
@@ -173,7 +173,7 @@ def load_printpoints(path, folder_name, json_name):
point = rg.Point3d(data_point["point"][0], data_point["point"][1], data_point["point"][2])
points.append(point)
- compas_frame = Frame.from_data(data_point["frame"])
+ compas_frame = Frame.__from_data__(data_point["frame"])
pt, x_axis, y_axis = compas_frame.point, compas_frame.xaxis, compas_frame.yaxis
frame = rs.PlaneFromFrame(pt, x_axis, y_axis)
frames.append(frame)
@@ -322,8 +322,9 @@ def tool_visualization(origin_coords, mesh, planes, i):
def load_multiple_meshes(starts_with, ends_with, path, folder_name):
""" Load all the meshes that have the specified name, and print them in different colors. """
- filenames = get_files_with_name(starts_with, ends_with, os.path.join(path, folder_name, 'output'))
- meshes = [Mesh.from_obj(os.path.join(path, folder_name, 'output', filename)) for filename in filenames]
+ output_dir = Path(path) / folder_name / 'output'
+ filenames = get_files_with_name(starts_with, ends_with, str(output_dir))
+ meshes = [Mesh.from_obj(str(output_dir / filename)) for filename in filenames]
loaded_meshes = []
for i, m in enumerate(meshes):
@@ -451,29 +452,27 @@ def missing_input():
def load_json_file(path, folder_name, json_name, in_output_folder=True):
""" Loads data from json. """
-
+ base = Path(path) / folder_name
if in_output_folder:
- filename = os.path.join(os.path.join(path), folder_name, 'output', json_name)
+ filename = base / 'output' / json_name
else:
- filename = os.path.join(os.path.join(path), folder_name, json_name)
+ filename = base / json_name
data = None
- if os.path.isfile(filename):
- with open(filename, 'r') as f:
- data = json.load(f)
- print("Loaded Json: '" + filename + "'")
+ if filename.is_file():
+ data = json.loads(filename.read_text())
+ print(f"Loaded Json: '{filename}'")
else:
- print("Attention! Filename: '" + filename + "' does not exist. ")
+ print(f"Attention! Filename: '{filename}' does not exist. ")
return data
def save_json_file(data, path, folder_name, json_name):
""" Saves data to json. """
- filename = os.path.join(path, folder_name, json_name)
- with open(filename, 'w') as f:
- f.write(json.dumps(data, indent=3, sort_keys=True))
- print("Saved to Json: '" + filename + "'")
+ filename = Path(path) / folder_name / json_name
+ filename.write_text(json.dumps(data, indent=3, sort_keys=True))
+ print(f"Saved to Json: '{filename}'")
def get_closest_point_index(pt, pts):
@@ -492,11 +491,9 @@ def distance_of_pt_from_crv(pt, crv):
def get_files_with_name(startswith, endswith, DATA_PATH):
""" Find all files with the specified start and end in the data path. """
- files = []
- for file in os.listdir(DATA_PATH):
- if file.startswith(startswith) and file.endswith(endswith):
- files.append(file)
- print('Found %d files with the given criteria : ' % len(files) + str(files))
+ files = [f.name for f in Path(DATA_PATH).iterdir()
+ if f.name.startswith(startswith) and f.name.endswith(endswith)]
+ print(f'Found {len(files)} files with the given criteria : {files}')
return files
diff --git a/src/compas_slicer_ghpython/yak_template/icon.png b/src/compas_slicer_ghpython/yak_template/icon.png
new file mode 100644
index 00000000..1cf13b9f
Binary files /dev/null and b/src/compas_slicer_ghpython/yak_template/icon.png differ
diff --git a/src/compas_slicer_ghpython/yak_template/manifest.yml b/src/compas_slicer_ghpython/yak_template/manifest.yml
new file mode 100644
index 00000000..cd36a42b
--- /dev/null
+++ b/src/compas_slicer_ghpython/yak_template/manifest.yml
@@ -0,0 +1,9 @@
+name: compas_slicer
+version: {{ version }}
+authors:
+ - Ioanna Mitropoulou
+ - Joris Burger
+ - Jelle Feringa
+description: Slicing package for FDM 3D Printing with COMPAS - Grasshopper components for visualization and data loading.
+url: "https://github.com/compas-dev/compas_slicer"
+keywords: ["COMPAS", "slicer", "3D printing", "FDM", "Grasshopper", "fabrication"]
diff --git a/tasks.py b/tasks.py
index 4bb951c0..8618917a 100644
--- a/tasks.py
+++ b/tasks.py
@@ -1,25 +1,17 @@
# -*- coding: utf-8 -*-
-from __future__ import print_function
-
import contextlib
import glob
import os
import sys
from shutil import rmtree
-from invoke import Exit
-from invoke import task
-
-try:
- input = raw_input
-except NameError:
- pass
-
+from compas_invocations2.grasshopper import publish_yak, yakerize
+from invoke import Collection, Exit, task
BASE_FOLDER = os.path.dirname(__file__)
-class Log(object):
+class Log:
def __init__(self, out=sys.stdout, err=sys.stderr):
self.out = out
self.err = err
@@ -30,14 +22,14 @@ def flush(self):
def write(self, message):
self.flush()
- self.out.write(message + '\n')
+ self.out.write(message + "\n")
self.out.flush()
def info(self, message):
- self.write('[INFO] %s' % message)
+ self.write(f"[INFO] {message}")
def warn(self, message):
- self.write('[WARN] %s' % message)
+ self.write(f"[WARN] {message}")
log = Log()
@@ -47,78 +39,76 @@ def confirm(question):
while True:
response = input(question).lower().strip()
- if not response or response in ('n', 'no'):
+ if not response or response in ("n", "no"):
return False
- if response in ('y', 'yes'):
+ if response in ("y", "yes"):
return True
- print('Focus, kid! It is either (y)es or (n)o', file=sys.stderr)
+ print("Focus, kid! It is either (y)es or (n)o", file=sys.stderr)
@task(default=True)
def help(ctx):
"""Lists available tasks and usage."""
- ctx.run('invoke --list')
+ ctx.run("invoke --list")
log.write('Use "invoke -h " to get detailed help for a task.')
-@task(help={
- 'docs': 'True to clean up generated documentation, otherwise False',
- 'bytecode': 'True to clean up compiled python files, otherwise False.',
- 'builds': 'True to clean up build/packaging artifacts, otherwise False.'})
+@task(
+ help={
+ "docs": "True to clean up generated documentation, otherwise False",
+ "bytecode": "True to clean up compiled python files, otherwise False.",
+ "builds": "True to clean up build/packaging artifacts, otherwise False.",
+ }
+)
def clean(ctx, docs=True, bytecode=True, builds=True):
"""Cleans the local copy from compiled artifacts."""
with chdir(BASE_FOLDER):
- if builds:
- ctx.run('python setup.py clean')
-
if bytecode:
for root, dirs, files in os.walk(BASE_FOLDER):
for f in files:
- if f.endswith('.pyc'):
+ if f.endswith(".pyc"):
os.remove(os.path.join(root, f))
- if '.git' in dirs:
- dirs.remove('.git')
+ if ".git" in dirs:
+ dirs.remove(".git")
folders = []
if docs:
- folders.append('docs/api/generated')
+ folders.append("docs/api/generated")
- folders.append('dist/')
+ folders.append("dist/")
if bytecode:
- for t in ('src', 'tests'):
- folders.extend(glob.glob('{}/**/__pycache__'.format(t), recursive=True))
+ for t in ("src", "tests"):
+ folders.extend(glob.glob(f"{t}/**/__pycache__", recursive=True))
if builds:
- folders.append('build/')
- folders.append('src/compas_slicer.egg-info/')
+ folders.append("build/")
+ folders.append("src/compas_slicer.egg-info/")
for folder in folders:
rmtree(os.path.join(BASE_FOLDER, folder), ignore_errors=True)
-@task(help={
- 'rebuild': 'True to clean all previously built docs before starting, otherwise False.',
- 'doctest': 'True to run doctests, otherwise False.',
- 'check_links': 'True to check all web links in docs for validity, otherwise False.'})
-def docs(ctx, doctest=False, rebuild=True, check_links=False):
- """Builds package's HTML documentation."""
-
- if rebuild:
- clean(ctx)
-
+@task(
+ help={
+ "serve": "True to serve docs locally, otherwise just build.",
+ "strict": "True to fail on warnings, otherwise False.",
+ }
+)
+def docs(ctx, serve=False, strict=True):
+ """Builds package's HTML documentation using MkDocs."""
with chdir(BASE_FOLDER):
- if doctest:
- ctx.run('sphinx-build -b doctest docs dist/docs')
-
- ctx.run('sphinx-build -b html -E docs dist/docs')
-
- if check_links:
- ctx.run('sphinx-build -b linkcheck docs dist/docs')
+ if serve:
+ ctx.run("mkdocs serve")
+ else:
+ cmd = "mkdocs build"
+ if strict:
+ cmd += " --strict"
+ ctx.run(cmd)
@task()
@@ -126,80 +116,94 @@ def check(ctx):
"""Check the consistency of documentation, coding style and a few other things."""
with chdir(BASE_FOLDER):
- log.write('Checking MANIFEST.in...')
- ctx.run('check-manifest')
-
- log.write('Checking metadata...')
- ctx.run('python setup.py check --strict --metadata')
-
- log.write('Running flake8 python linter...')
- ctx.run('flake8 --count --statistics src tests')
+ log.write("Running ruff linter...")
+ ctx.run("ruff check src tests")
- # log.write('Checking python imports...')
- # ctx.run('isort --check-only --diff --recursive src tests setup.py')
+ log.write("Running ruff formatter check...")
+ ctx.run("ruff format --check src tests")
-@task(help={
- 'checks': 'True to run all checks before testing, otherwise False.'})
+@task(help={"checks": "True to run all checks before testing, otherwise False."})
def test(ctx, checks=False, doctest=False):
"""Run all tests."""
if checks:
check(ctx)
with chdir(BASE_FOLDER):
- cmd = ['pytest']
+ cmd = ["pytest"]
if doctest:
- cmd.append('--doctest-modules')
+ cmd.append("--doctest-modules")
+
+ ctx.run(" ".join(cmd))
- ctx.run(' '.join(cmd))
@task()
def lint(ctx):
- """Check the consistency of coding style."""
- log.write('Running flake8 python linter...')
- ctx.run('flake8 src')
-
+ """Check the consistency of coding style with ruff."""
+ log.write("Running ruff linter...")
+ ctx.run("ruff check src/")
+
+
+@task()
+def format(ctx):
+ """Format code with ruff."""
+ log.write("Running ruff formatter...")
+ ctx.run("ruff format src/ tests/")
+ ctx.run("ruff check --fix src/ tests/")
+
+
+@task()
+def typecheck(ctx):
+ """Run type checking with mypy."""
+ log.write("Running mypy type checker...")
+ ctx.run("mypy src/compas_slicer --ignore-missing-imports")
+
@task
def prepare_changelog(ctx):
"""Prepare changelog for next release."""
- UNRELEASED_CHANGELOG_TEMPLATE = '\nUnreleased\n----------\n\n**Added**\n\n**Changed**\n\n**Fixed**\n\n**Deprecated**\n\n**Removed**\n'
+ UNRELEASED_CHANGELOG_TEMPLATE = "\nUnreleased\n----------\n\n**Added**\n\n**Changed**\n\n**Fixed**\n\n**Deprecated**\n\n**Removed**\n"
with chdir(BASE_FOLDER):
# Preparing changelog for next release
- with open('CHANGELOG.rst', 'r+') as changelog:
+ with open("CHANGELOG.rst", "r+") as changelog:
content = changelog.read()
- start_index = content.index('----------')
- start_index = content.rindex('\n', 0, start_index - 1)
- last_version = content[start_index:start_index + 11].strip()
+ start_index = content.index("----------")
+ start_index = content.rindex("\n", 0, start_index - 1)
+ last_version = content[start_index : start_index + 11].strip()
- if last_version == 'Unreleased':
- log.write('Already up-to-date')
+ if last_version == "Unreleased":
+ log.write("Already up-to-date")
return
changelog.seek(0)
- changelog.write(content[0:start_index] + UNRELEASED_CHANGELOG_TEMPLATE + content[start_index:])
+ changelog.write(
+ content[0:start_index]
+ + UNRELEASED_CHANGELOG_TEMPLATE
+ + content[start_index:]
+ )
ctx.run('git add CHANGELOG.rst && git commit -m "Prepare changelog for next release"')
-
-
-@task(help={
- 'release_type': 'Type of release follows semver rules. Must be one of: major, minor, patch.'})
+@task(
+ help={
+ "release_type": "Type of release follows semver rules. Must be one of: major, minor, patch."
+ }
+)
def release(ctx, release_type):
"""Releases the project in one swift command!"""
- if release_type not in ('patch', 'minor', 'major'):
- raise Exit('The release type parameter is invalid.\nMust be one of: major, minor, patch')
+ if release_type not in ("patch", "minor", "major"):
+ raise Exit("The release type parameter is invalid.\nMust be one of: major, minor, patch")
# Run checks
- ctx.run('invoke check test')
+ ctx.run("invoke check test")
# Bump version and git tag it
- ctx.run('bump2version %s --verbose' % release_type)
+ ctx.run(f"bump2version {release_type} --verbose")
# Build project
- ctx.run('python setup.py clean --all sdist bdist_wheel')
+ ctx.run("python -m build")
# Prepare changelog for next release
prepare_changelog(ctx)
@@ -208,10 +212,12 @@ def release(ctx, release_type):
clean(ctx)
# Upload to pypi
- if confirm('Everything is ready. You are about to push to git which will trigger a release to pypi.org. Are you sure? [y/N]'):
- ctx.run('git push --tags && git push')
+ if confirm(
+ "Everything is ready. You are about to push to git which will trigger a release to pypi.org. Are you sure? [y/N]"
+ ):
+ ctx.run("git push --tags && git push")
else:
- raise Exit('You need to manually revert the tag/commits created.')
+ raise Exit("You need to manually revert the tag/commits created.")
@contextlib.contextmanager
@@ -223,3 +229,19 @@ def chdir(dirname=None):
yield
finally:
os.chdir(current_dir)
+
+
+# Create collection and add compas_invocations2 tasks
+ns = Collection()
+ns.add_task(yakerize)
+ns.add_task(publish_yak)
+ns.add_task(help)
+ns.add_task(clean)
+ns.add_task(docs)
+ns.add_task(check)
+ns.add_task(test)
+ns.add_task(lint)
+ns.add_task(format)
+ns.add_task(typecheck)
+ns.add_task(prepare_changelog)
+ns.add_task(release)
diff --git a/temp/PLACEHOLDER b/temp/PLACEHOLDER
deleted file mode 100644
index 0a0022e9..00000000
--- a/temp/PLACEHOLDER
+++ /dev/null
@@ -1,2 +0,0 @@
-# container for temorary files
-# these will be ignored by the version control system
diff --git a/tests/test_examples.py b/tests/test_examples.py
new file mode 100644
index 00000000..b20cb06d
--- /dev/null
+++ b/tests/test_examples.py
@@ -0,0 +1,27 @@
+import sys
+from pathlib import Path
+
+import pytest
+
+EXAMPLES_DIR = Path(__file__).parent.parent / 'examples'
+
+examples = [
+ ('1_planar_slicing_simple', 'example_1_planar_slicing_simple'),
+ ('2_curved_slicing', 'ex2_curved_slicing'),
+ ('3_planar_slicing_vertical_sorting', 'example_3_planar_vertical_sorting'),
+ ('4_gcode_generation', 'example_4_gcode'),
+ ('5_non_planar_slicing_on_custom_base', 'scalar_field_slicing'),
+ ('6_attributes_transfer', 'example_6_attributes_transfer'),
+]
+
+
+@pytest.mark.parametrize('folder,module', examples)
+def test_example(folder, module):
+ """Run example as integration test."""
+ example_path = str(EXAMPLES_DIR / folder)
+ sys.path.insert(0, example_path)
+ try:
+ mod = __import__(module)
+ mod.main()
+ finally:
+ sys.path.remove(example_path)
diff --git a/tests/test_performance.py b/tests/test_performance.py
new file mode 100644
index 00000000..4e6b825d
--- /dev/null
+++ b/tests/test_performance.py
@@ -0,0 +1,295 @@
+"""Performance benchmarks and regression tests for compas_slicer.
+
+Run benchmarks:
+ pytest tests/test_performance.py --benchmark-only
+
+Save baseline:
+ pytest tests/test_performance.py --benchmark-save=baseline
+
+Compare to baseline:
+ pytest tests/test_performance.py --benchmark-compare=baseline
+
+Fail on regression (>20% slower):
+ pytest tests/test_performance.py --benchmark-compare=baseline --benchmark-compare-fail=mean:20%
+"""
+
+import numpy as np
+import pytest
+from compas.datastructures import Mesh
+from compas.geometry import Sphere
+
+from compas_slicer._numpy_ops import (
+ batch_closest_points,
+ edge_gradient_from_vertex_gradient,
+ face_gradient_from_scalar_field,
+ min_distances_to_set,
+ per_vertex_divergence,
+ vectorized_distances,
+ vertex_gradient_from_face_gradient,
+)
+
+
+# =============================================================================
+# Fixtures
+# =============================================================================
+
+
+@pytest.fixture
+def small_mesh():
+ """Small mesh for quick tests (~200 faces)."""
+ sphere = Sphere(5)
+ mesh = Mesh.from_shape(sphere, u=10, v=10)
+ mesh.quads_to_triangles()
+ return mesh
+
+
+@pytest.fixture
+def medium_mesh():
+ """Medium mesh for benchmarks (~2k faces)."""
+ sphere = Sphere(5)
+ mesh = Mesh.from_shape(sphere, u=32, v=32)
+ mesh.quads_to_triangles()
+ return mesh
+
+
+@pytest.fixture
+def large_mesh():
+ """Large mesh for stress testing (~8k faces)."""
+ sphere = Sphere(5)
+ mesh = Mesh.from_shape(sphere, u=64, v=64)
+ mesh.quads_to_triangles()
+ return mesh
+
+
+def mesh_to_arrays(mesh):
+ """Convert COMPAS mesh to numpy arrays."""
+ V = np.array([mesh.vertex_coordinates(v) for v in mesh.vertices()], dtype=np.float64)
+ F = np.array([mesh.face_vertices(f) for f in mesh.faces()], dtype=np.intp)
+ edges = np.array(list(mesh.edges()), dtype=np.intp)
+ face_normals = np.array([mesh.face_normal(f) for f in mesh.faces()], dtype=np.float64)
+ face_areas = np.array([mesh.face_area(f) for f in mesh.faces()], dtype=np.float64)
+ return V, F, edges, face_normals, face_areas
+
+
+# =============================================================================
+# Correctness Tests (run always)
+# =============================================================================
+
+
+class TestNumpyOpsCorrectness:
+ """Test that vectorized ops produce correct results."""
+
+ def test_batch_closest_points(self):
+ """Test KDTree-based closest point search."""
+ query = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]], dtype=np.float64)
+ target = np.array([[0.1, 0, 0], [1, 1, 1.1], [5, 5, 5]], dtype=np.float64)
+
+ indices, distances = batch_closest_points(query, target)
+
+ assert indices[0] == 0 # closest to [0.1, 0, 0]
+ assert indices[1] == 1 # closest to [1, 1, 1.1]
+ assert distances[0] == pytest.approx(0.1, abs=1e-6)
+
+ def test_vectorized_distances(self):
+ """Test distance matrix computation."""
+ p1 = np.array([[0, 0, 0], [1, 0, 0]], dtype=np.float64)
+ p2 = np.array([[0, 0, 0], [0, 1, 0], [0, 0, 1]], dtype=np.float64)
+
+ dists = vectorized_distances(p1, p2)
+
+ assert dists.shape == (2, 3)
+ assert dists[0, 0] == pytest.approx(0.0)
+ assert dists[0, 1] == pytest.approx(1.0)
+ assert dists[1, 0] == pytest.approx(1.0)
+
+ def test_vertex_gradient_from_face_gradient(self, small_mesh):
+ """Test vertex gradient computation."""
+ V, F, _, _, face_areas = mesh_to_arrays(small_mesh)
+
+ # Create simple face gradients (pointing up)
+ face_gradient = np.zeros((len(F), 3), dtype=np.float64)
+ face_gradient[:, 2] = 1.0 # all gradients point up
+
+ result = vertex_gradient_from_face_gradient(V, F, face_gradient, face_areas)
+
+ assert result.shape == (len(V), 3)
+ # All vertex gradients should point up (z-component positive)
+ assert np.all(result[:, 2] > 0)
+
+ def test_edge_gradient_from_vertex_gradient(self, small_mesh):
+ """Test edge gradient computation."""
+ V, F, edges, _, _ = mesh_to_arrays(small_mesh)
+
+ # Create vertex gradients
+ vertex_gradient = np.ones((len(V), 3), dtype=np.float64)
+
+ result = edge_gradient_from_vertex_gradient(edges, vertex_gradient)
+
+ assert result.shape == (len(edges), 3)
+ # Each edge gradient should be sum of two vertex gradients = [2, 2, 2]
+ np.testing.assert_array_almost_equal(result, np.full_like(result, 2.0))
+
+ def test_face_gradient_from_scalar_field(self, small_mesh):
+ """Test face gradient from scalar field."""
+ V, F, _, face_normals, face_areas = mesh_to_arrays(small_mesh)
+
+ # Use z-coordinate as scalar field (gradient should point in z)
+ scalar_field = V[:, 2].copy()
+
+ result = face_gradient_from_scalar_field(V, F, scalar_field, face_normals, face_areas)
+
+ assert result.shape == (len(F), 3)
+ # Gradient of z should have significant z-component
+ assert np.mean(np.abs(result[:, 2])) > 0
+
+ def test_per_vertex_divergence(self, small_mesh):
+ """Test divergence computation."""
+ V, F, _, _, _ = mesh_to_arrays(small_mesh)
+
+ # Create uniform gradient field
+ X = np.ones((len(F), 3), dtype=np.float64)
+ cotans = np.ones((len(F), 3), dtype=np.float64) * 0.5
+
+ result = per_vertex_divergence(V, F, X, cotans)
+
+ assert result.shape == (len(V),)
+ # Result should be finite
+ assert np.all(np.isfinite(result))
+
+
+# =============================================================================
+# Benchmark Tests
+# =============================================================================
+
+
+class TestBenchmarkDistances:
+ """Benchmark distance computations."""
+
+ def test_batch_closest_1k_points(self, benchmark):
+ """Benchmark: find closest points for 1k queries in 1k targets."""
+ np.random.seed(42)
+ query = np.random.rand(1000, 3).astype(np.float64)
+ target = np.random.rand(1000, 3).astype(np.float64)
+
+ result = benchmark(batch_closest_points, query, target)
+
+ assert len(result[0]) == 1000
+
+ def test_min_distances_1k_points(self, benchmark):
+ """Benchmark: minimum distances for 1k points."""
+ np.random.seed(42)
+ query = np.random.rand(1000, 3).astype(np.float64)
+ target = np.random.rand(1000, 3).astype(np.float64)
+
+ result = benchmark(min_distances_to_set, query, target)
+
+ assert len(result) == 1000
+
+ def test_distance_matrix_500x500(self, benchmark):
+ """Benchmark: full distance matrix 500x500."""
+ np.random.seed(42)
+ p1 = np.random.rand(500, 3).astype(np.float64)
+ p2 = np.random.rand(500, 3).astype(np.float64)
+
+ result = benchmark(vectorized_distances, p1, p2)
+
+ assert result.shape == (500, 500)
+
+
+class TestBenchmarkGradients:
+ """Benchmark gradient computations."""
+
+ def test_vertex_gradient_medium_mesh(self, benchmark, medium_mesh):
+ """Benchmark: vertex gradient on medium mesh."""
+ V, F, _, _, face_areas = mesh_to_arrays(medium_mesh)
+ face_gradient = np.random.rand(len(F), 3).astype(np.float64)
+
+ result = benchmark(vertex_gradient_from_face_gradient, V, F, face_gradient, face_areas)
+
+ assert result.shape == (len(V), 3)
+
+ def test_face_gradient_medium_mesh(self, benchmark, medium_mesh):
+ """Benchmark: face gradient from scalar field on medium mesh."""
+ V, F, _, face_normals, face_areas = mesh_to_arrays(medium_mesh)
+ scalar_field = V[:, 2].copy()
+
+ result = benchmark(
+ face_gradient_from_scalar_field, V, F, scalar_field, face_normals, face_areas
+ )
+
+ assert result.shape == (len(F), 3)
+
+ def test_divergence_medium_mesh(self, benchmark, medium_mesh):
+ """Benchmark: divergence on medium mesh."""
+ V, F, _, _, _ = mesh_to_arrays(medium_mesh)
+ X = np.random.rand(len(F), 3).astype(np.float64)
+ cotans = np.random.rand(len(F), 3).astype(np.float64)
+
+ result = benchmark(per_vertex_divergence, V, F, X, cotans)
+
+ assert result.shape == (len(V),)
+
+
+class TestBenchmarkLargeMesh:
+ """Stress tests on large meshes."""
+
+ def test_vertex_gradient_large_mesh(self, benchmark, large_mesh):
+ """Benchmark: vertex gradient on large mesh (~8k faces)."""
+ V, F, _, _, face_areas = mesh_to_arrays(large_mesh)
+ face_gradient = np.random.rand(len(F), 3).astype(np.float64)
+
+ result = benchmark(vertex_gradient_from_face_gradient, V, F, face_gradient, face_areas)
+
+ assert result.shape[0] == len(V)
+
+ def test_batch_closest_5k_points(self, benchmark):
+ """Benchmark: closest points for 5k queries."""
+ np.random.seed(42)
+ query = np.random.rand(5000, 3).astype(np.float64)
+ target = np.random.rand(5000, 3).astype(np.float64)
+
+ result = benchmark(batch_closest_points, query, target)
+
+ assert len(result[0]) == 5000
+
+
+# =============================================================================
+# Regression Guards
+# =============================================================================
+
+
+class TestPerformanceRegression:
+ """Tests that fail if performance regresses significantly.
+
+ These use explicit timing assertions as a fallback when
+ pytest-benchmark comparison is not available.
+ """
+
+ def test_batch_closest_should_be_fast(self):
+ """Closest point search for 1k points should complete in < 50ms."""
+ import time
+
+ np.random.seed(42)
+ query = np.random.rand(1000, 3).astype(np.float64)
+ target = np.random.rand(1000, 3).astype(np.float64)
+
+ start = time.perf_counter()
+ for _ in range(10):
+ batch_closest_points(query, target)
+ elapsed = (time.perf_counter() - start) / 10
+
+ assert elapsed < 0.05, f"batch_closest_points too slow: {elapsed*1000:.1f}ms"
+
+ def test_vertex_gradient_should_be_fast(self, medium_mesh):
+ """Vertex gradient on 2k face mesh should complete in < 20ms."""
+ import time
+
+ V, F, _, _, face_areas = mesh_to_arrays(medium_mesh)
+ face_gradient = np.random.rand(len(F), 3).astype(np.float64)
+
+ start = time.perf_counter()
+ for _ in range(10):
+ vertex_gradient_from_face_gradient(V, F, face_gradient, face_areas)
+ elapsed = (time.perf_counter() - start) / 10
+
+ assert elapsed < 0.02, f"vertex_gradient too slow: {elapsed*1000:.1f}ms"
diff --git a/tests/test_planar_print_organization_horizontal_layers.py b/tests/test_planar_print_organization_horizontal_layers.py
index 6d2bb28f..607397d5 100644
--- a/tests/test_planar_print_organization_horizontal_layers.py
+++ b/tests/test_planar_print_organization_horizontal_layers.py
@@ -1,6 +1,8 @@
-import os
-import compas_slicer
+from pathlib import Path
+
import numpy as np
+
+import compas_slicer
from compas_slicer.slicers import PlanarSlicer
from compas_slicer.post_processing import generate_brim
from compas_slicer.post_processing import simplify_paths_rdp
@@ -10,20 +12,17 @@
from compas_slicer.print_organization.print_organization_utilities.extruder_toggle import check_assigned_extruder_toggle
from compas.datastructures import Mesh
-HERE = os.path.dirname(__file__)
-DATA = os.path.join(HERE, 'tests_data')
-stl_to_test = ['distorted_v_closed_low_res.obj'] # , 'distorted_a_closed_low_res.obj']
+DATA_PATH = Path(__file__).parent / 'tests_data'
+stl_to_test = ['distorted_v_closed_low_res.obj']
def create_setup(filename):
""" Setting up the stage for testing. """
- FILE = os.path.abspath(os.path.join(DATA, filename))
- compas_mesh = Mesh.from_obj(FILE)
- slicer = PlanarSlicer(compas_mesh, slicer_type="default", layer_height=20)
+ compas_mesh = Mesh.from_obj(DATA_PATH / filename)
+ slicer = PlanarSlicer(compas_mesh, layer_height=20)
slicer.slice_model()
generate_brim(slicer, layer_width=3.0, number_of_brim_offsets=3)
simplify_paths_rdp(slicer, threshold=1.3)
- # seams_smooth(slicer, smooth_distance=10)
slicer.printout_info()
print_organizer = PlanarPrintOrganizer(slicer)
print_organizer.create_printpoints()
@@ -103,7 +102,7 @@ def test_planar_add_safety_printpoints_for_horizontal_layers():
for filename in stl_to_test:
slicer, print_organizer = create_setup(filename)
- set_extruder_toggle(print_organizer, slicer) # has already been don
+ set_extruder_toggle(print_organizer, slicer)
pp_dict = print_organizer.printpoints_dict
@@ -129,26 +128,12 @@ def test_planar_add_safety_printpoints_for_horizontal_layers():
def test_planar_set_linear_velocity_constant_for_horizontal_layers():
""" Tests set_linear_velocity on planar slicer, with constant value. """
- #
- # # copy to avoid altering the classes, so that all test functions can start from same setup
- # print_organizer_copy = copy.deepcopy(print_organizer)
- # slicer_copy = copy.deepcopy(slicer)
- #
- # set_linear_velocity(print_organizer_copy, "constant", v=25.0)
pass
- # TODO check results
def test_planar_set_blend_radius_for_horizontal_layers():
""" Tests set_blend_radius on planar slicer. """
- #
- # # copy to avoid altering the classes, so that all test functions can start from same setup
- # print_organizer_copy = copy.deepcopy(print_organizer)
- # slicer_copy = copy.deepcopy(slicer)
- #
- # set_blend_radius(print_organizer_copy, d_fillet=10.0)
pass
- # TODO check results
if __name__ == '__main__':
diff --git a/tests/test_planar_slicing.py b/tests/test_planar_slicing.py
index 1e9c1f1a..6bb5bbca 100644
--- a/tests/test_planar_slicing.py
+++ b/tests/test_planar_slicing.py
@@ -1,13 +1,14 @@
-from compas_slicer.geometry import Layer
-from compas_slicer.geometry import Path
-import os
+from pathlib import Path
+
from compas.datastructures import Mesh
+
+from compas_slicer.geometry import Layer
+from compas_slicer.geometry import Path as SlicerPath
from compas_slicer.slicers import PlanarSlicer
-DATA = os.path.join(os.path.dirname(__file__), 'tests_data')
-FILE = os.path.abspath(os.path.join(DATA, 'cylinder.obj'))
+DATA_PATH = Path(__file__).parent / 'tests_data'
-compas_mesh = Mesh.from_obj(os.path.join(DATA, FILE))
+compas_mesh = Mesh.from_obj(DATA_PATH / 'cylinder.obj')
layer_height = 15.0
z = [compas_mesh.vertex_attribute(key, 'z') for key in compas_mesh.vertices()]
@@ -18,7 +19,7 @@
def test_planar_slicing_success():
""" Tests simple planar slicing. """
- slicer = PlanarSlicer(compas_mesh, slicer_type="default", layer_height=layer_height)
+ slicer = PlanarSlicer(compas_mesh, layer_height=layer_height)
slicer.slice_model()
assert isinstance(slicer.layers, list), "The layers are not a list"
@@ -27,11 +28,10 @@ def test_planar_slicing_success():
assert isinstance(slicer.layers[0], Layer), "The slicer does not contain layers of type 'compas_slicer.Layer'"
for i in range(len(slicer.layers)):
assert len(slicer.layers[i].paths) == 1, "There is a layer with empty Contours list at index %d" % i
- assert isinstance(slicer.layers[i].paths[0], Path), "Wrong class type in Layer.Contour list"
+ assert isinstance(slicer.layers[i].paths[0], SlicerPath), "Wrong class type in Layer.Contour list"
assert slicer.layers[i].paths[0].is_closed, "Path resulting from slicing of cylinder using 'planar_compas' is " \
"open. It should be closed "
-# test inclined cylinder. How many paths open, how many paths closed
if __name__ == '__main__':
pass