Skip to content

Commit ddd132b

Browse files
committed
Remove post-process
1 parent 6a6ca0f commit ddd132b

21 files changed

Lines changed: 25 additions & 5470 deletions

EyeFlow.spec

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,14 @@ from PyInstaller.utils.hooks import collect_submodules
55
datas = []
66
hiddenimports = []
77
datas += collect_data_files('pipelines')
8-
datas += collect_data_files('postprocess')
98
datas += collect_data_files('sv_ttk')
109
datas += collect_data_files('tkinterdnd2')
1110
datas += [('EyeFlow_logo.png', '.')]
1211
datas += [('EyeFlow.ico', '.')]
1312
datas += [('default_settings.json', '.')]
1413
datas += [('pyproject.toml', '.')]
1514
hiddenimports += collect_submodules('pipelines')
16-
hiddenimports += collect_submodules('postprocess')
1715
hiddenimports += collect_submodules('tkinterdnd2')
18-
hiddenimports += ['matplotlib.backends.backend_ps']
1916

2017

2118
a = Analysis(

README.md

Lines changed: 2 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -35,16 +35,13 @@ pip install -e .
3535

3636
# Installs pipeline-specific dependencies (optional)
3737
pip install -e ".[pipelines]"
38-
39-
# Installs postprocess-specific dependencies such as the graphics dashboard (optional)
40-
pip install -e ".[postprocess]"
4138
```
4239

4340
### 2. Development Setup (Contributor)
4441

4542
```sh
4643
# Install all dependencies including dev tools (ruff, pre-commit, pyinstaller)
47-
pip install -e ".[dev,pipelines,postprocess]"
44+
pip install -e ".[dev,pipelines]"
4845

4946
# Initialize pre-commit hooks (optionnal)
5047
pre-commit install
@@ -76,10 +73,7 @@ Launch the main application to process files interactively:
7673
7774
The GUI handles batch processing for folders, single .h5/.hdf5 files, or .zip archives and lets you run multiple pipelines at once. Batch outputs preserve the input subfolder layout under the chosen output directory (one combined `.h5` per input file).
7875
79-
You can also select batch-level postprocess steps. These run after the selected pipelines finish and before optional zipping, so any generated dashboards, PNGs, or summaries are included in the final output folder or archive.
80-
8176
Use the Pipeline Library tab to select which pipelines run. Selection preferences are saved per user between app launches, including installed builds.
82-
Use the Postprocess Library tab the same way for postprocess steps.
8377
8478
```sh
8579
# Via the entry point
@@ -91,7 +85,7 @@ python src/eye_flow.py
9185
9286
When you run `eyeflow` from inside the repository checkout, the launcher prefers the local `src/` tree so newly added or edited pipelines are picked up without needing a full reinstall.
9387
94-
Installed builds expose editable `pipelines/` and `postprocess/` folders next to `EyeFlow.exe`; use the Library tabs' Open folder and Reload buttons to edit and refresh them.
88+
Installed builds expose an editable `pipelines/` folder next to `EyeFlow.exe`; use the Pipeline Library tab's Open folder and Reload buttons to edit and refresh it.
9589
9690
### CLI
9791
@@ -145,54 +139,3 @@ class MyAnalysis(ProcessPipeline):
145139
attrs=attrs
146140
)
147141
```
148-
149-
## Postprocess System
150-
151-
Postprocess steps are discovered from `src/postprocess/` in the same spirit as pipelines, but they run once per batch over the generated pipeline output folder.
152-
153-
Use `@registerPostprocess(...)` to declare:
154-
155-
- optional Python package dependencies with `required_deps`
156-
- required pipeline outputs with `required_pipelines`
157-
158-
### Simple Postprocess Structure
159-
160-
```python
161-
from postprocess.core.base import (
162-
BatchPostprocess,
163-
PostprocessContext,
164-
PostprocessResult,
165-
registerPostprocess,
166-
)
167-
168-
169-
@registerPostprocess(
170-
name="My Batch Summary",
171-
description="Aggregate metrics across the generated batch outputs.",
172-
required_pipelines=["Basic Stats"],
173-
)
174-
class MyBatchSummary(BatchPostprocess):
175-
def run(self, context: PostprocessContext) -> PostprocessResult:
176-
report_path = context.output_dir / "my_batch_summary.json"
177-
report_path.write_text("{}", encoding="utf-8")
178-
179-
return PostprocessResult(
180-
summary="Generated my_batch_summary.json.",
181-
generated_paths=[str(report_path)],
182-
metadata={"file_count": len(context.processed_files)},
183-
)
184-
```
185-
186-
Inside a postprocess, you can:
187-
188-
- read `context.output_dir`
189-
- read `context.processed_files`
190-
- read `context.selected_pipelines`
191-
- read `context.input_path`
192-
- read `context.zip_outputs`
193-
- write extra artifacts into `context.output_dir` before optional zipping
194-
- return a short `summary`, explicit `generated_paths`, and structured `metadata`
195-
196-
The included `Graphics Dashboard` postprocess shows the intended pattern: it consumes the `arterial_waveform_shape_metrics` output and generates a cohort dashboard plus PNG exports after the batch finishes.
197-
`Pipeline Metrics Manifest` is a lighter built-in example that writes a JSON inventory of the generated pipeline metric datasets for the batch.
198-
`Postprocess Tutorial` is the minimal reference example: it writes a single JSON file showing every `PostprocessContext` field and the `PostprocessResult` output format.

default_settings.json

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,6 @@
77
"waveform_harmonic_organization_SVD": false,
88
"waveform_shape_metrics": true
99
},
10-
"postprocess_visibility": {
11-
"Pipeline Metrics Manifest": false,
12-
"Postprocess Tutorial": false,
13-
"QC_Windkessel_RC": false,
14-
"Variability and heterogeneity tables": false,
15-
"groups comparison dashboard": false
16-
},
1710
"trim_h5source": true,
1811
"ui_mode": "minimal"
1912
}

hooks/hook-postprocess.py

Lines changed: 0 additions & 4 deletions
This file was deleted.

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ dependencies = ["numpy>=1.24", "h5py>=3.9", "sv-ttk>=2.6", "tkinterdnd2"]
1717
[project.optional-dependencies]
1818
# For specific pipelines
1919
pipelines = ["torch>=2.2", "pandas>=2.1"]
20-
postprocess = ["matplotlib>=3.8", "pandas>=2.1", "plotly>=5.18"]
2120

2221
# For developers
2322
dev = ["ruff", "pre-commit", "pyinstaller"]

src/app_settings.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -137,12 +137,6 @@ def normalize_pipeline_visibility(
137137
return normalize_named_visibility(pipeline_names, stored_visibility)
138138

139139

140-
def normalize_postprocess_visibility(
141-
postprocess_names: Iterable[str], stored_visibility: Mapping[str, bool] | None
142-
) -> tuple[dict[str, bool], bool]:
143-
return normalize_named_visibility(postprocess_names, stored_visibility)
144-
145-
146140
class AppSettingsStore:
147141
def __init__(
148142
self,
@@ -207,12 +201,6 @@ def load_pipeline_visibility(self) -> dict[str, bool]:
207201
def save_pipeline_visibility(self, visibility: Mapping[str, bool]) -> None:
208202
self.save_named_visibility("pipeline_visibility", visibility)
209203

210-
def load_postprocess_visibility(self) -> dict[str, bool]:
211-
return self.load_named_visibility("postprocess_visibility")
212-
213-
def save_postprocess_visibility(self, visibility: Mapping[str, bool]) -> None:
214-
self.save_named_visibility("postprocess_visibility", visibility)
215-
216204
def load_ui_mode(self) -> str:
217205
mode = self.load().get("ui_mode")
218206
return mode if mode in {"minimal", "advanced"} else "minimal"

src/cli.py

Lines changed: 6 additions & 113 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,11 @@
22
Command-line interface to run EyeFlow pipelines over a collection of HDF5 files.
33
44
Usage example:
5-
python cli.py --data data/ --pipelines pipelines.txt --postprocess postprocess.txt --output ./results --zip --zip-name my_run.zip
5+
python cli.py --data data/ --pipelines pipelines.txt --output ./results --zip --zip-name my_run.zip
66
77
Inputs:
88
--data / -d Path to a directory (recursively scanned), a single .h5/.hdf5 file, or a .zip archive of .h5 files.
99
--pipelines / -p Text file listing pipeline names (one per line, '#' and blank lines ignored).
10-
--postprocess Optional text file listing postprocess names (one per line, '#' and blank lines ignored).
1110
--output / -o Base directory where results will be written (input subfolder layout is preserved).
1211
--zip / -z When set, compress the outputs into a .zip archive after completion.
1312
--zip-name Optional filename for the archive (default: outputs.zip).
@@ -33,22 +32,11 @@
3332
)
3433
from pipelines.core.errors import format_pipeline_exception
3534
from pipelines.core.utils import write_combined_results_h5
36-
from postprocess import (
37-
PostprocessContext,
38-
PostprocessDescriptor,
39-
load_postprocess_catalog,
40-
)
4135

4236

4337
def _build_pipeline_registry() -> dict[str, PipelineDescriptor]:
4438
available, _ = load_pipeline_catalog()
45-
# pipelines = load_all_pipelines()
46-
return {p.name: p for p in available}
47-
48-
49-
def _build_postprocess_registry() -> dict[str, PostprocessDescriptor]:
50-
available, _ = load_postprocess_catalog()
51-
return {p.name: p for p in available}
39+
return {pipeline.name: pipeline for pipeline in available}
5240

5341

5442
def _load_pipeline_list(
@@ -78,48 +66,6 @@ def _load_pipeline_list(
7866
return selected
7967

8068

81-
def _load_postprocess_list(
82-
path: Path, registry: dict[str, PostprocessDescriptor]
83-
) -> list[PostprocessDescriptor]:
84-
raw_lines = path.read_text(encoding="utf-8").splitlines()
85-
selected: list[PostprocessDescriptor] = []
86-
missing: list[str] = []
87-
for line in raw_lines:
88-
name = line.strip()
89-
if not name or name.startswith("#"):
90-
continue
91-
postprocess = registry.get(name)
92-
if postprocess is None:
93-
missing.append(name)
94-
else:
95-
selected.append(postprocess)
96-
if missing:
97-
available = ", ".join(registry.keys())
98-
raise ValueError(
99-
f"Unknown postprocess step(s): {', '.join(missing)}. Available: {available}"
100-
)
101-
return selected
102-
103-
104-
def _validate_postprocess_selection(
105-
postprocesses: Sequence[PostprocessDescriptor],
106-
selected_pipeline_names: Sequence[str],
107-
) -> None:
108-
selected_set = set(selected_pipeline_names)
109-
errors = []
110-
for postprocess in postprocesses:
111-
missing_required = [
112-
name for name in postprocess.required_pipelines if name not in selected_set
113-
]
114-
if missing_required:
115-
errors.append(
116-
f"{postprocess.name} requires pipeline(s): "
117-
f"{', '.join(missing_required)}"
118-
)
119-
if errors:
120-
raise ValueError("\n".join(errors))
121-
122-
12369
def _find_h5_inputs(path: Path) -> list[Path]:
12470
if path.is_file():
12571
if path.suffix.lower() in {".h5", ".hdf5"}:
@@ -131,13 +77,6 @@ def _find_h5_inputs(path: Path) -> list[Path]:
13177
raise FileNotFoundError(f"Input path does not exist: {path}")
13278

13379

134-
def _safe_pipeline_suffix(name: str) -> str:
135-
cleaned = "".join(ch if ch.isalnum() else "_" for ch in name.lower())
136-
while "__" in cleaned:
137-
cleaned = cleaned.replace("__", "_")
138-
return cleaned.strip("_") or "pipeline"
139-
140-
14180
def _prepare_data_root(
14281
data_path: Path,
14382
) -> tuple[Path, tempfile.TemporaryDirectory | None]:
@@ -230,23 +169,12 @@ def _zip_output_dir(
230169
def run_cli(
231170
data_path: Path,
232171
pipelines_file: Path,
233-
postprocess_file: Path | None,
234172
output_dir: Path,
235173
zip_outputs: bool = False,
236174
zip_name: str | None = None,
237175
) -> int:
238176
registry = _build_pipeline_registry()
239177
pipelines = _load_pipeline_list(pipelines_file, registry)
240-
postprocess_registry = _build_postprocess_registry()
241-
postprocesses = (
242-
_load_postprocess_list(postprocess_file, postprocess_registry)
243-
if postprocess_file is not None
244-
else []
245-
)
246-
_validate_postprocess_selection(
247-
postprocesses,
248-
selected_pipeline_names=[pipeline.name for pipeline in pipelines],
249-
)
250178
data_root, tempdir = _prepare_data_root(data_path)
251179
work_tempdir_path: Path | None = None
252180
clean_work_output = False
@@ -279,37 +207,6 @@ def run_cli(
279207
failures.append(f"{h5_path}: {exc}")
280208
print(f"[FAIL] {h5_path.name}: {exc}", file=sys.stderr)
281209

282-
if postprocesses and processed_outputs:
283-
context = PostprocessContext(
284-
output_dir=work_root,
285-
processed_files=tuple(processed_outputs),
286-
selected_pipelines=tuple(pipeline.name for pipeline in pipelines),
287-
input_path=data_path,
288-
zip_outputs=zip_outputs,
289-
)
290-
for descriptor in postprocesses:
291-
print(f"[POST] Running {descriptor.name}...")
292-
try:
293-
result = descriptor.instantiate().run(context)
294-
except Exception as exc: # noqa: BLE001
295-
msg = (
296-
f"Postprocess '{descriptor.name}' failed: "
297-
f"{type(exc).__name__}: {exc}"
298-
)
299-
failures.append(msg)
300-
print(f"[POST FAIL] {msg}", file=sys.stderr)
301-
continue
302-
if result.summary:
303-
print(f"[POST OK] {descriptor.name}: {result.summary}")
304-
else:
305-
print(f"[POST OK] {descriptor.name}")
306-
elif postprocesses:
307-
print(
308-
"[POST SKIP] No successful pipeline outputs were generated, "
309-
"so postprocess steps were skipped.",
310-
file=sys.stderr,
311-
)
312-
313210
if zip_outputs:
314211
try:
315212
final_name = (zip_name or "outputs.zip").strip() or "outputs.zip"
@@ -340,7 +237,10 @@ def _zip_progress(done: int, total: int, _rel_path: Path) -> None:
340237
)
341238
summary_msg = f"Outputs stored under: {work_root}"
342239
else:
343-
summary_msg = f"Outputs stored under: {work_root}"
240+
if len(processed_outputs) == 1:
241+
summary_msg = f"Output file: {processed_outputs[0]}"
242+
else:
243+
summary_msg = f"Outputs stored under: {work_root}"
344244

345245
print(f"Completed. {summary_msg}")
346246

@@ -375,12 +275,6 @@ def main(argv: Sequence[str] | None = None) -> int:
375275
type=Path,
376276
help="Text file with pipeline names to run (one per line, '#' and blank lines ignored).",
377277
)
378-
parser.add_argument(
379-
"--postprocess",
380-
type=Path,
381-
default=None,
382-
help="Optional text file with postprocess names to run after pipelines.",
383-
)
384278
parser.add_argument(
385279
"-o",
386280
"--output",
@@ -406,7 +300,6 @@ def main(argv: Sequence[str] | None = None) -> int:
406300
return run_cli(
407301
args.data,
408302
args.pipelines,
409-
args.postprocess,
410303
args.output,
411304
zip_outputs=args.zip,
412305
zip_name=args.zip_name,

0 commit comments

Comments
 (0)