Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
49 commits
Select commit Hold shift + click to select a range
9810cff
Move lcoa changes to latest copy from master
sangeetabhatia03 Feb 5, 2026
d057065
Scenario files from old branch
sangeetabhatia03 Feb 5, 2026
b53448e
Resource file
sangeetabhatia03 Feb 5, 2026
e5e2298
Resource file
sangeetabhatia03 Feb 5, 2026
9634323
Merge branch 'master' into sb/lcoa-inputs-from-tlo
tbhallett Feb 6, 2026
5ff3732
Single scenario file and small edits to healthsystem
sangeetabhatia03 Feb 9, 2026
ab186f0
Merge branch 'sb/lcoa-inputs-from-tlo' of https://github.com/UCL/TLOm…
sangeetabhatia03 Feb 9, 2026
42c67aa
Additional imports
sangeetabhatia03 Feb 9, 2026
730542c
Prep for submission
sangeetabhatia03 Feb 10, 2026
10b70a3
Modify scenario setup
sangeetabhatia03 Feb 10, 2026
4474b1a
Uncomment actual scenarios for azure run
sangeetabhatia03 Feb 16, 2026
aa092a4
Initial figures
sangeetabhatia03 Feb 16, 2026
6ba0046
Minor edits
sangeetabhatia03 Feb 17, 2026
f8e060f
Series empty;debugging
sangeetabhatia03 Feb 23, 2026
373081a
Figures for ContraceptionRoutine only; deaths empty; debugging
sangeetabhatia03 Feb 23, 2026
3dec55b
Edits to read partially completed jobs
sangeetabhatia03 Feb 24, 2026
592fc9e
Analysis figures; WIP
sangeetabhatia03 Feb 24, 2026
06b78da
post PR 1819 merge
sangeetabhatia03 Feb 24, 2026
ab639c5
Plot population growth
sangeetabhatia03 Mar 4, 2026
3118beb
Working through figs
sangeetabhatia03 Mar 9, 2026
46fbbc3
Add multiplier as an argument to extract_results
sangeetabhatia03 Mar 9, 2026
90713c7
Use new interface for cost estimation
sangeetabhatia03 Mar 9, 2026
e396e82
reinstate autodiscover; working analysis script
sangeetabhatia03 Mar 9, 2026
63bf7e3
WIP; compute ICERs
sangeetabhatia03 Mar 10, 2026
65f63c6
Additional figures for sanity check
sangeetabhatia03 Mar 11, 2026
7d984bd
More figs
sangeetabhatia03 Mar 15, 2026
5bf0196
Additional figures
sangeetabhatia03 Mar 16, 2026
d68491a
service availability switch params
sangeetabhatia03 Mar 19, 2026
b84f06b
Debugging with a single service
sangeetabhatia03 Mar 20, 2026
f5102b0
Scenario file with a single treatment id
sangeetabhatia03 Mar 20, 2026
10a1554
Additional tests
sangeetabhatia03 Mar 26, 2026
ee1a39f
Merge remote-tracking branch 'origin/master' into sb/lcoa-inputs-from…
sangeetabhatia03 Mar 26, 2026
bbd3358
Record never ran and do not reschedule if treatment id unavailable
sangeetabhatia03 Mar 26, 2026
c3d3e76
Remove test with incorrect logic
sangeetabhatia03 Mar 26, 2026
4240252
Formatting
sangeetabhatia03 Mar 26, 2026
055303a
Test run with smaller pop size
sangeetabhatia03 Mar 26, 2026
4962fc4
Test service availability switch with a recurring HSI event
sangeetabhatia03 Mar 27, 2026
83870ab
Rechecking results processing
sangeetabhatia03 Mar 30, 2026
0be5afc
More figure edits
sangeetabhatia03 Mar 31, 2026
38369b7
Scenario without full consumables availability and no improved health…
sangeetabhatia03 Apr 1, 2026
abfba20
Combine pickle files from the two runs
sangeetabhatia03 Apr 2, 2026
dbf40a7
Costing code + working with collated susped/resume outputs
sangeetabhatia03 Apr 16, 2026
9076f2c
Compute ICERs run by run
sangeetabhatia03 Apr 23, 2026
c4152c9
ICER figures
sangeetabhatia03 Apr 27, 2026
d452444
Extract number of HCWs and capacity used
sangeetabhatia03 Apr 28, 2026
08a8fbb
Extract capacity used per cadre
sangeetabhatia03 Apr 30, 2026
85823fa
LCOA R script + python hook
sangeetabhatia03 May 1, 2026
3ecadeb
Retrieve annual capacity by cadre
sangeetabhatia03 May 5, 2026
c6fd68e
Run LCOA through a python script
sangeetabhatia03 May 11, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ policy_name,Naive
year_mode_switch,2100
scale_to_effective_capabilities,FALSE
Service_Availability,"[""*""]"
year_service_availability_switch,2100
service_availability_postSwitch,"[""*""]"
use_funded_or_actual_staffing,funded_plus
mode_appt_constraints,1
mode_appt_constraints_postSwitch,1
Expand Down
277 changes: 206 additions & 71 deletions src/scripts/costing/cost_estimation.py

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
"""CLI helper to combine suspended and resumed pickle outputs."""

# python src/scripts/lcoa_inputs_from_tlo_analyses/combine_suspended_and_resumed_pickles.py --suspended_results_folder outputs/s.bhatia@imperial.ac.uk/effect_of_each_treatment_id-2026-02-12T120859Z --resumed_results_folder outputs/s.bhatia@imperial.ac.uk/effect_of_each_treatment_id-2026-02-16T154500Z_folder --output_folder outputs/s.bhatia@imperial.ac.uk/effect_of_each_treatment_id-combined


import argparse
import pickle
import warnings
from pathlib import Path
from typing import Any

import pandas as pd

def _validate_input_output_paths(
suspended_results_folder: Path,
resumed_results_folder: Path,
output_folder: Path,
) -> None:
"""Validate input/output path constraints for pickle combination helper."""
suspended_resolved = suspended_results_folder.resolve()
resumed_resolved = resumed_results_folder.resolve()
output_resolved = output_folder.resolve()

if output_resolved == suspended_resolved or output_resolved == resumed_resolved:
raise ValueError(
"output_folder must be different from both suspended_results_folder and resumed_results_folder."
)

def _combine_pickled_objects(suspended_obj: Any, resumed_obj: Any, context: str = "root") -> Any:
"""Combine suspended and resumed objects with suspended object first."""
if suspended_obj is None and resumed_obj is None:
return None
if isinstance(suspended_obj, dict) and isinstance(resumed_obj, dict):
combined = {}
for key, suspended_value in suspended_obj.items():
if key in resumed_obj:
combined[key] = _combine_pickled_objects(
suspended_value, resumed_obj[key], context=f"{context}.{key}"
)
else:
combined[key] = suspended_value
for key, resumed_value in resumed_obj.items():
if key not in combined:
combined[key] = resumed_value
return combined
if isinstance(suspended_obj, pd.DataFrame) and isinstance(resumed_obj, pd.DataFrame):
return pd.concat([suspended_obj, resumed_obj], axis=0)
if isinstance(suspended_obj, pd.Series) and isinstance(resumed_obj, pd.Series):
return pd.concat([suspended_obj, resumed_obj], axis=0)
if isinstance(suspended_obj, list) and isinstance(resumed_obj, list):
return suspended_obj + resumed_obj
if isinstance(suspended_obj, tuple) and isinstance(resumed_obj, tuple):
return suspended_obj + resumed_obj
try:
return suspended_obj + resumed_obj
except TypeError as exc:
raise TypeError(
f"Unsupported combine operation at {context}: "
f"{type(suspended_obj).__name__} and {type(resumed_obj).__name__}."
) from exc


def combine_suspended_and_resumed_pickles(
suspended_results_folder: Path,
resumed_results_folder: Path,
output_folder: Path,
) -> None:
"""Combine corresponding suspended and resumed pickles into output folder."""
_validate_input_output_paths(suspended_results_folder, resumed_results_folder, output_folder)

draw_dirs = sorted([p for p in resumed_results_folder.iterdir() if p.is_dir()], key=lambda p: p.name)
for draw_dir in draw_dirs:
print(f"Processing draw directory: {draw_dir}...")
run_dirs = sorted([p for p in draw_dir.iterdir() if p.is_dir()], key=lambda p: p.name)
for run_dir in run_dirs:
print(f" Processing run directory: {run_dir}...")
pickles = sorted(run_dir.glob("*.pickle"), key=lambda p: p.name)
for resumed_pickle_path in pickles:
print(f" Processing pickle file: {resumed_pickle_path}...")
with resumed_pickle_path.open("rb") as resumed_file:
resumed_obj = pickle.load(resumed_file)

suspended_pickle_path = (
suspended_results_folder / "0" / run_dir.name / resumed_pickle_path.name
)
if suspended_pickle_path.exists():
with suspended_pickle_path.open("rb") as suspended_file:
suspended_obj = pickle.load(suspended_file)
try:
combined_obj = _combine_pickled_objects(suspended_obj, resumed_obj)
except TypeError as exc:
raise TypeError(
"Could not combine pickled objects for "
f"{resumed_pickle_path} with types "
f"{type(suspended_obj).__name__} and {type(resumed_obj).__name__}."
) from exc
else:
warnings.warn(
"No suspended counterpart found for "
f"{resumed_pickle_path} (expected at {suspended_pickle_path}); "
"copying resumed object to output unchanged.",
stacklevel=2,
)
combined_obj = resumed_obj

output_pickle_path = output_folder / draw_dir.name / run_dir.name / resumed_pickle_path.name
output_pickle_path.parent.mkdir(parents=True, exist_ok=True)
with output_pickle_path.open("wb") as output_file:
pickle.dump(combined_obj, output_file)


def main() -> None:
parser = argparse.ArgumentParser(
description=(
"Combine suspended and resumed pickle outputs into a new output folder, "
"with suspended content prepended where counterparts exist."
)
)
parser.add_argument("suspended_results_folder", type=Path)
parser.add_argument("resumed_results_folder", type=Path)
parser.add_argument("output_folder", type=Path)
args = parser.parse_args()

combine_suspended_and_resumed_pickles(
suspended_results_folder=args.suspended_results_folder,
resumed_results_folder=args.resumed_results_folder,
output_folder=args.output_folder,
)


if __name__ == "__main__":
main()
Loading
Loading