@@ -105,6 +105,8 @@ async def list_user_runs(
105105 repo_id : Optional [str ],
106106 username : Optional [str ],
107107 only_active : bool ,
108+ include_jobs : bool ,
109+ job_submissions_limit : Optional [int ],
108110 prev_submitted_at : Optional [datetime ],
109111 prev_run_id : Optional [uuid .UUID ],
110112 limit : int ,
@@ -148,7 +150,14 @@ async def list_user_runs(
148150 runs = []
149151 for r in run_models :
150152 try :
151- runs .append (run_model_to_run (r , return_in_api = True ))
153+ runs .append (
154+ run_model_to_run (
155+ r ,
156+ return_in_api = True ,
157+ include_jobs = include_jobs ,
158+ job_submissions_limit = job_submissions_limit ,
159+ )
160+ )
152161 except pydantic .ValidationError :
153162 pass
154163 if len (run_models ) > len (runs ):
@@ -652,46 +661,26 @@ async def delete_runs(
652661
653662def run_model_to_run (
654663 run_model : RunModel ,
655- include_job_submissions : bool = True ,
664+ include_jobs : bool = True ,
665+ job_submissions_limit : Optional [int ] = None ,
656666 return_in_api : bool = False ,
657667 include_sensitive : bool = False ,
658668) -> Run :
659669 jobs : List [Job ] = []
660- run_jobs = sorted (run_model .jobs , key = lambda j : (j .replica_num , j .job_num , j .submission_num ))
661- for replica_num , replica_submissions in itertools .groupby (
662- run_jobs , key = lambda j : j .replica_num
663- ):
664- for job_num , job_submissions in itertools .groupby (
665- replica_submissions , key = lambda j : j .job_num
666- ):
667- submissions = []
668- job_model = None
669- for job_model in job_submissions :
670- if include_job_submissions :
671- job_submission = job_model_to_job_submission (job_model )
672- if return_in_api :
673- # Set default non-None values for 0.18 backward-compatibility
674- # Remove in 0.19
675- if job_submission .job_provisioning_data is not None :
676- if job_submission .job_provisioning_data .hostname is None :
677- job_submission .job_provisioning_data .hostname = ""
678- if job_submission .job_provisioning_data .ssh_port is None :
679- job_submission .job_provisioning_data .ssh_port = 22
680- submissions .append (job_submission )
681- if job_model is not None :
682- # Use the spec from the latest submission. Submissions can have different specs
683- job_spec = JobSpec .__response__ .parse_raw (job_model .job_spec_data )
684- if not include_sensitive :
685- _remove_job_spec_sensitive_info (job_spec )
686- jobs .append (Job (job_spec = job_spec , job_submissions = submissions ))
670+ if include_jobs :
671+ jobs = _get_run_jobs_with_submissions (
672+ run_model = run_model ,
673+ job_submissions_limit = job_submissions_limit ,
674+ return_in_api = return_in_api ,
675+ include_sensitive = include_sensitive ,
676+ )
687677
688678 run_spec = RunSpec .__response__ .parse_raw (run_model .run_spec )
689679
690680 latest_job_submission = None
691- if include_job_submissions :
681+ if len ( jobs ) > 0 and len ( jobs [ 0 ]. job_submissions ) > 0 :
692682 # TODO(egor-s): does it make sense with replicas and multi-node?
693- if jobs :
694- latest_job_submission = jobs [0 ].job_submissions [- 1 ]
683+ latest_job_submission = jobs [0 ].job_submissions [- 1 ]
695684
696685 service_spec = None
697686 if run_model .service_spec is not None :
@@ -716,6 +705,47 @@ def run_model_to_run(
716705 return run
717706
718707
708+ def _get_run_jobs_with_submissions (
709+ run_model : RunModel ,
710+ job_submissions_limit : Optional [int ],
711+ return_in_api : bool = False ,
712+ include_sensitive : bool = False ,
713+ ) -> List [Job ]:
714+ jobs : List [Job ] = []
715+ run_jobs = sorted (run_model .jobs , key = lambda j : (j .replica_num , j .job_num , j .submission_num ))
716+ for replica_num , replica_submissions in itertools .groupby (
717+ run_jobs , key = lambda j : j .replica_num
718+ ):
719+ for job_num , job_models in itertools .groupby (replica_submissions , key = lambda j : j .job_num ):
720+ submissions = []
721+ job_model = None
722+ if job_submissions_limit is not None :
723+ if job_submissions_limit == 0 :
724+ # Take latest job submission to return its job_spec
725+ job_models = list (job_models )[- 1 :]
726+ else :
727+ job_models = list (job_models )[- job_submissions_limit :]
728+ for job_model in job_models :
729+ if job_submissions_limit != 0 :
730+ job_submission = job_model_to_job_submission (job_model )
731+ if return_in_api :
732+ # Set default non-None values for 0.18 backward-compatibility
733+ # Remove in 0.19
734+ if job_submission .job_provisioning_data is not None :
735+ if job_submission .job_provisioning_data .hostname is None :
736+ job_submission .job_provisioning_data .hostname = ""
737+ if job_submission .job_provisioning_data .ssh_port is None :
738+ job_submission .job_provisioning_data .ssh_port = 22
739+ submissions .append (job_submission )
740+ if job_model is not None :
741+ # Use the spec from the latest submission. Submissions can have different specs
742+ job_spec = JobSpec .__response__ .parse_raw (job_model .job_spec_data )
743+ if not include_sensitive :
744+ _remove_job_spec_sensitive_info (job_spec )
745+ jobs .append (Job (job_spec = job_spec , job_submissions = submissions ))
746+ return jobs
747+
748+
719749async def _get_pool_offers (
720750 session : AsyncSession ,
721751 project : ProjectModel ,
0 commit comments