@@ -7,7 +7,7 @@ Track computation progress and job status.
77Show progress bar during populate:
88
99``` python
10- ProcessedData .populate(display_progress = True )
10+ SessionAnalysis .populate(display_progress = True )
1111```
1212
1313## Check Remaining Work
@@ -16,7 +16,7 @@ Count entries left to compute:
1616
1717``` python
1818# What's left to compute
19- remaining = ProcessedData .key_source - ProcessedData
19+ remaining = SessionAnalysis .key_source - SessionAnalysis
2020print (f " { len (remaining)} entries remaining " )
2121```
2222
@@ -25,7 +25,7 @@ print(f"{len(remaining)} entries remaining")
2525Get counts by status:
2626
2727``` python
28- progress = ProcessedData .jobs.progress()
28+ progress = SessionAnalysis .jobs.progress()
2929# {'pending': 100, 'reserved': 5, 'error': 3, 'success': 892}
3030
3131for status, count in progress.items():
@@ -38,19 +38,19 @@ Access jobs by their current status:
3838
3939``` python
4040# Pending jobs (waiting to run)
41- ProcessedData .jobs.pending
41+ SessionAnalysis .jobs.pending
4242
4343# Currently running
44- ProcessedData .jobs.reserved
44+ SessionAnalysis .jobs.reserved
4545
4646# Failed jobs
47- ProcessedData .jobs.errors
47+ SessionAnalysis .jobs.errors
4848
4949# Completed jobs (if keep_completed=True)
50- ProcessedData .jobs.completed
50+ SessionAnalysis .jobs.completed
5151
5252# Skipped jobs
53- ProcessedData .jobs.ignored
53+ SessionAnalysis .jobs.ignored
5454```
5555
5656## View Job Details
@@ -59,10 +59,10 @@ Inspect specific jobs:
5959
6060``` python
6161# All jobs for a key
62- (ProcessedData .jobs & key).fetch1()
62+ (SessionAnalysis .jobs & key).fetch1()
6363
6464# Recent errors
65- ProcessedData .jobs.errors.to_dicts(
65+ SessionAnalysis .jobs.errors.to_dicts(
6666 order_by = ' completed_time DESC' ,
6767 limit = 10
6868)
@@ -73,7 +73,7 @@ ProcessedData.jobs.errors.to_dicts(
7373See which workers are processing:
7474
7575``` python
76- for job in ProcessedData .jobs.reserved.to_dicts():
76+ for job in SessionAnalysis .jobs.reserved.to_dicts():
7777 print (f " Key: { job} " )
7878 print (f " Host: { job[' host' ]} " )
7979 print (f " PID: { job[' pid' ]} " )
@@ -86,7 +86,7 @@ Track how long jobs take:
8686
8787``` python
8888# Average duration of completed jobs
89- completed = ProcessedData .jobs.completed.to_arrays(' duration' )
89+ completed = SessionAnalysis .jobs.completed.to_arrays(' duration' )
9090print (f " Average: { np.mean(completed):.1f } s " )
9191print (f " Median: { np.median(completed):.1f } s " )
9292```
@@ -112,10 +112,10 @@ This adds hidden attributes to computed tables:
112112
113113``` python
114114import time
115- from my_pipeline import ProcessedData
115+ from my_pipeline import SessionAnalysis
116116
117117while True :
118- remaining, total = ProcessedData .progress()
118+ remaining, total = SessionAnalysis .progress()
119119
120120 print (f " \r Progress: { total - remaining} / { total} ( { (total - remaining) / total:.0% } ) " , end = ' ' )
121121
@@ -130,10 +130,10 @@ For distributed mode with job tracking:
130130
131131``` python
132132import time
133- from my_pipeline import ProcessedData
133+ from my_pipeline import SessionAnalysis
134134
135135while True :
136- status = ProcessedData .jobs.progress()
136+ status = SessionAnalysis .jobs.progress()
137137
138138 print (f " \r Pending: { status.get(' pending' , 0 )} | "
139139 f " Running: { status.get(' reserved' , 0 )} | "
@@ -152,7 +152,7 @@ while True:
152152Check multiple tables:
153153
154154``` python
155- tables = [RawData, ProcessedData, Analysis ]
155+ tables = [Session, SessionAnalysis, TrialStats ]
156156
157157for table in tables:
158158 total = len (table.key_source)
0 commit comments