|
6 | 6 | """ |
7 | 7 |
|
8 | 8 | import datetime |
| 9 | +import glob |
9 | 10 | import math |
10 | 11 | import os |
| 12 | +import tempfile |
11 | 13 | import time |
12 | 14 | import shutil |
13 | 15 | import unittest |
|
17 | 19 |
|
18 | 20 | from arc.common import ARC_TESTING_PATH |
19 | 21 | from arc.imports import settings |
20 | | -from arc.job.adapter import DataPoint, JobEnum, JobTypeEnum, JobExecutionTypeEnum |
| 22 | +from arc.job.adapter import DataPoint, JobAdapter, JobEnum, JobTypeEnum, JobExecutionTypeEnum |
21 | 23 | from arc.job.adapters.gaussian import GaussianAdapter |
22 | 24 | from arc.level import Level |
23 | 25 | from arc.species import ARCSpecies |
@@ -194,28 +196,34 @@ def setUpClass(cls): |
194 | 196 | species=[ARCSpecies(label='spc1', xyz=['O 0 0 1'])], |
195 | 197 | testing=True, |
196 | 198 | ) |
| 199 | + # Copy the PBS time limit fixture into the directory structure the adapter expects. |
| 200 | + stl_dir = os.path.join(ARC_TESTING_PATH, 'test_JobAdapter_ServerTimeLimit') |
| 201 | + err_dest = os.path.join(stl_dir, 'calcs', 'Species', 'spc1', 'opt_101') |
| 202 | + os.makedirs(err_dest, exist_ok=True) |
| 203 | + shutil.copy(os.path.join(ARC_TESTING_PATH, 'server', 'pbs', 'timelimit', 'err.txt'), |
| 204 | + os.path.join(err_dest, 'err.txt')) |
197 | 205 | cls.job_5 = GaussianAdapter(execution_type='queue', |
198 | | - job_name='spc1', |
| 206 | + job_name='opt_101', |
199 | 207 | job_type='opt', |
200 | 208 | job_id='123456', |
201 | 209 | job_num=101, |
202 | | - job_server_name = 'server3', |
| 210 | + job_server_name='server3', |
203 | 211 | level=Level(method='cbs-qb3'), |
204 | 212 | project='test', |
205 | | - project_directory=os.path.join(ARC_TESTING_PATH, 'test_JobAdapter_ServerTimeLimit'), |
| 213 | + project_directory=stl_dir, |
206 | 214 | species=[ARCSpecies(label='spc1', xyz=['O 0 0 1'])], |
207 | 215 | server='server3', |
208 | 216 | testing=True, |
209 | 217 | ) |
210 | 218 | cls.job_6 = GaussianAdapter(execution_type='queue', |
211 | | - job_name='spc1', |
| 219 | + job_name='opt_101', |
212 | 220 | job_type='opt', |
213 | 221 | job_id='123456', |
214 | 222 | job_num=101, |
215 | | - job_server_name = 'server1', |
| 223 | + job_server_name='server1', |
216 | 224 | level=Level(method='cbs-qb3'), |
217 | 225 | project='test', |
218 | | - project_directory=os.path.join(ARC_TESTING_PATH, 'test_JobAdapter_ServerTimeLimit'), |
| 226 | + project_directory=stl_dir, |
219 | 227 | species=[ARCSpecies(label='spc1', xyz=['O 0 0 1'])], |
220 | 228 | testing=True, |
221 | 229 | queue='short_queue', |
@@ -471,5 +479,62 @@ def tearDownClass(cls): |
471 | 479 | shutil.rmtree(os.path.join(ARC_TESTING_PATH, 'test_JobAdapter_ServerTimeLimit'), ignore_errors=True) |
472 | 480 |
|
473 | 481 |
|
| 482 | +class TestRotateCSV(unittest.TestCase): |
| 483 | + """ |
| 484 | + Contains unit tests for the CSV rotation logic. |
| 485 | + """ |
| 486 | + |
| 487 | + def _make_csv(self, path, num_lines): |
| 488 | + """Helper to create a CSV file with a header and ``num_lines - 1`` data rows.""" |
| 489 | + with open(path, 'w') as f: |
| 490 | + f.write('col1,col2\n') |
| 491 | + for i in range(num_lines - 1): |
| 492 | + f.write(f'{i},data\n') |
| 493 | + |
| 494 | + def test_no_rotation_below_threshold(self): |
| 495 | + """Test that no rotation occurs when the file is below the threshold.""" |
| 496 | + with tempfile.TemporaryDirectory() as tmp: |
| 497 | + csv_path = os.path.join(tmp, 'jobs.csv') |
| 498 | + self._make_csv(csv_path, 10) |
| 499 | + JobAdapter._rotate_csv_if_needed(csv_path, max_lines=50) |
| 500 | + self.assertTrue(os.path.isfile(csv_path)) |
| 501 | + self.assertEqual(glob.glob(os.path.join(tmp, 'jobs.old.*.csv')), []) |
| 502 | + |
| 503 | + def test_rotation_at_threshold(self): |
| 504 | + """Test that the file is rotated when it reaches the threshold.""" |
| 505 | + with tempfile.TemporaryDirectory() as tmp: |
| 506 | + csv_path = os.path.join(tmp, 'jobs.csv') |
| 507 | + self._make_csv(csv_path, 50) |
| 508 | + JobAdapter._rotate_csv_if_needed(csv_path, max_lines=50) |
| 509 | + self.assertFalse(os.path.isfile(csv_path)) |
| 510 | + archives = glob.glob(os.path.join(tmp, 'jobs.old.*.csv')) |
| 511 | + self.assertEqual(len(archives), 1) |
| 512 | + |
| 513 | + def test_no_error_for_missing_file(self): |
| 514 | + """Test that rotation is a no-op when the file does not exist.""" |
| 515 | + JobAdapter._rotate_csv_if_needed('/tmp/nonexistent_arc_test.csv') |
| 516 | + |
| 517 | + def test_multiple_rotations(self): |
| 518 | + """Test that multiple rotations produce distinct archive files.""" |
| 519 | + with tempfile.TemporaryDirectory() as tmp: |
| 520 | + csv_path = os.path.join(tmp, 'jobs.csv') |
| 521 | + # First rotation on "day 1" |
| 522 | + self._make_csv(csv_path, 50) |
| 523 | + with patch('arc.job.adapter.datetime') as mock_dt: |
| 524 | + mock_dt.datetime.now.return_value = datetime.datetime(2026, 1, 15) |
| 525 | + mock_dt.timedelta = datetime.timedelta |
| 526 | + JobAdapter._rotate_csv_if_needed(csv_path, max_lines=50) |
| 527 | + self.assertFalse(os.path.isfile(csv_path)) |
| 528 | + # Second rotation on "day 2" |
| 529 | + self._make_csv(csv_path, 50) |
| 530 | + with patch('arc.job.adapter.datetime') as mock_dt: |
| 531 | + mock_dt.datetime.now.return_value = datetime.datetime(2026, 2, 20) |
| 532 | + mock_dt.timedelta = datetime.timedelta |
| 533 | + JobAdapter._rotate_csv_if_needed(csv_path, max_lines=50) |
| 534 | + self.assertFalse(os.path.isfile(csv_path)) |
| 535 | + archives = glob.glob(os.path.join(tmp, 'jobs.old.*.csv')) |
| 536 | + self.assertEqual(len(archives), 2) |
| 537 | + |
| 538 | + |
474 | 539 | if __name__ == '__main__': |
475 | 540 | unittest.main(testRunner=unittest.TextTestRunner(verbosity=2)) |
0 commit comments