|
1 | 1 | """Pytest configuration and shared fixtures for cachier tests.""" |
2 | 2 |
|
| 3 | +import logging |
3 | 4 | import os |
4 | 5 | from urllib.parse import parse_qs, unquote, urlencode, urlparse, urlunparse |
5 | 6 |
|
6 | 7 | import pytest |
7 | 8 |
|
| 9 | +logger = logging.getLogger(__name__) |
8 | 10 |
|
9 | | -@pytest.fixture(autouse=True, scope="function") |
| 11 | + |
| 12 | +@pytest.fixture(autouse=True) |
10 | 13 | def inject_worker_schema_for_sql_tests(monkeypatch, request): |
11 | 14 | """Automatically inject worker-specific schema into SQL connection string. |
12 | | - |
13 | | - This fixture enables parallel SQL test execution by giving each pytest-xdist |
14 | | - worker its own PostgreSQL schema, preventing table creation conflicts. |
| 15 | +
|
| 16 | + This fixture enables parallel SQL test execution by giving each pytest- |
| 17 | + xdist worker its own PostgreSQL schema, preventing table creation |
| 18 | + conflicts. |
| 19 | +
|
15 | 20 | """ |
16 | 21 | # Only apply to SQL tests |
17 | 22 | if "sql" not in request.node.keywords: |
18 | 23 | yield |
19 | 24 | return |
20 | | - |
21 | | - worker_id = os.environ.get('PYTEST_XDIST_WORKER', 'master') |
22 | | - |
23 | | - if worker_id == 'master': |
| 25 | + |
| 26 | + worker_id = os.environ.get("PYTEST_XDIST_WORKER", "master") |
| 27 | + |
| 28 | + if worker_id == "master": |
24 | 29 | # Not running in parallel, no schema isolation needed |
25 | 30 | yield |
26 | 31 | return |
27 | | - |
| 32 | + |
28 | 33 | # Get the original SQL connection string |
29 | | - original_url = os.environ.get('SQLALCHEMY_DATABASE_URL', 'sqlite:///:memory:') |
30 | | - |
31 | | - if 'postgresql' in original_url: |
| 34 | + original_url = os.environ.get( |
| 35 | + "SQLALCHEMY_DATABASE_URL", "sqlite:///:memory:" |
| 36 | + ) |
| 37 | + |
| 38 | + if "postgresql" in original_url: |
32 | 39 | # Create worker-specific schema name |
33 | 40 | schema_name = f"test_worker_{worker_id.replace('gw', '')}" |
34 | | - |
| 41 | + |
35 | 42 | # Parse the URL |
36 | 43 | parsed = urlparse(original_url) |
37 | | - |
| 44 | + |
38 | 45 | # Get existing query parameters |
39 | 46 | query_params = parse_qs(parsed.query) |
40 | | - |
| 47 | + |
41 | 48 | # Add or update the options parameter to set search_path |
42 | | - if 'options' in query_params: |
| 49 | + if "options" in query_params: |
43 | 50 | # Append to existing options |
44 | | - current_options = unquote(query_params['options'][0]) |
| 51 | + current_options = unquote(query_params["options"][0]) |
45 | 52 | new_options = f"{current_options} -csearch_path={schema_name}" |
46 | 53 | else: |
47 | 54 | # Create new options |
48 | 55 | new_options = f"-csearch_path={schema_name}" |
49 | | - |
50 | | - query_params['options'] = [new_options] |
51 | | - |
| 56 | + |
| 57 | + query_params["options"] = [new_options] |
| 58 | + |
52 | 59 | # Rebuild the URL with updated query parameters |
53 | 60 | new_query = urlencode(query_params, doseq=True) |
54 | | - new_url = urlunparse(( |
55 | | - parsed.scheme, |
56 | | - parsed.netloc, |
57 | | - parsed.path, |
58 | | - parsed.params, |
59 | | - new_query, |
60 | | - parsed.fragment |
61 | | - )) |
62 | | - |
| 61 | + new_url = urlunparse( |
| 62 | + ( |
| 63 | + parsed.scheme, |
| 64 | + parsed.netloc, |
| 65 | + parsed.path, |
| 66 | + parsed.params, |
| 67 | + new_query, |
| 68 | + parsed.fragment, |
| 69 | + ) |
| 70 | + ) |
| 71 | + |
63 | 72 | # Override both the environment variable and the module constant |
64 | | - monkeypatch.setenv('SQLALCHEMY_DATABASE_URL', new_url) |
65 | | - |
| 73 | + monkeypatch.setenv("SQLALCHEMY_DATABASE_URL", new_url) |
| 74 | + |
66 | 75 | # Also patch the SQL_CONN_STR constant used in tests |
67 | 76 | import tests.test_sql_core |
68 | | - monkeypatch.setattr(tests.test_sql_core, 'SQL_CONN_STR', new_url) |
69 | | - |
| 77 | + |
| 78 | + monkeypatch.setattr(tests.test_sql_core, "SQL_CONN_STR", new_url) |
| 79 | + |
70 | 80 | # Ensure schema creation by creating it before tests run |
71 | 81 | try: |
72 | 82 | from sqlalchemy import create_engine, text |
73 | | - |
| 83 | + |
74 | 84 | # Use original URL to create schema (without search_path) |
75 | 85 | engine = create_engine(original_url) |
76 | 86 | with engine.connect() as conn: |
77 | | - conn.execute(text(f"CREATE SCHEMA IF NOT EXISTS {schema_name}")) |
| 87 | + conn.execute( |
| 88 | + text(f"CREATE SCHEMA IF NOT EXISTS {schema_name}") |
| 89 | + ) |
78 | 90 | conn.commit() |
79 | 91 | engine.dispose() |
80 | | - except Exception: |
| 92 | + except Exception as e: |
81 | 93 | # If we can't create the schema, the test will fail anyway |
82 | | - pass |
83 | | - |
| 94 | + logger.debug(f"Failed to create schema {schema_name}: {e}") |
| 95 | + |
84 | 96 | yield |
85 | 97 |
|
86 | 98 |
|
@@ -142,52 +154,59 @@ def isolated_cache_directory(tmp_path, monkeypatch, request, worker_id): |
142 | 154 | @pytest.fixture(scope="session", autouse=True) |
143 | 155 | def cleanup_test_schemas(request): |
144 | 156 | """Clean up test schemas after all tests complete. |
145 | | - |
| 157 | +
|
146 | 158 | This fixture ensures that worker-specific PostgreSQL schemas created during |
147 | 159 | parallel test execution are properly cleaned up. |
| 160 | +
|
148 | 161 | """ |
149 | 162 | yield # Let all tests run first |
150 | | - |
| 163 | + |
151 | 164 | # Cleanup after all tests |
152 | | - worker_id = os.environ.get('PYTEST_XDIST_WORKER', 'master') |
153 | | - |
154 | | - if worker_id != 'master': |
| 165 | + worker_id = os.environ.get("PYTEST_XDIST_WORKER", "master") |
| 166 | + |
| 167 | + if worker_id != "master": |
155 | 168 | # Clean up the worker-specific schema |
156 | | - original_url = os.environ.get('SQLALCHEMY_DATABASE_URL', '') |
157 | | - |
158 | | - if 'postgresql' in original_url: |
| 169 | + original_url = os.environ.get("SQLALCHEMY_DATABASE_URL", "") |
| 170 | + |
| 171 | + if "postgresql" in original_url: |
159 | 172 | schema_name = f"test_worker_{worker_id.replace('gw', '')}" |
160 | | - |
| 173 | + |
161 | 174 | try: |
162 | 175 | from sqlalchemy import create_engine, text |
163 | | - |
| 176 | + |
164 | 177 | # Parse URL to remove any schema options for cleanup |
165 | 178 | parsed = urlparse(original_url) |
166 | 179 | query_params = parse_qs(parsed.query) |
167 | | - |
| 180 | + |
168 | 181 | # Remove options parameter if it exists |
169 | | - query_params.pop('options', None) |
170 | | - |
| 182 | + query_params.pop("options", None) |
| 183 | + |
171 | 184 | # Rebuild clean URL |
172 | | - clean_query = urlencode(query_params, doseq=True) if query_params else '' |
173 | | - clean_url = urlunparse(( |
174 | | - parsed.scheme, |
175 | | - parsed.netloc, |
176 | | - parsed.path, |
177 | | - parsed.params, |
178 | | - clean_query, |
179 | | - parsed.fragment |
180 | | - )) |
181 | | - |
| 185 | + clean_query = ( |
| 186 | + urlencode(query_params, doseq=True) if query_params else "" |
| 187 | + ) |
| 188 | + clean_url = urlunparse( |
| 189 | + ( |
| 190 | + parsed.scheme, |
| 191 | + parsed.netloc, |
| 192 | + parsed.path, |
| 193 | + parsed.params, |
| 194 | + clean_query, |
| 195 | + parsed.fragment, |
| 196 | + ) |
| 197 | + ) |
| 198 | + |
182 | 199 | engine = create_engine(clean_url) |
183 | 200 | with engine.connect() as conn: |
184 | 201 | # Drop the schema and all its contents |
185 | | - conn.execute(text(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE")) |
| 202 | + conn.execute( |
| 203 | + text(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE") |
| 204 | + ) |
186 | 205 | conn.commit() |
187 | 206 | engine.dispose() |
188 | | - except Exception: |
| 207 | + except Exception as e: |
189 | 208 | # If cleanup fails, it's not critical |
190 | | - pass |
| 209 | + logger.debug(f"Failed to cleanup schema {schema_name}: {e}") |
191 | 210 |
|
192 | 211 |
|
193 | 212 | def pytest_addoption(parser): |
|
0 commit comments