Skip to content

Commit d3b98ee

Browse files
committed
fix commit-ci errors
1 parent 419b202 commit d3b98ee

2 files changed

Lines changed: 85 additions & 64 deletions

File tree

CLAUDE.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -523,12 +523,14 @@ ______________________________________________________________________
523523
**ALWAYS run tests using `uv run ./scripts/test-local.sh`** - NEVER run pytest directly!
524524

525525
Examples:
526+
526527
- `uv run ./scripts/test-local.sh sql` - Run SQL tests
527528
- `uv run ./scripts/test-local.sh sql -p` - Run SQL tests in parallel
528529
- `uv run ./scripts/test-local.sh all -p` - Run all tests in parallel
529530
- `uv run ./scripts/test-local.sh mongo redis` - Run specific backends
530531

531532
This ensures:
533+
532534
- Correct virtual environment activation
533535
- Proper dependency installation
534536
- Docker container management for backend services

tests/conftest.py

Lines changed: 83 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -1,86 +1,98 @@
11
"""Pytest configuration and shared fixtures for cachier tests."""
22

3+
import logging
34
import os
45
from urllib.parse import parse_qs, unquote, urlencode, urlparse, urlunparse
56

67
import pytest
78

9+
logger = logging.getLogger(__name__)
810

9-
@pytest.fixture(autouse=True, scope="function")
11+
12+
@pytest.fixture(autouse=True)
1013
def inject_worker_schema_for_sql_tests(monkeypatch, request):
1114
"""Automatically inject worker-specific schema into SQL connection string.
12-
13-
This fixture enables parallel SQL test execution by giving each pytest-xdist
14-
worker its own PostgreSQL schema, preventing table creation conflicts.
15+
16+
This fixture enables parallel SQL test execution by giving each pytest-
17+
xdist worker its own PostgreSQL schema, preventing table creation
18+
conflicts.
19+
1520
"""
1621
# Only apply to SQL tests
1722
if "sql" not in request.node.keywords:
1823
yield
1924
return
20-
21-
worker_id = os.environ.get('PYTEST_XDIST_WORKER', 'master')
22-
23-
if worker_id == 'master':
25+
26+
worker_id = os.environ.get("PYTEST_XDIST_WORKER", "master")
27+
28+
if worker_id == "master":
2429
# Not running in parallel, no schema isolation needed
2530
yield
2631
return
27-
32+
2833
# Get the original SQL connection string
29-
original_url = os.environ.get('SQLALCHEMY_DATABASE_URL', 'sqlite:///:memory:')
30-
31-
if 'postgresql' in original_url:
34+
original_url = os.environ.get(
35+
"SQLALCHEMY_DATABASE_URL", "sqlite:///:memory:"
36+
)
37+
38+
if "postgresql" in original_url:
3239
# Create worker-specific schema name
3340
schema_name = f"test_worker_{worker_id.replace('gw', '')}"
34-
41+
3542
# Parse the URL
3643
parsed = urlparse(original_url)
37-
44+
3845
# Get existing query parameters
3946
query_params = parse_qs(parsed.query)
40-
47+
4148
# Add or update the options parameter to set search_path
42-
if 'options' in query_params:
49+
if "options" in query_params:
4350
# Append to existing options
44-
current_options = unquote(query_params['options'][0])
51+
current_options = unquote(query_params["options"][0])
4552
new_options = f"{current_options} -csearch_path={schema_name}"
4653
else:
4754
# Create new options
4855
new_options = f"-csearch_path={schema_name}"
49-
50-
query_params['options'] = [new_options]
51-
56+
57+
query_params["options"] = [new_options]
58+
5259
# Rebuild the URL with updated query parameters
5360
new_query = urlencode(query_params, doseq=True)
54-
new_url = urlunparse((
55-
parsed.scheme,
56-
parsed.netloc,
57-
parsed.path,
58-
parsed.params,
59-
new_query,
60-
parsed.fragment
61-
))
62-
61+
new_url = urlunparse(
62+
(
63+
parsed.scheme,
64+
parsed.netloc,
65+
parsed.path,
66+
parsed.params,
67+
new_query,
68+
parsed.fragment,
69+
)
70+
)
71+
6372
# Override both the environment variable and the module constant
64-
monkeypatch.setenv('SQLALCHEMY_DATABASE_URL', new_url)
65-
73+
monkeypatch.setenv("SQLALCHEMY_DATABASE_URL", new_url)
74+
6675
# Also patch the SQL_CONN_STR constant used in tests
6776
import tests.test_sql_core
68-
monkeypatch.setattr(tests.test_sql_core, 'SQL_CONN_STR', new_url)
69-
77+
78+
monkeypatch.setattr(tests.test_sql_core, "SQL_CONN_STR", new_url)
79+
7080
# Ensure schema creation by creating it before tests run
7181
try:
7282
from sqlalchemy import create_engine, text
73-
83+
7484
# Use original URL to create schema (without search_path)
7585
engine = create_engine(original_url)
7686
with engine.connect() as conn:
77-
conn.execute(text(f"CREATE SCHEMA IF NOT EXISTS {schema_name}"))
87+
conn.execute(
88+
text(f"CREATE SCHEMA IF NOT EXISTS {schema_name}")
89+
)
7890
conn.commit()
7991
engine.dispose()
80-
except Exception:
92+
except Exception as e:
8193
# If we can't create the schema, the test will fail anyway
82-
pass
83-
94+
logger.debug(f"Failed to create schema {schema_name}: {e}")
95+
8496
yield
8597

8698

@@ -142,52 +154,59 @@ def isolated_cache_directory(tmp_path, monkeypatch, request, worker_id):
142154
@pytest.fixture(scope="session", autouse=True)
143155
def cleanup_test_schemas(request):
144156
"""Clean up test schemas after all tests complete.
145-
157+
146158
This fixture ensures that worker-specific PostgreSQL schemas created during
147159
parallel test execution are properly cleaned up.
160+
148161
"""
149162
yield # Let all tests run first
150-
163+
151164
# Cleanup after all tests
152-
worker_id = os.environ.get('PYTEST_XDIST_WORKER', 'master')
153-
154-
if worker_id != 'master':
165+
worker_id = os.environ.get("PYTEST_XDIST_WORKER", "master")
166+
167+
if worker_id != "master":
155168
# Clean up the worker-specific schema
156-
original_url = os.environ.get('SQLALCHEMY_DATABASE_URL', '')
157-
158-
if 'postgresql' in original_url:
169+
original_url = os.environ.get("SQLALCHEMY_DATABASE_URL", "")
170+
171+
if "postgresql" in original_url:
159172
schema_name = f"test_worker_{worker_id.replace('gw', '')}"
160-
173+
161174
try:
162175
from sqlalchemy import create_engine, text
163-
176+
164177
# Parse URL to remove any schema options for cleanup
165178
parsed = urlparse(original_url)
166179
query_params = parse_qs(parsed.query)
167-
180+
168181
# Remove options parameter if it exists
169-
query_params.pop('options', None)
170-
182+
query_params.pop("options", None)
183+
171184
# Rebuild clean URL
172-
clean_query = urlencode(query_params, doseq=True) if query_params else ''
173-
clean_url = urlunparse((
174-
parsed.scheme,
175-
parsed.netloc,
176-
parsed.path,
177-
parsed.params,
178-
clean_query,
179-
parsed.fragment
180-
))
181-
185+
clean_query = (
186+
urlencode(query_params, doseq=True) if query_params else ""
187+
)
188+
clean_url = urlunparse(
189+
(
190+
parsed.scheme,
191+
parsed.netloc,
192+
parsed.path,
193+
parsed.params,
194+
clean_query,
195+
parsed.fragment,
196+
)
197+
)
198+
182199
engine = create_engine(clean_url)
183200
with engine.connect() as conn:
184201
# Drop the schema and all its contents
185-
conn.execute(text(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE"))
202+
conn.execute(
203+
text(f"DROP SCHEMA IF EXISTS {schema_name} CASCADE")
204+
)
186205
conn.commit()
187206
engine.dispose()
188-
except Exception:
207+
except Exception as e:
189208
# If cleanup fails, it's not critical
190-
pass
209+
logger.debug(f"Failed to cleanup schema {schema_name}: {e}")
191210

192211

193212
def pytest_addoption(parser):

0 commit comments

Comments
 (0)