-
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest_performance.py
More file actions
243 lines (198 loc) · 9.07 KB
/
test_performance.py
File metadata and controls
243 lines (198 loc) · 9.07 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
#!/usr/bin/env python3
"""Performance tests for the factory pattern and optimizations."""
import os
import sys
import tempfile
import time
import pytest
# Add parent directory to path for imports
project_root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, project_root) # For pythonLogs
# Import test utilities
from tests.core.test_log_utils import get_safe_timezone
from pythonLogs import (
LoggerFactory,
LoggerType,
LogLevel,
create_logger,
get_or_create_logger,
basic_logger,
size_rotating_logger,
clear_logger_registry,
get_registered_loggers,
)
class TestPerformance:
"""Performance tests for factory pattern and optimizations."""
def setup_method(self):
"""Clear registry before each test."""
clear_logger_registry()
def test_settings_caching_performance(self):
"""Test that settings caching improves performance."""
# Create multiple loggers (should reuse cached settings)
start_time = time.time()
loggers = []
for i in range(50): # Reasonable number for CI/testing
logger = LoggerFactory.create_logger(LoggerType.BASIC, name=f"settings_test_{i}")
loggers.append(logger)
elapsed_time = time.time() - start_time
# Should complete relatively quickly (less than 1 second)
assert elapsed_time < 1.0
assert len(loggers) == 50
# Verify all loggers were created with unique names
names = {logger.name for logger in loggers}
assert len(names) == 50
def test_registry_caching_performance(self):
"""Test that registry caching provides significant performance improvement."""
# Baseline: Create new loggers each time
start_time = time.time()
for i in range(30):
create_logger(LoggerType.BASIC, name=f"no_cache_{i}")
no_cache_time = time.time() - start_time
# With caching: Reuse same logger
clear_logger_registry()
start_time = time.time()
for i in range(30):
get_or_create_logger(LoggerType.BASIC, name="cached_logger")
cache_time = time.time() - start_time
# Cached should be significantly faster
# Allow some tolerance for test environment variability and handle zero division
if no_cache_time > 0:
performance_improvement = (no_cache_time - cache_time) / no_cache_time
assert performance_improvement > 0.1 # At least 10% improvement
else:
# If timing is too precise, just verify cache_time is not greater
assert cache_time <= no_cache_time
# Verify only one logger was actually created
assert len(get_registered_loggers()) == 1
@pytest.mark.skipif(
sys.platform == "win32",
reason="Windows file locking issues with TemporaryDirectory - see test_performance_windows.py",
)
def test_directory_permission_caching(self):
"""Test that directory permission checking is cached."""
with tempfile.TemporaryDirectory() as temp_dir:
# First call should check and cache directory permissions
start_time = time.time()
logger1 = size_rotating_logger(name="dir_test_1", directory=temp_dir)
first_call_time = time.time() - start_time
# Subsequent calls to the same directory should be faster (cached)
start_time = time.time()
for i in range(10):
logger = size_rotating_logger(
name=f"dir_test_{i+2}", directory=temp_dir # The Same directory should use cache
)
subsequent_calls_time = time.time() - start_time
# The Average time per subsequent call should be less than the first call
avg_subsequent_time = subsequent_calls_time / 10
assert avg_subsequent_time <= first_call_time
def test_timezone_function_caching(self):
"""Test that timezone functions are cached for performance."""
# Create multiple loggers with same timezone
start_time = time.time()
safe_tz = get_safe_timezone()
loggers = []
for i in range(20):
logger = basic_logger(name=f"tz_test_{i}", timezone=safe_tz) # Same timezone should use cached function
loggers.append(logger)
elapsed_time = time.time() - start_time
# Should complete quickly due to timezone caching
assert elapsed_time < 0.5
assert len(loggers) == 20
@pytest.mark.skipif(
sys.platform == "win32",
reason="Windows timing precision issues - see test_performance_windows.py",
)
def test_enum_vs_string_performance(self):
"""Test that enum usage doesn't significantly impact performance."""
# Test with string values
start_time = time.time()
for i in range(25):
create_logger("basic", name=f"string_test_{i}", level="INFO")
string_time = time.time() - start_time
# Test with enum values
start_time = time.time()
for i in range(25):
create_logger(LoggerType.BASIC, name=f"enum_test_{i}", level=LogLevel.INFO)
enum_time = time.time() - start_time
# Enum performance should be comparable to strings
# Allow 60% tolerance for enum conversion overhead
assert enum_time <= string_time * 1.6
def test_large_scale_logger_creation(self):
"""Test performance with larger number of loggers."""
start_time = time.time()
# Create 100 different loggers
loggers = []
for i in range(100):
logger = LoggerFactory.create_logger(LoggerType.BASIC, name=f"scale_test_{i}", level=LogLevel.INFO)
loggers.append(logger)
elapsed_time = time.time() - start_time
# Should complete in reasonable time (less than 2 seconds)
assert elapsed_time < 2.0
assert len(loggers) == 100
# Verify all loggers are unique
names = {logger.name for logger in loggers}
assert len(names) == 100
@pytest.mark.skipif(
sys.platform == "win32",
reason="Windows file locking issues with TemporaryDirectory - see test_performance_windows.py",
)
def test_mixed_logger_types_performance(self):
"""Test performance when creating mixed logger types."""
with tempfile.TemporaryDirectory() as temp_dir:
start_time = time.time()
loggers = []
for i in range(30): # 10 of each type
if i % 3 == 0:
logger = create_logger(LoggerType.BASIC, name=f"mixed_basic_{i}")
elif i % 3 == 1:
logger = size_rotating_logger(name=f"mixed_size_{i}", directory=temp_dir)
else:
logger = create_logger(LoggerType.TIMED_ROTATING, name=f"mixed_timed_{i}", directory=temp_dir)
loggers.append(logger)
elapsed_time = time.time() - start_time
# Should complete efficiently
assert elapsed_time < 1.5
assert len(loggers) == 30
def test_memory_usage_with_registry(self):
"""Test that registry doesn't cause excessive memory usage."""
# Create many loggers in registry
for i in range(50):
get_or_create_logger(LoggerType.BASIC, name=f"memory_test_{i}")
# Verify registry contains expected number
registered = get_registered_loggers()
assert len(registered) == 50
# Clear registry
clear_logger_registry()
# Verify registry is empty
assert len(get_registered_loggers()) == 0
@pytest.mark.slow
@pytest.mark.skipif(
sys.platform == "win32",
reason="Windows file locking issues with TemporaryDirectory - see test_performance_windows.py",
)
def test_stress_test_factory_pattern(self):
"""Stress test the factory pattern with intensive usage."""
with tempfile.TemporaryDirectory() as temp_dir:
start_time = time.time()
# Intensive mixed usage
for i in range(200):
if i % 4 == 0:
logger = get_or_create_logger(LoggerType.BASIC, name="stress_cached")
elif i % 4 == 1:
logger = create_logger("basic", name=f"stress_basic_{i}")
elif i % 4 == 2:
logger = size_rotating_logger(name=f"stress_size_{i}", directory=temp_dir, level=LogLevel.WARNING)
else:
logger = LoggerFactory.create_logger(
LoggerType.TIMED_ROTATING,
name=f"stress_timed_{i}",
directory=temp_dir,
when="midnight",
)
# Actually use the logger
logger.info(f"Stress test message {i}")
elapsed_time = time.time() - start_time
# Should complete in reasonable time even under stress
assert elapsed_time < 5.0 # 5 seconds max for 200 loggers
# Verify registry has cached logger
assert "stress_cached" in get_registered_loggers()