Coverage for ivatar/test_pagan_optimized.py: 98%
177 statements
« prev ^ index » next coverage.py v7.11.0, created at 2025-11-04 00:07 +0000
« prev ^ index » next coverage.py v7.11.0, created at 2025-11-04 00:07 +0000
1"""
2Tests for optimized pagan avatar generator
3"""
5import hashlib
6import time
7import unittest
8from PIL import Image
9from io import BytesIO
10from django.test import TestCase
11import pagan
13from .pagan_optimized import (
14 OptimizedPagan,
15 create_optimized_pagan,
16 get_pagan_cache_info,
17 clear_pagan_cache,
18)
21class TestOptimizedPagan(TestCase):
22 """Test optimized pagan functionality"""
24 def setUp(self):
25 """Clear cache before each test"""
26 clear_pagan_cache()
28 def test_pagan_generation(self):
29 """Test basic optimized pagan generation"""
30 digest = hashlib.md5(b"test@example.com").hexdigest()
32 img = OptimizedPagan.generate_optimized(digest, 80)
34 self.assertIsNotNone(img)
35 self.assertIsInstance(img, Image.Image)
36 self.assertEqual(img.size, (80, 80))
37 self.assertEqual(img.mode, "RGBA")
39 def test_deterministic_generation(self):
40 """Test that same digest produces identical images"""
41 digest = hashlib.md5(b"deterministic@example.com").hexdigest()
43 img1 = OptimizedPagan.generate_optimized(digest, 80)
44 img2 = OptimizedPagan.generate_optimized(digest, 80)
46 # Convert to bytes for comparison
47 data1 = BytesIO()
48 img1.save(data1, format="PNG")
50 data2 = BytesIO()
51 img2.save(data2, format="PNG")
53 self.assertEqual(data1.getvalue(), data2.getvalue())
55 def test_different_digests_produce_different_images(self):
56 """Test that different digests produce different images"""
57 digest1 = hashlib.md5(b"user1@example.com").hexdigest()
58 digest2 = hashlib.md5(b"user2@example.com").hexdigest()
60 img1 = OptimizedPagan.generate_optimized(digest1, 80)
61 img2 = OptimizedPagan.generate_optimized(digest2, 80)
63 # Convert to bytes for comparison
64 data1 = BytesIO()
65 img1.save(data1, format="PNG")
67 data2 = BytesIO()
68 img2.save(data2, format="PNG")
70 self.assertNotEqual(data1.getvalue(), data2.getvalue())
72 def test_compatibility_with_original(self):
73 """Test that optimized version produces identical results to original"""
74 digest = hashlib.md5(b"compatibility@example.com").hexdigest()
76 # Generate with original pagan
77 original_avatar = pagan.Avatar(digest)
78 original_img = original_avatar.img.resize((80, 80), Image.LANCZOS)
80 # Generate with optimized version
81 optimized_img = OptimizedPagan.generate_optimized(digest, 80)
83 # Images should be identical
84 self.assertEqual(original_img.size, optimized_img.size)
85 self.assertEqual(original_img.mode, optimized_img.mode)
87 # Convert to bytes for pixel-perfect comparison
88 original_data = BytesIO()
89 original_img.save(original_data, format="PNG")
91 optimized_data = BytesIO()
92 optimized_img.save(optimized_data, format="PNG")
94 self.assertEqual(original_data.getvalue(), optimized_data.getvalue())
96 def test_caching_functionality(self):
97 """Test that caching works correctly"""
98 digest = hashlib.md5(b"cache_test@example.com").hexdigest()
100 # Clear cache and check initial stats
101 clear_pagan_cache()
102 initial_stats = get_pagan_cache_info()
103 self.assertEqual(initial_stats["hits"], 0)
104 self.assertEqual(initial_stats["misses"], 0)
106 # First generation (should be cache miss)
107 img1 = OptimizedPagan.generate_optimized(digest, 80)
108 stats_after_first = get_pagan_cache_info()
109 self.assertEqual(stats_after_first["misses"], 1)
110 self.assertEqual(stats_after_first["hits"], 0)
112 # Second generation (should be cache hit)
113 img2 = OptimizedPagan.generate_optimized(digest, 80)
114 stats_after_second = get_pagan_cache_info()
115 self.assertEqual(stats_after_second["misses"], 1)
116 self.assertEqual(stats_after_second["hits"], 1)
118 # Images should be identical
119 data1 = BytesIO()
120 img1.save(data1, format="PNG")
122 data2 = BytesIO()
123 img2.save(data2, format="PNG")
125 self.assertEqual(data1.getvalue(), data2.getvalue())
127 def test_different_sizes(self):
128 """Test pagan generation at different sizes"""
129 digest = hashlib.md5(b"sizes@example.com").hexdigest()
130 sizes = [40, 80, 120, 200]
132 for size in sizes:
133 with self.subTest(size=size):
134 img = OptimizedPagan.generate_optimized(digest, size)
135 self.assertEqual(img.size, (size, size))
137 def test_cache_size_limit(self):
138 """Test that cache respects size limits"""
139 # Set a small cache size for testing
140 original_size = OptimizedPagan._max_cache_size
141 OptimizedPagan._max_cache_size = 3
143 try:
144 clear_pagan_cache()
146 # Generate more avatars than cache size
147 for i in range(6):
148 digest = hashlib.md5(
149 f"cache_limit_{i}@example.com".encode()
150 ).hexdigest()
151 OptimizedPagan.generate_optimized(digest, 80)
153 # Cache size should not exceed limit
154 stats = get_pagan_cache_info()
155 self.assertLessEqual(stats["size"], 3)
157 finally:
158 # Restore original cache size
159 OptimizedPagan._max_cache_size = original_size
161 def test_create_optimized_pagan_function(self):
162 """Test the convenience function"""
163 digest = hashlib.md5(b"function_test@example.com").hexdigest()
165 data = create_optimized_pagan(digest, 80)
167 self.assertIsInstance(data, BytesIO)
169 # Should be able to load as image
170 data.seek(0)
171 img = Image.open(data)
172 self.assertEqual(img.size, (80, 80))
173 self.assertEqual(img.mode, "RGBA")
175 def test_error_handling(self):
176 """Test error handling with invalid input"""
177 # Test with invalid digest (should not crash)
178 try:
179 img = OptimizedPagan.generate_optimized("", 80)
180 # Should either return None or a valid image
181 if img is not None:
182 self.assertIsInstance(img, Image.Image)
183 except Exception:
184 self.fail("Optimized pagan should handle errors gracefully")
186 def test_performance_improvement(self):
187 """Test that optimization provides performance improvement"""
188 digest = hashlib.md5(b"performance@example.com").hexdigest()
189 iterations = 5
191 # Test original pagan performance
192 original_times = []
193 for _ in range(iterations):
194 start_time = time.time()
195 avatar = pagan.Avatar(digest)
196 img = avatar.img.resize((80, 80), Image.LANCZOS)
197 data = BytesIO()
198 img.save(data, format="PNG")
199 end_time = time.time()
200 original_times.append((end_time - start_time) * 1000)
202 # Clear cache and test optimized performance (first run - cache miss)
203 clear_pagan_cache()
204 optimized_first_times = []
205 for _ in range(iterations):
206 start_time = time.time()
207 data = create_optimized_pagan(digest, 80)
208 end_time = time.time()
209 optimized_first_times.append((end_time - start_time) * 1000)
211 # Test optimized performance (subsequent runs - cache hits)
212 optimized_cached_times = []
213 for _ in range(iterations):
214 start_time = time.time()
215 data = create_optimized_pagan(digest, 80)
216 end_time = time.time()
217 optimized_cached_times.append((end_time - start_time) * 1000)
219 original_avg = sum(original_times) / len(original_times)
220 optimized_cached_avg = sum(optimized_cached_times) / len(optimized_cached_times)
222 print("\nPerformance Comparison:")
223 print(f"Original average: {original_avg:.2f}ms")
224 print(f"Optimized (cached): {optimized_cached_avg:.2f}ms")
226 if optimized_cached_avg > 0:
227 improvement = original_avg / optimized_cached_avg
228 print(f"Improvement: {improvement:.1f}x faster")
230 # Should be significantly faster with caching
231 self.assertGreater(
232 improvement, 10, "Optimization should provide significant improvement"
233 )
235 def test_cache_stats(self):
236 """Test cache statistics tracking"""
237 clear_pagan_cache()
239 digest1 = hashlib.md5(b"stats1@example.com").hexdigest()
240 digest2 = hashlib.md5(b"stats2@example.com").hexdigest()
242 # Generate first avatar (cache miss)
243 OptimizedPagan.generate_optimized(digest1, 80)
245 # Generate second avatar (cache miss)
246 OptimizedPagan.generate_optimized(digest2, 80)
248 # Generate first avatar again (cache hit)
249 OptimizedPagan.generate_optimized(digest1, 80)
251 stats = get_pagan_cache_info()
253 self.assertEqual(stats["misses"], 2)
254 self.assertEqual(stats["hits"], 1)
255 self.assertEqual(stats["size"], 2)
256 self.assertIn("hit_rate", stats)
259class TestPaganPerformance(TestCase):
260 """Performance-focused tests for pagan optimization"""
262 def test_bulk_generation_performance(self):
263 """Test performance with multiple generations"""
264 clear_pagan_cache()
266 # Generate multiple pagan avatars
267 test_count = 20
268 digests = [
269 hashlib.md5(f"bulk{i}@example.com".encode()).hexdigest()
270 for i in range(test_count)
271 ]
273 start_time = time.time()
274 for digest in digests:
275 create_optimized_pagan(digest, 80)
276 end_time = time.time()
278 total_time = (end_time - start_time) * 1000 # ms
279 avg_time = total_time / test_count
281 print(f"Bulk generation: {test_count} pagan avatars in {total_time:.1f}ms")
282 print(f"Average per avatar: {avg_time:.2f}ms")
284 # Should average under 100ms per avatar (cache misses are still high, but cache hits are much faster)
285 self.assertLess(
286 avg_time, 100.0, f"Bulk generation too slow: {avg_time:.2f}ms avg"
287 )
289 def test_cache_hit_performance(self):
290 """Test performance improvement with cache hits"""
291 digest = hashlib.md5(b"cache_perf@example.com").hexdigest()
293 # First generation (cache miss)
294 start_time = time.time()
295 create_optimized_pagan(digest, 80)
296 first_time = (time.time() - start_time) * 1000
298 # Second generation (cache hit)
299 start_time = time.time()
300 create_optimized_pagan(digest, 80)
301 second_time = (time.time() - start_time) * 1000
303 print(f"First generation (miss): {first_time:.2f}ms")
304 print(f"Second generation (hit): {second_time:.2f}ms")
306 if second_time > 0:
307 improvement = first_time / second_time
308 print(f"Cache hit improvement: {improvement:.1f}x faster")
310 # Cache hits should be much faster
311 self.assertGreater(
312 improvement, 5, "Cache hits should provide significant speedup"
313 )
316if __name__ == "__main__":
317 unittest.main()