Coverage for ivatar/test_robohash_cached.py: 97%
118 statements
« prev ^ index » next coverage.py v7.11.0, created at 2025-11-04 00:07 +0000
« prev ^ index » next coverage.py v7.11.0, created at 2025-11-04 00:07 +0000
1"""
2Tests for consolidated robohash implementation
3"""
5import time
6import unittest
7from PIL import Image
8from io import BytesIO
9from django.test import TestCase
11# Import our consolidated implementation
12from .robohash import (
13 OptimizedRobohash,
14 create_robohash,
15 get_robohash_cache_stats,
16 clear_robohash_cache,
17)
20class TestConsolidatedRobohash(TestCase):
21 """Test consolidated robohash functionality and performance"""
23 def setUp(self):
24 """Clear cache before each test"""
25 clear_robohash_cache()
27 def test_cache_functionality(self):
28 """Test that caching works correctly"""
29 # Create two identical robohashes
30 digest = "test@example.com"
32 robohash1 = OptimizedRobohash(digest)
33 robohash1.assemble_optimized(sizex=300, sizey=300)
35 robohash2 = OptimizedRobohash(digest)
36 robohash2.assemble_optimized(sizex=300, sizey=300)
38 # Images should be valid
39 self.assertEqual(robohash1.img.size, robohash2.img.size)
40 self.assertIsNotNone(robohash1.img)
41 self.assertIsNotNone(robohash2.img)
43 def test_cache_stats(self):
44 """Test cache statistics tracking"""
45 clear_robohash_cache()
47 # Initial stats should be empty
48 stats = get_robohash_cache_stats()
49 self.assertEqual(stats["hits"], 0)
50 self.assertEqual(stats["misses"], 0)
52 # Generate a robohash (should create cache misses)
53 digest = "cache-test@example.com"
54 robohash = OptimizedRobohash(digest)
55 robohash.assemble_optimized(sizex=300, sizey=300)
57 stats_after = get_robohash_cache_stats()
58 self.assertGreaterEqual(stats_after["misses"], 0)
60 # Generate same robohash again (may create cache hits)
61 robohash2 = OptimizedRobohash(digest)
62 robohash2.assemble_optimized(sizex=300, sizey=300)
64 stats_final = get_robohash_cache_stats()
65 # Cache behavior may vary, just ensure stats are tracked
66 self.assertGreaterEqual(stats_final["hits"] + stats_final["misses"], 0)
68 def test_different_sizes_handled_correctly(self):
69 """Test that different sizes work correctly"""
70 digest = "size-test@example.com"
72 # Generate 300x300
73 robohash_300 = OptimizedRobohash(digest)
74 robohash_300.assemble_optimized(sizex=300, sizey=300)
76 # Generate 150x150
77 robohash_150 = OptimizedRobohash(digest)
78 robohash_150.assemble_optimized(sizex=150, sizey=150)
80 # Sizes should be correct
81 self.assertEqual(robohash_300.img.size, (300, 300))
82 self.assertEqual(robohash_150.img.size, (150, 150))
84 def test_create_robohash_function(self):
85 """Test the convenience function"""
86 digest = "function-test@example.com"
88 # Test the convenience function
89 data = create_robohash(digest, 300)
91 self.assertIsInstance(data, BytesIO)
93 # Should be able to load as image
94 data.seek(0)
95 img = Image.open(data)
96 self.assertEqual(img.size, (300, 300))
98 def test_performance_characteristics(self):
99 """Test that robohash generation performs reasonably"""
100 digest = "performance-test@example.com"
102 # Clear cache to start fresh
103 clear_robohash_cache()
105 # Time first generation
106 start_time = time.time()
107 robohash1 = OptimizedRobohash(digest)
108 robohash1.assemble_optimized(sizex=300, sizey=300)
109 first_time = time.time() - start_time
111 # Time second generation
112 start_time = time.time()
113 robohash2 = OptimizedRobohash(digest)
114 robohash2.assemble_optimized(sizex=300, sizey=300)
115 second_time = time.time() - start_time
117 # Both should complete in reasonable time
118 self.assertLess(first_time, 10.0) # Should complete within 10 seconds
119 self.assertLess(second_time, 10.0) # Should complete within 10 seconds
121 # Check that cache is working
122 stats = get_robohash_cache_stats()
123 self.assertGreaterEqual(stats["hits"] + stats["misses"], 0)
125 def test_cache_size_management(self):
126 """Test that cache manages size appropriately"""
127 clear_robohash_cache()
129 # Generate several robohashes
130 for i in range(10):
131 digest = f"cache-limit-test-{i}@example.com"
132 robohash = OptimizedRobohash(digest)
133 robohash.assemble_optimized(sizex=300, sizey=300)
135 # Cache should be managed appropriately
136 stats = get_robohash_cache_stats()
137 self.assertGreaterEqual(stats["cache_size"], 0)
138 self.assertLessEqual(stats["cache_size"], stats["max_cache_size"])
140 def test_error_handling(self):
141 """Test error handling in robohash implementation"""
142 # Test with various inputs that might cause issues
143 test_cases = ["", "invalid", "test@test.com"]
145 for digest in test_cases:
146 try:
147 robohash = OptimizedRobohash(digest)
148 robohash.assemble_optimized(sizex=300, sizey=300)
150 # Should not crash, should produce some image
151 self.assertIsNotNone(robohash.img)
153 except Exception as e:
154 self.fail(
155 f"Robohash should handle errors gracefully for '{digest}': {e}"
156 )
158 def test_different_robosets(self):
159 """Test different robot sets work correctly"""
160 digest = "roboset-test@example.com"
162 robosets = ["any", "set1", "set2"]
164 for roboset in robosets:
165 with self.subTest(roboset=roboset):
166 robohash = OptimizedRobohash(digest)
167 robohash.assemble_optimized(roboset=roboset, sizex=256, sizey=256)
169 self.assertIsNotNone(robohash.img)
170 self.assertEqual(robohash.img.size, (256, 256))
172 def test_create_function_with_different_parameters(self):
173 """Test create_robohash function with different parameters"""
174 digest = "params-test@example.com"
176 # Test different sizes
177 sizes = [64, 128, 256, 512]
179 for size in sizes:
180 with self.subTest(size=size):
181 data = create_robohash(digest, size, "any")
183 self.assertIsInstance(data, BytesIO)
184 data.seek(0)
185 img = Image.open(data)
186 self.assertEqual(img.size, (size, size))
189class TestRobohashPerformance(TestCase):
190 """Performance tests for robohash"""
192 def test_performance_comparison(self):
193 """Test performance characteristics"""
194 digest = "perf-comparison@example.com"
195 iterations = 3
197 # Clear cache and test performance
198 clear_robohash_cache()
199 times = []
201 for i in range(iterations):
202 start_time = time.time()
203 robohash = OptimizedRobohash(digest)
204 robohash.assemble_optimized(sizex=300, sizey=300)
205 times.append(time.time() - start_time)
207 avg_time = sum(times) / len(times)
209 print("\nRobohash Performance:")
210 print(f"Average time: {avg_time * 1000:.2f}ms")
212 # Cache stats
213 stats = get_robohash_cache_stats()
214 print(f"Cache stats: {stats}")
216 # Should complete in reasonable time
217 self.assertLess(avg_time, 5.0) # Should average less than 5 seconds
220if __name__ == "__main__":
221 # Run tests
222 unittest.main()