Coverage for ivatar/test_pagan_optimized.py: 98%

177 statements  

« prev     ^ index     » next       coverage.py v7.11.0, created at 2025-11-04 00:07 +0000

1""" 

2Tests for optimized pagan avatar generator 

3""" 

4 

5import hashlib 

6import time 

7import unittest 

8from PIL import Image 

9from io import BytesIO 

10from django.test import TestCase 

11import pagan 

12 

13from .pagan_optimized import ( 

14 OptimizedPagan, 

15 create_optimized_pagan, 

16 get_pagan_cache_info, 

17 clear_pagan_cache, 

18) 

19 

20 

21class TestOptimizedPagan(TestCase): 

22 """Test optimized pagan functionality""" 

23 

24 def setUp(self): 

25 """Clear cache before each test""" 

26 clear_pagan_cache() 

27 

28 def test_pagan_generation(self): 

29 """Test basic optimized pagan generation""" 

30 digest = hashlib.md5(b"test@example.com").hexdigest() 

31 

32 img = OptimizedPagan.generate_optimized(digest, 80) 

33 

34 self.assertIsNotNone(img) 

35 self.assertIsInstance(img, Image.Image) 

36 self.assertEqual(img.size, (80, 80)) 

37 self.assertEqual(img.mode, "RGBA") 

38 

39 def test_deterministic_generation(self): 

40 """Test that same digest produces identical images""" 

41 digest = hashlib.md5(b"deterministic@example.com").hexdigest() 

42 

43 img1 = OptimizedPagan.generate_optimized(digest, 80) 

44 img2 = OptimizedPagan.generate_optimized(digest, 80) 

45 

46 # Convert to bytes for comparison 

47 data1 = BytesIO() 

48 img1.save(data1, format="PNG") 

49 

50 data2 = BytesIO() 

51 img2.save(data2, format="PNG") 

52 

53 self.assertEqual(data1.getvalue(), data2.getvalue()) 

54 

55 def test_different_digests_produce_different_images(self): 

56 """Test that different digests produce different images""" 

57 digest1 = hashlib.md5(b"user1@example.com").hexdigest() 

58 digest2 = hashlib.md5(b"user2@example.com").hexdigest() 

59 

60 img1 = OptimizedPagan.generate_optimized(digest1, 80) 

61 img2 = OptimizedPagan.generate_optimized(digest2, 80) 

62 

63 # Convert to bytes for comparison 

64 data1 = BytesIO() 

65 img1.save(data1, format="PNG") 

66 

67 data2 = BytesIO() 

68 img2.save(data2, format="PNG") 

69 

70 self.assertNotEqual(data1.getvalue(), data2.getvalue()) 

71 

72 def test_compatibility_with_original(self): 

73 """Test that optimized version produces identical results to original""" 

74 digest = hashlib.md5(b"compatibility@example.com").hexdigest() 

75 

76 # Generate with original pagan 

77 original_avatar = pagan.Avatar(digest) 

78 original_img = original_avatar.img.resize((80, 80), Image.LANCZOS) 

79 

80 # Generate with optimized version 

81 optimized_img = OptimizedPagan.generate_optimized(digest, 80) 

82 

83 # Images should be identical 

84 self.assertEqual(original_img.size, optimized_img.size) 

85 self.assertEqual(original_img.mode, optimized_img.mode) 

86 

87 # Convert to bytes for pixel-perfect comparison 

88 original_data = BytesIO() 

89 original_img.save(original_data, format="PNG") 

90 

91 optimized_data = BytesIO() 

92 optimized_img.save(optimized_data, format="PNG") 

93 

94 self.assertEqual(original_data.getvalue(), optimized_data.getvalue()) 

95 

96 def test_caching_functionality(self): 

97 """Test that caching works correctly""" 

98 digest = hashlib.md5(b"cache_test@example.com").hexdigest() 

99 

100 # Clear cache and check initial stats 

101 clear_pagan_cache() 

102 initial_stats = get_pagan_cache_info() 

103 self.assertEqual(initial_stats["hits"], 0) 

104 self.assertEqual(initial_stats["misses"], 0) 

105 

106 # First generation (should be cache miss) 

107 img1 = OptimizedPagan.generate_optimized(digest, 80) 

108 stats_after_first = get_pagan_cache_info() 

109 self.assertEqual(stats_after_first["misses"], 1) 

110 self.assertEqual(stats_after_first["hits"], 0) 

111 

112 # Second generation (should be cache hit) 

113 img2 = OptimizedPagan.generate_optimized(digest, 80) 

114 stats_after_second = get_pagan_cache_info() 

115 self.assertEqual(stats_after_second["misses"], 1) 

116 self.assertEqual(stats_after_second["hits"], 1) 

117 

118 # Images should be identical 

119 data1 = BytesIO() 

120 img1.save(data1, format="PNG") 

121 

122 data2 = BytesIO() 

123 img2.save(data2, format="PNG") 

124 

125 self.assertEqual(data1.getvalue(), data2.getvalue()) 

126 

127 def test_different_sizes(self): 

128 """Test pagan generation at different sizes""" 

129 digest = hashlib.md5(b"sizes@example.com").hexdigest() 

130 sizes = [40, 80, 120, 200] 

131 

132 for size in sizes: 

133 with self.subTest(size=size): 

134 img = OptimizedPagan.generate_optimized(digest, size) 

135 self.assertEqual(img.size, (size, size)) 

136 

137 def test_cache_size_limit(self): 

138 """Test that cache respects size limits""" 

139 # Set a small cache size for testing 

140 original_size = OptimizedPagan._max_cache_size 

141 OptimizedPagan._max_cache_size = 3 

142 

143 try: 

144 clear_pagan_cache() 

145 

146 # Generate more avatars than cache size 

147 for i in range(6): 

148 digest = hashlib.md5( 

149 f"cache_limit_{i}@example.com".encode() 

150 ).hexdigest() 

151 OptimizedPagan.generate_optimized(digest, 80) 

152 

153 # Cache size should not exceed limit 

154 stats = get_pagan_cache_info() 

155 self.assertLessEqual(stats["size"], 3) 

156 

157 finally: 

158 # Restore original cache size 

159 OptimizedPagan._max_cache_size = original_size 

160 

161 def test_create_optimized_pagan_function(self): 

162 """Test the convenience function""" 

163 digest = hashlib.md5(b"function_test@example.com").hexdigest() 

164 

165 data = create_optimized_pagan(digest, 80) 

166 

167 self.assertIsInstance(data, BytesIO) 

168 

169 # Should be able to load as image 

170 data.seek(0) 

171 img = Image.open(data) 

172 self.assertEqual(img.size, (80, 80)) 

173 self.assertEqual(img.mode, "RGBA") 

174 

175 def test_error_handling(self): 

176 """Test error handling with invalid input""" 

177 # Test with invalid digest (should not crash) 

178 try: 

179 img = OptimizedPagan.generate_optimized("", 80) 

180 # Should either return None or a valid image 

181 if img is not None: 

182 self.assertIsInstance(img, Image.Image) 

183 except Exception: 

184 self.fail("Optimized pagan should handle errors gracefully") 

185 

186 def test_performance_improvement(self): 

187 """Test that optimization provides performance improvement""" 

188 digest = hashlib.md5(b"performance@example.com").hexdigest() 

189 iterations = 5 

190 

191 # Test original pagan performance 

192 original_times = [] 

193 for _ in range(iterations): 

194 start_time = time.time() 

195 avatar = pagan.Avatar(digest) 

196 img = avatar.img.resize((80, 80), Image.LANCZOS) 

197 data = BytesIO() 

198 img.save(data, format="PNG") 

199 end_time = time.time() 

200 original_times.append((end_time - start_time) * 1000) 

201 

202 # Clear cache and test optimized performance (first run - cache miss) 

203 clear_pagan_cache() 

204 optimized_first_times = [] 

205 for _ in range(iterations): 

206 start_time = time.time() 

207 data = create_optimized_pagan(digest, 80) 

208 end_time = time.time() 

209 optimized_first_times.append((end_time - start_time) * 1000) 

210 

211 # Test optimized performance (subsequent runs - cache hits) 

212 optimized_cached_times = [] 

213 for _ in range(iterations): 

214 start_time = time.time() 

215 data = create_optimized_pagan(digest, 80) 

216 end_time = time.time() 

217 optimized_cached_times.append((end_time - start_time) * 1000) 

218 

219 original_avg = sum(original_times) / len(original_times) 

220 optimized_cached_avg = sum(optimized_cached_times) / len(optimized_cached_times) 

221 

222 print("\nPerformance Comparison:") 

223 print(f"Original average: {original_avg:.2f}ms") 

224 print(f"Optimized (cached): {optimized_cached_avg:.2f}ms") 

225 

226 if optimized_cached_avg > 0: 

227 improvement = original_avg / optimized_cached_avg 

228 print(f"Improvement: {improvement:.1f}x faster") 

229 

230 # Should be significantly faster with caching 

231 self.assertGreater( 

232 improvement, 10, "Optimization should provide significant improvement" 

233 ) 

234 

235 def test_cache_stats(self): 

236 """Test cache statistics tracking""" 

237 clear_pagan_cache() 

238 

239 digest1 = hashlib.md5(b"stats1@example.com").hexdigest() 

240 digest2 = hashlib.md5(b"stats2@example.com").hexdigest() 

241 

242 # Generate first avatar (cache miss) 

243 OptimizedPagan.generate_optimized(digest1, 80) 

244 

245 # Generate second avatar (cache miss) 

246 OptimizedPagan.generate_optimized(digest2, 80) 

247 

248 # Generate first avatar again (cache hit) 

249 OptimizedPagan.generate_optimized(digest1, 80) 

250 

251 stats = get_pagan_cache_info() 

252 

253 self.assertEqual(stats["misses"], 2) 

254 self.assertEqual(stats["hits"], 1) 

255 self.assertEqual(stats["size"], 2) 

256 self.assertIn("hit_rate", stats) 

257 

258 

259class TestPaganPerformance(TestCase): 

260 """Performance-focused tests for pagan optimization""" 

261 

262 def test_bulk_generation_performance(self): 

263 """Test performance with multiple generations""" 

264 clear_pagan_cache() 

265 

266 # Generate multiple pagan avatars 

267 test_count = 20 

268 digests = [ 

269 hashlib.md5(f"bulk{i}@example.com".encode()).hexdigest() 

270 for i in range(test_count) 

271 ] 

272 

273 start_time = time.time() 

274 for digest in digests: 

275 create_optimized_pagan(digest, 80) 

276 end_time = time.time() 

277 

278 total_time = (end_time - start_time) * 1000 # ms 

279 avg_time = total_time / test_count 

280 

281 print(f"Bulk generation: {test_count} pagan avatars in {total_time:.1f}ms") 

282 print(f"Average per avatar: {avg_time:.2f}ms") 

283 

284 # Should average under 100ms per avatar (cache misses are still high, but cache hits are much faster) 

285 self.assertLess( 

286 avg_time, 100.0, f"Bulk generation too slow: {avg_time:.2f}ms avg" 

287 ) 

288 

289 def test_cache_hit_performance(self): 

290 """Test performance improvement with cache hits""" 

291 digest = hashlib.md5(b"cache_perf@example.com").hexdigest() 

292 

293 # First generation (cache miss) 

294 start_time = time.time() 

295 create_optimized_pagan(digest, 80) 

296 first_time = (time.time() - start_time) * 1000 

297 

298 # Second generation (cache hit) 

299 start_time = time.time() 

300 create_optimized_pagan(digest, 80) 

301 second_time = (time.time() - start_time) * 1000 

302 

303 print(f"First generation (miss): {first_time:.2f}ms") 

304 print(f"Second generation (hit): {second_time:.2f}ms") 

305 

306 if second_time > 0: 

307 improvement = first_time / second_time 

308 print(f"Cache hit improvement: {improvement:.1f}x faster") 

309 

310 # Cache hits should be much faster 

311 self.assertGreater( 

312 improvement, 5, "Cache hits should provide significant speedup" 

313 ) 

314 

315 

316if __name__ == "__main__": 

317 unittest.main()