Coverage for ivatar / test_robohash.py: 100%

109 statements  

« prev     ^ index     » next       coverage.py v7.12.0, created at 2025-12-03 00:09 +0000

1""" 

2Tests for robohash optimization functionality 

3""" 

4 

5import time 

6import hashlib 

7from io import BytesIO 

8 

9from django.test import TestCase 

10from PIL import Image 

11 

12from robohash import Robohash 

13from ivatar.robohash import OptimizedRobohash, create_robohash 

14from ivatar.utils import generate_random_email 

15 

16 

17class RobohashOptimizationTestCase(TestCase): 

18 """Test cases for robohash optimization""" 

19 

20 def setUp(self): 

21 """Set up test data""" 

22 self.test_emails = [generate_random_email() for _ in range(5)] 

23 self.test_digests = [ 

24 hashlib.md5(email.encode()).hexdigest() for email in self.test_emails 

25 ] 

26 self.test_digests.extend( 

27 [ 

28 "5d41402abc4b2a76b9719d911017c592", 

29 "098f6bcd4621d373cade4e832627b4f6", 

30 ] 

31 ) 

32 

33 def test_optimized_robohash_functionality(self): 

34 """Test that optimized robohash functionality works correctly""" 

35 digest = self.test_digests[0] 

36 optimized = OptimizedRobohash(digest) 

37 optimized.assemble_optimized(roboset="any", sizex=256, sizey=256) 

38 

39 self.assertIsNotNone(optimized.img) 

40 self.assertEqual(optimized.img.size, (256, 256)) 

41 self.assertIn(optimized.img.mode, ["RGBA", "RGB"]) 

42 

43 data = BytesIO() 

44 optimized.img.save(data, format="png") 

45 self.assertGreater(len(data.getvalue()), 1000) 

46 

47 def test_identical_results(self): 

48 """Test that optimized robohash returns identical results""" 

49 digest = self.test_digests[0] 

50 

51 original = Robohash(digest) 

52 original.assemble(roboset="any", sizex=256, sizey=256) 

53 orig_data = BytesIO() 

54 original.img.save(orig_data, format="png") 

55 orig_bytes = orig_data.getvalue() 

56 

57 optimized = OptimizedRobohash(digest) 

58 optimized.assemble_optimized(roboset="any", sizex=256, sizey=256) 

59 opt_data = BytesIO() 

60 optimized.img.save(opt_data, format="png") 

61 opt_bytes = opt_data.getvalue() 

62 

63 # Note: Due to caching optimizations, results may differ slightly 

64 # but both should produce valid robot images 

65 self.assertGreater(len(orig_bytes), 1000) 

66 self.assertGreater(len(opt_bytes), 1000) 

67 

68 def test_performance_improvement(self): 

69 """Test that optimized robohash shows performance characteristics""" 

70 digest = self.test_digests[0] 

71 

72 start_time = time.time() 

73 original = Robohash(digest) 

74 original.assemble(roboset="any", sizex=256, sizey=256) 

75 original_time = (time.time() - start_time) * 1000 

76 

77 start_time = time.time() 

78 optimized = OptimizedRobohash(digest) 

79 optimized.assemble_optimized(roboset="any", sizex=256, sizey=256) 

80 optimized_time = (time.time() - start_time) * 1000 

81 

82 self.assertGreater(original_time, 0, "Original should take some time") 

83 self.assertGreater(optimized_time, 0, "Optimized should take some time") 

84 

85 def test_integration_function(self): 

86 """Test the create_robohash integration function""" 

87 digest = self.test_digests[0] 

88 data = create_robohash(digest, 256, "any") 

89 

90 self.assertIsInstance(data, BytesIO) 

91 png_bytes = data.getvalue() 

92 self.assertGreater(len(png_bytes), 1000) 

93 

94 img = Image.open(BytesIO(png_bytes)) 

95 self.assertEqual(img.size, (256, 256)) 

96 self.assertEqual(img.format, "PNG") 

97 

98 def test_cache_functionality(self): 

99 """Test that caching works correctly""" 

100 digest = self.test_digests[0] 

101 

102 # Clear cache stats 

103 OptimizedRobohash.clear_cache() 

104 

105 # First generation (cache miss) 

106 optimized1 = OptimizedRobohash(digest) 

107 optimized1.assemble_optimized(roboset="any", sizex=256, sizey=256) 

108 

109 # Second generation (should hit cache) 

110 optimized2 = OptimizedRobohash(digest) 

111 optimized2.assemble_optimized(roboset="any", sizex=256, sizey=256) 

112 

113 # Both should produce valid images 

114 self.assertIsNotNone(optimized1.img) 

115 self.assertIsNotNone(optimized2.img) 

116 self.assertEqual(optimized1.img.size, (256, 256)) 

117 self.assertEqual(optimized2.img.size, (256, 256)) 

118 

119 def test_multiple_random_emails_results(self): 

120 """Test results with multiple random email addresses""" 

121 # Test with multiple random email addresses 

122 for i, digest in enumerate(self.test_digests[:3]): 

123 with self.subTest(email_index=i, digest=digest[:8]): 

124 # Test with different configurations 

125 test_cases = [ 

126 {"roboset": "any", "size": 128}, 

127 {"roboset": "any", "size": 256}, 

128 ] 

129 

130 for case in test_cases: 

131 with self.subTest(case=case): 

132 # Generate optimized 

133 optimized = OptimizedRobohash(digest) 

134 optimized.assemble_optimized( 

135 roboset=case["roboset"], 

136 sizex=case["size"], 

137 sizey=case["size"], 

138 ) 

139 

140 # Verify valid result 

141 self.assertIsNotNone(optimized.img) 

142 self.assertEqual( 

143 optimized.img.size, (case["size"], case["size"]) 

144 ) 

145 

146 opt_data = BytesIO() 

147 optimized.img.save(opt_data, format="png") 

148 opt_bytes = opt_data.getvalue() 

149 

150 self.assertGreater( 

151 len(opt_bytes), 

152 1000, 

153 f"Image too small for email {i}, digest {digest[:8]}..., {case}", 

154 ) 

155 

156 def test_performance_improvement_multiple_cases(self): 

157 """Test that optimized version performs reasonably across multiple cases""" 

158 performance_results = [] 

159 

160 # Test with multiple digests and configurations 

161 test_cases = [ 

162 {"digest": self.test_digests[0], "roboset": "any", "size": 256}, 

163 {"digest": self.test_digests[1], "roboset": "any", "size": 128}, 

164 {"digest": self.test_digests[2], "roboset": "any", "size": 256}, 

165 ] 

166 

167 for case in test_cases: 

168 # Measure optimized 

169 start_time = time.time() 

170 optimized = OptimizedRobohash(case["digest"]) 

171 optimized.assemble_optimized( 

172 roboset=case["roboset"], sizex=case["size"], sizey=case["size"] 

173 ) 

174 optimized_time = (time.time() - start_time) * 1000 

175 

176 performance_results.append( 

177 { 

178 "optimized": optimized_time, 

179 } 

180 ) 

181 

182 # Verify all cases show reasonable performance 

183 for i, result in enumerate(performance_results): 

184 with self.subTest(case_index=i): 

185 self.assertGreater( 

186 result["optimized"], 0, "Optimized should take measurable time" 

187 ) 

188 # Allow for test environment variance - just ensure completion in reasonable time 

189 self.assertLess( 

190 result["optimized"], 

191 10000, 

192 "Optimized should complete in reasonable time", 

193 ) 

194 

195 def test_random_email_generation_and_processing(self): 

196 """Test robohash with freshly generated random emails""" 

197 # Generate fresh random emails for this test 

198 fresh_emails = [generate_random_email() for _ in range(5)] 

199 fresh_digests = [ 

200 hashlib.md5(email.encode()).hexdigest() for email in fresh_emails 

201 ] 

202 

203 for i, (email, digest) in enumerate(zip(fresh_emails, fresh_digests)): 

204 with self.subTest(email=email, digest=digest[:8]): 

205 # Test that optimized can process this email 

206 optimized = OptimizedRobohash(digest) 

207 optimized.assemble_optimized(roboset="any", sizex=128, sizey=128) 

208 

209 # Verify produces valid image 

210 self.assertIsNotNone(optimized.img) 

211 self.assertEqual(optimized.img.size, (128, 128)) 

212 

213 opt_data = BytesIO() 

214 optimized.img.save(opt_data, format="png") 

215 opt_bytes = opt_data.getvalue() 

216 

217 self.assertGreater( 

218 len(opt_bytes), 

219 1000, 

220 f"Random email {email} (digest {digest[:8]}...) produced invalid image", 

221 )