@@ -115,9 +115,9 @@ def test_normalize_embeddings(self, test_inputs):
115
115
for result in results :
116
116
# Check if the embedding is normalized (L2 norm should be close to 1)
117
117
norm = np .linalg .norm (result )
118
- assert np .isclose (
119
- norm , 1.0 , atol = 1e-6
120
- ), f"Norm is { norm } , expected close to 1.0"
118
+ assert np .isclose (norm , 1.0 , atol = 1e-6 ), (
119
+ f"Norm is { norm } , expected close to 1.0"
120
+ )
121
121
122
122
def test_normalize_embeddings_false (self , test_inputs ):
123
123
"""
@@ -129,15 +129,15 @@ def test_normalize_embeddings_false(self, test_inputs):
129
129
for result in results :
130
130
# Check if the embedding is not normalized (L2 norm should not be close to 1)
131
131
norm = np .linalg .norm (result )
132
- assert not np .isclose (
133
- norm , 1.0 , atol = 1e-6
134
- ), f"Norm is { norm } , expected not close to 1.0"
132
+ assert not np .isclose (norm , 1.0 , atol = 1e-6 ), (
133
+ f"Norm is { norm } , expected not close to 1.0"
134
+ )
135
135
136
136
# Additional check: ensure that at least one embedding has a norm significantly different from 1
137
137
norms = [np .linalg .norm (result ) for result in results ]
138
- assert any (
139
- not np . isclose ( norm , 1.0 , atol = 0.1 ) for norm in norms
140
- ), "Expected at least one embedding with norm not close to 1.0"
138
+ assert any (not np . isclose ( norm , 1.0 , atol = 0.1 ) for norm in norms ), (
139
+ "Expected at least one embedding with norm not close to 1.0"
140
+ )
141
141
142
142
def test_encode_batch (self ) -> None :
143
143
"""
@@ -149,20 +149,20 @@ def test_encode_batch(self) -> None:
149
149
inputs = [f"This is test sentence { i } " for i in range (batch_size )]
150
150
results = self .embeddings .encode (inputs = inputs )
151
151
152
- assert (
153
- len (results ) == batch_size
154
- ), f"Expected { batch_size } results, got { len ( results ) } "
152
+ assert len ( results ) == batch_size , (
153
+ f"Expected { batch_size } results, got { len (results )} "
154
+ )
155
155
for result in results :
156
- assert (
157
- len (result ) == 384
158
- ), f"Expected embedding dimension 384, got { len ( result ) } "
156
+ assert len ( result ) == 384 , (
157
+ f"Expected embedding dimension 384, got { len (result )} "
158
+ )
159
159
160
160
# Test with a large batch to ensure it doesn't cause issues
161
161
large_batch = ["Large batch test" for _ in range (100 )]
162
162
large_results = self .embeddings .encode (inputs = large_batch )
163
- assert (
164
- len (large_results ) == 100
165
- ), f"Expected 100 results for large batch, got { len ( large_results ) } "
163
+ assert len ( large_results ) == 100 , (
164
+ f"Expected 100 results for large batch, got { len (large_results )} "
165
+ )
166
166
167
167
def test_encode_batch_consistency (self ) -> None :
168
168
"""
@@ -180,6 +180,6 @@ def test_encode_batch_consistency(self) -> None:
180
180
batch_result = self .embeddings .encode ([input_text , "Another sentence" ])[0 ]
181
181
182
182
# Compare the embeddings
183
- assert np .allclose (
184
- single_result , batch_result , atol = 1e-5
185
- ), "Embeddings are not consistent between single and batch processing"
183
+ assert np .allclose (single_result , batch_result , atol = 1e-5 ), (
184
+ "Embeddings are not consistent between single and batch processing"
185
+ )
0 commit comments