8
8
import upload
9
9
10
10
11
+ @pytest .fixture
12
+ def workflow_id ():
13
+ return '1234567890'
14
+
15
+
11
16
@pytest .fixture
12
17
def setup_fake_bucket (monkeypatch ):
13
18
"""Patch google storage functions to simulate a bucket."""
@@ -89,13 +94,6 @@ def fake_hash(path: Path):
89
94
return _setup_hash
90
95
91
96
92
- @pytest .fixture
93
- def frozen_timestamp (monkeypatch ):
94
- timestamp = 20241327_090504
95
- monkeypatch .setattr (upload , 'timestamp_build_number' , mock .Mock (return_value = timestamp ))
96
- return timestamp
97
-
98
-
99
97
def test_upload_external (setup_targets_dir , setup_fake_bucket ):
100
98
wheels = {
101
99
'external' : [
@@ -114,7 +112,7 @@ def test_upload_external(setup_targets_dir, setup_fake_bucket):
114
112
}
115
113
bucket , uploads = setup_fake_bucket (bucket_files )
116
114
117
- upload .upload (targets_dir )
115
+ upload .upload (targets_dir , workflow_id )
118
116
119
117
bucket_files = [f .name for f in bucket .list_blobs ()]
120
118
assert 'external/all-new/all_new-2.31.0-py3-none-any.whl' in bucket_files
@@ -124,7 +122,7 @@ def test_upload_external(setup_targets_dir, setup_fake_bucket):
124
122
assert {'all_new-2.31.0-py3-none-any.whl' , 'updated_version-3.14.1-cp311-cp311-manylinux1_x86_64.whl' } <= uploads
125
123
126
124
127
- def test_upload_built_no_conflict (setup_targets_dir , setup_fake_bucket , frozen_timestamp ):
125
+ def test_upload_built_no_conflict (setup_targets_dir , setup_fake_bucket , workflow_id ):
128
126
wheels = {
129
127
'built' : [
130
128
('without_collision-3.14.1-cp311-cp311-manylinux2010_x86_64.whl' , 'without-collision' , '3.14.1' , '>=3.7' ),
@@ -134,11 +132,11 @@ def test_upload_built_no_conflict(setup_targets_dir, setup_fake_bucket, frozen_t
134
132
135
133
bucket , uploads = setup_fake_bucket ({})
136
134
137
- upload .upload (targets_dir )
135
+ upload .upload (targets_dir , workflow_id )
138
136
139
137
bucket_files = [f .name for f in bucket .list_blobs ()]
140
138
assert (
141
- f'built/without-collision/without_collision-3.14.1-{ frozen_timestamp } -cp311-cp311-manylinux2010_x86_64.whl'
139
+ f'built/without-collision/without_collision-3.14.1-{ workflow_id } -cp311-cp311-manylinux2010_x86_64.whl'
142
140
in bucket_files
143
141
)
144
142
@@ -147,6 +145,7 @@ def test_upload_built_existing_sha_match_does_not_upload(
147
145
setup_targets_dir ,
148
146
setup_fake_bucket ,
149
147
setup_fake_hash ,
148
+ workflow_id ,
150
149
):
151
150
whl_hash = 'some-hash'
152
151
@@ -167,7 +166,7 @@ def test_upload_built_existing_sha_match_does_not_upload(
167
166
'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl' : whl_hash ,
168
167
})
169
168
170
- upload .upload (targets_dir )
169
+ upload .upload (targets_dir , workflow_id )
171
170
172
171
assert not uploads
173
172
@@ -176,7 +175,7 @@ def test_upload_built_existing_different_sha_does_upload(
176
175
setup_targets_dir ,
177
176
setup_fake_bucket ,
178
177
setup_fake_hash ,
179
- frozen_timestamp ,
178
+ workflow_id ,
180
179
):
181
180
original_hash = 'first-hash'
182
181
new_hash = 'second-hash'
@@ -198,20 +197,21 @@ def test_upload_built_existing_different_sha_does_upload(
198
197
'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl' : new_hash ,
199
198
})
200
199
201
- upload .upload (targets_dir )
200
+ upload .upload (targets_dir , workflow_id )
202
201
203
202
uploads = {str (Path (f ).name ) for f in uploads }
204
203
205
204
assert uploads == {'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl' }
206
205
207
206
bucket_files = {f .name for f in bucket .list_blobs ()}
208
- assert f'built/existing/existing-1.1.1-{ frozen_timestamp } -cp311-cp311-manylinux2010_x86_64.whl' in bucket_files
207
+ assert f'built/existing/existing-1.1.1-{ workflow_id } -cp311-cp311-manylinux2010_x86_64.whl' in bucket_files
209
208
210
209
211
210
def test_upload_built_existing_sha_match_does_not_upload_multiple_existing_builds (
212
211
setup_targets_dir ,
213
212
setup_fake_bucket ,
214
213
setup_fake_hash ,
214
+ workflow_id ,
215
215
):
216
216
matching_hash = 'some-hash'
217
217
non_matching_hash = 'xxxx'
@@ -242,7 +242,7 @@ def test_upload_built_existing_sha_match_does_not_upload_multiple_existing_build
242
242
'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl' : matching_hash ,
243
243
})
244
244
245
- upload .upload (targets_dir )
245
+ upload .upload (targets_dir , workflow_id )
246
246
247
247
assert not uploads
248
248
@@ -251,16 +251,55 @@ def test_upload_built_existing_different_sha_does_upload_multiple_existing_build
251
251
setup_targets_dir ,
252
252
setup_fake_bucket ,
253
253
setup_fake_hash ,
254
- frozen_timestamp ,
254
+ workflow_id ,
255
+ ):
256
+ original_hash = 'first-hash'
257
+ new_hash = 'second-hash'
258
+
259
+ wheels = {
260
+ 'built' : [
261
+ ('existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl' , 'existing' , '1.1.1' , '>=3.7' ),
262
+ ]
263
+ }
264
+ targets_dir = setup_targets_dir (wheels )
265
+
266
+ bucket_files = {
267
+ 'built/existing/existing-1.1.1-2024132600000-cp311-cp311-manylinux2010_x86_64.whl' :
268
+ {'requires-python' : '' , 'sha256' : 'b' },
269
+ 'built/existing/existing-1.1.1-2024132700000-cp311-cp311-manylinux2010_x86_64.whl' :
270
+ {'requires-python' : '' , 'sha256' : original_hash },
271
+ }
272
+ bucket , uploads = setup_fake_bucket (bucket_files )
273
+
274
+ setup_fake_hash ({
275
+ 'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl' : new_hash ,
276
+ })
277
+
278
+ upload .upload (targets_dir , workflow_id )
279
+
280
+ uploads = {str (Path (f ).name ) for f in uploads }
281
+
282
+ assert uploads == {'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl' }
283
+
284
+ bucket_files = {f .name for f in bucket .list_blobs ()}
285
+ assert f'built/existing/existing-1.1.1-{ workflow_id } -cp311-cp311-manylinux2010_x86_64.whl' in bucket_files
286
+
287
+
288
+ def test_build_tag_use_workflow_id (
289
+ setup_targets_dir ,
290
+ setup_fake_bucket ,
291
+ setup_fake_hash ,
255
292
):
256
293
original_hash = 'first-hash'
257
294
new_hash = 'second-hash'
295
+ workflow_id = '1234567890'
258
296
259
297
wheels = {
260
298
'built' : [
261
299
('existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl' , 'existing' , '1.1.1' , '>=3.7' ),
262
300
]
263
301
}
302
+
264
303
targets_dir = setup_targets_dir (wheels )
265
304
266
305
bucket_files = {
@@ -269,17 +308,18 @@ def test_upload_built_existing_different_sha_does_upload_multiple_existing_build
269
308
'built/existing/existing-1.1.1-2024132700000-cp311-cp311-manylinux2010_x86_64.whl' :
270
309
{'requires-python' : '' , 'sha256' : original_hash },
271
310
}
311
+
272
312
bucket , uploads = setup_fake_bucket (bucket_files )
273
313
274
314
setup_fake_hash ({
275
315
'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl' : new_hash ,
276
316
})
277
317
278
- upload .upload (targets_dir )
318
+ upload .upload (targets_dir , workflow_id )
279
319
280
320
uploads = {str (Path (f ).name ) for f in uploads }
281
321
282
322
assert uploads == {'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl' }
283
323
284
324
bucket_files = {f .name for f in bucket .list_blobs ()}
285
- assert f'built/existing/existing-1.1.1-{ frozen_timestamp } -cp311-cp311-manylinux2010_x86_64.whl' in bucket_files
325
+ assert f'built/existing/existing-1.1.1-{ workflow_id } -cp311-cp311-manylinux2010_x86_64.whl' in bucket_files
0 commit comments