Skip to content

Commit d796e87

Browse files
committed
Use workflow ID in wheel upload
1 parent bdf852d commit d796e87

File tree

2 files changed

+63
-28
lines changed

2 files changed

+63
-28
lines changed

.builders/tests/test_upload.py

Lines changed: 59 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,11 @@
88
import upload
99

1010

11+
@pytest.fixture
12+
def workflow_id():
13+
return '1234567890'
14+
15+
1116
@pytest.fixture
1217
def setup_fake_bucket(monkeypatch):
1318
"""Patch google storage functions to simulate a bucket."""
@@ -89,13 +94,6 @@ def fake_hash(path: Path):
8994
return _setup_hash
9095

9196

92-
@pytest.fixture
93-
def frozen_timestamp(monkeypatch):
94-
timestamp = 20241327_090504
95-
monkeypatch.setattr(upload, 'timestamp_build_number', mock.Mock(return_value=timestamp))
96-
return timestamp
97-
98-
9997
def test_upload_external(setup_targets_dir, setup_fake_bucket):
10098
wheels = {
10199
'external': [
@@ -114,7 +112,7 @@ def test_upload_external(setup_targets_dir, setup_fake_bucket):
114112
}
115113
bucket, uploads = setup_fake_bucket(bucket_files)
116114

117-
upload.upload(targets_dir)
115+
upload.upload(targets_dir, workflow_id)
118116

119117
bucket_files = [f.name for f in bucket.list_blobs()]
120118
assert 'external/all-new/all_new-2.31.0-py3-none-any.whl' in bucket_files
@@ -124,7 +122,7 @@ def test_upload_external(setup_targets_dir, setup_fake_bucket):
124122
assert {'all_new-2.31.0-py3-none-any.whl', 'updated_version-3.14.1-cp311-cp311-manylinux1_x86_64.whl'} <= uploads
125123

126124

127-
def test_upload_built_no_conflict(setup_targets_dir, setup_fake_bucket, frozen_timestamp):
125+
def test_upload_built_no_conflict(setup_targets_dir, setup_fake_bucket, workflow_id):
128126
wheels = {
129127
'built': [
130128
('without_collision-3.14.1-cp311-cp311-manylinux2010_x86_64.whl', 'without-collision', '3.14.1', '>=3.7'),
@@ -134,11 +132,11 @@ def test_upload_built_no_conflict(setup_targets_dir, setup_fake_bucket, frozen_t
134132

135133
bucket, uploads = setup_fake_bucket({})
136134

137-
upload.upload(targets_dir)
135+
upload.upload(targets_dir, workflow_id)
138136

139137
bucket_files = [f.name for f in bucket.list_blobs()]
140138
assert (
141-
f'built/without-collision/without_collision-3.14.1-{frozen_timestamp}-cp311-cp311-manylinux2010_x86_64.whl'
139+
f'built/without-collision/without_collision-3.14.1-{workflow_id}-cp311-cp311-manylinux2010_x86_64.whl'
142140
in bucket_files
143141
)
144142

@@ -147,6 +145,7 @@ def test_upload_built_existing_sha_match_does_not_upload(
147145
setup_targets_dir,
148146
setup_fake_bucket,
149147
setup_fake_hash,
148+
workflow_id,
150149
):
151150
whl_hash = 'some-hash'
152151

@@ -167,7 +166,7 @@ def test_upload_built_existing_sha_match_does_not_upload(
167166
'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl': whl_hash,
168167
})
169168

170-
upload.upload(targets_dir)
169+
upload.upload(targets_dir, workflow_id)
171170

172171
assert not uploads
173172

@@ -176,7 +175,7 @@ def test_upload_built_existing_different_sha_does_upload(
176175
setup_targets_dir,
177176
setup_fake_bucket,
178177
setup_fake_hash,
179-
frozen_timestamp,
178+
workflow_id,
180179
):
181180
original_hash = 'first-hash'
182181
new_hash = 'second-hash'
@@ -198,20 +197,21 @@ def test_upload_built_existing_different_sha_does_upload(
198197
'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl': new_hash,
199198
})
200199

201-
upload.upload(targets_dir)
200+
upload.upload(targets_dir, workflow_id)
202201

203202
uploads = {str(Path(f).name) for f in uploads}
204203

205204
assert uploads == {'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl'}
206205

207206
bucket_files = {f.name for f in bucket.list_blobs()}
208-
assert f'built/existing/existing-1.1.1-{frozen_timestamp}-cp311-cp311-manylinux2010_x86_64.whl' in bucket_files
207+
assert f'built/existing/existing-1.1.1-{workflow_id}-cp311-cp311-manylinux2010_x86_64.whl' in bucket_files
209208

210209

211210
def test_upload_built_existing_sha_match_does_not_upload_multiple_existing_builds(
212211
setup_targets_dir,
213212
setup_fake_bucket,
214213
setup_fake_hash,
214+
workflow_id,
215215
):
216216
matching_hash = 'some-hash'
217217
non_matching_hash = 'xxxx'
@@ -242,7 +242,7 @@ def test_upload_built_existing_sha_match_does_not_upload_multiple_existing_build
242242
'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl': matching_hash,
243243
})
244244

245-
upload.upload(targets_dir)
245+
upload.upload(targets_dir, workflow_id)
246246

247247
assert not uploads
248248

@@ -251,16 +251,55 @@ def test_upload_built_existing_different_sha_does_upload_multiple_existing_build
251251
setup_targets_dir,
252252
setup_fake_bucket,
253253
setup_fake_hash,
254-
frozen_timestamp,
254+
workflow_id,
255+
):
256+
original_hash = 'first-hash'
257+
new_hash = 'second-hash'
258+
259+
wheels = {
260+
'built': [
261+
('existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl', 'existing', '1.1.1', '>=3.7'),
262+
]
263+
}
264+
targets_dir = setup_targets_dir(wheels)
265+
266+
bucket_files = {
267+
'built/existing/existing-1.1.1-2024132600000-cp311-cp311-manylinux2010_x86_64.whl':
268+
{'requires-python': '', 'sha256': 'b'},
269+
'built/existing/existing-1.1.1-2024132700000-cp311-cp311-manylinux2010_x86_64.whl':
270+
{'requires-python': '', 'sha256': original_hash},
271+
}
272+
bucket, uploads = setup_fake_bucket(bucket_files)
273+
274+
setup_fake_hash({
275+
'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl': new_hash,
276+
})
277+
278+
upload.upload(targets_dir, workflow_id)
279+
280+
uploads = {str(Path(f).name) for f in uploads}
281+
282+
assert uploads == {'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl'}
283+
284+
bucket_files = {f.name for f in bucket.list_blobs()}
285+
assert f'built/existing/existing-1.1.1-{workflow_id}-cp311-cp311-manylinux2010_x86_64.whl' in bucket_files
286+
287+
288+
def test_build_tag_use_workflow_id(
289+
setup_targets_dir,
290+
setup_fake_bucket,
291+
setup_fake_hash,
255292
):
256293
original_hash = 'first-hash'
257294
new_hash = 'second-hash'
295+
workflow_id = '1234567890'
258296

259297
wheels = {
260298
'built': [
261299
('existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl', 'existing', '1.1.1', '>=3.7'),
262300
]
263301
}
302+
264303
targets_dir = setup_targets_dir(wheels)
265304

266305
bucket_files = {
@@ -269,17 +308,18 @@ def test_upload_built_existing_different_sha_does_upload_multiple_existing_build
269308
'built/existing/existing-1.1.1-2024132700000-cp311-cp311-manylinux2010_x86_64.whl':
270309
{'requires-python': '', 'sha256': original_hash},
271310
}
311+
272312
bucket, uploads = setup_fake_bucket(bucket_files)
273313

274314
setup_fake_hash({
275315
'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl': new_hash,
276316
})
277317

278-
upload.upload(targets_dir)
318+
upload.upload(targets_dir, workflow_id)
279319

280320
uploads = {str(Path(f).name) for f in uploads}
281321

282322
assert uploads == {'existing-1.1.1-cp311-cp311-manylinux2010_x86_64.whl'}
283323

284324
bucket_files = {f.name for f in bucket.list_blobs()}
285-
assert f'built/existing/existing-1.1.1-{frozen_timestamp}-cp311-cp311-manylinux2010_x86_64.whl' in bucket_files
325+
assert f'built/existing/existing-1.1.1-{workflow_id}-cp311-cp311-manylinux2010_x86_64.whl' in bucket_files

.builders/upload.py

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -58,11 +58,6 @@ def display_message_block(message: str) -> None:
5858
print(divider)
5959

6060

61-
def timestamp_build_number() -> int:
62-
"""Produce a numeric timestamp to use as build numbers"""
63-
return int(time.strftime('%Y%m%d%H%M%S'))
64-
65-
6661
def hash_file(path: Path) -> str:
6762
"""Calculate the hash of the file pointed at by `path`"""
6863
with path.open('rb') as f:
@@ -95,7 +90,7 @@ def _build_number_of_wheel_blob(wheel_path: Blob) -> int:
9590
return int(build_number[0]) if build_number else -1
9691

9792

98-
def upload(targets_dir):
93+
def upload(targets_dir, workflow_id):
9994
client = storage.Client()
10095
bucket = client.bucket(BUCKET_NAME)
10196
artifact_types: set[str] = set()
@@ -148,8 +143,7 @@ def upload(targets_dir):
148143
'with the same hash')
149144
continue
150145

151-
build_number = timestamp_build_number()
152-
artifact_name = f'{name}-{version}-{build_number}-{python_tag}-{abi_tag}-{platform_tag}.whl'
146+
artifact_name = f'{name}-{version}-{workflow_id}-{python_tag}-{abi_tag}-{platform_tag}.whl'
153147
artifact = bucket.blob(f'{artifact_type}/{project_name}/{artifact_name}')
154148

155149
print(f'{padding}Artifact: {artifact_name}')
@@ -213,5 +207,6 @@ def upload(targets_dir):
213207
if __name__ == '__main__':
214208
parser = argparse.ArgumentParser(prog='builder', allow_abbrev=False)
215209
parser.add_argument('targets_dir')
210+
parser.add_argument('workflow_id')
216211
args = parser.parse_args()
217-
upload(args.targets_dir)
212+
upload(args.targets_dir, args.workflow_id)

0 commit comments

Comments
 (0)