forked from aws/aws-lambda-builders
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathpackager.py
639 lines (530 loc) · 27.2 KB
/
packager.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
"""
Installs packages using PIP
"""
import sys
import re
import subprocess
import logging
from email.parser import FeedParser
from .compat import pip_import_string
from .compat import pip_no_compile_c_env_vars
from .compat import pip_no_compile_c_shim
from .utils import OSUtils
LOG = logging.getLogger(__name__)
# TODO update the wording here
MISSING_DEPENDENCIES_TEMPLATE = r"""
Could not install dependencies:
%s
You will have to build these yourself and vendor them in
the vendor folder.
"""
class PackagerError(Exception):
pass
class InvalidSourceDistributionNameError(PackagerError):
pass
class RequirementsFileNotFoundError(PackagerError):
def __init__(self, requirements_path):
super(RequirementsFileNotFoundError, self).__init__("Requirements file not found: %s" % requirements_path)
class MissingDependencyError(PackagerError):
"""Raised when some dependencies could not be packaged for any reason."""
def __init__(self, missing):
self.missing = missing
class NoSuchPackageError(PackagerError):
"""Raised when a package name or version could not be found."""
def __init__(self, package_name):
super(NoSuchPackageError, self).__init__("Could not satisfy the requirement: %s" % package_name)
class PackageDownloadError(PackagerError):
"""Generic networking error during a package download."""
pass
class UnsupportedPythonVersion(PackagerError):
"""Generic networking error during a package download."""
def __init__(self, version):
super(UnsupportedPythonVersion, self).__init__("'%s' version of python is not supported" % version)
def get_lambda_abi(runtime):
supported = {"python2.7": "cp27mu", "python3.6": "cp36m", "python3.7": "cp37m", "python3.8": "cp38"}
if runtime not in supported:
raise UnsupportedPythonVersion(runtime)
return supported[runtime]
class PythonPipDependencyBuilder(object):
def __init__(self, runtime, osutils=None, dependency_builder=None):
"""Initialize a PythonPipDependencyBuilder.
:type runtime: str
:param runtime: Python version to build dependencies for. This can
either be python2.7, python3.6 or python3.7. These are currently the
only supported values.
:type osutils: :class:`lambda_builders.utils.OSUtils`
:param osutils: A class used for all interactions with the
outside OS.
:type dependency_builder: :class:`DependencyBuilder`
:param dependency_builder: This class will be used to build the
dependencies of the project.
"""
self.osutils = osutils
if osutils is None:
self.osutils = OSUtils()
if dependency_builder is None:
dependency_builder = DependencyBuilder(self.osutils, runtime)
self._dependency_builder = dependency_builder
def build_dependencies(self, artifacts_dir_path, scratch_dir_path, requirements_path, ui=None, config=None):
"""Builds a python project's dependencies into an artifact directory.
:type artifacts_dir_path: str
:param artifacts_dir_path: Directory to write dependencies into.
:type scratch_dir_path: str
:param scratch_dir_path: Directory to write temp files into.
:type requirements_path: str
:param requirements_path: Path to a requirements.txt file to inspect
for a list of dependencies.
:type ui: :class:`lambda_builders.utils.UI` or None
:param ui: A class that traps all progress information such as status
and errors. If injected by the caller, it can be used to monitor
the status of the build process or forward this information
elsewhere.
:type config: :class:`lambda_builders.utils.Config` or None
:param config: To be determined. This is an optional config object
we can extend at a later date to add more options to how pip is
called.
"""
# TODO: The DependencyBuilder makes the assumption that it is running
# in a virtual environment that matches the runtime you want to use.
# Since otherwise there is no way to force pip to build wheels for the
# correct version of python. We need to enforce that assumption here
# by finding/creating a virtualenv of the correct version and when
# pip is called set the appropriate env vars.
if not self.osutils.file_exists(requirements_path):
raise RequirementsFileNotFoundError(requirements_path)
self._dependency_builder.build_site_packages(requirements_path, artifacts_dir_path, scratch_dir_path)
class DependencyBuilder(object):
"""Build site-packages by manually downloading and unpacking wheels.
Pip is used to download all the dependency sdists. Then wheels that
compatible with lambda are downloaded. Any source packages that do not
have a matching wheel file are built into a wheel and that file is checked
for compatibility with the lambda python runtime environment.
All compatible wheels that are downloaded/built this way are unpacked
into a site-packages directory, to be included in the bundle by the
packager.
"""
_MANYLINUX_COMPATIBLE_PLATFORM = {
"any",
"linux_x86_64",
"manylinux1_x86_64",
"manylinux2010_x86_64",
"manylinux2014_x86_64",
}
_COMPATIBLE_PACKAGE_WHITELIST = {"sqlalchemy"}
def __init__(self, osutils, runtime, pip_runner=None):
"""Initialize a DependencyBuilder.
:type osutils: :class:`lambda_builders.utils.OSUtils`
:param osutils: A class used for all interactions with the
outside OS.
:type runtime: str
:param runtime: AWS Lambda Python runtime to build for
:type pip_runner: :class:`PipRunner`
:param pip_runner: This class is responsible for executing our pip
on our behalf.
"""
self._osutils = osutils
if pip_runner is None:
pip_runner = PipRunner(python_exe=None, pip=SubprocessPip(osutils))
self._pip = pip_runner
self.runtime = runtime
def build_site_packages(self, requirements_filepath, target_directory, scratch_directory):
"""Build site-packages directory for a set of requiremetns.
:type requirements_filepath: str
:param requirement_filepath: The path to a requirements file to inspect
for a list of top-level requirements to install. This should be
equivilent to ``pip install -r requirements_filepath.txt`` in
theory.
:type target_directory: str
:param target_directory: The directory to build all dependencies into.
This directory should be on the PYTHON_PATH of whichever process
wants to use thse dependencies.
:type scratch_directory: str
:param scratch_directory: The directory to write temp files into.
:raises MissingDependencyError: This exception is raised if one or more
packages could not be installed. The complete list of missing
packages is included in the error object's ``missing`` property.
"""
if self._has_at_least_one_package(requirements_filepath):
wheels, packages_without_wheels = self._download_dependencies(scratch_directory, requirements_filepath)
self._install_wheels(scratch_directory, target_directory, wheels)
if packages_without_wheels:
raise MissingDependencyError(packages_without_wheels)
def _has_at_least_one_package(self, filename):
if not self._osutils.file_exists(filename):
return False
with open(filename, "r") as f:
# This is meant to be a best effort attempt.
# This can return True and still have no packages
# actually being specified, but those aren't common
# cases.
for line in f:
line = line.strip()
if line and not line.startswith("#"):
return True
return False
def _download_dependencies(self, directory, requirements_filename):
# Download all dependencies we can, letting pip choose what to
# download.
# deps should represent the best effort we can make to gather all the
# dependencies.
deps = self._download_all_dependencies(requirements_filename, directory)
# Sort the downloaded packages into three categories:
# - sdists (Pip could not get a wheel so it gave us an sdist)
# - lambda compatible wheel files
# - lambda incompatible wheel files
# Pip will give us a wheel when it can, but some distributions do not
# ship with wheels at all in which case we will have an sdist for it.
# In some cases a platform specific wheel file may be availble so pip
# will have downloaded that, if our platform does not match the
# platform lambda runs on (linux_x86_64/manylinux) then the downloaded
# wheel file may not be compatible with lambda. Pure python wheels
# still will be compatible because they have no platform dependencies.
compatible_wheels = set()
incompatible_wheels = set()
sdists = set()
for package in deps:
if package.dist_type == "sdist":
sdists.add(package)
else:
if self._is_compatible_wheel_filename(package.filename):
compatible_wheels.add(package)
else:
incompatible_wheels.add(package)
LOG.debug("initial compatible: %s", compatible_wheels)
LOG.debug("initial incompatible: %s", incompatible_wheels | sdists)
# Next we need to go through the downloaded packages and pick out any
# dependencies that do not have a compatible wheel file downloaded.
# For these packages we need to explicitly try to download a
# compatible wheel file.
missing_wheels = sdists | incompatible_wheels
self._download_binary_wheels(missing_wheels, directory)
# Re-count the wheel files after the second download pass. Anything
# that has an sdist but not a valid wheel file is still not going to
# work on lambda and we must now try and build the sdist into a wheel
# file ourselves.
compatible_wheels, incompatible_wheels = self._categorize_wheel_files(directory)
LOG.debug("compatible wheels after second download pass: %s", compatible_wheels)
missing_wheels = sdists - compatible_wheels
self._build_sdists(missing_wheels, directory, compile_c=True)
# There is still the case where the package had optional C dependencies
# for speedups. In this case the wheel file will have built above with
# the C dependencies if it managed to find a C compiler. If we are on
# an incompatible architecture this means the wheel file generated will
# not be compatible. If we categorize our files once more and find that
# there are missing dependencies we can try our last ditch effort of
# building the package and trying to sever its ability to find a C
# compiler.
compatible_wheels, incompatible_wheels = self._categorize_wheel_files(directory)
LOG.debug("compatible after building wheels (no C compiling): %s", compatible_wheels)
missing_wheels = sdists - compatible_wheels
self._build_sdists(missing_wheels, directory, compile_c=False)
# Final pass to find the compatible wheel files and see if there are
# any unmet dependencies left over. At this point there is nothing we
# can do about any missing wheel files. We tried downloading a
# compatible version directly and building from source.
compatible_wheels, incompatible_wheels = self._categorize_wheel_files(directory)
LOG.debug("compatible after building wheels (C compiling): %s", compatible_wheels)
# Now there is still the case left over where the setup.py has been
# made in such a way to be incompatible with python's setup tools,
# causing it to lie about its compatibility. To fix this we have a
# manually curated whitelist of packages that will work, despite
# claiming otherwise.
compatible_wheels, incompatible_wheels = self._apply_wheel_whitelist(compatible_wheels, incompatible_wheels)
missing_wheels = deps - compatible_wheels
LOG.debug("Final compatible: %s", compatible_wheels)
LOG.debug("Final incompatible: %s", incompatible_wheels)
LOG.debug("Final missing wheels: %s", missing_wheels)
return compatible_wheels, missing_wheels
def _download_all_dependencies(self, requirements_filename, directory):
# Download dependencies prefering wheel files but falling back to
# raw source dependences to get the transitive closure over
# the dependency graph. Return the set of all package objects
# which will serve as the master list of dependencies needed to deploy
# successfully.
self._pip.download_all_dependencies(requirements_filename, directory)
deps = {Package(directory, filename) for filename in self._osutils.get_directory_contents(directory)}
LOG.debug("Full dependency closure: %s", deps)
return deps
def _download_binary_wheels(self, packages, directory):
# Try to get binary wheels for each package that isn't compatible.
LOG.debug("Downloading missing wheels: %s", packages)
lambda_abi = get_lambda_abi(self.runtime)
self._pip.download_manylinux_wheels([pkg.identifier for pkg in packages], directory, lambda_abi)
def _build_sdists(self, sdists, directory, compile_c=True):
LOG.debug("Build missing wheels from sdists " "(C compiling %s): %s", compile_c, sdists)
for sdist in sdists:
path_to_sdist = self._osutils.joinpath(directory, sdist.filename)
self._pip.build_wheel(path_to_sdist, directory, compile_c)
def _categorize_wheel_files(self, directory):
final_wheels = [
Package(directory, filename)
for filename in self._osutils.get_directory_contents(directory)
if filename.endswith(".whl")
]
compatible_wheels, incompatible_wheels = set(), set()
for wheel in final_wheels:
if self._is_compatible_wheel_filename(wheel.filename):
compatible_wheels.add(wheel)
else:
incompatible_wheels.add(wheel)
return compatible_wheels, incompatible_wheels
def _is_compatible_wheel_filename(self, filename):
wheel = filename[:-4]
implementation, abi, platform = wheel.split("-")[-3:]
# Verify platform is compatible
if platform not in self._MANYLINUX_COMPATIBLE_PLATFORM:
return False
lambda_runtime_abi = get_lambda_abi(self.runtime)
# Verify that the ABI is compatible with lambda. Either none or the
# correct type for the python version cp27mu for py27 and cp36m for
# py36.
if abi == "none":
return True
prefix_version = implementation[:3]
if prefix_version == "cp3":
# Deploying python 3 function which means we need cp36m abi
# We can also accept abi3 which is the CPython 3 Stable ABI and
# will work on any version of python 3.
return abi == lambda_runtime_abi or abi == "abi3"
elif prefix_version == "cp2":
# Deploying to python 2 function which means we need cp27mu abi
return abi == "cp27mu"
# Don't know what we have but it didn't pass compatibility tests.
return False
def _apply_wheel_whitelist(self, compatible_wheels, incompatible_wheels):
compatible_wheels = set(compatible_wheels)
actual_incompatible_wheels = set()
for missing_package in incompatible_wheels:
if missing_package.name in self._COMPATIBLE_PACKAGE_WHITELIST:
compatible_wheels.add(missing_package)
else:
actual_incompatible_wheels.add(missing_package)
return compatible_wheels, actual_incompatible_wheels
def _install_purelib_and_platlib(self, wheel, root):
# Take a wheel package and the directory it was just unpacked into and
# unpackage the purelib/platlib directories if they are present into
# the parent directory. On some systems purelib and platlib need to
# be installed into separate locations, for lambda this is not the case
# and both should be installed in site-packages.
data_dir = self._osutils.joinpath(root, wheel.data_dir)
if not self._osutils.directory_exists(data_dir):
return
unpack_dirs = {"purelib", "platlib"}
data_contents = self._osutils.get_directory_contents(data_dir)
for content_name in data_contents:
if content_name in unpack_dirs:
source = self._osutils.joinpath(data_dir, content_name)
self._osutils.copytree(source, root)
# No reason to keep the purelib/platlib source directory around
# so we delete it to conserve space in the package.
self._osutils.rmtree(source)
def _install_wheels(self, src_dir, dst_dir, wheels):
if self._osutils.directory_exists(dst_dir):
self._osutils.rmtree(dst_dir)
self._osutils.makedirs(dst_dir)
for wheel in wheels:
zipfile_path = self._osutils.joinpath(src_dir, wheel.filename)
self._osutils.extract_zipfile(zipfile_path, dst_dir)
self._install_purelib_and_platlib(wheel, dst_dir)
class Package(object):
"""A class to represent a package downloaded but not yet installed."""
def __init__(self, directory, filename, osutils=None):
self.dist_type = "wheel" if filename.endswith(".whl") else "sdist"
self._directory = directory
self.filename = filename
if osutils is None:
osutils = OSUtils()
self._osutils = osutils
self._name, self._version = self._calculate_name_and_version()
@property
def name(self):
return self._name
@property
def data_dir(self):
# The directory format is {distribution}-{version}.data
return "%s-%s.data" % (self._name, self._version)
def _normalize_name(self, name):
# Taken directly from PEP 503
return re.sub(r"[-_.]+", "-", name).lower()
@property
def identifier(self):
return "%s==%s" % (self._name, self._version)
def __str__(self):
return "%s(%s)" % (self.identifier, self.dist_type)
def __repr__(self):
return str(self)
def __eq__(self, other):
if not isinstance(other, Package):
return False
return self.identifier == other.identifier
def __hash__(self):
return hash(self.identifier)
def _calculate_name_and_version(self):
if self.dist_type == "wheel":
# From the wheel spec (PEP 427)
# {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-
# {platform tag}.whl
name, version = self.filename.split("-")[:2]
else:
info_fetcher = SDistMetadataFetcher(osutils=self._osutils)
sdist_path = self._osutils.joinpath(self._directory, self.filename)
name, version = info_fetcher.get_package_name_and_version(sdist_path)
normalized_name = self._normalize_name(name)
return normalized_name, version
class SDistMetadataFetcher(object):
"""This is the "correct" way to get name and version from an sdist."""
# https://git.io/vQkwV
_SETUPTOOLS_SHIM = (
"import setuptools, tokenize;__file__=%r;"
"f=getattr(tokenize, 'open', open)(__file__);"
"code=f.read().replace('\\r\\n', '\\n');"
"f.close();"
"exec(compile(code, __file__, 'exec'))"
)
def __init__(self, osutils=None):
if osutils is None:
osutils = OSUtils()
self._osutils = osutils
def _parse_pkg_info_file(self, filepath):
# The PKG-INFO generated by the egg-info command is in an email feed
# format, so we use an email feedparser here to extract the metadata
# from the PKG-INFO file.
data = self._osutils.get_file_contents(filepath, binary=False)
parser = FeedParser()
parser.feed(data)
return parser.close()
def _generate_egg_info(self, package_dir):
setup_py = self._osutils.joinpath(package_dir, "setup.py")
script = self._SETUPTOOLS_SHIM % setup_py
cmd = [sys.executable, "-c", script, "--no-user-cfg", "egg_info", "--egg-base", "egg-info"]
egg_info_dir = self._osutils.joinpath(package_dir, "egg-info")
self._osutils.makedirs(egg_info_dir)
p = subprocess.Popen(cmd, cwd=package_dir, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
info_contents = self._osutils.get_directory_contents(egg_info_dir)
pkg_info_path = self._osutils.joinpath(egg_info_dir, info_contents[0], "PKG-INFO")
return pkg_info_path
def _unpack_sdist_into_dir(self, sdist_path, unpack_dir):
if sdist_path.endswith(".zip"):
self._osutils.extract_zipfile(sdist_path, unpack_dir)
elif sdist_path.endswith((".tar.gz", ".tar.bz2")):
self._osutils.extract_tarfile(sdist_path, unpack_dir)
else:
raise InvalidSourceDistributionNameError(sdist_path)
# There should only be one directory unpacked.
contents = self._osutils.get_directory_contents(unpack_dir)
return self._osutils.joinpath(unpack_dir, contents[0])
def get_package_name_and_version(self, sdist_path):
with self._osutils.tempdir() as tempdir:
package_dir = self._unpack_sdist_into_dir(sdist_path, tempdir)
pkg_info_filepath = self._generate_egg_info(package_dir)
metadata = self._parse_pkg_info_file(pkg_info_filepath)
name = metadata["Name"]
version = metadata["Version"]
return name, version
class SubprocessPip(object):
"""Wrapper around calling pip through a subprocess."""
def __init__(self, osutils=None, python_exe=None, import_string=None):
if osutils is None:
osutils = OSUtils()
self._osutils = osutils
self.python_exe = python_exe
if import_string is None:
import_string = pip_import_string(python_exe=self.python_exe)
self._import_string = import_string
def main(self, args, env_vars=None, shim=None):
if env_vars is None:
env_vars = self._osutils.environ()
if shim is None:
shim = ""
run_pip = ("import sys; %s; sys.exit(main(%s))") % (self._import_string, args)
exec_string = "%s%s" % (shim, run_pip)
invoke_pip = [self.python_exe, "-c", exec_string]
p = self._osutils.popen(invoke_pip, stdout=self._osutils.pipe, stderr=self._osutils.pipe, env=env_vars)
out, err = p.communicate()
rc = p.returncode
return rc, out, err
class PipRunner(object):
"""Wrapper around pip calls used by chalice."""
_LINK_IS_DIR_PATTERN = "Processing (.+?)\n" " Link is a directory," " ignoring download_dir"
def __init__(self, python_exe, pip, osutils=None):
if osutils is None:
osutils = OSUtils()
self.python_exe = python_exe
self._wrapped_pip = pip
self._osutils = osutils
def _execute(self, command, args, env_vars=None, shim=None):
"""Execute a pip command with the given arguments."""
main_args = [command] + args
LOG.debug("calling pip %s", " ".join(main_args))
rc, out, err = self._wrapped_pip.main(main_args, env_vars=env_vars, shim=shim)
return rc, out, err
def build_wheel(self, wheel, directory, compile_c=True):
"""Build an sdist into a wheel file."""
arguments = ["--no-deps", "--wheel-dir", directory, wheel]
env_vars = self._osutils.environ()
shim = ""
if not compile_c:
env_vars.update(pip_no_compile_c_env_vars)
shim = pip_no_compile_c_shim
# Ignore rc and stderr from this command since building the wheels
# may fail and we will find out when we categorize the files that were
# generated.
self._execute("wheel", arguments, env_vars=env_vars, shim=shim)
def download_all_dependencies(self, requirements_filename, directory):
"""Download all dependencies as sdist or wheel."""
arguments = ["-r", requirements_filename, "--dest", directory]
rc, out, err = self._execute("download", arguments)
# When downloading all dependencies we expect to get an rc of 0 back
# since we are casting a wide net here letting pip have options about
# what to download. If a package is not found it is likely because it
# does not exist and was mispelled. In this case we raise an error with
# the package name. Otherwise a nonzero rc results in a generic
# download error where we pass along the stderr.
if rc != 0:
if err is None:
err = b"Unknown error"
error = err.decode()
match = re.search(("Could not find a version that satisfies the " "requirement (.+?) "), error)
if match:
package_name = match.group(1)
raise NoSuchPackageError(str(package_name))
raise PackageDownloadError(error)
stdout = out.decode()
matches = re.finditer(self._LINK_IS_DIR_PATTERN, stdout)
for match in matches:
wheel_package_path = str(match.group(1))
# Looks odd we do not check on the error status of building the
# wheel here. We can assume this is a valid package path since
# we already passed the pip download stage. This stage would have
# thrown a PackageDownloadError if any of the listed packages were
# not valid.
# If it fails the actual build step, it will have the same behavior
# as any other package we fail to build a valid wheel for, and
# complain at deployment time.
self.build_wheel(wheel_package_path, directory)
def download_manylinux_wheels(self, packages, directory, lambda_abi):
"""Download wheel files for manylinux for all the given packages."""
# If any one of these dependencies fails pip will bail out. Since we
# are only interested in all the ones we can download, we need to feed
# each package to pip individually. The return code of pip doesn't
# matter here since we will inspect the working directory to see which
# wheels were downloaded. We are only interested in wheel files
# compatible with lambda, which means manylinux1_x86_64 platform and
# cpython implementation. The compatible abi depends on the python
# version and is checked later.
for package in packages:
arguments = [
"--only-binary=:all:",
"--no-deps",
"--platform",
"manylinux2014_x86_64",
"--implementation",
"cp",
"--abi",
lambda_abi,
"--dest",
directory,
package,
]
self._execute("download", arguments)