This repository was archived by the owner on Feb 26, 2025. It is now read-only.
forked from spack/spack
-
Notifications
You must be signed in to change notification settings - Fork 15
/
Copy pathpackage.py
45 lines (37 loc) · 1.97 KB
/
package.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
##############################################################################
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class PyFunctionalizer(PythonPackage):
"""Functionalizer - Spark functionalizer developed by Blue Brain Project, EPFL"""
homepage = "https://github.com/BlueBrain/functionalizer"
pypi = "functionalizer/functionalizer-1.0.0.tar.gz"
version("1.0.0", sha256="c62754fcf41e29729386c23cefb0dd57b449ac27c0b47ba5e2e4b2776c517494")
depends_on("cmake", type="build")
depends_on("ninja", type="build")
depends_on("py-scikit-build-core+pyproject", type="build")
depends_on("py-setuptools-scm", type="build")
depends_on("spark+hadoop@3.0.0:", type="run")
depends_on("hadoop@3:", type="run")
depends_on("py-docopt", type=("build", "run"))
depends_on("py-future", type=("build", "run"))
depends_on("py-fz-td-recipe@0.2:", type=("build", "run"))
# h5py needed for morphologies before, and to supplement libSONATA due
# to missing API functionality
depends_on("py-h5py", type=("build", "run"))
depends_on("py-hdfs", type=("build", "run"))
depends_on("py-jprops", type=("build", "run"))
depends_on("py-libsonata@0.1.17:", type=("build", "run"))
depends_on("py-lxml", type=("build", "run"))
depends_on("py-morphio", type=("build", "run"))
depends_on("py-mpi4py", type=("build", "run"))
depends_on("py-numpy", type=("build", "run"))
depends_on("py-packaging", type=("build", "run"))
depends_on("py-pandas", type=("build", "run"))
depends_on("py-pyarrow+dataset+parquet@3.0.0:", type=("build", "run"))
depends_on("py-pyspark@3.0.0:", type=("build", "run"))
def setup_run_environment(self, env):
env.set("SPARK_HOME", self.spec["spark"].prefix)
env.set("HADOOP_HOME", self.spec["hadoop"].prefix)