|
| 1 | +############################################################################## |
| 2 | +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other |
| 3 | +# Spack Project Developers. See the top-level COPYRIGHT file for details. |
| 4 | +# |
| 5 | +# SPDX-License-Identifier: (Apache-2.0 OR MIT) |
| 6 | +from spack.package import * |
| 7 | + |
| 8 | + |
| 9 | +class PyFunctionalizer(PythonPackage): |
| 10 | + """Functionalizer - Spark functionalizer developed by Blue Brain Project, EPFL""" |
| 11 | + |
| 12 | + homepage = "https://github.com/BlueBrain/functionalizer" |
| 13 | + pypi = "functionalizer/functionalizer-1.0.0.tar.gz" |
| 14 | + |
| 15 | + version("1.0.0", sha256="c62754fcf41e29729386c23cefb0dd57b449ac27c0b47ba5e2e4b2776c517494") |
| 16 | + |
| 17 | + depends_on("py-cmake", type="build") |
| 18 | + depends_on("py-ninja", type="build") |
| 19 | + depends_on("py-scikit-build-core+pyproject", type="build") |
| 20 | + depends_on("py-setuptools-scm", type="build") |
| 21 | + |
| 22 | + depends_on("spark+hadoop@3.0.0:", type="run") |
| 23 | + depends_on("hadoop@3:", type="run") |
| 24 | + |
| 25 | + depends_on("py-docopt", type=("build", "run")) |
| 26 | + depends_on("py-future", type=("build", "run")) |
| 27 | + depends_on("py-fz-td-recipe@0.2:", type=("build", "run")) |
| 28 | + # h5py needed for morphologies before, and to supplement libSONATA due |
| 29 | + # to missing API functionality |
| 30 | + depends_on("py-h5py", type=("build", "run")) |
| 31 | + depends_on("py-hdfs", type=("build", "run")) |
| 32 | + depends_on("py-jprops", type=("build", "run")) |
| 33 | + depends_on("py-libsonata@0.1.17:", type=("build", "run")) |
| 34 | + depends_on("py-lxml", type=("build", "run")) |
| 35 | + depends_on("py-morphio", type=("build", "run")) |
| 36 | + depends_on("py-mpi4py", type=("build", "run")) |
| 37 | + depends_on("py-numpy", type=("build", "run")) |
| 38 | + depends_on("py-packaging", type=("build", "run")) |
| 39 | + depends_on("py-pandas", type=("build", "run")) |
| 40 | + depends_on("py-pyarrow+dataset+parquet@3.0.0:", type=("build", "run")) |
| 41 | + depends_on("py-pyspark@3.0.0:", type=("build", "run")) |
| 42 | + |
| 43 | + def setup_run_environment(self, env): |
| 44 | + env.set("SPARK_HOME", self.spec["spark"].prefix) |
| 45 | + env.set("HADOOP_HOME", self.spec["hadoop"].prefix) |
0 commit comments