Added pyspark version
This commit is contained in:
parent
424d294997
commit
27b7683be9
17
Makefile
17
Makefile
|
@ -9,13 +9,22 @@ UID := $(shell id -u)
|
|||
mobileinsight.sif: mobileinsight.def
|
||||
"$(APPTAINER)" build --build-arg="NUM_THREADS=$(NUM_THREADS)" "$@" "$<"
|
||||
|
||||
run: mobileinsight.sif
|
||||
mobileinsight-spark.sif: with-spark.def mobileinsight.sif
|
||||
"$(APPTAINER)" build "$@" "$<"
|
||||
|
||||
prepare:
|
||||
@mkdir -p "$(OVERLAY_DIR)"
|
||||
@lsmod | grep overlay > /dev/null || sudo modprobe overlay
|
||||
|
||||
run: mobileinsight.sif prepare
|
||||
@$(APPTAINER) run --overlay "$(OVERLAY_DIR)" \
|
||||
-B "/run/user/$(UID)" mobileinsight.sif
|
||||
|
||||
clean:
|
||||
rm -f mobileinsight.sif $(OVERLAY_DIR)
|
||||
run-spark: mobileinsight-spark.sif prepare
|
||||
@$(APPTAINER) run --overlay "$(OVERLAY_DIR)" \
|
||||
-B "/run/user/$(UID)" mobileinsight-spark.sif
|
||||
|
||||
.PHONY: run clean
|
||||
clean:
|
||||
rm -rf *.sif $(OVERLAY_DIR)
|
||||
|
||||
.PHONY: prepare run run-spark clean
|
||||
|
|
|
@ -29,6 +29,11 @@ $ ./mobileinsight.sif python3 "$PYTHON_ARGS"
|
|||
|
||||
# Start a bash shell inside the container
|
||||
$ ./mobileinsight.sif bash
|
||||
|
||||
# Only available in spark version
|
||||
#
|
||||
# Start a pyspark interpreter
|
||||
$ ./mobileinsight-spark.sif pyspark
|
||||
```
|
||||
|
||||
Examples are stored in /opt/mobileinsight-examples.
|
||||
|
@ -36,4 +41,6 @@ Examples are stored in /opt/mobileinsight-examples.
|
|||
Build
|
||||
-----
|
||||
|
||||
Run `make mobileinsight.sif` to build the apptainer image.
|
||||
Run `make mobileinsight.sif` or `make mobileinsight-spark.sif` to build the
|
||||
apptainer image. The spark version includes both `mobileinsight` along with
|
||||
`pyspark`.
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
Bootstrap: localimage
|
||||
From: mobileinsight.sif
|
||||
|
||||
%arguments
|
||||
SPARK_URL="https://dlcdn.apache.org/spark/spark-3.5.1/spark-3.5.1-bin-hadoop3.tgz"
|
||||
|
||||
%environment
|
||||
export SPARK_HOME="/opt/spark"
|
||||
export PATH="/opt/spark/sbin:/opt/spark/bin:$PATH"
|
||||
export PYSPARK_PYTHON="/usr/bin/python3"
|
||||
export SPARK_LOCAL_IP="127.0.0.1"
|
||||
|
||||
%post
|
||||
# Update and install dependencies
|
||||
mkdir -p /build
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get update
|
||||
apt-get upgrade -y
|
||||
apt-get install -y \
|
||||
openjdk-8-jre-headless \
|
||||
scala
|
||||
|
||||
# Download Spark
|
||||
SPARK_TGZ="/build/$(basename "{{ SPARK_URL }}")"
|
||||
cd /build
|
||||
wget "{{ SPARK_URL }}" -nv -O "$SPARK_TGZ"
|
||||
tar -xf "$SPARK_TGZ"
|
||||
mv "$(echo $SPARK_TGZ | sed 's/\.tgz$//')" /opt/spark
|
||||
|
||||
# Cleanup
|
||||
apt-get autoclean -y
|
||||
rm -rf /build
|
Loading…
Reference in New Issue