#!/bin/bash #SBATCH --job-name=spark # Exclusive mode is recommended for all spark jobs #SBATCH --exclusive #SBATCH --nodes=1 #SBATCH --time=10 module load spark # This syntax tells spark to use all cpu cores on the node. export MASTER="local[*]" # This is a scala example run-example SparkPi # This is a python example. # For production jobs, you'll probably want to have a python module loaded. # This will use the system python if you don't have a python module loaded. spark-submit --master $MASTER $SPARK_HOME/examples/src/main/python/pi.py