Commit d9209842 authored by Stansberry, Dale's avatar Stansberry, Dale
Browse files

- More clean-up

parent 16353e6f
Pipeline #3120 skipped
#!/sw/rhea/python/2.7.9/rhel6.6_gnu4.4.7/bin/python
#!/usr/bin/env python
import os
import sys
......@@ -89,8 +89,6 @@ def generateConfiguration( hostname, master, nodeinfo ):
conf.write( 'SPARK_LOG_DIR=' + SPARK_LOCAL_DIR + '/logs\n' )
conf.write( 'SPARK_WORKER_DIR=' + SPARK_LOCAL_DIR + '\n' )
conf.write( 'SPARK_LOCAL_DIRS=' + SPARK_LOCAL_DIR + '/local\n' )
if python_support:
conf.write( 'SPARK_PYTHON_SUPPORT=1\n' )
conf.close()
......@@ -148,13 +146,7 @@ if __name__ == '__main__':
print 'Configuring spark on ' + hostname + ' at ' + SPARK_DEPLOY_DIR + ', master: ' + master
if "SPARK_PYTHON_SUPPORT" in os.environ:
python_support = True
else:
python_support = False
nodeinfo = collectNodeInfo()
generateConfiguration( hostname, master, nodeinfo )
os.environ["SPARK_CONF_DIR"] = SPARK_DEPLOY_DIR + "/nodes/" + hostname + "/conf"
......
#!/usr/bin/python
#!/usr/bin/env python
import os
import sys
......@@ -63,24 +63,22 @@ def generateScript():
outf = open( script_file, 'w' )
outf.write( "#!/bin/bash\n\n" )
outf.write( '#PBS -A ' + account + '\n' )
outf.write( '#PBS -l walltime=' + walltime + ',nodes=' + num_nodes + '\n' )
outf.write( '\n' )
outf.write( 'source ${MODULESHOME}/init/bash\n\n' )
outf.write( 'module load python\n' )
if python_support == True:
outf.write( 'export SPARK_PYTHON_SUPPORT=1\n' )
outf.write( '#PBS -l walltime=' + walltime + ',nodes=' + num_nodes + '\n\n' )
outf.write( 'export SPARK_HOME=' + spark_home + '\n' )
if deploy_dir != None:
outf.write( 'export SPARK_DEPLOY_DIR=' + deploy_dir + '\n\n' )
outf.write( 'export SPARK_DEPLOY_DIR=' + deploy_dir + '\n\n' )
outf.write( 'source ${MODULESHOME}/init/bash\n' )
outf.write( 'module load python\n' )
outf.write( 'export PYTHONPATH=$SPARK_HOME/python\n\n' )
outf.write( 'if [ -d "$SPARK_DEPLOY_DIR/nodes" ]; then\n' )
outf.write( ' rm -rf $SPARK_DEPLOY_DIR/nodes/*\n' )
outf.write( ' sync\n' )
outf.write( 'fi\n\n' )
outf.write( "echo =============== Running spark deployment ===============\n\n" )
outf.write( 'cd $WORKDIR\n' )
outf.write( 'mpirun -n ' + num_nodes + ' --npernode 1 $SPARK_HOME/sbin/spark_deploy.py $SPARK_HOME $SPARK_DEPLOY_DIR &\n\n' )
outf.write( "MASTERFILE=$SPARK_DEPLOY_DIR/nodes/master\n" )
outf.write( "timeout=" + deploy_timeout + "\n" )
......@@ -93,12 +91,17 @@ def generateScript():
outf.write( " exit 1\n" )
outf.write( " fi\n" )
outf.write( "done\n\n" )
outf.write( "MASTERNODE=$(<$MASTERFILE)\n" )
outf.write( "echo Master: $MASTERNODE\n\n" )
outf.write( 'SPARK_SUBMIT="$SPARK_HOME/bin/spark-submit --driver-memory '+driver_memory+' --executor-memory '+executor_memory+' --master spark://$MASTERNODE:7077"\n\n' )
outf.write( "echo =============== Running spark job ===============\n\n" )
outf.write( "# Edit/add the following line to with your Spark program and arguments specified...\n\n" )
outf.write( "# $SPARK_SUBMIT <application> [arguments...]\n")
outf.write( "# Edit/add the following line to with your Spark program and arguments specified...\n" )
outf.write( "# $SPARK_SUBMIT <application> [arguments...]\n\n")
outf.write( "echo =============== Spark job finished ===============\n\n" )
outf.close()
......@@ -128,11 +131,6 @@ def setupDeployDir():
conf.write( '\n# Support for modules:\n' )
conf.write( 'source /etc/profile\n' )
conf.write( 'source $MODULESHOME/init/bash\n' )
if python_support == True:
conf.write( 'module load python\n' )
conf.write( 'module load python_pip\n' )
conf.close()
# Append default (static) settings to spark-defaults.conf template
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment