Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Sign in
Toggle navigation
Menu
Open sidebar
Stansberry, Dale
spark_on_demand
Commits
d9209842
Commit
d9209842
authored
May 11, 2016
by
Stansberry, Dale
Browse files
- More clean-up
parent
16353e6f
Pipeline
#3120
skipped
Changes
2
Pipelines
1
Hide whitespace changes
Inline
Side-by-side
spark_deploy.py
View file @
d9209842
#!/
sw/rhea/python/2.7.9/rhel6.6_gnu4.4.7
/bin/python
#!/
usr
/bin/
env
python
import
os
import
sys
...
...
@@ -89,8 +89,6 @@ def generateConfiguration( hostname, master, nodeinfo ):
conf
.
write
(
'SPARK_LOG_DIR='
+
SPARK_LOCAL_DIR
+
'/logs
\n
'
)
conf
.
write
(
'SPARK_WORKER_DIR='
+
SPARK_LOCAL_DIR
+
'
\n
'
)
conf
.
write
(
'SPARK_LOCAL_DIRS='
+
SPARK_LOCAL_DIR
+
'/local
\n
'
)
if
python_support
:
conf
.
write
(
'SPARK_PYTHON_SUPPORT=1
\n
'
)
conf
.
close
()
...
...
@@ -148,13 +146,7 @@ if __name__ == '__main__':
print
'Configuring spark on '
+
hostname
+
' at '
+
SPARK_DEPLOY_DIR
+
', master: '
+
master
if
"SPARK_PYTHON_SUPPORT"
in
os
.
environ
:
python_support
=
True
else
:
python_support
=
False
nodeinfo
=
collectNodeInfo
()
generateConfiguration
(
hostname
,
master
,
nodeinfo
)
os
.
environ
[
"SPARK_CONF_DIR"
]
=
SPARK_DEPLOY_DIR
+
"/nodes/"
+
hostname
+
"/conf"
...
...
spark_setup.py
View file @
d9209842
#!/usr/bin/python
#!/usr/bin/
env
python
import
os
import
sys
...
...
@@ -63,24 +63,22 @@ def generateScript():
outf
=
open
(
script_file
,
'w'
)
outf
.
write
(
"#!/bin/bash
\n\n
"
)
outf
.
write
(
'#PBS -A '
+
account
+
'
\n
'
)
outf
.
write
(
'#PBS -l walltime='
+
walltime
+
',nodes='
+
num_nodes
+
'
\n
'
)
outf
.
write
(
'
\n
'
)
outf
.
write
(
'source ${MODULESHOME}/init/bash
\n\n
'
)
outf
.
write
(
'module load python
\n
'
)
if
python_support
==
True
:
outf
.
write
(
'export SPARK_PYTHON_SUPPORT=1
\n
'
)
outf
.
write
(
'#PBS -l walltime='
+
walltime
+
',nodes='
+
num_nodes
+
'
\n\n
'
)
outf
.
write
(
'export SPARK_HOME='
+
spark_home
+
'
\n
'
)
if
deploy_dir
!=
None
:
outf
.
write
(
'export SPARK_DEPLOY_DIR='
+
deploy_dir
+
'
\n\n
'
)
outf
.
write
(
'export SPARK_DEPLOY_DIR='
+
deploy_dir
+
'
\n\n
'
)
outf
.
write
(
'source ${MODULESHOME}/init/bash
\n
'
)
outf
.
write
(
'module load python
\n
'
)
outf
.
write
(
'export PYTHONPATH=$SPARK_HOME/python
\n\n
'
)
outf
.
write
(
'if [ -d "$SPARK_DEPLOY_DIR/nodes" ]; then
\n
'
)
outf
.
write
(
' rm -rf $SPARK_DEPLOY_DIR/nodes/*
\n
'
)
outf
.
write
(
' sync
\n
'
)
outf
.
write
(
'fi
\n\n
'
)
outf
.
write
(
"echo =============== Running spark deployment ===============
\n\n
"
)
outf
.
write
(
'cd $WORKDIR
\n
'
)
outf
.
write
(
'mpirun -n '
+
num_nodes
+
' --npernode 1 $SPARK_HOME/sbin/spark_deploy.py $SPARK_HOME $SPARK_DEPLOY_DIR &
\n\n
'
)
outf
.
write
(
"MASTERFILE=$SPARK_DEPLOY_DIR/nodes/master
\n
"
)
outf
.
write
(
"timeout="
+
deploy_timeout
+
"
\n
"
)
...
...
@@ -93,12 +91,17 @@ def generateScript():
outf
.
write
(
" exit 1
\n
"
)
outf
.
write
(
" fi
\n
"
)
outf
.
write
(
"done
\n\n
"
)
outf
.
write
(
"MASTERNODE=$(<$MASTERFILE)
\n
"
)
outf
.
write
(
"echo Master: $MASTERNODE
\n\n
"
)
outf
.
write
(
'SPARK_SUBMIT="$SPARK_HOME/bin/spark-submit --driver-memory '
+
driver_memory
+
' --executor-memory '
+
executor_memory
+
' --master spark://$MASTERNODE:7077"
\n\n
'
)
outf
.
write
(
"echo =============== Running spark job ===============
\n\n
"
)
outf
.
write
(
"# Edit/add the following line to with your Spark program and arguments specified...
\n\n
"
)
outf
.
write
(
"# $SPARK_SUBMIT <application> [arguments...]
\n
"
)
outf
.
write
(
"# Edit/add the following line to with your Spark program and arguments specified...
\n
"
)
outf
.
write
(
"# $SPARK_SUBMIT <application> [arguments...]
\n\n
"
)
outf
.
write
(
"echo =============== Spark job finished ===============
\n\n
"
)
outf
.
close
()
...
...
@@ -128,11 +131,6 @@ def setupDeployDir():
conf
.
write
(
'
\n
# Support for modules:
\n
'
)
conf
.
write
(
'source /etc/profile
\n
'
)
conf
.
write
(
'source $MODULESHOME/init/bash
\n
'
)
if
python_support
==
True
:
conf
.
write
(
'module load python
\n
'
)
conf
.
write
(
'module load python_pip
\n
'
)
conf
.
close
()
# Append default (static) settings to spark-defaults.conf template
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment