Commit 3999abf3 authored by delcmo's avatar delcmo
Browse files

renamed some of the file with prefix to make them available in the navigation...

renamed some of the file with prefix to make them available in the navigation tab in Workbench, removed commented blocks in input file and added REANME file with contain.
parent 8f5255f3
Loading
Loading
Loading
Loading
+14 −0
Original line number Diff line number Diff line
File names and descriptions:
* pipe_3840.drive: contains logic to generated driver script used by Dakota to run Nek4nuc
  application.
* pipe_3840.in: Dakota input file.
* pipe_3840.re2: Nek5000 mesh file containing the spectral element mesh of the pipe.
* pipe_3840.tmpl: Nek4nuc templated input file that will be used by the Dakota driver to
  generate a Nek4nuc input file.
* pipe_3840.txt: text file containing coordinates of probes for collecting instanteneous 
  velocity fields and compute the FFT.
* pipe_3840_pipe_exp_data.txt: text file containing coordinates of probes for line plots 
  sub-block.
* pipe_3840_exp_xyz_coord.txt: contains experimental data of the time-averaged velocity
   field.
* pipe_3840_tabular_output.dat: data file with results from the Dakota-Nek4nuc study.

dakota_driver.py

deleted100644 → 0
+0 −88
Original line number Diff line number Diff line
#!/usr/bin/env python

# Dakota will execute this script as
#   generic_driver.py params.in results.out
# The command line arguments will be extracted by dakota.interfacing automatically.
# necessary python modules
import os
import sys
import json

sys.path.insert(0, "/home/lpswile/Workbench-Linux/wasppy")
sys.path.insert(0, "/projects/dakota/install/cts/intel/dakota-6.11.0.Linux.x86_64/share/dakota/Python/dakota") # dakota_install/share/dakota/Python/dakota
import waspdrive # Workbench Analysis Sequence Processor driver module
from interfacing import interfacing as di # Dakota's interface module

# ----------------------------
# Parse Dakota parameters file
# ----------------------------

params, results = di.read_parameters_file()

#dump params to external params.json file for future use by the template engine
params_for_template_engine_file_path = "params.json"
with open(params_for_template_engine_file_path, 'w') as outfile:
    f=json.dump(params._variables,  outfile, default=lambda o: o.__dict__)
#note that in object params, _variables is an OrderedDict
#params.descriptors will output this ordered _variables
# -------------------------------
# Pre-processing
# Convert and send to application
# Or copy parameters into template to generate input file
# -------------------------------

# Set up the data structures
continuous_vars = [params[k] for k in params.descriptors]
#continuous_vars are defined in dakota .in file as cdv_descriptor

active_set_vector = 0

# Alternatively, the ASV can be accessed by index in
# function, gradient, hessian order
#for i, bit in enumerate(results["obj_fn"].asv):
#    if bit:
#        active_set_vector += 1 << i

# Obtain the drive module's input
# This file contains the application information and extraction logic
import glob
driver_inputs = glob.glob("*.drive")
if len(driver_inputs) == 0:
    raise ValueError("Unable to find drive file in "+os.getcwd()+"; did you forget to copy or link the file?")

driver_document = waspdrive.process_drive_input(driver_inputs[0])
rtncode=waspdrive.run_external_app(driver_document, params_for_template_engine_file_path)

#follow the logic in ddi file to extract required responses
res_output=waspdrive.extract_results(driver_document)

retval = dict([])
retval['fns']=res_output


# ----------------------------
# Return the results to Dakota
# ----------------------------

# Insert extracted values into results
# Results iterator provides an index, response name, and response
try:
  for i, n, r in results:
    if r.asv.function:
        try:
            r.function = retval['fns'][i]
        except:
            pass
# Catch Dakota 6.9 exception where results interface has changed
# ValueError: too many values to unpack            
except ValueError: 
  i = 0
  for n, r in results.items():
      r.function = retval['fns'][i]
      i+=1
results.write()

#dump to external results.json file
with open('results.json', 'w') as outfile:
    rst=json.dump(results,  outfile, default=lambda o: o.__dict__)

multidim_parameter_study.in

deleted100644 → 0
+0 −51
Original line number Diff line number Diff line
# DAKOTA INPUT FILE: multidimensional parameter study

environment
	tabular_graphics_data
	  tabular_graphics_file = 'multidim_param_study_tabular_output'

method
        multidim_parameter_study
	partition 2 5 2 

variables
	active all

        discrete_uncertain_set
	  integer 2
	    num_set_values 3 6 
	    set_values 5 10 15 4 6 8 10 12 14
	  descriptor 'filterWeight' 'lx1'
          real 1
            num_set_values 3 
            set_values   0.1 0.5 0.9
	  descriptor       'filterCutoffRatio'

interface
   analysis_drivers = 'python dakota_driver.py'
   fork
   work_directory
     named 'pipe_workdir'
   directory_tag
   directory_save
   copy_files = "pipe_3840.re2"
                "pipe_3840.tmpl"
                "pipe_3840_31000.fld"
                "dakota_driver.py"
                "pipe_3840.drive"
                "pipe_3840.txt"
                "test.py"
                "expPoints.txt"
                "longPipeRe31k.txt"
                "script-postprocess.py"
   replace
   parameters_file = 'params.in'
   results_file = 'results.out'
   file_save file_tag
   asynchronous evaluation_concurrency = 4 

responses
	response_functions = 2 
        descriptors = 'L2_error_norm' 'Friction_velocity'
	no_gradients
	no_hessians
+16 −44
Original line number Diff line number Diff line
## Apollo
#application "/home/mxd/NEAMS+VisIt-rev67bd6a14e-Linux/rte/entry.sh /home/mxd/NEAMS+VisIt-rev67bd6a14e-Linux/rte/nek4nuc.py -i pipe_3840.parn -e /opt/openmpi-3.1.1-gcc-4.8.5/bin/mpirun -v 0 -nek '/home/mxd/Nek5000-v17'"
### Application block ###
application "/ascldap/users/lpswile/Workbench-Linux/rte/entry.sh /ascldap/users/lpswile/Workbench-Linux/rte/nek4nuc.py -i pipe_3840.parn -e srun -v 0 -nek '/ascldap/users/lpswile/Nek5000'"
## Panacea
#application "/home/mxd/NEAMS+VisIt-rev67bd6a14e-Linux/rte/entry.sh /home/mxd/NEAMS+VisIt-rev67bd6a14e-Linux/rte/nek4nuc.py -i pipe_3840.parn -e /software/tools/apps/openmpi/gcc4/1.10.2/bin/mpirun -v 0 -nek '/home/mxd/Nek5000-v17'"
## Local
#application "/Users/mxd/ORNL-Research/Workbench/Workbench-Darwin/rte/entry.sh /Users/mxd/ORNL-Research/Workbench/Workbench-Darwin/rte/nek4nuc.py -i rectangle.parn -e /usr/local/bin/mpirun -v 0 -nek '/Users/mxd/ORNL-Research/Nek5000-project/Nek5000'"
            input_file 'pipe_3840.parn'
            input_tmpl 'pipe_3840.tmpl'

#extract_from 'pipe_3840.dragLogic/pipe_3840.frctVel.txt'  find last_line # add optional integer N to grab the last N lines 
#column 1 delimiter ' '

extract_from 'pipe_3840.linePlots/pipe_3840.avg.txt' using "/ascldap/users/lpswile/Workbench-Linux/rte/entry.sh script-postprocess.py"
### Extract block ###
# Extract friction velocity
extract_from 'pipe_3840.dragLogic/pipe_3840.frctVel.txt' find last_line 
column 1 delimiter ' '
# Compute L2 error norm between Nek5000 data and experimental data
extract_from 'pipe_3840.linePlots/pipe_3840.avg.txt' using "/ascldap/users/lpswile/Workbench-Linux/rte/entry.sh pipe_3840_postprocess.py"

### Scheduler block ###
scheduler
    header = "#!/bin/bash"
             "#SBATCH --nodes=2"
@@ -23,27 +19,3 @@ scheduler
             "module purge"
             "module load sparc-dev/intel"
    submit_path = '/usr/bin/sbatch'

## Apollo
##scheduler
##    scheduler_header = "#!/bin/bash"
##                       "#PBS -m bea"
##                       "#PBS -M mxd@ornl.gov"
##                       "#PBS -l nodes=1:ppn=12"
##                       "#PBS -N DakNek"
##                       "#PBS -l walltime=00:30:00"
##    submit_path = "/opt/torque-6.1.3/bin/qsub"

## Panacea
#scheduler
#    scheduler_header = "#!/bin/bash"
#                       "#PBS -W group_list=cades-nsed"
#                       "#PBS -A nsed"
#                       "#PBS -l qos=std"
#                       "#PBS -m bea"
#                       "#PBS -M mxd@ornl.gov"
#                       "#PBS -l nodes=1:ppn=1"
#                       "#PBS -l walltime=00:10:00"
#                       "#PBS -N DakNek"
#                       "module load mpi/openmpi/gcc4/1.10.2"
#    submit_path = "/opt/torque/bin/qsub"

pipe_3840.in

0 → 100644
+49 −0
Original line number Diff line number Diff line
# DAKOTA INPUT FILE: multidimensional parameter study

environment
  tabular_graphics_data
    tabular_graphics_file = 'pipe_3840_tabular_output.dat'

method
  multidim_parameter_study
    partition 2 5 2

variables
  active all
  discrete_uncertain_set
    integer 2
      num_set_values 3 6 
      set_values 5 10 15 4 6 8 10 12 14
	  descriptor 'filterWeight' 'lx1'
    real 1
      num_set_values 3 
      set_values 0.1 0.5 0.9
	  descriptor 'filterCutoffRatio'

interface
  analysis_drivers = 'python dakota_driver.py'
  fork
  work_directory
    named 'pipe_3841_workdir'
    directory_tag
    directory_save
  copy_files = "pipe_3840.re2"
              "pipe_3840.tmpl"
              "pipe_3840_31000.fld"
              "dakota_driver.py"
              "pipe_3840.drive"
              "pipe_3840.txt"
              "pipe_3840_exp_xyz_coord.txt"
              "pipe_3840_pipe_exp_data.txt"
              "pipe_3840_postprocess.py"
   replace
   parameters_file = 'params.in'
   results_file = 'results.out'
   file_save file_tag
   asynchronous evaluation_concurrency = 4

responses
	response_functions = 2 
        descriptors = 'L2_error_norm' 'Friction_velocity'
	no_gradients
	no_hessians
Loading