-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmaestroex-gpu.ini
118 lines (92 loc) · 2.42 KB
/
maestroex-gpu.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
[main]
testTopDir = /raid/testing/maestroex-gpu
webTopDir = /raid/www/MAESTROeX/test-suite/gpu
sourceTree = C_Src
numMakeJobs = 20
suiteName = MAESTROeX-SBU
sub_title = gpu version
goUpLink = 1
reportActiveTestsOnly = 1
COMP = gnu
add_to_c_make_command = CUDA_ARCH=70 USE_CUDA=TRUE INTEGRATOR_DIR=VODE
use_ctools = 0
plot_file_name = maestro.plot_base_name
purge_output = 1
summary_job_info_field1 = EOS
summary_job_info_field2 = NETWORK
#globalAddToExecString = diffusion.use_mlmg_solver=1 gravity.use_mlmg_solver=1
# MPIcommand should use the placeholders:
# @host@ to indicate where to put the hostname to run on
# @nprocs@ to indicate where to put the number of processors
# @command@ to indicate where to put the command to run
#
# only tests with useMPI = 1 will run in parallel
# nprocs is problem dependent and specified in the individual problem
# sections.
MPIcommand = mpiexec -n @nprocs@ @command@
default_branch = development
# email
sendEmailWhenFail = 0
emailTo = castro-development@googlegroups.com
emailBody = check http://groot.astro.sunysb.edu/MAESTROeX/test-suite/gpu/
# slack
slack_post = 1
slack_webhookfile = /home/zingale/.slack.webhook
slack_channel = "#maestro"
slack_username = "i am groot"
[AMReX]
dir = /raid/testing/maestroex-gpu/amrex
branch = development
[source]
dir = /raid/testing/maestroex-gpu/MAESTROeX
branch = development
[extra-Microphysics]
dir = /raid/testing/maestroex-gpu/Microphysics
branch = development
comp_string = MICROPHYSICS_HOME=@self@
[RT]
buildDir = Exec/test_problems/rt
inputFile = inputs_2d_regression
dim = 2
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 0
useOMP = 0
keywords = hydro
[reacting_bubble-2d-amr]
buildDir = Exec/test_problems/reacting_bubble
inputFile = inputs_2d_regression.2levels
dim = 2
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 0
useOMP = 0
keywords = hydro
link1File = helm_table.dat
link2File = model.hse.cool.coulomb
[reacting_bubble-3d]
buildDir = Exec/test_problems/reacting_bubble
inputFile = inputs_3d_gpu_regression
dim = 3
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 0
useOMP = 0
keywords = hydro
link1File = helm_table.dat
link2File = model.hse.cool.coulomb
[wdconvect-amr]
buildDir = Exec/science/wdconvect
inputFile = inputs_files/inputs_3d_regression.2levels
dim = 3
doVis = 0
compileTest = 0
restartTest = 0
useMPI = 0
useOMP = 0
keywords = hydro
link1File = helm_table.dat
link2File = model_files/kepler_new_6.25e8.hybrid.hse.320