[main] testTopDir = /scratch/AMReX_RegTesting/rt-MAESTROeX/ webTopDir = /scratch/AMReX_RegTesting/rt-MAESTROeX/web MAKE = make sourceTree = C_Src numMakeJobs = 8 COMP = g++ FCOMP = gfortran add_to_c_make_command = TEST=TRUE USE_ASSERTION=TRUE purge_output = 1 # suiteName is the name prepended to all output directories suiteName = MAESTROeX reportActiveTestsOnly = 1 # Add "GO UP" link at the top of the web page? goUpLink = 1 # email sendEmailWhenFail = 1 emailTo = ajnonaka@lbl.gov emailBody = Check https://ccse.lbl.gov/pub/RegressionTesting2/MAESTROeX/ for more details. # MPIcommand should use the placeholders: # @host@ to indicate where to put the hostname to run on # @nprocs@ to indicate where to put the number of processors # @command@ to indicate where to put the command to run # # only tests with useMPI = 1 will run in parallel # nprocs is problem dependent and specified in the individual problem # sections. #MPIcommand = mpiexec -host @host@ -n @nprocs@ @command@ MPIcommand = mpiexec -n @nprocs@ @command@ MPIhost = [AMReX] dir = /scratch/AMReX_RegTesting/amrex/ branch = "development" [source] dir = /scratch/AMReX_RegTesting/MAESTROeX/ branch = "development" [extra-MICRO] dir = /scratch/AMReX_RegTesting/Microphysics/ branch = "development" # individual problems follow [reacting_bubble_2d] buildDir = Exec/test_problems/reacting_bubble inputFile = inputs_2d_regression outputFile = reacting_bubble_2d_plt0000003 aux1File = model.hse.cool.coulomb link1File = helm_table.dat dim = 2 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0 [reacting_bubble_omp_2d] buildDir = Exec/test_problems/reacting_bubble inputFile = inputs_omp_2d_regression outputFile = reacting_bubble_omp_2d_plt0000003 aux1File = model.hse.cool.coulomb link1File = helm_table.dat dim = 2 restartTest = 0 useMPI = 1 numprocs = 2 useOMP = 1 numthreads = 4 compileTest = 0 doVis = 0 [reacting_bubble_2d_amr] buildDir = Exec/test_problems/reacting_bubble inputFile = inputs_2d_amr_regression outputFile = reacting_bubble_2d_amr_plt0000003 aux1File = model.hse.cool.coulomb link1File = helm_table.dat dim = 2 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0 [reacting_bubble_3d] buildDir = Exec/test_problems/reacting_bubble inputFile = inputs_3d_regression outputFile = reacting_bubble_3d_plt0000003 aux1File = model.hse.cool.coulomb link1File = helm_table.dat dim = 3 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0 [reacting_bubble_omp_3d] buildDir = Exec/test_problems/reacting_bubble inputFile = inputs_omp_3d_regression outputFile = reacting_bubble_omp_3d_plt0000003 aux1File = model.hse.cool.coulomb link1File = helm_table.dat dim = 3 restartTest = 0 useMPI = 1 numprocs = 2 useOMP = 1 numthreads = 4 compileTest = 0 doVis = 0 [reacting_bubble_3d_amr] buildDir = Exec/test_problems/reacting_bubble inputFile = inputs_3d_amr_regression outputFile = reacting_bubble_3d_amr_plt0000003 aux1File = model.hse.cool.coulomb link1File = helm_table.dat dim = 3 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0 [wdconvect] buildDir = Exec/science/wdconvect inputFile = inputs_files/inputs_3d_regression outputFile = wdconvect_plt0000003 aux1File = ./model_files/kepler_new_6.25e8.hybrid.hse.320 link1File = helm_table.dat dim = 3 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0 [wdconvect_omp] buildDir = Exec/science/wdconvect inputFile = inputs_files/inputs_omp_3d_regression outputFile = wdconvect_omp_plt0000003 aux1File = ./model_files/kepler_new_6.25e8.hybrid.hse.1280 link1File = helm_table.dat dim = 3 restartTest = 0 useMPI = 1 numprocs = 2 useOMP = 1 numthreads = 4 compileTest = 0 doVis = 0 [wdconvect_amr] buildDir = Exec/science/wdconvect inputFile = inputs_files/inputs_3d_amr_regression outputFile = wdconvect_amr_plt0000003 aux1File = ./model_files/kepler_new_6.25e8.hybrid.hse.1280 link1File = helm_table.dat dim = 3 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0 [double_bubble_2d] buildDir = Exec/test_problems/double_bubble inputFile = inputs_2d_regression outputFile = double_bubble_2d_plt0000005 dim = 2 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0 [incomp_shear_jet_2d] buildDir = Exec/test_problems/incomp_shear_jet inputFile = inputs_2d_regression outputFile = incomp_shear_jet_2d_plt0000005 dim = 2 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0 [rt_2d] buildDir = Exec/test_problems/rt inputFile = inputs_2d_regression outputFile = rt_2d_plt0000005 dim = 2 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0 [test_convect_2d] buildDir = Exec/test_problems/test_convect inputFile = inputs_2d_regression outputFile = test_convect_2d_plt0000005 aux1File = model.hse link1File = helm_table.dat dim = 2 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0 [test_stability_2d] buildDir = Exec/test_problems/test_stability inputFile = inputs_2d_C_regression outputFile = plt0000050 aux1File = 5peaks_const_grav.dat link1File = helm_table.dat dim = 2 restartTest = 0 useMPI = 1 numprocs = 4 useOMP = 0 numthreads = 1 compileTest = 0 doVis = 0