Skip to content
Snippets Groups Projects
Commit 8cc68d7a authored by Christoph Knote's avatar Christoph Knote
Browse files

Add test chem blueprint

parent 32bd7274
No related branches found
No related tags found
No related merge requests found
Showing
with 69 additions and 323 deletions
20
\ No newline at end of file
22
\ No newline at end of file
blueprints/test_chemistry/WRF_DOMAIN_PLOT_COAWST.png

170 KiB

......@@ -19,7 +19,7 @@ WRFDir=${WRF_SRC_PATH}
# --- Input data settings ---
# path to geogrid input data
geogDir=${WRF_GEOG_PATH}
geogDir=/alcc/gpfs2/home/mbees/data/geog/
# meteo input
# Vtable for the chosen meteo input
......@@ -28,7 +28,7 @@ metVtableFile=${WPS_SRC_PATH}/ungrib/Variable_Tables/Vtable.GFS
metInc=1
# full path to a met input file - you can use any "%<>" abbreviations known
# to the "date" command
metFilePattern="${WRF_GFS_METEO_PATH}/GF%Y%m%d%H"
metFilePattern="/alcc/gpfs2/home/mbees/data/meteo/GFS/GF%Y%m%d%H"
# example:
# "/glade/p/rda/data/ds083.2/grib2/%Y/%Y.%m/fnl_%Y%m%d_%H_00.grib2"
......@@ -57,26 +57,6 @@ restartRootDir=${SCRATCH}/WRF/restart/
# remove run directory after run is finished?
removeRunDir=false
# --- MPI settings ---
mpiCommandPre="mpirun /usr/bin/time -v"
mpiCommandMain="mpirun /usr/bin/time -v"
mpiCommandReal=${mpiCommandPre}
# --- Batch system ---
# argument to submit a job in a held state
batchHoldArgument="--hold"
# command to release a held job
batchReleaseCommand="scontrol release"
# command to submit jobs to the queueing system
batchSubmitCommand=sbatch
# dependency argument for chaining runs upon submission
batchDepArgument="--dependency=afterany:__id__"
# sed command ("used as s/__command/\1/") to retrieve job run PID upon
# submission with $batchSubmitCommand
batchPidSedCommand="Submitted batch job \(.*\)"
# --- Chemistry ---
withChemistry=true
......@@ -84,28 +64,28 @@ withChemistry=true
# WRF-Chem installation directory
WRFChemDir=${WRF_CHEM_SRC_PATH}
# megan_bio_emiss installation directory
WRFMEGANdir=${WRF_CHEM_MEGAN_BIO_EMISS_PATH}
# mozbc installation directory
WRFMOZARTdir=${WRF_CHEM_MOZBC_PATH}
# wesley/exocoldens installation directory
WRFmztoolsdir=${WRF_CHEM_WES_COLDENS_PATH}
# anthro_emiss installation directory
WRFanthrodir=${WRF_CHEM_ANTHRO_EMIS_PATH}
# fire_emis installation directory
WRFfiredir=${WRF_CHEM_FIRE_EMIS_PATH}
# path to utility executables
meganBioEmissBin=megan_bio_emiss
mozbcBin=mozbc
weselyBin=wesely
exo_coldensBin=exo_coldens
anthro_emisBin=anthro_emis
fire_emisBin=fire_emis
# path to Wesely and Exo_Coldens input data
wesColdensDataPath=${WESELY_EXO_COLDENS_DATA}
# path to MEGAN input data
MEGANdir=/alcc/gpfs2/home/mbees/data/emissions/biogenic/MEGAN
# use anthro_emiss or predefined files?
emissUseAnthroEmiss=true
emissUseAnthroEmiss=false
# raw emission input - the files you read in with anthro_emiss
emissDir=/alcc/gpfs2/home/mbees/data/emissions/anthropogenic/EDGAR-HTAP/MOZART_MOSAIC
emissDir=/alcc/gpfs2/home/mbees/data/emissions/anthropogenic/EDGARv5/MOZART_MOSAIC
# emission conversion script for anthro_emis - must match emissions in emissDir
emissInpFile=emis_edgarhtap_mozmos.inp
emissInpFile=emis_edgarv5_mozmos.inp
# year the emissions are valid for (for offset calculation)
emissYear=2010
emissYear=2015
# FINN fires
fireFilePattern="/alcc/gpfs2/home/mbees/data/emissions/fires/FINN/GLOB_MOZ4_%Y%j.txt"
......
SHELL=/bin/bash
MAILTO=christoph.knote@med.uni-augsburg.de
USER=knotechr
#
# ### daily WRF-Chem forecasts ###
#
00 02 * * * /bin/bash -l /alcc/gpfs2/home/u/knotechr/wrfotron/master.bash /alcc/gpfs2/home/u/knotechr/wrfotron/blueprints/operational_chemistry `/bin/date -u --date="1 days ago" "+\%Y \%m \%d 00"` 96 03
#
......@@ -11,7 +11,7 @@
cat_var_prefix = ' ',
serial_output = .true.,
start_output_time = '__startDate__',
stop_output_time = '__endDate__',
stop_output_time = '__startDate__',
output_interval = 3600,
data_yrs_offset = __emissYearOffset__,
emissions_zdim_stag = 1,
......
&CONTROL
domains = __domains__
anthro_dir = '__emissDir__'
src_file_prefix = 'edgar_v5_2015_'
src_file_suffix = '_0.1x0.1.nc'
src_names = 'CO(28)','NOx(30)','SO2(64)','NH3(17)','BC(12)','OC(12)','PM2.5(1)','PM10(1)','BENZENE(78)','BIGALK(72)','BIGENE(56)','C2H2(26)','C2H4(28)',
'C2H5OH(46)','C2H6(30)','C3H6(42)','C3H8(44)','CH2O(30)',
'CH3CHO(44)','CH3COCH3(58)','CH3OH(32)','CH3COOH(60)','HCOOH(46)',
'MEK(72)','TOLUENE(92)','XYLENES(106)'
sub_categories = 'TOTAL',
serial_output = .true.,
start_output_time = '__startDate__',
stop_output_time = '__startDate__',
output_interval = 3600,
data_yrs_offset = __emissYearOffset__,
emissions_zdim_stag = 1,
emis_map = 'CO->CO','NO->0.8*NOx','NO2->0.2*NOx','SO2->SO2','NH3->NH3','BIGALK->BIGALK','BIGENE->BIGENE',
'C2H4->C2H4','C2H5OH->C2H5OH','C2H6->C2H6','CH2O->CH2O','CH3CHO->CH3CHO',
'CH3COCH3->CH3COCH3','CH3OH->CH3OH','MEK->MEK','TOLUENE->TOLUENE',
'C3H6->C3H6','C3H8->C3H8','ISOP->0.0*CO','C10H16->0.0*CO',
'SULF->0.0*SO2','C2H2->0.00561790*CO','BENZENE->BENZENE','XYLENE->XYLENES',
'GLY->0.0*CO','MACR->0.0*CO','MGLY->0.0*CO','MVK->0.0*CO',
'HCOOH->0.0*CO','HONO->0.0*CO','VOCA->0.04*CO','VOCBB->0.0*CO',
'ECI(a)->0.1*BC','ECJ(a)->0.9*BC','ORGI(a)->0.1*OC','ORGJ(a)->0.9*OC','PM25I(a)->0.1*PM2.5',
'PM25J(a)->0.9*PM2.5','PM_10(a)->PM10 + -1.0*PM2.5','SO4I(a)->0.0*PM10','SO4J(a)->0.0*PM10','NO3I(a)->0.0*PM10',
'NO3J(a)->0.0*PM10','NH4I(a)->0.0*PM10','NH4J(a)->0.0*PM10','NAI(a)->0.0*PM10','NAJ(a)->0.0*PM10',
'CLI(a)->0.0*PM10','CLJ(a)->0.0*PM10','CO_A->CO','CO_BB->0.0*CO','ORGI_A(a)->0.0*PM10',
'ORGI_BB(a)->0.0*PM10','ORGJ_A(a)->0.0*PM10','ORGJ_BB(a)->0.0*PM10'
/
+:h:0:alt
+:h:0:gsw
+:h:0:tke
+:h:0:SWDDIF
\ No newline at end of file
#!/bin/bash -l
#SBATCH --partition=alcc1,epyc
#SBATCH -o __runDir__/__mainJobName__.%j.%N.out
#SBATCH -D __runDir__
#SBATCH -J __mainJobName__
#SBATCH --nodes=2
#SBATCH --ntasks-per-node=28
#SBATCH --mem-per-cpu=2000
#SBATCH --mail-type=FAIL
#SBATCH --mail-user=christoph.knote@med.uni-augsburg.de
#SBATCH --time=12:00:00
#
# variable $MACHINEFILE just holds the filename where acquired
# nodes/cores names are written. e.g.
#
MACHINEFILE=__runDir__/slurm.hosts
#
# Generate Machinefile for openmpi such that hosts are in the same
# order as if run via srun
#
srun hostname -s | sort -n > $MACHINEFILE
#!/bin/bash -l
#SBATCH --partition=alcc1
#SBATCH -o __runDir__/__postJobName__.%j.%N.out
#SBATCH -D __runDir__
#SBATCH -J __postJobName__
#SBATCH --ntasks=1
#SBATCH --mem=5G
#SBATCH --mail-type=FAIL
#SBATCH --mail-user=christoph.knote@med.uni-augsburg.de
#SBATCH --time=02:00:00
#
# variable $MACHINEFILE just holds the filename where acquired
# nodes/cores names are written. e.g.
#
MACHINEFILE=__runDir__/slurm.hosts
#
# Generate Machinefile for openmpi such that hosts are in the same
# order as if run via srun
#
srun hostname -s | sort -n > $MACHINEFILE
#!/bin/bash -l
#SBATCH --partition=alcc1
#SBATCH -o __runDir__/__preJobName__.%j.%N.out
#SBATCH -D __runDir__
#SBATCH -J __preJobName__
#SBATCH --ntasks=1
#SBATCH --mem=10G
#SBATCH --mail-type=FAIL
#SBATCH --mail-user=christoph.knote@med.uni-augsburg.de
#SBATCH --time=03:00:00
#
# variable $MACHINEFILE just holds the filename where acquired
# nodes/cores names are written. e.g.
#
MACHINEFILE=__runDir__/slurm.hosts
#
# Generate Machinefile for openmpi such that hosts are in the same
# order as if run via srun
#
srun hostname -s | sort -n > $MACHINEFILE
#!/bin/bash -l
#SBATCH --partition=alcc1
#SBATCH -o __runDir__/__stagingJobName__.%j.%N.out
#SBATCH -D __runDir__
#SBATCH -J __stagingJobName__
#SBATCH --ntasks=1
#SBATCH --mem=5G
#SBATCH --mail-type=FAIL
#SBATCH --mail-user=christoph.knote@med.uni-augsburg.de
#SBATCH --time=01:00:00
#
# variable $MACHINEFILE just holds the filename where acquired
# nodes/cores names are written. e.g.
#
MACHINEFILE=__runDir__/slurm.hosts
#
# Generate Machinefile for openmpi such that hosts are in the same
# order as if run via srun
#
srun hostname -s | sort -n > $MACHINEFILE
#!/bin/bash
# ------------------------------------------------------------------------------
# WRFOTRON v 2.0b
# Christoph Knote (LMU Munich, Germany)
# 06/2016
# christoph.knote@lmu.de
# ------------------------------------------------------------------------------
# path to the WRFotron installation
chainDir=${HOME}/wrfotron
# --- Executable locations ---
# WPS installation directory
WPSDir=${WPS_SRC_PATH}
# WRF installation directory
WRFDir=${WRF_SRC_PATH}
# --- Input data settings ---
# path to geogrid input data
geogDir=${WRF_GEOG_PATH}
# meteo input
# Vtable for the chosen meteo input
metVtableFile=${WPS_SRC_PATH}/ungrib/Variable_Tables/Vtable.GFS
# time increment in hours
metInc=1
# full path to a met input file - you can use any "%<>" abbreviations known
# to the "date" command
metFilePattern="${WRF_GFS_METEO_PATH}/GF%Y%m%d%H"
# example:
# "/glade/p/rda/data/ds083.2/grib2/%Y/%Y.%m/fnl_%Y%m%d_%H_00.grib2"
# --- Pre/Postprocessing settings ---
# prepararation script
preScriptPath=NONEXISTENT.bash
# postprocessing scripts (arbitrary)
postScriptPath=NONEXISTENT.bash
# postprocessing scripts (actions for each wrfout file)
postPerFileScriptPath=NONEXISTENT.bash
# --- Working directories ---
# where the WRF will be run (some fast, large disk like "scratch" or similar)
workDir=${SCRATCH}/WRF/work/
# where the unprocessed WRF output will be stored
stagingRootDir=${SCRATCH}/WRF/staging/
# where the WRF output will be stored
archiveRootDir=${SCRATCH}/archive/WRF/
# where the WRF restart files will be stored
restartRootDir=${SCRATCH}/WRF/restart/
# remove run directory after run is finished?
removeRunDir=false
# --- MPI settings ---
# mental note for GNU:
#mpirun -mca pml ucx -x UCX_TLS=rc,shm -N $SLURM_NTASKS_PER_NODE -hostfile $MACHINEFILE
mpiCommandPre="mpirun -mca pml ucx -x UCX_TLS=rc,shm -hostfile $MACHINEFILE /usr/bin/time -v"
mpiCommandMain="mpirun -mca pml ucx -x UCX_TLS=rc,shm -N $SLURM_NTASKS_PER_NODE -hostfile $MACHINEFILE /usr/bin/time -v"
mpiCommandReal=${mpiCommandPre}
# mental note for INTEL:
#mpiCommandPre="srun /usr/bin/time -v"
#mpiCommandMain="srun /usr/bin/time -v"
#mpiCommandReal="srun /usr/bin/time -v"
# --- Batch system ---
# argument to submit a job in a held state
batchHoldArgument="--hold"
# command to release a held job
batchReleaseCommand="scontrol release"
# command to submit jobs to the queueing system
batchSubmitCommand=sbatch
# dependency argument for chaining runs upon submission
batchDepArgument="--dependency=afterany:__id__"
# sed command ("used as s/__command/\1/") to retrieve job run PID upon
# submission with $batchSubmitCommand
batchPidSedCommand="Submitted batch job \(.*\)"
# --- Chemistry ---
withChemistry=true
# WRF-Chem installation directory
WRFChemDir=${WRF_CHEM_SRC_PATH}
# megan_bio_emiss installation directory
WRFMEGANdir=${WRF_CHEM_MEGAN_BIO_EMISS_PATH}
# mozbc installation directory
WRFMOZARTdir=${WRF_CHEM_MOZBC_PATH}
# wesley/exocoldens installation directory
WRFmztoolsdir=${WRF_CHEM_WES_COLDENS_PATH}
# anthro_emiss installation directory
WRFanthrodir=${WRF_CHEM_ANTHRO_EMIS_PATH}
# fire_emis installation directory
WRFfiredir=${WRF_CHEM_FIRE_EMIS_PATH}
# path to MEGAN input data
MEGANdir=/alcc/gpfs2/home/mbees/data/emissions/biogenic/MEGAN
# use anthro_emiss or predefined files?
emissUseAnthroEmiss=true
# raw emission input - the files you read in with anthro_emiss
emissDir=/alcc/gpfs2/home/mbees/data/emissions/anthropogenic/EDGAR-HTAP/MOZART_MOSAIC
# emission conversion script for anthro_emis - must match emissions in emissDir
emissInpFile=emis_edgarhtap_mozmos.inp
# year the emissions are valid for (for offset calculation)
emissYear=2010
# FINN fires
fireFilePattern="/alcc/gpfs2/home/mbees/data/emissions/fires/FINN/GLOB_MOZ4_%Y%j.txt"
fireInpFile=finn_fires.inp
# boundary condition input
chembcFilePattern="/alcc/gpfs2/home/mbees/data/chembc/WACCM/WACCM%Y%m%d"
chembcInpFile=waccm.inp
# TUV photolysis option 4 data file
TUVDataPath="/alcc/gpfs2/home/mbees/data/tuv/TUV.phot.bz2"
#!/bin/bash -l
#SBATCH --partition=alcc1,epyc
#SBATCH -o __runDir__/__mainJobName__.%j.%N.out
#SBATCH -D __runDir__
#SBATCH -J __mainJobName__
#SBATCH --nodes=2
#SBATCH --ntasks-per-node=28
#SBATCH --mem-per-cpu=2000
#SBATCH --mail-type=FAIL
#SBATCH --mail-user=christoph.knote@med.uni-augsburg.de
#SBATCH --time=12:00:00
#!/bin/bash -l
#SBATCH --partition=alcc1
#SBATCH -o __runDir__/__postJobName__.%j.%N.out
#SBATCH -D __runDir__
#SBATCH -J __postJobName__
#SBATCH --ntasks=1
#SBATCH --mem=5G
#SBATCH --mail-type=FAIL
#SBATCH --mail-user=christoph.knote@med.uni-augsburg.de
#SBATCH --time=02:00:00
#!/bin/bash -l
#SBATCH --partition=alcc1
#SBATCH -o __runDir__/__preJobName__.%j.%N.out
#SBATCH -D __runDir__
#SBATCH -J __preJobName__
#SBATCH --ntasks=1
#SBATCH --mem=10G
#SBATCH --mail-type=FAIL
#SBATCH --mail-user=christoph.knote@med.uni-augsburg.de
#SBATCH --time=03:00:00
#!/bin/bash -l
#SBATCH --partition=alcc1
#SBATCH -o __runDir__/__stagingJobName__.%j.%N.out
#SBATCH -D __runDir__
#SBATCH -J __stagingJobName__
#SBATCH --ntasks=1
#SBATCH --mem=5G
#SBATCH --mail-type=FAIL
#SBATCH --mail-user=christoph.knote@med.uni-augsburg.de
#SBATCH --time=01:00:00
......@@ -22,8 +22,8 @@
cldchem_onoff = 0, 0,
vertmix_onoff = 1, 1,
chem_conv_tr = 1, 1,
conv_tr_wetscav = 0, 0,
conv_tr_aqchem = 0, 0,
conv_tr_wetscav = 1, 1,
conv_tr_aqchem = 1, 1,
seas_opt = 2,
dust_opt = 3,
dmsemis_opt = 1,
......@@ -36,7 +36,7 @@
biomass_burn_opt = 5, 5,
plumerisefire_frq = 30, 30,
scale_fire_emiss = .true., .true.,
n2o5_hetchem = 0,
n2o5_hetchem = 1,
lnox_opt = 1, 1,
diagnostic_chem = 0, 0,
diagnostic_dep = 0, 0,
......
&share
wrf_core = 'ARW',
max_dom = 2,
max_dom = 1,
start_date = '__startDate__','__startDate__','__startDate__'
end_date = '__endDate__','__startDate__','__startDate__',
interval_seconds = __metIncSec__,
......@@ -10,14 +10,14 @@
&geogrid
parent_id = 1, 1, 2,
parent_grid_ratio = 1, 9, 5,
i_parent_start = 1, 54, 40,
j_parent_start = 1, 34, 40,
e_we = 91, 82, 196,
e_sn = 93, 73, 196,
i_parent_start = 1, 96, 40,
j_parent_start = 1, 77, 40,
e_we = 46, 163, 196,
e_sn = 47, 136, 196,
! geog_data_res = 'modis_fpar+modis_lai+modis_lakes+modis_30s+modis_15s+30s', 'modis_fpar+modis_lai+modis_lakes+modis_30s+modis_15s+30s',
! 'modis_fpar+modis_lai+modis_lakes+modis_30s+modis_15s+30s',
dx = 20000,
dy = 20000,
dx = 80000,
dy = 80000,
map_proj = 'lambert',
ref_lat = 50.0,
ref_lon = 7.5,
......
......@@ -40,18 +40,18 @@
time_step = 120,
time_step_fract_num = 0,
time_step_fract_den = 1,
max_dom = 2,
e_we = 91, 82, 196,
e_sn = 93, 73, 196,
max_dom = 1,
e_we = 46, 163, 196,
e_sn = 47, 136, 196,
e_vert = 33, 33, 33,
num_metgrid_levels = 34,
num_metgrid_soil_levels = 4,
dx = 20000, 2222, 400,
dy = 20000, 2222, 400,
dx = 80000, 2222, 400,
dy = 80000, 2222, 400,
grid_id = 1, 2, 3,
parent_id = 0, 1, 2,
i_parent_start = 1, 54, 40,
j_parent_start = 1, 34, 40,
i_parent_start = 1, 96, 40,
j_parent_start = 1, 77, 40,
parent_grid_ratio = 1, 9, 5,
parent_time_step_ratio = 1, 9, 5,
feedback = 0,
......
......@@ -8,7 +8,7 @@
#SBATCH --mail-user=christoph.knote@med.uni-augsburg.de
#SBATCH --time=00:30:00
module load gnu geos proj
module load gnu gdal geos proj
scriptPath=/alcc/gpfs2/home/u/knotechr/wrfotron/tools/nc_2_geotiff.py
wrfDataPathPattern="/alcc/gpfs2/scratch/mbees/knotechr/archive/WRF/operational_chemistry/wrfout___domain___%Y-%m-%d_%H:%M:%S"
......@@ -23,10 +23,10 @@ do
for anHour in $(seq 0 ${intervalHours} ${rangeHours})
do
aWRFDataPath=$(date -u --date="${dateToProcess} 0 UTC +${anHour} hours" "+${wrfDataPathPattern/__domain__/${domain}}")
aWRFDataPath=$(date -u --date="${firstDateToProcess} 0 UTC +${anHour} hours" "+${wrfDataPathPattern/__domain__/${domain}}")
aPlotDir=$(date -u --date="${dateToProcess} 0 UTC +${anHour} hours" "+${plotDirPattern/__domain__/${domain}}")
aPlotPath=$(date -u --date="${dateToProcess} 0 UTC +${anHour} hours" "+${plotPathPattern}")
aPlotDir=$(date -u --date="${firstDateToProcess} 0 UTC +${anHour} hours" "+${plotDirPattern/__domain__/${domain}}")
aPlotPath=$(date -u --date="${firstDateToProcess} 0 UTC +${anHour} hours" "+${plotPathPattern}")
if [ -f ${aWRFDataPath} ]
then
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment