Commit fb21078d authored by Prashanth Dwarakanath's avatar Prashanth Dwarakanath
Browse files

Release version 1.2.0

parent 725d27e1
......@@ -14,8 +14,7 @@ fi;
PROCESS_DIR=$1
SCRIPT_DIR=$2
MAPNAME=$3
expected_datasetcount=`cat ${SCRIPT_DIR}/mapfiles/${MAPNAME}|awk '{print $1}'|sort -u|wc -l`
expected_datasetcount=`awk '{print $1}' ${SCRIPT_DIR}/mapfiles/${MAPNAME}|sort -u|wc -l`
echo "Expected datasetcount=$expected_datasetcount"
phase1=`ls -ltr ${PROCESS_DIR}/09_publish1_01_${MAPNAME}*|tail -1|awk '{print $9}'`
......
......@@ -20,19 +20,18 @@ timestamp="`date +${ESGF_PUBLISHING_PHASE_TIMESTAMP}`";
# Assuming phase script name `*_XX.sh` with `XX` being phase number
phase="`basename $0 .sh | awk -F_ '{print $NF}'`";
if [ -z $1 ]; then
echo "USAGE: $0 <staging_dir>"
if [ -z $2 ]; then
echo "USAGE: $0 <staging_dir> <publish_system>"
exit 1;
fi;
staging_dir=$1
publish_system=$2
# Check for staging directory
[ -d ${STAGING_DIR} ] || (
echo "ERROR: Staging dir ${STAGING_DIR} does not exist!";
exit 1;
);
if [ ! "${publish_system}" = "${HOSTNAME}" ]; then
echo "Cannot execute on this host as ESGF_PUBLISHING_NODE_HOST value in esgf_publish_settings.sh does not match the hostname of this machine.";
exit -1;
fi
echo "PHASE${phase}: Beginning staging setup @ ${timestamp}"; \
mkdir ${staging_dir};
echo "PHASE${phase}: Finished @ `date +${ESGF_PUBLISHING_PHASE_TIMESTAMP}`";
......
......@@ -51,7 +51,6 @@ done;
# Begin
echo "PHASE${phase}: Beginning node data integrity check @ ${timestamp}";
source ${ESGF_VENV_DIR}/bin/activate ${ESGF_VENV_NAME};
echo "PHASE${phase}: executing inside '${CONDA_DEFAULT_ENV}' venv";
cd ${datasetmgr_dir};
......@@ -68,8 +67,8 @@ echo "PHASE${phase}: check 01 PASSED";
if [ `grep path_padding_options ${datasetmgr_config} | grep -c ${data_dir}` -eq 0 ]; then
tmpfile=`date +%s`_datasetmgr.conf;
gawk -F, '{if ($1 ~ /^path_padding_options/) printf("%s,%s\n", $0, "dptemporary_r'${request_number}:${data_dir}'/"); else print;}' ${datasetmgr_config} > ${tmpfile};
cp ${datasetmgr_config} ${tmpfile}.bak;
mv -f ${tmpfile} ${datasetmgr_config};
cp `realpath ${datasetmgr_config}` ${tmpfile}.bak;
mv -f ${tmpfile} `realpath ${datasetmgr_config}`;
fi;
logfile="${process_dir}/${phase}_integrity_02_${timestamp}.log";
python3 datasetmgr.py --add-files ${checksum_file} --relative-path --dry-run > ${logfile};
......@@ -80,15 +79,19 @@ num_files=`find ${file_dir} -type f -regex '^.*\.nc$' | grep -v latest | \
grep -v files | wc -l | cut -d' ' -f1`;
[ ! ${files_pre_add_inspected} -eq 0 ] || (
echo "PHASE${phase}: ERROR - 02 inspected file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
[ ${files_pre_add_warnings} -eq 0 ] || (
echo "PHASE${phase}: ERROR - 02 warnings found for possibly duplicated files!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
[ ${files_pre_add_inspected} -eq ${num_files} ] || (
echo "PHASE${phase}: ERROR - 02 file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
[ ${files_pre_add_added} -eq 0 ] || (
echo "PHASE${phase}: ERROR - 02 added file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
echo "PHASE${phase}: check 02 PASSED";
logfile="${process_dir}/${phase}_integrity_03_${timestamp}.log";
......@@ -97,12 +100,15 @@ files_post_add_inspected=`grep "Inspected files" ${logfile} | head -n 1 | awk '{
files_post_add_added=`grep "Added files" ${logfile} | awk '{print $NF}'`;
[ ! ${files_post_add_inspected} -eq 0 ] || (
echo "PHASE${phase}: ERROR - 03 inspected file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
[ ${files_post_add_inspected} -eq ${num_files} ] || (
echo "PHASE${phase}: ERROR - 03 file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
[ ${files_post_add_added} -eq ${files_post_add_inspected} ] || (
echo "PHASE${phase}: ERROR - 03 added file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
echo "PHASE${phase}: check 03 PASSED";
logfile="${process_dir}/${phase}_integrity_04_${timestamp}.log"; \
......@@ -111,19 +117,23 @@ files_post_add_unchecked=`tail -n 2 ${logfile} | head -n 1 | rev | cut -d' ' -f1
files_post_add_prepub=` tail -n 1 ${logfile} | rev | cut -d' ' -f1 | rev`;
[ ! ${files_post_add_prepub} -eq 0 ] || (
echo "PHASE${phase}: ERROR - 04 prepub file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
[ ${files_post_add_unchecked} -eq ${num_files} ] || (
echo "PHASE${phase}: ERROR - 04 unchecked file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
echo "PHASE${phase}: check 04 PASSED";
logfile="${process_dir}/${phase}_integrity_05_${timestamp}.log";
python3 datasetmgr.py -c ${checksum_upstream_file} --relative-path > ${logfile};
[ `grep -c "${CHECK_CHECKSUMS_TALLY}" ${logfile}` -eq 1 ] || (
echo "PHASE${phase}: ERROR - 05 checksums mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
files_check_unchecked=`grep "Unchecked files" ${logfile} | awk '{print $NF}'`;
[ ${files_check_unchecked} -eq 0 ] || (
echo "PHASE${phase}: ERROR - 05 unchecked file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
echo "PHASE${phase}: check 05 PASSED";
logfile="${process_dir}/${phase}_integrity_06_${timestamp}.log";
......@@ -132,9 +142,11 @@ files_post_check_unchecked=`grep "Unchecked files" ${logfile} | awk '{print $NF}
files_post_check_prepub=`grep "Prepublication files" ${logfile} | awk '{print $NF}'`;
[ ${files_post_check_unchecked} -eq 0 ] || (
echo "PHASE${phase}: ERROR - 06 unchecked file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
[ ${files_post_check_prepub} -eq ${num_files} ] || (
echo "PHASE${phase}: ERROR - 06 prepub file number mismatch!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
echo "PHASE${phase}: check 06 PASSED";
logfile="${process_dir}/${phase}_integrity_07_${timestamp}.log";
......@@ -146,8 +158,10 @@ logfile="${process_dir}/${phase}_integrity_08_${timestamp}.log";
python3 datasetmgr.py -s > ${logfile};
[ `diff ${logfile} ${first_logfile} | wc -l | cut -d' ' -f1` -eq 0 ] || (
echo "PHASE${phase}: ERROR - 08 integrity failed idempotency check!";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
exit 1; );
echo "PHASE${phase}: check 08 PASSED";
mv -f ${tmpfile}.bak `realpath ${datasetmgr_config}`;
# End
echo "PHASE${phase}: Finished @ `date +${ESGF_PUBLISHING_PHASE_TIMESTAMP}`";
......
......@@ -100,7 +100,7 @@ files_check_unchecked=`grep "Unchecked files" ${logfile} | awk '{print $NF}'`;
echo "PHASE${phase}: ERROR - 04 unchecked file number mismatch!";
exit 1; );
echo "PHASE${phase}: check 04 PASSED";
logfile="${process_dir}/${phase}_move_05_${timestamp}.log";
logfile="${process_dir}/${phase}_move_05_datasetmgrout.log";
python3 datasetmgr.py -s > ${logfile};
files_post_check_unchecked=`grep "Unchecked files" ${logfile} | awk '{print $NF}'`;
files_post_check_prepub=`grep "Prepublication files" ${logfile} | awk '{print $NF}'`;
......
......@@ -21,8 +21,8 @@ timestamp="`date +${ESGF_PUBLISHING_PHASE_TIMESTAMP}`";
# Assuming phase script name `*_XX.sh` with `XX` being phase number
phase="`basename $0 .sh | awk -F_ '{print $NF}'`";
if [ -z $4 ]; then
echo "USAGE: $0 <process_dir> <script_dir> <datasetmgr_dir> <ckmapfilelist>";
if [ -z $5 ]; then
echo "USAGE: $0 <process_dir> <script_dir> <datasetmgr_dir> <ckmapfilelist> <phase7 datasetmgrout>";
exit 1;
fi;
......@@ -30,6 +30,7 @@ process_dir="$1";
script_dir="$2";
datasetmgr_dir="$3";
ckmapfilelist="$4";
phase7datasetmgrout="$5";
# Check for directories
for directory in ${process_dir} ${script_dir}; do
......@@ -39,12 +40,12 @@ for directory in ${process_dir} ${script_dir}; do
);
done;
## Check for files
#for filename in ${ckmapfile}; do
#[ -e ${filename} ] || (
#echo "PHASE${phase}: ERROR - file ${filename} does not exist!";
#exit 1;
#);
#done;
for filename in ${phase7datasetmgrout}; do
[ -e ${filename} ] || (
echo "PHASE${phase}: ERROR - file ${filename} does not exist!";
exit 1;
);
done;
# Begin
echo "PHASE${phase}: Beginning node post publication checks @ ${timestamp}";
......@@ -53,7 +54,14 @@ source ${ESGF_VENV_DIR}/bin/activate ${ESGF_VENV_NAME};
echo "PHASE${phase}: executing inside '${CONDA_DEFAULT_ENV}' venv";
cd ${script_dir};
echo "PHASE${phase}: working in '`pwd`'";
addedfiles=0
addedcatalogs=0
while read filename; do
numfiles=`wc -l ${filename}|awk '{print $1}'`
numdatasets=`awk '{print $1}' ${filename}|sort -u|wc -l`
addedfiles=`expr $addedfiles + $numfiles`
addedcatalogs=`expr $addedcatalogs + $numdatasets` #because the number of added catalogs is same as added datasets
mapname=`basename $filename`;
logfile="${process_dir}/${phase}_postpub_01_${timestamp}_${mapname}.log";
bash ${ESGF_PUBLISHING_DIR}/esgf_publish_check_phasefiles.sh ${process_dir} ${script_dir} ${mapname} > ${logfile} 2>&1;
......@@ -66,21 +74,33 @@ done <${ckmapfilelist}
cd ${datasetmgr_dir};
echo "PHASE${phase}: working in '`pwd`'";
logfile="${process_dir}/${phase}_postpub_02_${timestamp}.log";
# Let's stop here for now
echo "Bye bye"
exit -1
python3 datasetmgr.py -i /esg/content/thredds/esgcet/ --new-only > ${logfile} 2>&1;
echo "PHASE${phase}: check 02 NOT IMPLEMENTED";
if [ $? -ne 0 ]; then exit 1; fi
echo "PHASE${phase}: check 02 PASSED";
logfile="${process_dir}/${phase}_postpub_03_${timestamp}.log";
python3 datasetmgr.py -s > ${logfile};
files_post_check_unchecked=`grep "Unchecked files" ${logfile} | awk '{print $NF}'`;
files_post_check_prepub=`grep "Prepublication files" ${logfile} | awk '{print $NF}'`;
files_post_check_catalog=`grep "Catalog files" ${logfile} | awk '{print $NF}'`;
files_post_check_pub=`grep "Published files" ${logfile} | awk '{print $NF}'`;
files_pre_check_catalog=`grep "Catalog files" ${phase7datasetmgrout} | awk '{print $NF}'`;
files_pre_check_pub=`grep "Published files" ${phase7datasetmgrout} | awk '{print $NF}'`;
expectedcatalogcount=`expr ${files_pre_check_catalog} + ${addedcatalogs}`
expectedpublishedcount=`expr ${files_pre_check_pub} + ${addedfiles}`
logfile="${process_dir}/${phase}_postpub_03_added_datasetcount.log";
echo ${addedcatalogs} >${logfile}
[ ${files_post_check_unchecked} -eq 0 ] || (
echo "PHASE${phase}: ERROR - 03 unchecked file number mismatch!";
exit 1; );
[ ${files_post_check_prepub} -eq 0 ] || (
echo "PHASE${phase}: ERROR - 03 prepub file number mismatch!";
exit 1; );
[ ${files_post_check_pub} -eq ${expectedpublishedcount} ] || (
echo "PHASE${phase}: ERROR - 03 published file count ${files_post_check_pub} does not match expected value ${expectedpublishedcount}!";
exit 1; );
[ ${files_post_check_catalog} -eq ${expectedcatalogcount} ] || (
echo "PHASE${phase}: ERROR - 03 catalog file count ${files_post_check_catalog} does not match expected value ${expectedcatalogcount}!";
exit 1; );
echo "PHASE${phase}: check 03 PASSED";
# End
......
......@@ -21,7 +21,7 @@ timestamp="`date +${ESGF_PUBLISHING_PHASE_TIMESTAMP}`";
# Assuming phase script name `*_XX.sh` with `XX` being phase number
phase="`basename $0 .sh | awk -F_ '{print $NF}'`";
if [ -z $4 ]; then
if [ -z $5 ]; then
echo "USAGE: $0 <process_dir> <nagios_dir> <publish_system> <experiment>";
exit 1;
fi;
......@@ -30,6 +30,7 @@ process_dir="$1";
nagios_dir="$2";
publish_system="$3";
experiment="`echo $4 | tr '[:lower:]' '[:upper:]'`";
phase12_addeddatasetcount="`cat $5`"
# Check for directories
for directory in ${process_dir} ${nagios_dir}; do
......@@ -38,19 +39,9 @@ for directory in ${process_dir} ${nagios_dir}; do
exit 1;
);
done;
## Check for files
#for filename in ${ckmapfile}; do
#[ -e ${filename} ] || (
#echo "PHASE${phase}: ERROR - file ${filename} does not exist!";
#exit 1;
#);
#done;
# Begin
echo "PHASE${phase}: Beginning node nagios update @ ${timestamp}";
source ${ESGF_VENV_DIR}/bin/activate ${ESGF_VENV_NAME};
echo "PHASE${phase}: executing inside '${CONDA_DEFAULT_ENV}' venv";
cd ${nagios_dir};
echo "PHASE${phase}: working in '`pwd`'";
logfile="${process_dir}/${phase}_nagios_01_${timestamp}.log";
......@@ -59,6 +50,13 @@ if bash runquery2.sh ${publish_system} > ${logfile} 2>&1; then
exit 1;
else
echo "PHASE${phase}: check 01 PASSED";
oldcount=`grep "Found" $logfile|cut -d ' ' -f2|head -1`
newcount=`grep "Found" $logfile|cut -d ' ' -f2|tail -1`
expectednewcount=`expr $oldcount + $phase12_addeddatasetcount`
if [ $newcount -ne $expectednewcount ]; then
echo "PHASE${phase}: ERROR 01: Found $newcount datasets from $experiment published on ${publish_system} while expected dataset count was $expectednewcount";
exit 1;
fi
fi;
cp queryout.${publish_system}.${experiment} oldqueryout.${publish_system}.${experiment};
logfile="${process_dir}/${phase}_nagios_02_${timestamp}.log";
......
......@@ -21,13 +21,14 @@ timestamp="`date +${ESGF_PUBLISHING_PHASE_TIMESTAMP}`";
# Assuming phase script name `*_XX.sh` with `XX` being phase number
phase="`basename $0 .sh | awk -F_ '{print $NF}'`";
if [ -z $2 ]; then
if [ -z $3 ]; then
echo "USAGE: $0 <process_dir> <script_dir>";
exit 1;
fi;
process_dir="$1";
script_dir="$2";
publish_system="$3";
# Check for directories
for directory in ${process_dir} ${script_dir}; do
......@@ -44,7 +45,11 @@ source ${ESGF_VENV_DIR}/bin/activate ${ESGF_VENV_NAME};
echo "PHASE${phase}: executing inside '${CONDA_DEFAULT_ENV}' venv";
cd ${script_dir};
echo "PHASE${phase}: working in '`pwd`'";
bash backup_solr_plus_tds.sh;
if [ "$publish_system" = "esg-dn2.nsc.liu.se" ]; then
bash backup_tds.sh;
elif [ "$publish_system" = "esg-dn1.nsc.liu.se" ]; then
bash backup_solr_plus_tds.sh;
fi
# End
echo "PHASE${phase}: Finished @ `date +${ESGF_PUBLISHING_PHASE_TIMESTAMP}`";
......
......@@ -21,13 +21,19 @@ timestamp="`date +${ESGF_PUBLISHING_PHASE_TIMESTAMP}`";
# Assuming phase script name `*_XX.sh` with `XX` being phase number
phase="`basename $0 .sh | awk -F_ '{print $NF}'`";
if [ -z $2 ]; then
echo "USAGE: $0 <user> <node>";
if [ -z $1 ]; then
echo "USAGE: $0 <user>";
exit 1;
fi;
user="$1";
node="$2";
if [ "${HOSTNAME}" = "esg-dn1.nsc.liu.se" ]; then
node="esg-dn2.nsc.liu.se";
elif [ "${HOSTNAME}" = "esg-dn2.nsc.liu.se" ]; then
node="esg-dn1.nsc.liu.se";
else exit -1;
fi
# Begin
echo "PHASE${phase}: Beginning node synchronization @ ${timestamp}";
......
......@@ -80,7 +80,7 @@ cat ${publication_list} | while read request client releasedir datadir checksums
else
variable_job=`${ESGF_PUBLISHING_DIR}/esgf_publish_cmip6_varcheck.sh ${releasedir} ${datadir} | grep "^[[:space:]]*sbatch" | awk '{print $NF}'`;
fi
workbook_job=`${ESGF_PUBLISHING_DIR}/esgf_publish_workbook_job.sh ${publication_email} ${request} ${client} ${checksums} ${receive_job} ${variable_job} ${mode}| grep "^[[:space:]]*sbatch" | awk '{print $NF}'`;
workbook_job=`${ESGF_PUBLISHING_DIR}/esgf_publish_workbook_job.sh ${publication_email} ${request} ${client} ${checksums} ${receive_job} ${variable_job} ${ESGF_AGENT} ${mode}| grep "^[[:space:]]*sbatch" | awk '{print $NF}'`;
echo "$receive_job"
echo "$variable_job"
echo "$workbook_job"
......
# Active NSC ESGF publishing agents
ESGF_AGENTS=(
"krishnaveni"
......@@ -6,12 +5,15 @@ ESGF_AGENTS=(
"robpi"
);
#Current NSC ESGF publisher (ensure that you set this before invoking esgf_publish_process_list.sh)
ESGF_AGENT="pchengi";
ESGF_PUBLISHING_EMAIL=pchengi@nsc.liu.se;
ESGF_PUBLISHING_DIR=`dirname $(which ${0})`;
ESGF_VARCHECKER=${ESGF_PUBLISHING_DIR}/20200319_exclvarcheck_cmip6.sh;
ESGF_VARCHECKER_LOCAL=${ESGF_PUBLISHING_DIR}/local_exclvarcheck_cmip6.sh;
ESGF_VARCHECKER_SKIP=0;
ESGF_MODULEFILE=${ESGF_PUBLISHING_DIR}/20200406_moduleloads_cmip6;
ESGF_PUBLISHING_EMAIL=pchengi@nsc.liu.se;
ESGF_PUBLISHING_LOG_DIR=${HOME}/esgf_publishing_logs;
ESGF_WORKBOOK_VENV=${ESGF_PUBLISHING_DIR}/workbook_venv;
ESGF_PUBLISHING_PIPE_DIR=${HOME}/esgf_pipes;
......
......@@ -9,7 +9,7 @@
#SBATCH --mail-user={EMAIL}
esgf_publish_workbook.sh {REQUEST_MAIL} {CLIENT} {CHECKSUMSFILE} {RECEIVE_JOB} {VARIABLE_JOB} {MODE}
esgf_publish_workbook.sh {REQUEST_MAIL} {CLIENT} {CHECKSUMSFILE} {RECEIVE_JOB} {VARIABLE_JOB} {AGENT} {MODE}
# ERROR codes
# 1 - unknown error
......
......@@ -17,7 +17,7 @@ fi;
source `dirname $(which ${0})`/esgf_publish_settings.sh
# Number of settings options
NUMSETTINGS=6;
NUMSETTINGS=7;
# If you require a target list, of minimum 1, otherwise NUMSETTINGS
let NUMREQUIRED=${NUMSETTINGS}+0;
......@@ -31,6 +31,7 @@ if [ $# -lt ${NUMREQUIRED} ]; then
echo " checksumfile - file containing the checksums";
echo " receive - recieve checks \`.job\` filename";
echo " variable - variable checks \`.job\` filename";
echo " agent ";
echo " --nobatch,--local (optional arguments)";
echo "";
echo " EXAMPLES:";
......@@ -50,7 +51,8 @@ client=$2;
checksumfile=$3;
receive_job=$4;
variable_job=$5;
mode="$6";
agent=$6
mode="$7";
nobatchmode=0
localmode=0
......@@ -77,15 +79,15 @@ short_today=`date +%Y%m%d`;
source ${ESGF_WORKBOOK_VENV}/bin/activate;
mkdir -p ${ESGF_PUBLISHING_LOG_DIR}
mkdir -p ${ESGF_PUBLISHING_PIPE_DIR}
workbookargs="--checksumfile ${checksumfile} --org ${client} -ml ${maplabel} -m ${mount_point} -rh ${host} -ph ${ESGF_PUBLISHING_NODE_HOST} -t ${ESGF_PUBLISHING_DIR}/esgf_template_cmip6_publication_workflow.jin -e ${email} -rn ${number} -rp ${release_dir} -lr ${receive_log} -lv ${variable_log} -lf ${variable_long_log}";
workbookargs="--agent ${agent} --checksumfile ${checksumfile} --org ${client} -ml ${maplabel} -m ${mount_point} -rh ${host} -ph ${ESGF_PUBLISHING_NODE_HOST} -t ${ESGF_PUBLISHING_DIR}/esgf_template_cmip6_publication_workflow.jin -e ${email} -rn ${number} -rp ${release_dir} -lr ${receive_log} -lv ${variable_log} -lf ${variable_long_log}";
workbookout="${ESGF_PUBLISHING_LOG_DIR}/${today}-CMIP6_data_publication_request_${serial}.md";
mkfileout="${ESGF_PUBLISHING_PIPE_DIR}/${short_today}_cmip6_${client}_req${serial}.mk";
if [ "$mode" = "--local" ]; then
mkfileargs="--checksumfile ${checksumfile} --org ${client} --mode local -ml ${maplabel} -m ${mount_point} -rh ${host} -ph ${ESGF_PUBLISHING_NODE_HOST} -t ${ESGF_PUBLISHING_DIR}/esgf_template_cmip6_publication_pipeline.mk -e ${email} -rn ${number} -rp ${release_dir} -lr ${receive_log} -lv ${variable_log} -lf ${variable_long_log}";
mkfileargs="--agent ${agent} --checksumfile ${checksumfile} --org ${client} --mode local -ml ${maplabel} -m ${mount_point} -rh ${host} -ph ${ESGF_PUBLISHING_NODE_HOST} -t ${ESGF_PUBLISHING_DIR}/esgf_template_cmip6_publication_pipeline.mk -e ${email} -rn ${number} -rp ${release_dir} -lr ${receive_log} -lv ${variable_log} -lf ${variable_long_log}";
elif [ "$mode" = "--nobatch" ]; then
mkfileargs="--checksumfile ${checksumfile} --org ${client} --mode nobatch -ml ${maplabel} -m ${mount_point} -rh ${host} -ph ${ESGF_PUBLISHING_NODE_HOST} -t ${ESGF_PUBLISHING_DIR}/esgf_template_cmip6_publication_pipeline.mk -e ${email} -rn ${number} -rp ${release_dir} -lr ${receive_log} -lv ${variable_log} -lf ${variable_long_log}";
mkfileargs="--agent ${agent} --checksumfile ${checksumfile} --org ${client} --mode nobatch -ml ${maplabel} -m ${mount_point} -rh ${host} -ph ${ESGF_PUBLISHING_NODE_HOST} -t ${ESGF_PUBLISHING_DIR}/esgf_template_cmip6_publication_pipeline.mk -e ${email} -rn ${number} -rp ${release_dir} -lr ${receive_log} -lv ${variable_log} -lf ${variable_long_log}";
else
mkfileargs="--checksumfile ${checksumfile} --org ${client} -ml ${maplabel} -m ${mount_point} -rh ${host} -ph ${ESGF_PUBLISHING_NODE_HOST} -t ${ESGF_PUBLISHING_DIR}/esgf_template_cmip6_publication_pipeline.mk -e ${email} -rn ${number} -rp ${release_dir} -lr ${receive_log} -lv ${variable_log} -lf ${variable_long_log}";
mkfileargs="--agent ${agent} --checksumfile ${checksumfile} --org ${client} -ml ${maplabel} -m ${mount_point} -rh ${host} -ph ${ESGF_PUBLISHING_NODE_HOST} -t ${ESGF_PUBLISHING_DIR}/esgf_template_cmip6_publication_pipeline.mk -e ${email} -rn ${number} -rp ${release_dir} -lr ${receive_log} -lv ${variable_log} -lf ${variable_long_log}";
fi
echo "esgf_publish_generate_workbook.py ${workbookargs} >${workbookout}";
esgf_publish_generate_workbook.py ${workbookargs} > ${workbookout}
......
......@@ -18,7 +18,7 @@ fi;
source `dirname $(which ${0})`/esgf_publish_settings.sh
# Number of settings options
NUMSETTINGS=7;
NUMSETTINGS=8;
# If you require a target list, of minimum 1, otherwise NUMSETTINGS
let NUMREQUIRED=${NUMSETTINGS}+0;
......@@ -28,15 +28,16 @@ fi
# I/O-check and help text
if [ $# -lt ${NUMREQUIRED} ]; then
echo "USAGE: [TIME=${TIME}] $0 <email> <request> <client> <checksumsfile> <receivejob> <variablejob> <mode>";
echo "USAGE: [TIME=${TIME}] $0 <email> <request> <client> <checksumsfile> <receivejob> <variablejob> <agent> <mode>";
echo "";
echo " OPTIONS:";
echo " email - Request email";
echo " request - Request number";
echo " client - Client (KNMI/SMHI etc)";
echo " checksumsfile - Checksums file
echo " checksumsfile - Checksums file"
echo " receivejob - .job file for the receive job";
echo " variablejob - .job file for the variable job";
echo " agent (user who executes publication)";
echo " --local/--nobatch - optional arguments";
echo "";
echo " ENVIRONMENT:";
......@@ -60,7 +61,8 @@ client=$3;
checksumsfile=`basename $4`;
receive_job=$5;
variable_job=$6;
mode=$7
agent=$7;
mode=$8
template=${ESGF_PUBLISHING_DIR}/esgf_publish_workbook.job
datetime=`date +%Y%m%d`
......@@ -81,7 +83,7 @@ fi;
# set lognames to the same format as jobname
slurmfile=${LOGDIR}/${jobname}.out
cat ${template} | sed 's/{NAME}/'${jobname}'/g' | sed 's/{TIME}/'${TIME}'/g' | sed 's/{EMAIL}/'${ESGF_PUBLISHING_EMAIL}'/g' | sed 's|{REQUEST_MAIL}|'${email}'|g' | sed 's|{RECEIVE_JOB}|'${receive_job}'|g' | sed 's|{VARIABLE_JOB}|'${variable_job}'|g' | sed 's|{REQUEST}|'${request}'|g' | sed 's|{SLURM}|'${slurmfile}'|g' | sed 's|{MODE}|'${mode}'|g' | sed 's|{CLIENT}|'${client}'|g' | sed 's|{CHECKSUMSFILE}|'${checksumsfile}'|g' > ${LOGDIR}/${jobname}.job
cat ${template} | sed 's/{NAME}/'${jobname}'/g' | sed 's/{TIME}/'${TIME}'/g' | sed 's/{EMAIL}/'${ESGF_PUBLISHING_EMAIL}'/g' | sed 's|{REQUEST_MAIL}|'${email}'|g' | sed 's|{RECEIVE_JOB}|'${receive_job}'|g' | sed 's|{VARIABLE_JOB}|'${variable_job}'|g' | sed 's|{REQUEST}|'${request}'|g' | sed 's|{SLURM}|'${slurmfile}'|g' | sed 's|{MODE}|'${mode}'|g' | sed 's|{CLIENT}|'${client}'|g' | sed 's|{CHECKSUMSFILE}|'${checksumsfile}'|g' |sed 's|{AGENT}|'${agent}'|g' > ${LOGDIR}/${jobname}.job
echo "# Created job ${jobname} . Submit with;"
echo "sbatch ${LOGDIR}/${jobname}.job"
......@@ -47,7 +47,7 @@ $(LOCKFILE): | $(PROCESS_DIR)
touch $@
$(PROCESS_DIR)/00_staging.done: | $(PROCESS_DIR) $(LOCKFILE)
esgf_publish_cmip6_phase_00.sh $(STAGING_DIR);
esgf_publish_cmip6_phase_00.sh $(STAGING_DIR) $(PUBLISH_SYSTEM);
touch $@;
$(PROCESS_DIR)/01_download.done: | $(PROCESS_DIR)/00_staging.done $(LOCKFILE)
......@@ -95,15 +95,15 @@ $(PROCESS_DIR)/11_publish3.done: | $(PROCESS_DIR)/10_publish2.done $(LOCKFILE)
touch $@;
$(PROCESS_DIR)/12_postpub.done: | $(PROCESS_DIR)/11_publish3.done $(LOCKFILE)
esgf_publish_cmip6_phase_12.sh $(PROCESS_DIR) $(SCRIPT_DIR) $(DATASETMGR_DIR) $(PROCESS_DIR)/08_mapfix_03_ckmaplist;
esgf_publish_cmip6_phase_12.sh $(PROCESS_DIR) $(SCRIPT_DIR) $(DATASETMGR_DIR) $(PROCESS_DIR)/08_mapfix_03_ckmaplist $(PROCESS_DIR)/07_move_05_datasetmgrout.log;
touch $@;
$(PROCESS_DIR)/13_nagios.done: | $(PROCESS_DIR)/12_postpub.done $(LOCKFILE)
esgf_publish_cmip6_phase_13.sh $(PROCESS_DIR) $(NAGIOS_DIR) $(PUBLISH_SYSTEM) $(EXPERIMENT);
esgf_publish_cmip6_phase_13.sh $(PROCESS_DIR) $(NAGIOS_DIR) $(PUBLISH_SYSTEM) $(EXPERIMENT) $(PROCESS_DIR)/12_postpub_03_added_datasetcount.log;
touch $@;
$(PROCESS_DIR)/14_backups.done: | $(PROCESS_DIR)/13_nagios.done $(LOCKFILE)
esgf_publish_cmip6_phase_14.sh $(PROCESS_DIR) $(SCRIPT_DIR);
esgf_publish_cmip6_phase_14.sh $(PROCESS_DIR) $(SCRIPT_DIR) $(PUBLISH_SYSTEM);
touch $@;
$(PROCESS_DIR)/15_commit.done: | $(PROCESS_DIR)/14_backups.done $(LOCKFILE)
......@@ -111,7 +111,7 @@ $(PROCESS_DIR)/15_commit.done: | $(PROCESS_DIR)/14_backups.done $(LOCKFILE)
touch $@;
$(PROCESS_DIR)/16_sync.done: | $(PROCESS_DIR)/15_commit.done $(LOCKFILE)
esgf_publish_cmip6_phase_16.sh root esg-dn2.nsc.liu.se;
esgf_publish_cmip6_phase_16.sh root
touch $@;
$(FINISH_FILE): | $(PROCESS_DIR)/16_sync.done
......@@ -122,6 +122,3 @@ createdir: $(addprefix $(PROCESS_DIR)/,00_staging.done)
downloadonly: $(addprefix $(PROCESS_DIR)/,00_staging.done 01_download.done)
prechecksum: $(addprefix $(PROCESS_DIR)/,00_staging.done 01_download.done 02_cdf2cim.done 03_nctcck.done)
uptochecksum: $(addprefix $(PROCESS_DIR)/,00_staging.done 01_download.done 02_cdf2cim.done 03_nctcck.done 04_checksums.done)
uptomapfilegen: $(addprefix $(PROCESS_DIR)/,00_staging.done 01_download.done 02_cdf2cim.done 03_nctcck.done 04_checksums.done 05_integrity.done 06_mapfile.done)
uptomapfixdone: $(addprefix $(PROCESS_DIR)/,00_staging.done 01_download.done 02_cdf2cim.done 03_nctcck.done 04_checksums.done 05_integrity.done 06_mapfile.done 07_move.done 08_mapfix.done)
prenagiossync: $(addprefix $(PROCESS_DIR)/,00_staging.done 01_download.done 02_cdf2cim.done 03_nctcck.done 04_checksums.done 05_integrity.done 06_mapfile.done 07_move.done 08_mapfix.done 09_publish1.done 10_publish2.done 11_publish3.done 12_postpub.done)
today=`date +%Y%m%d`
python3 -m venv ${HOME}/${today}_esgf_publishing_python_venv
source ${HOME}/${today}_esgf_publishing_python_venv/bin/activate
pip3 install --upgrade pip
pip3 install jinja2
deactivate
ln -s ${HOME}/${today}_esgf_publishing_python_venv ${HOME}/esgf_publishing/workbook_venv
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment