diff --git a/creation/web_base/glidein_startup.sh b/creation/web_base/glidein_startup.sh index 4ebd8604f..1b703b2b6 100644 --- a/creation/web_base/glidein_startup.sh +++ b/creation/web_base/glidein_startup.sh @@ -7,29 +7,6 @@ # Base script of the Glidein (pilot job) # This scripts runs all the others -# default IFS, to protect against unusual environment, better than "unset IFS" because works with restoring old one -IFS=$' \t\n' - -global_args="$*" -# GWMS_STARTUP_SCRIPT=$0 -GWMS_STARTUP_SCRIPT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/$(basename "${BASH_SOURCE[0]}")" -GWMS_PATH="" -# Relative to the work directory (GWMS_DIR, gwms_lib_dir, gwms_bin_dir and gwms_exec_dir will be the absolute paths) -# bin (utilities), lib (libraries), exec (aux scripts to be executed/sourced, e.g. pre-job) -GWMS_SUBDIR=".gwms.d" - -export LANG=C - -# General options -GWMS_MULTIUSER_GLIDEIN= -# Set GWMS_MULTIUSER_GLIDEIN if the Glidein may spawn processes (for jobs) as a different user. -# This will prepare the glidein, e.g. setting to 777 the permission of TEMP directories -# This should never happen only when using GlExec. Not in Singularity, not w/o sudo mechanisms. -# Comment the following line if GlExec or similar will not be used -#GWMS_MULTIUSER_GLIDEIN=true -# Default GWMS log server -GWMS_LOGSERVER_ADDRESS='https://fermicloud152.fnal.gov/log' - ############################## # Utility functions to allow the script to source functions and retrieve data stored as tarball at the end of the script itself @@ -87,7 +64,6 @@ on_die() { kill -s "$1" %1 } -GWMS_MULTIGLIDEIN_CHILDS= on_die_multi() { echo "Multi-Glidein received signal... shutting down child glideins (forwarding $1 signal to ${GWMS_MULTIGLIDEIN_CHILDS})" 1>&2 ON_DIE=1 @@ -110,16 +86,16 @@ logdebug() { # Functions to start multiple glideins copy_all() { - # 1:prefix (of the files to skip), 2:directory - # should it copy also hidden files? - mkdir -p "$2" - for f in *; do - [[ -e "${f}" ]] || break # protect for nullglob TODO: should this be a continue? - if [[ "${f}" = ${1}* ]]; then - continue - fi - cp -r "${f}" "$2"/ - done + # 1:prefix (of the files to skip), 2:directory + # should it copy also hidden files? + mkdir -p "$2" + for f in *; do + [[ -e "${f}" ]] || break # protect for nullglob TODO: should this be a continue? + if [[ "${f}" = ${1}* ]]; then + continue + fi + cp -r "${f}" "$2"/ + done } do_start_all() { @@ -192,61 +168,12 @@ usage() { exit 1 } - -# params will contain the full list of parameters -# -param_XXX YYY will become "XXX YYY" -# TODO: can use an array instead? -params="" - -while [ $# -gt 0 ] -do case "$1" in - -factory) glidein_factory="$2";; - -name) glidein_name="$2";; - -entry) glidein_entry="$2";; - -clientname) client_name="$2";; - -clientgroup) client_group="$2";; - -web) repository_url="$2";; - -proxy) proxy_url="$2";; - -dir) work_dir="$2";; - -sign) sign_id="$2";; - -signtype) sign_type="$2";; - -signentry) sign_entry_id="$2";; - -cluster) condorg_cluster="$2";; - -subcluster) condorg_subcluster="$2";; - -submitcredid) glidein_cred_id="$2";; - -schedd) condorg_schedd="$2";; - -descript) descript_file="$2";; - -descriptentry) descript_entry_file="$2";; - -clientweb) client_repository_url="$2";; - -clientwebgroup) client_repository_group_url="$2";; - -clientsign) client_sign_id="$2";; - -clientsigntype) client_sign_type="$2";; - -clientsigngroup) client_sign_group_id="$2";; - -clientdescript) client_descript_file="$2";; - -clientdescriptgroup) client_descript_group_file="$2";; - -slotslayout) slots_layout="$2";; - -v) operation_mode="$2";; - -multiglidein) multi_glidein="$2";; - -multirestart) multi_glidein_restart="$2";; - -param_*) params="$params $(echo "$1" | awk '{print substr($0,8)}') $2";; - *) (warn "Unknown option $1"; usage) 1>&2; exit 1 -esac -shift 2 -done - -# make sure we have a valid slots_layout -if (echo "x${slots_layout}" | grep -i fixed) >/dev/null 2>&1 ; then - slots_layout="fixed" -else - slots_layout="partitionable" -fi - construct_xml() { - result="$1" + result="$1" - glidein_end_time="$(date +%s)" + glidein_end_time="$(date +%s)" - echo " + echo " ${start_dir} @@ -262,142 +189,139 @@ ${result} extract_parent_fname() { - exitcode=$1 - - if [ -s otrx_output.xml ]; then - # file exists and is not 0 size - last_result=$(cat otrx_output.xml) - - if [ "${exitcode}" -eq 0 ]; then - echo "SUCCESS" - else - last_script_name=$(echo "${last_result}" |awk '/" - echo " OK" - # propagate metrics as well - echo "${last_result}" | grep '" - else - last_script_name=$(echo "${last_result}" |awk '/" + echo " OK" + # propagate metrics as well + echo "${last_result}" | grep '" + else + last_script_name=$(echo "${last_result}" |awk '//{fr=0;}{if (fr==1) print $0}//{fr=1;}') - my_reason=" Validation failed in ${last_script_name}. + last_script_reason=$(echo "${last_result}" | awk 'BEGIN{fr=0;}/<[/]detail>/{fr=0;}{if (fr==1) print $0}//{fr=1;}') + my_reason=" Validation failed in ${last_script_name}. ${last_script_reason}" - echo " " - echo " ERROR + echo " " + echo " ERROR ${last_script_name}" - # propagate metrics as well (will include the failure metric) - echo "${last_result}" | grep '" - echo " + # propagate metrics as well (will include the failure metric) + echo "${last_result}" | grep '" + echo " ${my_reason} " - fi - else - # create a minimal XML file, else - echo " " - if [ "${exitcode}" -eq 0 ]; then - echo " OK" - else - echo " ERROR" - echo " Unknown" - fi - echo " + fi + else + # create a minimal XML file, else + echo " " + if [ "${exitcode}" -eq 0 ]; then + echo " OK" + else + echo " ERROR" + echo " Unknown" + fi + echo " No detail. Could not find source XML file. " - fi + fi } basexml2simplexml() { - final_result="$1" + final_result="$1" - # augment with node info - echo "${final_result}" | awk 'BEGIN{fr=1;}{if (fr==1) print $0}//{fr=0;}' + # augment with node info + echo "${final_result}" | awk 'BEGIN{fr=1;}{if (fr==1) print $0}//{fr=0;}' - echo " ${client_name}" - echo " ${client_group}" + echo " ${client_name}" + echo " ${client_group}" - echo " $(id -un)" - echo " $(uname -m)" - if [ -e '/etc/redhat-release' ]; then - echo " $(cat /etc/redhat-release)" - fi - echo " $(uname -n)" + echo " $(id -un)" + echo " $(uname -m)" + if [ -e '/etc/redhat-release' ]; then + echo " $(cat /etc/redhat-release)" + fi + echo " $(uname -n)" - echo "${final_result}" | awk 'BEGIN{fr=0;}{if (fr==1) print $0}//{fr=1;}' + echo "${final_result}" | awk 'BEGIN{fr=0;}{if (fr==1) print $0}//{fr=1;}' } simplexml2longxml() { - final_result_simple="$1" - global_result="$2" - - echo "${final_result_simple}" | awk 'BEGIN{fr=1;}{if (fr==1) print $0}/' - echo ' ' - echo "${global_result}" | awk '{print " " $0}' - echo ' ' - echo ' ' - fi - - echo "${final_result_simple}" | awk 'BEGIN{fr=0;}{if (fr==1) print $0}//{fr=0;}' - - echo " ${glidein_factory}" - echo " ${glidein_name}" - echo " ${glidein_entry}" - echo " ${condorg_cluster}" - echo " ${condorg_subcluster}" - echo " ${glidein_cred_id}" - echo " ${condorg_schedd}" - - echo "${final_result_simple}" | awk 'BEGIN{fr=0;}{if (fr==1) print $0}//{fr=1;}' + final_result_simple="$1" + global_result="$2" + + echo "${final_result_simple}" | awk 'BEGIN{fr=1;}{if (fr==1) print $0}/' + echo ' ' + echo "${global_result}" | awk '{print " " $0}' + echo ' ' + echo ' ' + fi + + echo "${final_result_simple}" | awk 'BEGIN{fr=0;}{if (fr==1) print $0}//{fr=0;}' + + echo " ${glidein_factory}" + echo " ${glidein_name}" + echo " ${glidein_entry}" + echo " ${condorg_cluster}" + echo " ${condorg_subcluster}" + echo " ${glidein_cred_id}" + echo " ${condorg_schedd}" + + echo "${final_result_simple}" | awk 'BEGIN{fr=0;}{if (fr==1) print $0}//{fr=1;}' } print_tail() { - exit_code=$1 - final_result_simple="$2" - final_result_long="$3" - - glidein_end_time=$(date +%s) - let total_time=${glidein_end_time}-${startup_time} - echo "=== Glidein ending $(date) (${glidein_end_time}) with code ${exit_code} after ${total_time} ===" - echo "" - echo "=== XML description of glidein activity ===" - echo "${final_result_simple}" | grep -v "" - echo "=== End XML description of glidein activity ===" - - echo "" 1>&2 - echo "=== Encoded XML description of glidein activity ===" 1>&2 - echo "${final_result_long}" | gzip --stdout - | b64uuencode 1>&2 - echo "=== End encoded XML description of glidein activity ===" 1>&2 + exit_code=$1 + final_result_simple="$2" + final_result_long="$3" + + glidein_end_time=$(date +%s) + let total_time=${glidein_end_time}-${startup_time} + echo "=== Glidein ending $(date) (${glidein_end_time}) with code ${exit_code} after ${total_time} ===" + echo "" + echo "=== XML description of glidein activity ===" + echo "${final_result_simple}" | grep -v "" + echo "=== End XML description of glidein activity ===" + + echo "" 1>&2 + echo "=== Encoded XML description of glidein activity ===" 1>&2 + echo "${final_result_long}" | gzip --stdout - | b64uuencode 1>&2 + echo "=== End encoded XML description of glidein activity ===" 1>&2 } #################################### # Cleaup, print out message and exit -work_dir_created=0 -glide_local_tmp_dir_created=0 - # Remove Glidein directories (work_dir, glide_local_tmp_dir) # 1 - exit code # Using GLIDEIN_DEBUG_OPTIONS, start_dir, work_dir_created, work_dir, @@ -426,169 +350,169 @@ glidien_cleanup() { # use this for early failures, when we cannot assume we can write to disk at all # too bad we end up with some repeated code, but difficult to do better early_glidein_failure() { - error_msg="$1" + error_msg="$1" - warn "${error_msg}" + warn "${error_msg}" - sleep "${sleep_time}" - # wait a bit in case of error, to reduce lost glideins + sleep "${sleep_time}" + # wait a bit in case of error, to reduce lost glideins - glidein_end_time="$(date +%s)" - result=" WN_RESOURCE + glidein_end_time="$(date +%s)" + result=" WN_RESOURCE ERROR ${error_msg} " - final_result="$(construct_xml "${result}")" - final_result_simple="$(basexml2simplexml "${final_result}")" - # have no global section - final_result_long="$(simplexml2longxml "${final_result_simple}" "")" + final_result="$(construct_xml "${result}")" + final_result_simple="$(basexml2simplexml "${final_result}")" + # have no global section + final_result_long="$(simplexml2longxml "${final_result_simple}" "")" - glidien_cleanup + glidien_cleanup - print_tail 1 "${final_result_simple}" "${final_result_long}" + print_tail 1 "${final_result_simple}" "${final_result_long}" - exit 1 + exit 1 } # use this one once the most basic ops have been done glidein_exit() { - # Removed lines about $lock_file (lock file for whole machine) not present elsewhere - - gwms_process_scripts "$GWMS_DIR" cleanup "${glidein_config}" - - global_result="" - if [ -f otr_outlist.list ]; then - global_result=$(cat otr_outlist.list) - chmod u+w otr_outlist.list - fi - - ge_last_script_name=$(extract_parent_fname "$1") - result=$(extract_parent_xml_detail "$1") - final_result=$(construct_xml "${result}") - - # augment with node info - final_result_simple=$(basexml2simplexml "${final_result}") - - # Create a richer version, too - final_result_long=$(simplexml2longxml "${final_result_simple}" "${global_result}") - - if [ "$1" -ne 0 ]; then - report_failed=$(gconfig_get GLIDEIN_Report_Failed "${glidein_config}" "-i") - - if [ -z "${report_failed}" ]; then - report_failed="NEVER" - fi - - factory_report_failed=$(gconfig_get GLIDEIN_Factory_Report_Failed "${glidein_config}" "-i") - - if [ -z "${factory_report_failed}" ]; then - factory_collector=$(gconfig_get GLIDEIN_Factory_Collector "${glidein_config}" "-i") - if [ -z "${factory_collector}" ]; then - # no point in enabling it if there are no collectors - factory_report_failed="NEVER" - else - factory_report_failed="ALIVEONLY" - fi - fi - - do_report=0 - if [ "${report_failed}" != "NEVER" ] || [ "${factory_report_failed}" != "NEVER" ]; then - do_report=1 - fi - - - # wait a bit in case of error, to reduce lost glideins - let "dl=$(date +%s) + ${sleep_time}" - dlf=$(date --date="@${dl}") - add_config_line "GLIDEIN_ADVERTISE_ONLY" "1" - add_config_line "GLIDEIN_Failed" "True" - add_config_line "GLIDEIN_EXIT_CODE" "$1" - add_config_line "GLIDEIN_ToDie" "${dl}" - add_config_line "GLIDEIN_Expire" "${dl}" - add_config_line "GLIDEIN_LAST_SCRIPT" "${ge_last_script_name}" - add_config_line "GLIDEIN_ADVERTISE_TYPE" "Retiring" - - add_config_line "GLIDEIN_FAILURE_REASON" "Glidein failed while running ${ge_last_script_name}. Keeping node busy until ${dl} (${dlf})." - - condor_vars_file=$(gconfig_get CONDOR_VARS_FILE "${glidein_config}" "-i") - if [ -n "${condor_vars_file}" ]; then - # if we are to advertise, this should be available... else, it does not matter anyhow - add_condor_vars_line "GLIDEIN_ADVERTISE_ONLY" "C" "True" "+" "Y" "Y" "-" - add_condor_vars_line "GLIDEIN_Failed" "C" "True" "+" "Y" "Y" "-" - add_condor_vars_line "GLIDEIN_EXIT_CODE" "I" "-" "+" "Y" "Y" "-" - add_condor_vars_line "GLIDEIN_ToDie" "I" "-" "+" "Y" "Y" "-" - add_condor_vars_line "GLIDEIN_Expire" "I" "-" "+" "Y" "Y" "-" - add_condor_vars_line "GLIDEIN_LAST_SCRIPT" "S" "-" "+" "Y" "Y" "-" - add_condor_vars_line "GLIDEIN_FAILURE_REASON" "S" "-" "+" "Y" "Y" "-" - fi - main_work_dir="$(get_work_dir main)" - - for ((t=$(date +%s); t < dl; t=$(date +%s))) - do - if [ -e "${main_work_dir}/${last_script}" ] && [ "${do_report}" = "1" ] ; then - # if the file exists, we should be able to talk to the collectors - # notify that things went badly and we are waiting + # Removed lines about $lock_file (lock file for whole machine) not present elsewhere + + gwms_process_scripts "$GWMS_DIR" cleanup "${glidein_config}" + + global_result="" + if [ -f otr_outlist.list ]; then + global_result=$(cat otr_outlist.list) + chmod u+w otr_outlist.list + fi + + ge_last_script_name=$(extract_parent_fname "$1") + result=$(extract_parent_xml_detail "$1") + final_result=$(construct_xml "${result}") + + # augment with node info + final_result_simple=$(basexml2simplexml "${final_result}") + + # Create a richer version, too + final_result_long=$(simplexml2longxml "${final_result_simple}" "${global_result}") + + if [ "$1" -ne 0 ]; then + report_failed=$(gconfig_get GLIDEIN_Report_Failed "${glidein_config}" "-i") + + if [ -z "${report_failed}" ]; then + report_failed="NEVER" + fi + + factory_report_failed=$(gconfig_get GLIDEIN_Factory_Report_Failed "${glidein_config}" "-i") + + if [ -z "${factory_report_failed}" ]; then + factory_collector=$(gconfig_get GLIDEIN_Factory_Collector "${glidein_config}" "-i") + if [ -z "${factory_collector}" ]; then + # no point in enabling it if there are no collectors + factory_report_failed="NEVER" + else + factory_report_failed="ALIVEONLY" + fi + fi + + do_report=0 + if [ "${report_failed}" != "NEVER" ] || [ "${factory_report_failed}" != "NEVER" ]; then + do_report=1 + fi + + + # wait a bit in case of error, to reduce lost glideins + let "dl=$(date +%s) + ${sleep_time}" + dlf=$(date --date="@${dl}") + add_config_line "GLIDEIN_ADVERTISE_ONLY" "1" + add_config_line "GLIDEIN_Failed" "True" + add_config_line "GLIDEIN_EXIT_CODE" "$1" + add_config_line "GLIDEIN_ToDie" "${dl}" + add_config_line "GLIDEIN_Expire" "${dl}" + add_config_line "GLIDEIN_LAST_SCRIPT" "${ge_last_script_name}" + add_config_line "GLIDEIN_ADVERTISE_TYPE" "Retiring" + + add_config_line "GLIDEIN_FAILURE_REASON" "Glidein failed while running ${ge_last_script_name}. Keeping node busy until ${dl} (${dlf})." + + condor_vars_file=$(gconfig_get CONDOR_VARS_FILE "${glidein_config}" "-i") + if [ -n "${condor_vars_file}" ]; then + # if we are to advertise, this should be available... else, it does not matter anyhow + add_condor_vars_line "GLIDEIN_ADVERTISE_ONLY" "C" "True" "+" "Y" "Y" "-" + add_condor_vars_line "GLIDEIN_Failed" "C" "True" "+" "Y" "Y" "-" + add_condor_vars_line "GLIDEIN_EXIT_CODE" "I" "-" "+" "Y" "Y" "-" + add_condor_vars_line "GLIDEIN_ToDie" "I" "-" "+" "Y" "Y" "-" + add_condor_vars_line "GLIDEIN_Expire" "I" "-" "+" "Y" "Y" "-" + add_condor_vars_line "GLIDEIN_LAST_SCRIPT" "S" "-" "+" "Y" "Y" "-" + add_condor_vars_line "GLIDEIN_FAILURE_REASON" "S" "-" "+" "Y" "Y" "-" + fi + main_work_dir="$(get_work_dir main)" + + for ((t=$(date +%s); t < dl; t=$(date +%s))) + do + if [ -e "${main_work_dir}/${last_script}" ] && [ "${do_report}" = "1" ] ; then + # if the file exists, we should be able to talk to the collectors + # notify that things went badly and we are waiting + if [ "${factory_report_failed}" != "NEVER" ]; then + add_config_line "GLIDEIN_ADVERTISE_DESTINATION" "Factory" + warn "Notifying Factory of error" + "${main_work_dir}/${last_script}" glidein_config + fi + if [ "${report_failed}" != "NEVER" ]; then + add_config_line "GLIDEIN_ADVERTISE_DESTINATION" "VO" + warn "Notifying VO of error" + "${main_work_dir}/${last_script}" glidein_config + fi + fi + + # sleep for about 5 mins... but randomize a bit + let "ds=250+${RANDOM}%100" + let "as=$(date +%s) + ${ds}" + if [ ${as} -gt ${dl} ]; then + # too long, shorten to the deadline + let "ds=${dl} - $(date +%s)" + fi + warn "Sleeping ${ds}" + sleep ${ds} + done + + if [ -e "${main_work_dir}/${last_script}" ] && [ "${do_report}" = "1" ]; then + # notify that things went badly and we are going away if [ "${factory_report_failed}" != "NEVER" ]; then add_config_line "GLIDEIN_ADVERTISE_DESTINATION" "Factory" - warn "Notifying Factory of error" + if [ "${factory_report_failed}" = "ALIVEONLY" ]; then + add_config_line "GLIDEIN_ADVERTISE_TYPE" "INVALIDATE" + else + add_config_line "GLIDEIN_ADVERTISE_TYPE" "Killing" + add_config_line "GLIDEIN_FAILURE_REASON" "Glidein failed while running ${ge_last_script_name}. Terminating now. (${dl}) (${dlf})" + fi "${main_work_dir}/${last_script}" glidein_config + warn "Last notification sent to Factory" fi if [ "${report_failed}" != "NEVER" ]; then add_config_line "GLIDEIN_ADVERTISE_DESTINATION" "VO" - warn "Notifying VO of error" + if [ "${report_failed}" = "ALIVEONLY" ]; then + add_config_line "GLIDEIN_ADVERTISE_TYPE" "INVALIDATE" + else + add_config_line "GLIDEIN_ADVERTISE_TYPE" "Killing" + add_config_line "GLIDEIN_FAILURE_REASON" "Glidein failed while running ${ge_last_script_name}. Terminating now. (${dl}) (${dlf})" + fi "${main_work_dir}/${last_script}" glidein_config + warn "Last notification sent to VO" fi fi + fi - # sleep for about 5 mins... but randomize a bit - let "ds=250+${RANDOM}%100" - let "as=$(date +%s) + ${ds}" - if [ ${as} -gt ${dl} ]; then - # too long, shorten to the deadline - let "ds=${dl} - $(date +%s)" - fi - warn "Sleeping ${ds}" - sleep ${ds} - done - - if [ -e "${main_work_dir}/${last_script}" ] && [ "${do_report}" = "1" ]; then - # notify that things went badly and we are going away - if [ "${factory_report_failed}" != "NEVER" ]; then - add_config_line "GLIDEIN_ADVERTISE_DESTINATION" "Factory" - if [ "${factory_report_failed}" = "ALIVEONLY" ]; then - add_config_line "GLIDEIN_ADVERTISE_TYPE" "INVALIDATE" - else - add_config_line "GLIDEIN_ADVERTISE_TYPE" "Killing" - add_config_line "GLIDEIN_FAILURE_REASON" "Glidein failed while running ${ge_last_script_name}. Terminating now. (${dl}) (${dlf})" - fi - "${main_work_dir}/${last_script}" glidein_config - warn "Last notification sent to Factory" - fi - if [ "${report_failed}" != "NEVER" ]; then - add_config_line "GLIDEIN_ADVERTISE_DESTINATION" "VO" - if [ "${report_failed}" = "ALIVEONLY" ]; then - add_config_line "GLIDEIN_ADVERTISE_TYPE" "INVALIDATE" - else - add_config_line "GLIDEIN_ADVERTISE_TYPE" "Killing" - add_config_line "GLIDEIN_FAILURE_REASON" "Glidein failed while running ${ge_last_script_name}. Terminating now. (${dl}) (${dlf})" - fi - "${main_work_dir}/${last_script}" glidein_config - warn "Last notification sent to VO" - fi - fi - fi - - log_write "glidein_startup.sh" "text" "glidein is about to exit with retcode $1" "info" - send_logs_to_remote - - glidien_cleanup - - print_tail "$1" "${final_result_simple}" "${final_result_long}" - - exit "$1" + log_write "glidein_startup.sh" "text" "glidein is about to exit with retcode $1" "info" + send_logs_to_remote + + glidien_cleanup + + print_tail "$1" "${final_result_simple}" "${final_result_long}" + + exit "$1" } #################################################### @@ -730,121 +654,6 @@ params2file() { return 0 } -################ -# Parse and verify arguments - -# allow some parameters to change arguments -# multiglidein GLIDEIN_MULTIGLIDEIN -> multi_glidein -tmp_par=$(params_get_simple GLIDEIN_MULTIGLIDEIN "${params}") -[ -n "${tmp_par}" ] && multi_glidein=${tmp_par} - -case "${operation_mode}" in - nodebug) - sleep_time=1199 - set_debug=0;; - fast) - sleep_time=150 - set_debug=1;; - check) - sleep_time=150 - set -x - set_debug=2;; - *) - sleep_time=1199 - set_debug=1;; -esac - -if [ -z "${descript_file}" ]; then - warn "Missing descript fname." - usage -fi - -if [ -z "${descript_entry_file}" ]; then - warn "Missing descript fname for entry." - usage -fi - -if [ -z "${glidein_name}" ]; then - warn "Missing gliden name." - usage -fi - -if [ -z "${glidein_entry}" ]; then - warn "Missing glidein entry name." - usage -fi - - -if [ -z "${repository_url}" ]; then - warn "Missing Web URL." - usage -fi - -repository_entry_url="${repository_url}/entry_${glidein_entry}" - -if [ -z "${proxy_url}" ]; then - proxy_url="None" -fi - -if [ "${proxy_url}" = "OSG" ]; then - if [ -z "${OSG_SQUID_LOCATION}" ]; then - # if OSG does not define a Squid, then don't use any - proxy_url="None" - warn "OSG_SQUID_LOCATION undefined, not using any Squid URL" 1>&2 - else - proxy_url="$(echo "${OSG_SQUID_LOCATION}" | awk -F ':' '{if ($2 =="") {print $1 ":3128"} else {print $0}}')" - fi -fi - -if [ -z "${sign_id}" ]; then - warn "Missing signature." - usage -fi - -if [ -z "${sign_entry_id}" ]; then - warn "Missing entry signature." - usage -fi - -if [ -z "${sign_type}" ]; then - sign_type="sha1" -fi - -if [ "${sign_type}" != "sha1" ]; then - warn "Unsupported signtype ${sign_type} found." - usage -fi - -if [ -n "${client_repository_url}" ]; then - # client data is optional, user url as a switch - if [ -z "${client_sign_type}" ]; then - client_sign_type="sha1" - fi - - if [ "${client_sign_type}" != "sha1" ]; then - warn "Unsupported clientsigntype ${client_sign_type} found." - usage - fi - - if [ -z "${client_descript_file}" ]; then - warn "Missing client descript fname." - usage - fi - - if [ -n "${client_repository_group_url}" ]; then - # client group data is optional, user url as a switch - if [ -z "${client_group}" ]; then - warn "Missing client group name." - usage - fi - - if [ -z "${client_descript_group_file}" ]; then - warn "Missing client descript fname for group." - usage - fi - fi -fi - md5wrapper() { # $1 - file name # $2 - option (quiet) @@ -891,467 +700,116 @@ dir_id() { echo "${dir_id}_" } -# Generate glidein UUID -if command -v uuidgen >/dev/null 2>&1; then - glidein_uuid="$(uuidgen)" -else - glidein_uuid="$(od -x -w32 -N32 /dev/urandom | awk 'NR==1{OFS="-"; print $2$3,$4,$5,$6,$7$8$9}')" -fi +set_proxy_fullpath() { + # Set the X509_USER_PROXY path to full path to the file + if fullpath="$(readlink -f "${X509_USER_PROXY}")"; then + echo "Setting X509_USER_PROXY ${X509_USER_PROXY} to canonical path ${fullpath}" 1>&2 + export X509_USER_PROXY="${fullpath}" + else + echo "Unable to get canonical path for X509_USER_PROXY, using ${X509_USER_PROXY}" 1>&2 + fi +} -startup_time="$(date +%s)" -echo "Starting glidein_startup.sh at $(date) (${startup_time})" +############################################ +# get the proper descript file based on id +# Arg: type (main/entry/client/client_group) +get_repository_url() { + case "$1" in + main) echo "${repository_url}";; + entry) echo "${repository_entry_url}";; + client) echo "${client_repository_url}";; + client_group) echo "${client_repository_group_url}";; + *) echo "[get_repository_url] Invalid id: $1" 1>&2 + return 1 + ;; + esac +} -echo "script_checksum = '$(md5wrapper "$0")'" -echo "debug_mode = '${operation_mode}'" -echo "condorg_cluster = '${condorg_cluster}'" -echo "condorg_subcluster= '${condorg_subcluster}'" -echo "condorg_schedd = '${condorg_schedd}'" -echo "glidein_uuid = '${glidein_uuid}'" -echo "glidein_credential_id = '${glidein_cred_id}'" -echo "glidein_factory = '${glidein_factory}'" -echo "glidein_name = '${glidein_name}'" -echo "glidein_entry = '${glidein_entry}'" -if [ -n "${client_name}" ]; then - # client name not required as it is not used for anything but debug info - echo "client_name = '${client_name}'" -fi -if [ -n "${client_group}" ]; then - echo "client_group = '${client_group}'" -fi -echo "multi_glidein/restart = '${multi_glidein}'/'${multi_glidein_restart}'" -echo "work_dir = '${work_dir}'" -echo "web_dir = '${repository_url}'" -echo "sign_type = '${sign_type}'" -echo "proxy_url = '${proxy_url}'" -echo "descript_fname = '${descript_file}'" -echo "descript_entry_fname = '${descript_entry_file}'" -echo "sign_id = '${sign_id}'" -echo "sign_entry_id = '${sign_entry_id}'" -if [ -n "${client_repository_url}" ]; then - echo "client_web_dir = '${client_repository_url}'" - echo "client_descript_fname = '${client_descript_file}'" - echo "client_sign_type = '${client_sign_type}'" - echo "client_sign_id = '${client_sign_id}'" - if [ -n "${client_repository_group_url}" ]; then - echo "client_web_group_dir = '${client_repository_group_url}'" - echo "client_descript_group_fname = '${client_descript_group_file}'" - echo "client_sign_group_id = '${client_sign_group_id}'" - fi -fi -echo -echo "Running on $(uname -n)" -echo "System: $(uname -a)" -if [ -e '/etc/redhat-release' ]; then - echo "Release: $(cat /etc/redhat-release 2>&1)" -fi -echo "As: $(id)" -echo "PID: $$" -echo - -if [ ${set_debug} -ne 0 ]; then - echo "------- Initial environment ---------------" 1>&2 - env 1>&2 - echo "------- =================== ---------------" 1>&2 -fi - -# Before anything else, spawn multiple glideins and wait, if asked to do so -if [[ -n "${multi_glidein}" ]] && [[ -z "${multi_glidein_restart}" ]] && [[ "${multi_glidein}" -gt 1 ]]; then - # start multiple glideins - ON_DIE=0 - trap 'ignore_signal' SIGHUP - trap_with_arg 'on_die_multi' SIGTERM SIGINT SIGQUIT - do_start_all "${multi_glidein}" - # Wait for all glideins and exit 0 - # TODO: Summarize exit codes and status from all child glideins - echo "------ Multi-glidein parent waiting for child processes (${GWMS_MULTIGLIDEIN_CHILDS}) ----------" 1>&2 - wait - echo "------ Exiting multi-glidein parent ----------" 1>&2 - exit 0 -fi - -######################################## -# make sure nobody else can write my files -# In the Grid world I cannot trust anybody -if ! umask 0022; then - early_glidein_failure "Failed in umask 0022" -fi +##################### +# Check signature +check_file_signature() { + cfs_id="$1" + cfs_fname="$2" -######################################## -# Setup OSG and/or Globus -if [ -r "${OSG_GRID}/setup.sh" ]; then - . "${OSG_GRID}/setup.sh" -else - if [ -r "${GLITE_LOCAL_CUSTOMIZATION_DIR}/cp_1.sh" ]; then - . "${GLITE_LOCAL_CUSTOMIZATION_DIR}/cp_1.sh" - fi -fi + cfs_work_dir="$(get_work_dir "${cfs_id}")" -if [ -z "${GLOBUS_PATH}" ]; then - if [ -z "${GLOBUS_LOCATION}" ]; then - # if GLOBUS_LOCATION not defined, try to guess it - if [ -r "/opt/globus/etc/globus-user-env.sh" ]; then - GLOBUS_LOCATION=/opt/globus - elif [ -r "/osgroot/osgcore/globus/etc/globus-user-env.sh" ]; then - GLOBUS_LOCATION=/osgroot/osgcore/globus - else - warn "GLOBUS_LOCATION not defined and could not guess it." - warn "Looked in:" - warn ' /opt/globus/etc/globus-user-env.sh' - warn ' /osgroot/osgcore/globus/etc/globus-user-env.sh' - warn 'Continuing like nothing happened' - fi - fi - - if [ -r "${GLOBUS_LOCATION}/etc/globus-user-env.sh" ]; then - . "${GLOBUS_LOCATION}/etc/globus-user-env.sh" - else - warn "GLOBUS_PATH not defined and ${GLOBUS_LOCATION}/etc/globus-user-env.sh does not exist." - warn 'Continuing like nothing happened' - fi -fi + cfs_desc_fname="${cfs_work_dir}/${cfs_fname}" + cfs_signature="${cfs_work_dir}/signature.sha1" -set_proxy_fullpath() { - # Set the X509_USER_PROXY path to full path to the file - if fullpath="$(readlink -f "${X509_USER_PROXY}")"; then - echo "Setting X509_USER_PROXY ${X509_USER_PROXY} to canonical path ${fullpath}" 1>&2 - export X509_USER_PROXY="${fullpath}" - else - echo "Unable to get canonical path for X509_USER_PROXY, using ${X509_USER_PROXY}" 1>&2 + if [ "${check_signature}" -gt 0 ]; then # check_signature is global for simplicity + tmp_signname="${cfs_signature}_$$_$(date +%s)_${RANDOM}" + if ! grep " ${cfs_fname}$" "${cfs_signature}" > "${tmp_signname}"; then + rm -f "${tmp_signname}" + echo "No signature for ${cfs_desc_fname}." 1>&2 + else + (cd "${cfs_work_dir}" && sha1sum -c "${tmp_signname}") 1>&2 + cfs_rc=$? + if [ ${cfs_rc} -ne 0 ]; then + "${main_dir}"/error_augment.sh -init + "${main_dir}"/error_gen.sh -error "check_file_signature" "Corruption" "File $cfs_desc_fname is corrupted." "file" "${cfs_desc_fname}" "source_type" "${cfs_id}" + "${main_dir}"/error_augment.sh -process ${cfs_rc} "check_file_signature" "${PWD}" "sha1sum -c ${tmp_signname}" "$(date +%s)" "(date +%s)" + "${main_dir}"/error_augment.sh -concat + warn "File ${cfs_desc_fname} is corrupted." + rm -f "${tmp_signname}" + return 1 + fi + rm -f "${tmp_signname}" + echo "Signature OK for ${cfs_id}:${cfs_fname}." 1>&2 + fi fi + return 0 } +##################### +# Untar support func -[ -n "${X509_USER_PROXY}" ] && set_proxy_fullpath - -num_gct=0 - - -######################################## -# prepare and move to the work directory - -# Replace known keywords: Condor, CONDOR, OSG, TMPDIR, AUTO, . -# Empty $work_dir means PWD (same as ".") -# A custom path could be provided (no "*)" in case) -tmp="${work_dir}" -if [ -z "${work_dir}" ]; then - work_dir="$(pwd)" -else - case "${work_dir}" in - Condor|CONDOR) work_dir="${_CONDOR_SCRATCH_DIR}";; - OSG) work_dir="${OSG_WN_TMP}";; - TMPDIR) work_dir="${TMPDIR}";; - AUTO) automatic_work_dir;; - .) work_dir="$(pwd)";; - esac -fi - -if [ -z "${work_dir}" ]; then - early_glidein_failure "Unable to identify Startup dir for the glidein ($tmp)." -fi - -if [ ! -e "${work_dir}" ]; then - early_glidein_failure "Startup dir '${work_dir}' ($tmp) does not exist." -fi +get_untar_subdir() { + gus_id="$1" + gus_fname="$2" -start_dir="$(pwd)" -echo "Started in '${start_dir}' ($tmp)" + gus_prefix="$(get_prefix "${gus_id}")" + gus_config_cfg="${gus_prefix}UNTAR_CFG_FILE" -work_dir_template="${work_dir}/glide_$(dir_id)XXXXXX" -if ! work_dir="$(mktemp -d "${work_dir_template}")"; then - early_glidein_failure "Cannot create word_dir '${work_dir_template}'" -else - if ! cd "${work_dir}"; then - early_glidein_failure "Work dir '${work_dir}' was created but cannot cd into it." - else - echo "Running in ${work_dir}" + gus_config_file="$(grep "^${gus_config_cfg} " glidein_config | cut -d ' ' -f 2-)" + if [ -z "${gus_config_file}" ]; then + warn "Error, cannot find '${gus_config_cfg}' in glidein_config." + glidein_exit 1 fi -fi -work_dir_created=1 - -# GWMS_SUBDIR defined on top -GWMS_DIR="${work_dir}/$GWMS_SUBDIR" -if ! mkdir "$GWMS_DIR" ; then - early_glidein_failure "Cannot create GWMS_DIR '$GWMS_DIR'" -fi -gwms_lib_dir="${GWMS_DIR}/lib" -if ! mkdir -p "$gwms_lib_dir" ; then - early_glidein_failure "Cannot create lib dir '$gwms_lib_dir'" -fi -gwms_bin_dir="${GWMS_DIR}/bin" -if ! mkdir -p "$gwms_bin_dir" ; then - early_glidein_failure "Cannot create bin dir '$gwms_bin_dir'" -fi -gwms_exec_dir="${GWMS_DIR}/exec" -if ! mkdir -p "$gwms_exec_dir" ; then - early_glidein_failure "Cannot create exec dir '$gwms_exec_dir'" -else - for i in setup prejob postjob cleanup setup_singularity ; do - mkdir -p "$gwms_exec_dir"/$i - done -fi -# mktemp makes it user readable by definition (ignores umask) -# TODO: MMSEC should this change to increase protection? Since GlExec is gone this should not be needed -if [ -n "${GWMS_MULTIUSER_GLIDEIN}" ]; then - if ! chmod a+rx "${work_dir}"; then - early_glidein_failure "Failed chmod '${work_dir}'" + gus_dir="$(grep -i "^${gus_fname} " "${gus_config_file}" | cut -s -f 2-)" + if [ -z "${gus_dir}" ]; then + warn "Error, untar dir for '${gus_fname}' cannot be empty." + glidein_exit 1 fi -fi - -glide_local_tmp_dir_template="/tmp/glide_$(dir_id)$(id -u -n)_XXXXXX" -if ! glide_local_tmp_dir="$(mktemp -d "${glide_local_tmp_dir_template}")"; then - early_glidein_failure "Cannot create temp '${glide_local_tmp_dir_template}'" -fi -glide_local_tmp_dir_created=1 - -glide_tmp_dir="${work_dir}/tmp" -if ! mkdir "${glide_tmp_dir}"; then - early_glidein_failure "Cannot create '${glide_tmp_dir}'" -fi -if [ -n "${GWMS_MULTIUSER_GLIDEIN}" ]; then - # TODO: MMSEC should this change to increase protection? Since GlExec is gone this should not be needed - # the tmpdirs should be world writable - # This way it will work even if the user spawned by the glidein is different than the glidein user - # This happened in GlExec, outside user stays the same in Singularity - if ! chmod 1777 "${glide_local_tmp_dir}"; then - early_glidein_failure "Failed chmod '${glide_local_tmp_dir}'" - fi + echo "${gus_dir}" + return 0 +} - if ! chmod 1777 "${glide_tmp_dir}"; then - early_glidein_failure "Failed chmod '${glide_tmp_dir}'" +##################### +# Periodic execution support function +add_periodic_script() { + # schedules a script for periodic execution using startd_cron + # parameters: wrapper full path, period, cwd, executable path (from cwd), + # config file path (from cwd), ID + # global variable: add_startd_cron_counter + #TODO: should it allow for variable number of parameters? + local include_fname=condor_config_startd_cron_include + local s_wrapper="$1" + local s_period_sec="${2}s" + local s_cwd="$3" + local s_fname="$4" + local s_config="$5" + local s_ffb_id="$6" + local s_cc_prefix="$7" + if [ ${add_startd_cron_counter} -eq 0 ]; then + # Make sure that no undesired file is there when called for first cron + rm -f ${include_fname} fi -fi - -short_main_dir=main -main_dir="${work_dir}/${short_main_dir}" -if ! mkdir "${main_dir}"; then - early_glidein_failure "Cannot create '${main_dir}'" -fi -short_entry_dir=entry_${glidein_entry} -entry_dir="${work_dir}/${short_entry_dir}" -if ! mkdir "${entry_dir}"; then - early_glidein_failure "Cannot create '${entry_dir}'" -fi - -if [ -n "${client_repository_url}" ]; then - short_client_dir=client - client_dir="${work_dir}/${short_client_dir}" - if ! mkdir "$client_dir"; then - early_glidein_failure "Cannot create '${client_dir}'" - fi - - if [ -n "${client_repository_group_url}" ]; then - short_client_group_dir=client_group_${client_group} - client_group_dir="${work_dir}/${short_client_group_dir}" - if ! mkdir "${client_group_dir}"; then - early_glidein_failure "Cannot create '${client_group_dir}'" - fi - fi -fi - -# Move the token files from condor to glidein workspace -# TODO: compare this w/ setup_x509.sh -# monitoring tokens, Should be using same credentials directory? -mv "${start_dir}/tokens.tgz" . -mv "${start_dir}/url_dirs.desc" . -# idtokens are handled in setup_x509.sh - TODO: remove once verified -#for idtk in ${start_dir}/*.idtoken; do -# if cp "${idtk}" . ; then -# echo "copied idtoken ${idtk} to $(pwd)" -# else -# echo "failed to copy idtoken ${idtk} to $(pwd)" 1>&2 -# fi -#done -#if [ -e "${GLIDEIN_CONDOR_TOKEN}" ]; then -# mkdir -p ticket -# tname="$(basename ${GLIDEIN_CONDOR_TOKEN})" -# cp "${GLIDEIN_CONDOR_TOKEN}" "ticket/${tname}" -# export GLIDEIN_CONDOR_TOKEN="$(pwd)/ticket/${tname}" -#fi - -# Extract and source all the data contained at the end of this script as tarball -extract_all_data - -wrapper_list="${PWD}/wrapper_list.lst" -touch "${wrapper_list}" - -# create glidein_config -glidein_config="${PWD}/glidein_config" -if ! echo > "${glidein_config}"; then - early_glidein_failure "Could not create '${glidein_config}'" -fi -if ! { - echo "# --- glidein_startup vals ---" - echo "GLIDEIN_UUID ${glidein_uuid}" - echo "GLIDEIN_Factory ${glidein_factory}" - echo "GLIDEIN_Name ${glidein_name}" - echo "GLIDEIN_Entry_Name ${glidein_entry}" - - if [ -n "${client_name}" ]; then - # client name not required as it is not used for anything but debug info - echo "GLIDECLIENT_Name ${client_name}" - fi - if [ -n "${client_group}" ]; then - # client group not required as it is not used for anything but debug info - echo "GLIDECLIENT_Group ${client_group}" - fi - echo "GLIDEIN_CredentialIdentifier ${glidein_cred_id}" - echo "CONDORG_CLUSTER ${condorg_cluster}" - echo "CONDORG_SUBCLUSTER ${condorg_subcluster}" - echo "CONDORG_SCHEDD ${condorg_schedd}" - echo "DEBUG_MODE ${set_debug}" - echo "GLIDEIN_STARTUP_PID $$" - echo "GLIDEIN_START_DIR_ORIG ${start_dir}" - echo "GLIDEIN_WORKSPACE_ORIG $(pwd)" - echo "GLIDEIN_WORK_DIR ${main_dir}" - echo "GLIDEIN_ENTRY_WORK_DIR ${entry_dir}" - echo "TMP_DIR ${glide_tmp_dir}" - echo "GLIDEIN_LOCAL_TMP_DIR ${glide_local_tmp_dir}" - echo "PROXY_URL ${proxy_url}" - echo "DESCRIPTION_FILE ${descript_file}" - echo "DESCRIPTION_ENTRY_FILE ${descript_entry_file}" - echo "GLIDEIN_Signature ${sign_id}" - echo "GLIDEIN_Entry_Signature ${sign_entry_id}" - - if [ -n "${client_repository_url}" ]; then - echo "GLIDECLIENT_WORK_DIR ${client_dir}" - echo "GLIDECLIENT_DESCRIPTION_FILE ${client_descript_file}" - echo "GLIDECLIENT_Signature ${client_sign_id}" - if [ -n "${client_repository_group_url}" ]; then - echo "GLIDECLIENT_GROUP_WORK_DIR ${client_group_dir}" - echo "GLIDECLIENT_DESCRIPTION_GROUP_FILE ${client_descript_group_file}" - echo "GLIDECLIENT_Group_Signature ${client_sign_group_id}" - fi - fi - echo "B64UUENCODE_SOURCE ${PWD}/b64uuencode.source" - echo "ADD_CONFIG_LINE_SOURCE ${PWD}/add_config_line.source" - echo "GET_ID_SELECTORS_SOURCE ${PWD}/get_id_selectors.source" - echo "LOGGING_UTILS_SOURCE ${PWD}/logging_utils.source" - echo "GLIDEIN_PATHS_SOURCE ${PWD}/glidein_paths.source" - echo "WRAPPER_LIST ${wrapper_list}" - echo "SLOTS_LAYOUT ${slots_layout}" - # Add a line saying we are still initializing... - echo "GLIDEIN_INITIALIZED 0" - # ...but be optimist, and leave advertise_only for the actual error handling script - echo "GLIDEIN_ADVERTISE_ONLY 0" - echo "GLIDEIN_CONDOR_TOKEN ${GLIDEIN_CONDOR_TOKEN}" - echo "# --- User Parameters ---" -} >> "${glidein_config}"; then - early_glidein_failure "Failed in updating '${glidein_config}'" -fi -# shellcheck disable=SC2086 -params2file ${params} - -############################################ -# Setup logging -log_init "${glidein_uuid}" "${work_dir}" -# Remove these files, if they are still there -rm -rf tokens.tgz url_dirs.desc tokens -log_setup "${glidein_config}" - -############################################ -# get the proper descript file based on id -# Arg: type (main/entry/client/client_group) -get_repository_url() { - case "$1" in - main) echo "${repository_url}";; - entry) echo "${repository_entry_url}";; - client) echo "${client_repository_url}";; - client_group) echo "${client_repository_group_url}";; - *) echo "[get_repository_url] Invalid id: $1" 1>&2 - return 1 - ;; - esac -} - -##################### -# Check signature -check_file_signature() { - cfs_id="$1" - cfs_fname="$2" - - cfs_work_dir="$(get_work_dir "${cfs_id}")" - - cfs_desc_fname="${cfs_work_dir}/${cfs_fname}" - cfs_signature="${cfs_work_dir}/signature.sha1" - - if [ "${check_signature}" -gt 0 ]; then # check_signature is global for simplicity - tmp_signname="${cfs_signature}_$$_$(date +%s)_${RANDOM}" - if ! grep " ${cfs_fname}$" "${cfs_signature}" > "${tmp_signname}"; then - rm -f "${tmp_signname}" - echo "No signature for ${cfs_desc_fname}." 1>&2 - else - (cd "${cfs_work_dir}" && sha1sum -c "${tmp_signname}") 1>&2 - cfs_rc=$? - if [ ${cfs_rc} -ne 0 ]; then - "${main_dir}"/error_augment.sh -init - "${main_dir}"/error_gen.sh -error "check_file_signature" "Corruption" "File $cfs_desc_fname is corrupted." "file" "${cfs_desc_fname}" "source_type" "${cfs_id}" - "${main_dir}"/error_augment.sh -process ${cfs_rc} "check_file_signature" "${PWD}" "sha1sum -c ${tmp_signname}" "$(date +%s)" "(date +%s)" - "${main_dir}"/error_augment.sh -concat - warn "File ${cfs_desc_fname} is corrupted." - rm -f "${tmp_signname}" - return 1 - fi - rm -f "${tmp_signname}" - echo "Signature OK for ${cfs_id}:${cfs_fname}." 1>&2 - fi - fi - return 0 -} - -##################### -# Untar support func - -get_untar_subdir() { - gus_id="$1" - gus_fname="$2" - - gus_prefix="$(get_prefix "${gus_id}")" - gus_config_cfg="${gus_prefix}UNTAR_CFG_FILE" - - gus_config_file="$(grep "^${gus_config_cfg} " glidein_config | cut -d ' ' -f 2-)" - if [ -z "${gus_config_file}" ]; then - warn "Error, cannot find '${gus_config_cfg}' in glidein_config." - glidein_exit 1 - fi - - gus_dir="$(grep -i "^${gus_fname} " "${gus_config_file}" | cut -s -f 2-)" - if [ -z "${gus_dir}" ]; then - warn "Error, untar dir for '${gus_fname}' cannot be empty." - glidein_exit 1 - fi - - echo "${gus_dir}" - return 0 -} - -##################### -# Periodic execution support function and global variable -add_startd_cron_counter=0 -add_periodic_script() { - # schedules a script for periodic execution using startd_cron - # parameters: wrapper full path, period, cwd, executable path (from cwd), - # config file path (from cwd), ID - # global variable: add_startd_cron_counter - #TODO: should it allow for variable number of parameters? - local include_fname=condor_config_startd_cron_include - local s_wrapper="$1" - local s_period_sec="${2}s" - local s_cwd="$3" - local s_fname="$4" - local s_config="$5" - local s_ffb_id="$6" - local s_cc_prefix="$7" - if [ ${add_startd_cron_counter} -eq 0 ]; then - # Make sure that no undesired file is there when called for first cron - rm -f ${include_fname} - fi - - let add_startd_cron_counter=add_startd_cron_counter+1 - local name_prefix=GLIDEIN_PS_ - local s_name="${name_prefix}${add_startd_cron_counter}" + let add_startd_cron_counter=add_startd_cron_counter+1 + local name_prefix=GLIDEIN_PS_ + local s_name="${name_prefix}${add_startd_cron_counter}" # Append the following to the startd configuration # Instead of Periodic and Kill wait for completion: @@ -1536,315 +994,864 @@ perform_wget() { rm -f otrb_output.xml chmod a-w otr_outlist.list fi - return ${wget_retval} -} + return ${wget_retval} +} + +perform_curl() { + curl_args=("$@") + arg_len="${#curl_args[@]}" + ffb_url="${curl_args[0]}" + ffb_repository="$(dirname "${ffb_url}")" + ffb_real_fname="$(basename "${ffb_url}")" + for ((i=0; i&1)" + curl_retval=$? + if [ ${curl_retval} -eq 0 ] && [ ! -e "${ffb_tmp_outname}" ] ; then + touch "${ffb_tmp_outname}" + fi + + + if [ "${curl_retval}" -ne 0 ]; then + curl_version="$(curl --version 2>&1 | head -1)" + warn "${curl_cmd} failed. version:${curl_version} exit code ${curl_retval} stderr: ${curl_resp} " + # cannot use error_*.sh helper functions + # may not have been loaded yet, and wget fails often + echo " + + ${PWD} + $(uname -a) + $(cat /etc/system-release) + ${curl_version} + + + ${curl_cmd} + $(date --date=@"${START}" +%Y-%m-%dT%H:%M:%S%:z) + $(date +%Y-%m-%dT%H:%M:%S%:z) + + + ERROR + Network + ${ffb_url} + ${proxy_url} + ${ffb_id} + + + Failed to load file '${ffb_real_fname}' from '${ffb_repository}' using proxy '${proxy_url}'. ${curl_resp} + +" > otrb_output.xml + warn "Failed to load file '${ffb_real_fname}' from '${ffb_repository}'." + + if [ -f otr_outlist.list ]; then + chmod u+w otr_outlist.list + else + touch otr_outlist.list + fi + cat otrb_output.xml >> otr_outlist.list + echo "" > otrx_output.xml + cat otrb_output.xml >> otrx_output.xml + rm -f otrb_output.xml + chmod a-w otr_outlist.list + fi + return ${curl_retval} +} + +fetch_file_base() { + # Perform the file download and corresponding action (untar, execute, ...) + ffb_id="$1" + ffb_target_fname="$2" + ffb_real_fname="$3" + ffb_file_type="$4" + ffb_config_out="$5" + ffb_period=$6 + # condor cron prefix, used only for periodic executables + ffb_cc_prefix="$7" + + ffb_work_dir="$(get_work_dir "${ffb_id}")" + + ffb_repository="$(get_repository_url "${ffb_id}")" + + ffb_tmp_outname="${ffb_work_dir}/${ffb_real_fname}" + ffb_outname="${ffb_work_dir}/${ffb_target_fname}" + + # Create a dummy default in case something goes wrong + # cannot use error_*.sh helper functions + # may not have been loaded yet + have_dummy_otrx=1 + echo " + + + ${PWD} + + + Unknown + $(date +%Y-%m-%dT%H:%M:%S%:z) + $(date +%Y-%m-%dT%H:%M:%S%:z) + + + ERROR + Unknown + ${ffb_id} + + + An unknown error occurred. + +" > otrx_output.xml + user_agent="glidein/${glidein_entry}/${condorg_schedd}/${condorg_cluster}.${condorg_subcluster}/${client_name}" + ffb_url="${ffb_repository}/${ffb_real_fname}" + curl_version=$(curl --version | head -1 ) + wget_version=$(wget --version | head -1 ) + #old wget command: + #wget --user-agent="wget/glidein/$glidein_entry/$condorg_schedd/$condorg_cluster.$condorg_subcluster/$client_name" "$ffb_nocache_str" -q -O "$ffb_tmp_outname" "$ffb_repository/$ffb_real_fname" + #equivalent to: + #wget ${ffb_url} --user-agent=${user_agent} -q -O "${ffb_tmp_outname}" "${ffb_nocache_str}" + #with env http_proxy=$proxy_url set if proxy_url != "None" + # + #construct curl equivalent so we can try either + + wget_args=("${ffb_url}" "--user-agent" "wget/${user_agent}" "--quiet" "--output-document" "${ffb_tmp_outname}" ) + curl_args=("${ffb_url}" "--user-agent" "curl/${user_agent}" "--silent" "--show-error" "--output" "${ffb_tmp_outname}") + + if [ "${ffb_file_type}" = "nocache" ]; then + if [ "${curl_version}" != "" ]; then + curl_args+=("--header") + curl_args+=("'Cache-Control: no-cache'") + fi + if [ "${wget_version}" != "" ]; then + if wget --help | grep -q "\-\-no-cache "; then + wget_args+=("--no-cache") + elif wget --help |grep -q "\-\-cache="; then + wget_args+=("--cache=off") + else + warn "wget ${wget_version} cannot disable caching" + fi + fi + fi + + if [ "${proxy_url}" != "None" ];then + if [ "${curl_version}" != "" ]; then + curl_args+=("--proxy") + curl_args+=("${proxy_url}") + fi + if [ "${wget_version}" != "" ]; then + #these two arguments have to be last as coded, put any future + #wget args earlier in wget_args array + wget_args+=("--proxy") + wget_args+=("${proxy_url}") + fi + fi + + fetch_completed=1 + if [ ${fetch_completed} -ne 0 ] && [ "${wget_version}" != "" ]; then + perform_wget "${wget_args[@]}" + fetch_completed=$? + fi + if [ ${fetch_completed} -ne 0 ] && [ "${curl_version}" != "" ]; then + perform_curl "${curl_args[@]}" + fetch_completed=$? + fi + + if [ ${fetch_completed} -ne 0 ]; then + return ${fetch_completed} + fi + + # check signature + if ! check_file_signature "${ffb_id}" "${ffb_real_fname}"; then + # error already displayed inside the function + return 1 + fi + + # rename it to the correct final name, if needed + if [ "${ffb_tmp_outname}" != "${ffb_outname}" ]; then + if ! mv "${ffb_tmp_outname}" "${ffb_outname}"; then + warn "Failed to rename ${ffb_tmp_outname} into ${ffb_outname}" + return 1 + fi + fi + + # if executable, execute + if [[ "${ffb_file_type}" = "exec" || "${ffb_file_type}" = "exec:"* ]]; then + if ! chmod u+x "${ffb_outname}"; then + warn "Error making '${ffb_outname}' executable" + return 1 + fi + if [ "${ffb_id}" = "main" ] && [ "${ffb_target_fname}" = "${last_script}" ]; then # last_script global for simplicity + echo "Skipping last script ${last_script}" 1>&2 + elif [[ "${ffb_target_fname}" = "cvmfs_umount.sh" ]] || [[ -n "${cleanup_script}" && "${ffb_target_fname}" = "${cleanup_script}" ]]; then # cleanup_script global for simplicity + # TODO: temporary OR checking for cvmfs_umount.sh; to be removed after Bruno's ticket on cleanup [#25073] + echo "Skipping cleanup script ${ffb_outname} (${cleanup_script})" 1>&2 + cp "${ffb_outname}" "$gwms_exec_dir/cleanup/${ffb_target_fname}" + chmod a+x "${gwms_exec_dir}/cleanup/${ffb_target_fname}" + else + echo "Executing (flags:${ffb_file_type#exec}) ${ffb_outname}" + # have to do it here, as this will be run before any other script + chmod u+rx "${main_dir}"/error_augment.sh + + # the XML file will be overwritten now, and hopefully not an error situation + have_dummy_otrx=0 + "${main_dir}"/error_augment.sh -init + START=$(date +%s) + if [[ "${ffb_file_type}" = "exec:s" ]]; then + "${main_dir}/singularity_wrapper.sh" "${ffb_outname}" glidein_config "${ffb_id}" + else + "${ffb_outname}" glidein_config "${ffb_id}" + fi + ret=$? + END=$(date +%s) + "${main_dir}"/error_augment.sh -process ${ret} "${ffb_id}/${ffb_target_fname}" "${PWD}" "${ffb_outname} glidein_config" "${START}" "${END}" #generating test result document + "${main_dir}"/error_augment.sh -concat + if [ ${ret} -ne 0 ]; then + echo "=== Validation error in ${ffb_outname} ===" 1>&2 + warn "Error running '${ffb_outname}'" + < otrx_output.xml awk 'BEGIN{fr=0;}/<[/]detail>/{fr=0;}{if (fr==1) print $0}//{fr=1;}' 1>&2 + return 1 + else + # If ran successfully and periodic, schedule to execute with schedd_cron + echo "=== validation OK in ${ffb_outname} (${ffb_period}) ===" 1>&2 + if [ "${ffb_period}" -gt 0 ]; then + add_periodic_script "${main_dir}/script_wrapper.sh" "${ffb_period}" "${work_dir}" "${ffb_outname}" glidein_config "${ffb_id}" "${ffb_cc_prefix}" + fi + fi + fi + elif [ "${ffb_file_type}" = "wrapper" ]; then + echo "${ffb_outname}" >> "${wrapper_list}" + elif [ "${ffb_file_type}" = "untar" ]; then + ffb_short_untar_dir="$(get_untar_subdir "${ffb_id}" "${ffb_target_fname}")" + ffb_untar_dir="${ffb_work_dir}/${ffb_short_untar_dir}" + START=$(date +%s) + (mkdir "${ffb_untar_dir}" && cd "${ffb_untar_dir}" && tar -xmzf "${ffb_outname}") 1>&2 + ret=$? + if [ ${ret} -ne 0 ]; then + "${main_dir}"/error_augment.sh -init + "${main_dir}"/error_gen.sh -error "tar" "Corruption" "Error untarring '${ffb_outname}'" "file" "${ffb_outname}" "source_type" "${cfs_id}" + "${main_dir}"/error_augment.sh -process ${cfs_rc} "tar" "${PWD}" "mkdir ${ffb_untar_dir} && cd ${ffb_untar_dir} && tar -xmzf ${ffb_outname}" "${START}" "$(date +%s)" + "${main_dir}"/error_augment.sh -concat + warn "Error untarring '${ffb_outname}'" + return 1 + fi + fi + + if [ "${ffb_config_out}" != "FALSE" ]; then + ffb_prefix="$(get_prefix "${ffb_id}")" + if [ "${ffb_file_type}" = "untar" ]; then + # when untaring the original file is less interesting than the untar dir + if ! add_config_line "${ffb_prefix}${ffb_config_out}" "${ffb_untar_dir}"; then + glidein_exit 1 + fi + else + if ! add_config_line "${ffb_prefix}${ffb_config_out}" "${ffb_outname}"; then + glidein_exit 1 + fi + fi + fi + + if [ "${have_dummy_otrx}" -eq 1 ]; then + # no one should really look at this file, but just to avoid confusion + echo " + + + ${PWD} + + + Unknown + $(date +%Y-%m-%dT%H:%M:%S%:z) + $(date +%Y-%m-%dT%H:%M:%S%:z) + + + OK + +" > otrx_output.xml + fi + + return 0 +} + +# Adds $1 to GWMS_PATH and update PATH +add_to_path() { + logdebug "Adding to GWMS_PATH: $1" + local old_path=":${PATH%:}:" + old_path="${old_path//:$GWMS_PATH:/}" + local old_gwms_path=":${GWMS_PATH%:}:" + old_gwms_path="${old_gwms_path//:$1:/}" + old_gwms_path="${1%:}:${old_gwms_path#:}" + export GWMS_PATH="${old_gwms_path%:}" + old_path="${GWMS_PATH}:${old_path#:}" + export PATH="${old_path%:}" +} + +fixup_condor_dir() { + # All files in the native condor tarballs have a directory like condor-9.0.11-1-x86_64_CentOS7-stripped + # However the (not used anymore) gwms create_condor_tarball removes that dir + # Here we remove that dir as well to allow factory ops to use native condor tarballs + + # Check if the condor dir has only one subdir, the one like "condor-9.0.11-1-x86_64_CentOS7-stripped" + # See https://stackoverflow.com/questions/32429333/how-to-test-if-a-linux-directory-contain-only-one-subdirectory-and-no-other-file + if [ $(find "${gs_id_work_dir}/condor" -maxdepth 1 -type d -printf 1 | wc -m) -eq 2 ]; then + echo "Fixing directory structure of condor tarball" + mv "${gs_id_work_dir}"/condor/condor*/* "${gs_id_work_dir}"/condor > /dev/null + else + echo "Condor tarball does not need to be fixed" + fi +} + +##################################################################### +#################### Execution starts here.... ###################### +##################################################################### + +# default IFS, to protect against unusual environment, better than "unset IFS" because works with restoring old one +IFS=$' \t\n' + +global_args="$*" +# GWMS_STARTUP_SCRIPT=$0 +GWMS_STARTUP_SCRIPT="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/$(basename "${BASH_SOURCE[0]}")" +GWMS_PATH="" +# Relative to the work directory (GWMS_DIR, gwms_lib_dir, gwms_bin_dir and gwms_exec_dir will be the absolute paths) +# bin (utilities), lib (libraries), exec (aux scripts to be executed/sourced, e.g. pre-job) +GWMS_SUBDIR=".gwms.d" + +export LANG=C + +# General options +GWMS_MULTIUSER_GLIDEIN= +# Set GWMS_MULTIUSER_GLIDEIN if the Glidein may spawn processes (for jobs) as a different user. +# This will prepare the glidein, e.g. setting to 777 the permission of TEMP directories +# This should never happen only when using GlExec. Not in Singularity, not w/o sudo mechanisms. +# Comment the following line if GlExec or similar will not be used +#GWMS_MULTIUSER_GLIDEIN=true +# Default GWMS log server +GWMS_LOGSERVER_ADDRESS='https://fermicloud152.fnal.gov/log' + +GWMS_MULTIGLIDEIN_CHILDS= + +# params will contain the full list of parameters +# -param_XXX YYY will become "XXX YYY" +# TODO: can use an array instead? +params="" + +while [ $# -gt 0 ] +do case "$1" in + -factory) glidein_factory="$2";; + -name) glidein_name="$2";; + -entry) glidein_entry="$2";; + -clientname) client_name="$2";; + -clientgroup) client_group="$2";; + -web) repository_url="$2";; + -proxy) proxy_url="$2";; + -dir) work_dir="$2";; + -sign) sign_id="$2";; + -signtype) sign_type="$2";; + -signentry) sign_entry_id="$2";; + -cluster) condorg_cluster="$2";; + -subcluster) condorg_subcluster="$2";; + -submitcredid) glidein_cred_id="$2";; + -schedd) condorg_schedd="$2";; + -descript) descript_file="$2";; + -descriptentry) descript_entry_file="$2";; + -clientweb) client_repository_url="$2";; + -clientwebgroup) client_repository_group_url="$2";; + -clientsign) client_sign_id="$2";; + -clientsigntype) client_sign_type="$2";; + -clientsigngroup) client_sign_group_id="$2";; + -clientdescript) client_descript_file="$2";; + -clientdescriptgroup) client_descript_group_file="$2";; + -slotslayout) slots_layout="$2";; + -v) operation_mode="$2";; + -multiglidein) multi_glidein="$2";; + -multirestart) multi_glidein_restart="$2";; + -param_*) params="$params $(echo "$1" | awk '{print substr($0,8)}') $2";; + *) (warn "Unknown option $1"; usage) 1>&2; exit 1 +esac +shift 2 +done + +# make sure we have a valid slots_layout +if (echo "x${slots_layout}" | grep -i fixed) >/dev/null 2>&1 ; then + slots_layout="fixed" +else + slots_layout="partitionable" +fi + +#################################### +# Cleaup, print out message and exit +work_dir_created=0 +glide_local_tmp_dir_created=0 + +################ +# Parse and verify arguments + +# allow some parameters to change arguments +# multiglidein GLIDEIN_MULTIGLIDEIN -> multi_glidein +tmp_par=$(params_get_simple GLIDEIN_MULTIGLIDEIN "${params}") +[ -n "${tmp_par}" ] && multi_glidein=${tmp_par} + +case "${operation_mode}" in + nodebug) + sleep_time=1199 + set_debug=0;; + fast) + sleep_time=150 + set_debug=1;; + check) + sleep_time=150 + set -x + set_debug=2;; + *) + sleep_time=1199 + set_debug=1;; +esac + +if [ -z "${descript_file}" ]; then + warn "Missing descript fname." + usage +fi + +if [ -z "${descript_entry_file}" ]; then + warn "Missing descript fname for entry." + usage +fi + +if [ -z "${glidein_name}" ]; then + warn "Missing gliden name." + usage +fi + +if [ -z "${glidein_entry}" ]; then + warn "Missing glidein entry name." + usage +fi + + +if [ -z "${repository_url}" ]; then + warn "Missing Web URL." + usage +fi + +repository_entry_url="${repository_url}/entry_${glidein_entry}" + +if [ -z "${proxy_url}" ]; then + proxy_url="None" +fi + +if [ "${proxy_url}" = "OSG" ]; then + if [ -z "${OSG_SQUID_LOCATION}" ]; then + # if OSG does not define a Squid, then don't use any + proxy_url="None" + warn "OSG_SQUID_LOCATION undefined, not using any Squid URL" 1>&2 + else + proxy_url="$(echo "${OSG_SQUID_LOCATION}" | awk -F ':' '{if ($2 =="") {print $1 ":3128"} else {print $0}}')" + fi +fi + +if [ -z "${sign_id}" ]; then + warn "Missing signature." + usage +fi + +if [ -z "${sign_entry_id}" ]; then + warn "Missing entry signature." + usage +fi + +if [ -z "${sign_type}" ]; then + sign_type="sha1" +fi + +if [ "${sign_type}" != "sha1" ]; then + warn "Unsupported signtype ${sign_type} found." + usage +fi + +if [ -n "${client_repository_url}" ]; then + # client data is optional, user url as a switch + if [ -z "${client_sign_type}" ]; then + client_sign_type="sha1" + fi + + if [ "${client_sign_type}" != "sha1" ]; then + warn "Unsupported clientsigntype ${client_sign_type} found." + usage + fi + + if [ -z "${client_descript_file}" ]; then + warn "Missing client descript fname." + usage + fi + + if [ -n "${client_repository_group_url}" ]; then + # client group data is optional, user url as a switch + if [ -z "${client_group}" ]; then + warn "Missing client group name." + usage + fi + + if [ -z "${client_descript_group_file}" ]; then + warn "Missing client descript fname for group." + usage + fi + fi +fi + +# Generate glidein UUID +if command -v uuidgen >/dev/null 2>&1; then + glidein_uuid="$(uuidgen)" +else + glidein_uuid="$(od -x -w32 -N32 /dev/urandom | awk 'NR==1{OFS="-"; print $2$3,$4,$5,$6,$7$8$9}')" +fi + +startup_time="$(date +%s)" +echo "Starting glidein_startup.sh at $(date) (${startup_time})" + +echo "script_checksum = '$(md5wrapper "$0")'" +echo "debug_mode = '${operation_mode}'" +echo "condorg_cluster = '${condorg_cluster}'" +echo "condorg_subcluster= '${condorg_subcluster}'" +echo "condorg_schedd = '${condorg_schedd}'" +echo "glidein_uuid = '${glidein_uuid}'" +echo "glidein_credential_id = '${glidein_cred_id}'" +echo "glidein_factory = '${glidein_factory}'" +echo "glidein_name = '${glidein_name}'" +echo "glidein_entry = '${glidein_entry}'" +if [ -n "${client_name}" ]; then + # client name not required as it is not used for anything but debug info + echo "client_name = '${client_name}'" +fi +if [ -n "${client_group}" ]; then + echo "client_group = '${client_group}'" +fi +echo "multi_glidein/restart = '${multi_glidein}'/'${multi_glidein_restart}'" +echo "work_dir = '${work_dir}'" +echo "web_dir = '${repository_url}'" +echo "sign_type = '${sign_type}'" +echo "proxy_url = '${proxy_url}'" +echo "descript_fname = '${descript_file}'" +echo "descript_entry_fname = '${descript_entry_file}'" +echo "sign_id = '${sign_id}'" +echo "sign_entry_id = '${sign_entry_id}'" +if [ -n "${client_repository_url}" ]; then + echo "client_web_dir = '${client_repository_url}'" + echo "client_descript_fname = '${client_descript_file}'" + echo "client_sign_type = '${client_sign_type}'" + echo "client_sign_id = '${client_sign_id}'" + if [ -n "${client_repository_group_url}" ]; then + echo "client_web_group_dir = '${client_repository_group_url}'" + echo "client_descript_group_fname = '${client_descript_group_file}'" + echo "client_sign_group_id = '${client_sign_group_id}'" + fi +fi +echo +echo "Running on $(uname -n)" +echo "System: $(uname -a)" +if [ -e '/etc/redhat-release' ]; then + echo "Release: $(cat /etc/redhat-release 2>&1)" +fi +echo "As: $(id)" +echo "PID: $$" +echo + +if [ ${set_debug} -ne 0 ]; then + echo "------- Initial environment ---------------" 1>&2 + env 1>&2 + echo "------- =================== ---------------" 1>&2 +fi + +# Before anything else, spawn multiple glideins and wait, if asked to do so +if [[ -n "${multi_glidein}" ]] && [[ -z "${multi_glidein_restart}" ]] && [[ "${multi_glidein}" -gt 1 ]]; then + # start multiple glideins + ON_DIE=0 + trap 'ignore_signal' SIGHUP + trap_with_arg 'on_die_multi' SIGTERM SIGINT SIGQUIT + do_start_all "${multi_glidein}" + # Wait for all glideins and exit 0 + # TODO: Summarize exit codes and status from all child glideins + echo "------ Multi-glidein parent waiting for child processes (${GWMS_MULTIGLIDEIN_CHILDS}) ----------" 1>&2 + wait + echo "------ Exiting multi-glidein parent ----------" 1>&2 + exit 0 +fi + +######################################## +# make sure nobody else can write my files +# In the Grid world I cannot trust anybody +if ! umask 0022; then + early_glidein_failure "Failed in umask 0022" +fi + +######################################## +# Setup OSG and/or Globus +if [ -r "${OSG_GRID}/setup.sh" ]; then + . "${OSG_GRID}/setup.sh" +else + if [ -r "${GLITE_LOCAL_CUSTOMIZATION_DIR}/cp_1.sh" ]; then + . "${GLITE_LOCAL_CUSTOMIZATION_DIR}/cp_1.sh" + fi +fi -perform_curl() { - curl_args=("$@") - arg_len="${#curl_args[@]}" - ffb_url="${curl_args[0]}" - ffb_repository="$(dirname "${ffb_url}")" - ffb_real_fname="$(basename "${ffb_url}")" - for ((i=0; i&1)" - curl_retval=$? - if [ ${curl_retval} -eq 0 ] && [ ! -e "${ffb_tmp_outname}" ] ; then - touch "${ffb_tmp_outname}" fi + if [ -r "${GLOBUS_LOCATION}/etc/globus-user-env.sh" ]; then + . "${GLOBUS_LOCATION}/etc/globus-user-env.sh" + else + warn "GLOBUS_PATH not defined and ${GLOBUS_LOCATION}/etc/globus-user-env.sh does not exist." + warn 'Continuing like nothing happened' + fi +fi - if [ "${curl_retval}" -ne 0 ]; then - curl_version="$(curl --version 2>&1 | head -1)" - warn "${curl_cmd} failed. version:${curl_version} exit code ${curl_retval} stderr: ${curl_resp} " - # cannot use error_*.sh helper functions - # may not have been loaded yet, and wget fails often - echo " - - ${PWD} - $(uname -a) - $(cat /etc/system-release) - ${curl_version} - - - ${curl_cmd} - $(date --date=@"${START}" +%Y-%m-%dT%H:%M:%S%:z) - $(date +%Y-%m-%dT%H:%M:%S%:z) - - - ERROR - Network - ${ffb_url} - ${proxy_url} - ${ffb_id} - - - Failed to load file '${ffb_real_fname}' from '${ffb_repository}' using proxy '${proxy_url}'. ${curl_resp} - -" > otrb_output.xml - warn "Failed to load file '${ffb_real_fname}' from '${ffb_repository}'." +[ -n "${X509_USER_PROXY}" ] && set_proxy_fullpath - if [ -f otr_outlist.list ]; then - chmod u+w otr_outlist.list - else - touch otr_outlist.list - fi - cat otrb_output.xml >> otr_outlist.list - echo "" > otrx_output.xml - cat otrb_output.xml >> otrx_output.xml - rm -f otrb_output.xml - chmod a-w otr_outlist.list - fi - return ${curl_retval} -} +num_gct=0 -fetch_file_base() { - # Perform the file download and corresponding action (untar, execute, ...) - ffb_id="$1" - ffb_target_fname="$2" - ffb_real_fname="$3" - ffb_file_type="$4" - ffb_config_out="$5" - ffb_period=$6 - # condor cron prefix, used only for periodic executables - ffb_cc_prefix="$7" - ffb_work_dir="$(get_work_dir "${ffb_id}")" +######################################## +# prepare and move to the work directory - ffb_repository="$(get_repository_url "${ffb_id}")" +# Replace known keywords: Condor, CONDOR, OSG, TMPDIR, AUTO, . +# Empty $work_dir means PWD (same as ".") +# A custom path could be provided (no "*)" in case) +tmp="${work_dir}" +if [ -z "${work_dir}" ]; then + work_dir="$(pwd)" +else + case "${work_dir}" in + Condor|CONDOR) work_dir="${_CONDOR_SCRATCH_DIR}";; + OSG) work_dir="${OSG_WN_TMP}";; + TMPDIR) work_dir="${TMPDIR}";; + AUTO) automatic_work_dir;; + .) work_dir="$(pwd)";; + esac +fi - ffb_tmp_outname="${ffb_work_dir}/${ffb_real_fname}" - ffb_outname="${ffb_work_dir}/${ffb_target_fname}" +if [ -z "${work_dir}" ]; then + early_glidein_failure "Unable to identify Startup dir for the glidein ($tmp)." +fi - # Create a dummy default in case something goes wrong - # cannot use error_*.sh helper functions - # may not have been loaded yet - have_dummy_otrx=1 - echo " - - - ${PWD} - - - Unknown - $(date +%Y-%m-%dT%H:%M:%S%:z) - $(date +%Y-%m-%dT%H:%M:%S%:z) - - - ERROR - Unknown - ${ffb_id} - - - An unknown error occurred. - -" > otrx_output.xml - user_agent="glidein/${glidein_entry}/${condorg_schedd}/${condorg_cluster}.${condorg_subcluster}/${client_name}" - ffb_url="${ffb_repository}/${ffb_real_fname}" - curl_version=$(curl --version | head -1 ) - wget_version=$(wget --version | head -1 ) - #old wget command: - #wget --user-agent="wget/glidein/$glidein_entry/$condorg_schedd/$condorg_cluster.$condorg_subcluster/$client_name" "$ffb_nocache_str" -q -O "$ffb_tmp_outname" "$ffb_repository/$ffb_real_fname" - #equivalent to: - #wget ${ffb_url} --user-agent=${user_agent} -q -O "${ffb_tmp_outname}" "${ffb_nocache_str}" - #with env http_proxy=$proxy_url set if proxy_url != "None" - # - #construct curl equivalent so we can try either +if [ ! -e "${work_dir}" ]; then + early_glidein_failure "Startup dir '${work_dir}' ($tmp) does not exist." +fi - wget_args=("${ffb_url}" "--user-agent" "wget/${user_agent}" "--quiet" "--output-document" "${ffb_tmp_outname}" ) - curl_args=("${ffb_url}" "--user-agent" "curl/${user_agent}" "--silent" "--show-error" "--output" "${ffb_tmp_outname}") +start_dir="$(pwd)" +echo "Started in '${start_dir}' ($tmp)" - if [ "${ffb_file_type}" = "nocache" ]; then - if [ "${curl_version}" != "" ]; then - curl_args+=("--header") - curl_args+=("'Cache-Control: no-cache'") - fi - if [ "${wget_version}" != "" ]; then - if wget --help | grep -q "\-\-no-cache "; then - wget_args+=("--no-cache") - elif wget --help |grep -q "\-\-cache="; then - wget_args+=("--cache=off") - else - warn "wget ${wget_version} cannot disable caching" - fi - fi +work_dir_template="${work_dir}/glide_$(dir_id)XXXXXX" +if ! work_dir="$(mktemp -d "${work_dir_template}")"; then + early_glidein_failure "Cannot create word_dir '${work_dir_template}'" +else + if ! cd "${work_dir}"; then + early_glidein_failure "Work dir '${work_dir}' was created but cannot cd into it." + else + echo "Running in ${work_dir}" fi +fi +work_dir_created=1 - if [ "${proxy_url}" != "None" ];then - if [ "${curl_version}" != "" ]; then - curl_args+=("--proxy") - curl_args+=("${proxy_url}") - fi - if [ "${wget_version}" != "" ]; then - #these two arguments have to be last as coded, put any future - #wget args earlier in wget_args array - wget_args+=("--proxy") - wget_args+=("${proxy_url}") - fi - fi +# GWMS_SUBDIR defined on top +GWMS_DIR="${work_dir}/$GWMS_SUBDIR" +if ! mkdir "$GWMS_DIR" ; then + early_glidein_failure "Cannot create GWMS_DIR '$GWMS_DIR'" +fi +gwms_lib_dir="${GWMS_DIR}/lib" +if ! mkdir -p "$gwms_lib_dir" ; then + early_glidein_failure "Cannot create lib dir '$gwms_lib_dir'" +fi +gwms_bin_dir="${GWMS_DIR}/bin" +if ! mkdir -p "$gwms_bin_dir" ; then + early_glidein_failure "Cannot create bin dir '$gwms_bin_dir'" +fi +gwms_exec_dir="${GWMS_DIR}/exec" +if ! mkdir -p "$gwms_exec_dir" ; then + early_glidein_failure "Cannot create exec dir '$gwms_exec_dir'" +else + for i in setup prejob postjob cleanup setup_singularity ; do + mkdir -p "$gwms_exec_dir"/$i + done +fi - fetch_completed=1 - if [ ${fetch_completed} -ne 0 ] && [ "${wget_version}" != "" ]; then - perform_wget "${wget_args[@]}" - fetch_completed=$? - fi - if [ ${fetch_completed} -ne 0 ] && [ "${curl_version}" != "" ]; then - perform_curl "${curl_args[@]}" - fetch_completed=$? +# mktemp makes it user readable by definition (ignores umask) +# TODO: MMSEC should this change to increase protection? Since GlExec is gone this should not be needed +if [ -n "${GWMS_MULTIUSER_GLIDEIN}" ]; then + if ! chmod a+rx "${work_dir}"; then + early_glidein_failure "Failed chmod '${work_dir}'" fi +fi - if [ ${fetch_completed} -ne 0 ]; then - return ${fetch_completed} - fi +glide_local_tmp_dir_template="/tmp/glide_$(dir_id)$(id -u -n)_XXXXXX" +if ! glide_local_tmp_dir="$(mktemp -d "${glide_local_tmp_dir_template}")"; then + early_glidein_failure "Cannot create temp '${glide_local_tmp_dir_template}'" +fi +glide_local_tmp_dir_created=1 - # check signature - if ! check_file_signature "${ffb_id}" "${ffb_real_fname}"; then - # error already displayed inside the function - return 1 +glide_tmp_dir="${work_dir}/tmp" +if ! mkdir "${glide_tmp_dir}"; then + early_glidein_failure "Cannot create '${glide_tmp_dir}'" +fi + +if [ -n "${GWMS_MULTIUSER_GLIDEIN}" ]; then + # TODO: MMSEC should this change to increase protection? Since GlExec is gone this should not be needed + # the tmpdirs should be world writable + # This way it will work even if the user spawned by the glidein is different than the glidein user + # This happened in GlExec, outside user stays the same in Singularity + if ! chmod 1777 "${glide_local_tmp_dir}"; then + early_glidein_failure "Failed chmod '${glide_local_tmp_dir}'" fi - # rename it to the correct final name, if needed - if [ "${ffb_tmp_outname}" != "${ffb_outname}" ]; then - if ! mv "${ffb_tmp_outname}" "${ffb_outname}"; then - warn "Failed to rename ${ffb_tmp_outname} into ${ffb_outname}" - return 1 - fi + if ! chmod 1777 "${glide_tmp_dir}"; then + early_glidein_failure "Failed chmod '${glide_tmp_dir}'" fi +fi + +short_main_dir=main +main_dir="${work_dir}/${short_main_dir}" +if ! mkdir "${main_dir}"; then + early_glidein_failure "Cannot create '${main_dir}'" +fi - # if executable, execute - if [[ "${ffb_file_type}" = "exec" || "${ffb_file_type}" = "exec:"* ]]; then - if ! chmod u+x "${ffb_outname}"; then - warn "Error making '${ffb_outname}' executable" - return 1 - fi - if [ "${ffb_id}" = "main" ] && [ "${ffb_target_fname}" = "${last_script}" ]; then # last_script global for simplicity - echo "Skipping last script ${last_script}" 1>&2 - elif [[ "${ffb_target_fname}" = "cvmfs_umount.sh" ]] || [[ -n "${cleanup_script}" && "${ffb_target_fname}" = "${cleanup_script}" ]]; then # cleanup_script global for simplicity - # TODO: temporary OR checking for cvmfs_umount.sh; to be removed after Bruno's ticket on cleanup [#25073] - echo "Skipping cleanup script ${ffb_outname} (${cleanup_script})" 1>&2 - cp "${ffb_outname}" "$gwms_exec_dir/cleanup/${ffb_target_fname}" - chmod a+x "${gwms_exec_dir}/cleanup/${ffb_target_fname}" - else - echo "Executing (flags:${ffb_file_type#exec}) ${ffb_outname}" - # have to do it here, as this will be run before any other script - chmod u+rx "${main_dir}"/error_augment.sh +short_entry_dir=entry_${glidein_entry} +entry_dir="${work_dir}/${short_entry_dir}" +if ! mkdir "${entry_dir}"; then + early_glidein_failure "Cannot create '${entry_dir}'" +fi - # the XML file will be overwritten now, and hopefully not an error situation - have_dummy_otrx=0 - "${main_dir}"/error_augment.sh -init - START=$(date +%s) - if [[ "${ffb_file_type}" = "exec:s" ]]; then - "${main_dir}/singularity_wrapper.sh" "${ffb_outname}" glidein_config "${ffb_id}" - else - "${ffb_outname}" glidein_config "${ffb_id}" - fi - ret=$? - END=$(date +%s) - "${main_dir}"/error_augment.sh -process ${ret} "${ffb_id}/${ffb_target_fname}" "${PWD}" "${ffb_outname} glidein_config" "${START}" "${END}" #generating test result document - "${main_dir}"/error_augment.sh -concat - if [ ${ret} -ne 0 ]; then - echo "=== Validation error in ${ffb_outname} ===" 1>&2 - warn "Error running '${ffb_outname}'" - < otrx_output.xml awk 'BEGIN{fr=0;}/<[/]detail>/{fr=0;}{if (fr==1) print $0}//{fr=1;}' 1>&2 - return 1 - else - # If ran successfully and periodic, schedule to execute with schedd_cron - echo "=== validation OK in ${ffb_outname} (${ffb_period}) ===" 1>&2 - if [ "${ffb_period}" -gt 0 ]; then - add_periodic_script "${main_dir}/script_wrapper.sh" "${ffb_period}" "${work_dir}" "${ffb_outname}" glidein_config "${ffb_id}" "${ffb_cc_prefix}" - fi - fi - fi - elif [ "${ffb_file_type}" = "wrapper" ]; then - echo "${ffb_outname}" >> "${wrapper_list}" - elif [ "${ffb_file_type}" = "untar" ]; then - ffb_short_untar_dir="$(get_untar_subdir "${ffb_id}" "${ffb_target_fname}")" - ffb_untar_dir="${ffb_work_dir}/${ffb_short_untar_dir}" - START=$(date +%s) - (mkdir "${ffb_untar_dir}" && cd "${ffb_untar_dir}" && tar -xmzf "${ffb_outname}") 1>&2 - ret=$? - if [ ${ret} -ne 0 ]; then - "${main_dir}"/error_augment.sh -init - "${main_dir}"/error_gen.sh -error "tar" "Corruption" "Error untarring '${ffb_outname}'" "file" "${ffb_outname}" "source_type" "${cfs_id}" - "${main_dir}"/error_augment.sh -process ${cfs_rc} "tar" "${PWD}" "mkdir ${ffb_untar_dir} && cd ${ffb_untar_dir} && tar -xmzf ${ffb_outname}" "${START}" "$(date +%s)" - "${main_dir}"/error_augment.sh -concat - warn "Error untarring '${ffb_outname}'" - return 1 - fi +if [ -n "${client_repository_url}" ]; then + short_client_dir=client + client_dir="${work_dir}/${short_client_dir}" + if ! mkdir "$client_dir"; then + early_glidein_failure "Cannot create '${client_dir}'" fi - if [ "${ffb_config_out}" != "FALSE" ]; then - ffb_prefix="$(get_prefix "${ffb_id}")" - if [ "${ffb_file_type}" = "untar" ]; then - # when untaring the original file is less interesting than the untar dir - if ! add_config_line "${ffb_prefix}${ffb_config_out}" "${ffb_untar_dir}"; then - glidein_exit 1 - fi - else - if ! add_config_line "${ffb_prefix}${ffb_config_out}" "${ffb_outname}"; then - glidein_exit 1 - fi + if [ -n "${client_repository_group_url}" ]; then + short_client_group_dir=client_group_${client_group} + client_group_dir="${work_dir}/${short_client_group_dir}" + if ! mkdir "${client_group_dir}"; then + early_glidein_failure "Cannot create '${client_group_dir}'" fi fi +fi - if [ "${have_dummy_otrx}" -eq 1 ]; then - # no one should really look at this file, but just to avoid confusion - echo " - - - ${PWD} - - - Unknown - $(date +%Y-%m-%dT%H:%M:%S%:z) - $(date +%Y-%m-%dT%H:%M:%S%:z) - - - OK - -" > otrx_output.xml - fi +# Move the token files from condor to glidein workspace +# TODO: compare this w/ setup_x509.sh +# monitoring tokens, Should be using same credentials directory? +mv "${start_dir}/tokens.tgz" . +mv "${start_dir}/url_dirs.desc" . +# idtokens are handled in setup_x509.sh - TODO: remove once verified +#for idtk in ${start_dir}/*.idtoken; do +# if cp "${idtk}" . ; then +# echo "copied idtoken ${idtk} to $(pwd)" +# else +# echo "failed to copy idtoken ${idtk} to $(pwd)" 1>&2 +# fi +#done +#if [ -e "${GLIDEIN_CONDOR_TOKEN}" ]; then +# mkdir -p ticket +# tname="$(basename ${GLIDEIN_CONDOR_TOKEN})" +# cp "${GLIDEIN_CONDOR_TOKEN}" "ticket/${tname}" +# export GLIDEIN_CONDOR_TOKEN="$(pwd)/ticket/${tname}" +#fi - return 0 -} +# Extract and source all the data contained at the end of this script as tarball +extract_all_data -# Adds $1 to GWMS_PATH and update PATH -add_to_path() { - logdebug "Adding to GWMS_PATH: $1" - local old_path=":${PATH%:}:" - old_path="${old_path//:$GWMS_PATH:/}" - local old_gwms_path=":${GWMS_PATH%:}:" - old_gwms_path="${old_gwms_path//:$1:/}" - old_gwms_path="${1%:}:${old_gwms_path#:}" - export GWMS_PATH="${old_gwms_path%:}" - old_path="${GWMS_PATH}:${old_path#:}" - export PATH="${old_path%:}" -} +wrapper_list="${PWD}/wrapper_list.lst" +touch "${wrapper_list}" -fixup_condor_dir() { - # All files in the native condor tarballs have a directory like condor-9.0.11-1-x86_64_CentOS7-stripped - # However the (not used anymore) gwms create_condor_tarball removes that dir - # Here we remove that dir as well to allow factory ops to use native condor tarballs +# create glidein_config +glidein_config="${PWD}/glidein_config" +if ! echo > "${glidein_config}"; then + early_glidein_failure "Could not create '${glidein_config}'" +fi +if ! { + echo "# --- glidein_startup vals ---" + echo "GLIDEIN_UUID ${glidein_uuid}" + echo "GLIDEIN_Factory ${glidein_factory}" + echo "GLIDEIN_Name ${glidein_name}" + echo "GLIDEIN_Entry_Name ${glidein_entry}" - # Check if the condor dir has only one subdir, the one like "condor-9.0.11-1-x86_64_CentOS7-stripped" - # See https://stackoverflow.com/questions/32429333/how-to-test-if-a-linux-directory-contain-only-one-subdirectory-and-no-other-file - if [ $(find "${gs_id_work_dir}/condor" -maxdepth 1 -type d -printf 1 | wc -m) -eq 2 ]; then - echo "Fixing directory structure of condor tarball" - mv "${gs_id_work_dir}"/condor/condor*/* "${gs_id_work_dir}"/condor > /dev/null - else - echo "Condor tarball does not need to be fixed" + if [ -n "${client_name}" ]; then + # client name not required as it is not used for anything but debug info + echo "GLIDECLIENT_Name ${client_name}" fi -} + if [ -n "${client_group}" ]; then + # client group not required as it is not used for anything but debug info + echo "GLIDECLIENT_Group ${client_group}" + fi + echo "GLIDEIN_CredentialIdentifier ${glidein_cred_id}" + echo "CONDORG_CLUSTER ${condorg_cluster}" + echo "CONDORG_SUBCLUSTER ${condorg_subcluster}" + echo "CONDORG_SCHEDD ${condorg_schedd}" + echo "DEBUG_MODE ${set_debug}" + echo "GLIDEIN_STARTUP_PID $$" + echo "GLIDEIN_START_DIR_ORIG ${start_dir}" + echo "GLIDEIN_WORKSPACE_ORIG $(pwd)" + echo "GLIDEIN_WORK_DIR ${main_dir}" + echo "GLIDEIN_ENTRY_WORK_DIR ${entry_dir}" + echo "TMP_DIR ${glide_tmp_dir}" + echo "GLIDEIN_LOCAL_TMP_DIR ${glide_local_tmp_dir}" + echo "PROXY_URL ${proxy_url}" + echo "DESCRIPTION_FILE ${descript_file}" + echo "DESCRIPTION_ENTRY_FILE ${descript_entry_file}" + echo "GLIDEIN_Signature ${sign_id}" + echo "GLIDEIN_Entry_Signature ${sign_entry_id}" + + if [ -n "${client_repository_url}" ]; then + echo "GLIDECLIENT_WORK_DIR ${client_dir}" + echo "GLIDECLIENT_DESCRIPTION_FILE ${client_descript_file}" + echo "GLIDECLIENT_Signature ${client_sign_id}" + if [ -n "${client_repository_group_url}" ]; then + echo "GLIDECLIENT_GROUP_WORK_DIR ${client_group_dir}" + echo "GLIDECLIENT_DESCRIPTION_GROUP_FILE ${client_descript_group_file}" + echo "GLIDECLIENT_Group_Signature ${client_sign_group_id}" + fi + fi + echo "B64UUENCODE_SOURCE ${PWD}/b64uuencode.source" + echo "ADD_CONFIG_LINE_SOURCE ${PWD}/add_config_line.source" + echo "GET_ID_SELECTORS_SOURCE ${PWD}/get_id_selectors.source" + echo "LOGGING_UTILS_SOURCE ${PWD}/logging_utils.source" + echo "GLIDEIN_PATHS_SOURCE ${PWD}/glidein_paths.source" + echo "WRAPPER_LIST ${wrapper_list}" + echo "SLOTS_LAYOUT ${slots_layout}" + # Add a line saying we are still initializing... + echo "GLIDEIN_INITIALIZED 0" + # ...but be optimist, and leave advertise_only for the actual error handling script + echo "GLIDEIN_ADVERTISE_ONLY 0" + echo "GLIDEIN_CONDOR_TOKEN ${GLIDEIN_CONDOR_TOKEN}" + echo "# --- User Parameters ---" +} >> "${glidein_config}"; then + early_glidein_failure "Failed in updating '${glidein_config}'" +fi +# shellcheck disable=SC2086 +params2file ${params} + +############################################ +# Setup logging +log_init "${glidein_uuid}" "${work_dir}" +# Remove these files, if they are still there +rm -rf tokens.tgz url_dirs.desc tokens +log_setup "${glidein_config}" + +# Global variable for periodic execution support +add_startd_cron_counter=0 echo "Downloading files from Factory and Frontend" log_write "glidein_startup.sh" "text" "Downloading file from Factory and Frontend" "debug" @@ -1858,40 +1865,40 @@ check_signature=0 for gs_id in main entry client client_group do - if [ -z "${client_repository_url}" ]; then - if [ "${gs_id}" = "client" ]; then - # no client file when no cilent_repository - continue - fi - fi - if [ -z "${client_repository_group_url}" ]; then - if [ "${gs_id}" = "client_group" ]; then - # no client group file when no cilent_repository_group - continue - fi - fi - - gs_id_work_dir="$(get_work_dir ${gs_id})" - - # Fetch description file - gs_id_descript_file="$(get_descript_file ${gs_id})" - fetch_file_regular "${gs_id}" "${gs_id_descript_file}" - if ! signature_file_line="$(grep "^signature " "${gs_id_work_dir}/${gs_id_descript_file}")"; then - warn "No signature in description file ${gs_id_work_dir}/${gs_id_descript_file} (wc: $(wc < "${gs_id_work_dir}/${gs_id_descript_file}" 2>/dev/null))." - glidein_exit 1 - fi - signature_file=$(echo "${signature_file_line}" | cut -s -f 2-) - - # Fetch signature file - gs_id_signature="$(get_signature ${gs_id})" - fetch_file_regular "${gs_id}" "${signature_file}" - echo "${gs_id_signature} ${signature_file}" > "${gs_id_work_dir}/signature.sha1.test" - if ! (cd "${gs_id_work_dir}" && sha1sum -c signature.sha1.test) 1>&2 ; then - warn "Corrupted signature file '${gs_id_work_dir}/${signature_file}'." - glidein_exit 1 - fi - # for simplicity use a fixed name for signature file - mv "${gs_id_work_dir}/${signature_file}" "${gs_id_work_dir}/signature.sha1" + if [ -z "${client_repository_url}" ]; then + if [ "${gs_id}" = "client" ]; then + # no client file when no cilent_repository + continue + fi + fi + if [ -z "${client_repository_group_url}" ]; then + if [ "${gs_id}" = "client_group" ]; then + # no client group file when no cilent_repository_group + continue + fi + fi + + gs_id_work_dir="$(get_work_dir ${gs_id})" + + # Fetch description file + gs_id_descript_file="$(get_descript_file ${gs_id})" + fetch_file_regular "${gs_id}" "${gs_id_descript_file}" + if ! signature_file_line="$(grep "^signature " "${gs_id_work_dir}/${gs_id_descript_file}")"; then + warn "No signature in description file ${gs_id_work_dir}/${gs_id_descript_file} (wc: $(wc < "${gs_id_work_dir}/${gs_id_descript_file}" 2>/dev/null))." + glidein_exit 1 + fi + signature_file=$(echo "${signature_file_line}" | cut -s -f 2-) + + # Fetch signature file + gs_id_signature="$(get_signature ${gs_id})" + fetch_file_regular "${gs_id}" "${signature_file}" + echo "${gs_id_signature} ${signature_file}" > "${gs_id_work_dir}/signature.sha1.test" + if ! (cd "${gs_id_work_dir}" && sha1sum -c signature.sha1.test) 1>&2 ; then + warn "Corrupted signature file '${gs_id_work_dir}/${signature_file}'." + glidein_exit 1 + fi + # for simplicity use a fixed name for signature file + mv "${gs_id_work_dir}/${signature_file}" "${gs_id_work_dir}/signature.sha1" done # re-enable for everything else @@ -1903,25 +1910,25 @@ check_signature=1 # the description file for gs_id in main entry client client_group do - if [ -z "${client_repository_url}" ]; then - if [ "${gs_id}" = "client" ]; then - # no client file when no cilent_repository - continue - fi - fi - if [ -z "${client_repository_group_url}" ]; then - if [ "${gs_id}" = "client_group" ]; then - # no client group file when no cilent_repository_group - continue - fi - fi - - gs_id_descript_file="$(get_descript_file ${gs_id})" - if ! check_file_signature "${gs_id}" "${gs_id_descript_file}"; then - gs_id_work_dir="$(get_work_dir ${gs_id})" - warn "Corrupted description file ${gs_id_work_dir}/${gs_id_descript_file}." - glidein_exit 1 - fi + if [ -z "${client_repository_url}" ]; then + if [ "${gs_id}" = "client" ]; then + # no client file when no cilent_repository + continue + fi + fi + if [ -z "${client_repository_group_url}" ]; then + if [ "${gs_id}" = "client_group" ]; then + # no client group file when no cilent_repository_group + continue + fi + fi + + gs_id_descript_file="$(get_descript_file ${gs_id})" + if ! check_file_signature "${gs_id}" "${gs_id_descript_file}"; then + gs_id_work_dir="$(get_work_dir ${gs_id})" + warn "Corrupted description file ${gs_id_work_dir}/${gs_id_descript_file}." + glidein_exit 1 + fi done ################################################### @@ -1969,8 +1976,8 @@ do continue fi fi - warn "No '${gs_file_list_id}' in description file ${gs_id_work_dir}/${gs_id_descript_file}." - glidein_exit 1 + warn "No '${gs_file_list_id}' in description file ${gs_id_work_dir}/${gs_id_descript_file}." + glidein_exit 1 fi # space+tab separated file with multiple elements (was: awk '{print $2}', not safe for spaces in file name) gs_file_list="$(echo "${gs_file_list_line}" | cut -s -f 2 | sed -e 's/[[:space:]]*$//')"