Skip to content
Snippets Groups Projects
Commit 699d5a66 authored by Daniel Nemergut's avatar Daniel Nemergut
Browse files

Update stage was also missing from the restore and finish scripts, also...

Update stage was also missing from the restore and finish scripts, also corrected the query formatting
parent 6ab3a8f6
No related branches found
No related tags found
2 merge requests!1607merge 2.8.3 to main,!1588Corrected pims_split templates
Pipeline #14533 passed
......@@ -16,49 +16,91 @@ down_revision = '36591dc3f14d'
branch_labels = None
depends_on = None
wf_name = 'pims_split'
# Handle the proper encoding from text -> bytea to not lose any characters
def set_wf_content(wf_name: str, filename: str) -> bytes:
def set_wf_content(filename: str) -> bytes:
return (Path.cwd() / "versions" / "templates" / wf_name / filename).read_text().encode()
def upgrade():
conn = op.get_bind()
# Remove quotes around string variables that have already been quoted
op.execute(
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'split.condor'
WHERE filename = 'split.condor' and workflow_name = E'{wf_name}'
""",
set_wf_content("pims_split", "split_condor_2.8.3.txt"),
set_wf_content("split_condor_2.8.3.txt"),
)
# Put back in the update_stage calls that disappeared since 2.8.1
op.execute(
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'restore.sh' and workflow_name = E'{wf_name}'
""",
set_wf_content("restore_sh_2.8.3.txt"),
)
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'split.sh' and workflow_name = E'{wf_name}'
""",
set_wf_content("split_sh_2.8.3.txt"),
)
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'split.sh'
WHERE filename = 'write_finished_file.sh' and workflow_name = E'{wf_name}'
""",
set_wf_content("pims_split", "split_sh_2.8.3.txt"),
set_wf_content("finish_sh_2.8.3.txt"),
)
def downgrade():
op.execute(
conn = op.get_bind()
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'split.condor' and workflow_name = E'{wf_name}'
""",
set_wf_content("split_condor_2.8.2.txt"),
)
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'restore.sh' and workflow_name = E'{wf_name}'
""",
set_wf_content("restore_sh_2.8.2.txt"),
)
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'split.condor'
WHERE filename = 'split.sh' and workflow_name = E'{wf_name}'
""",
set_wf_content("pims_split", "split_condor_2.8.2.txt"),
set_wf_content("split_sh_2.8.2.txt"),
)
op.execute(
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'split.sh'
WHERE filename = 'write_finished_file.sh' and workflow_name = E'{wf_name}'
""",
set_wf_content("pims_split", "split_sh_2.8.2.txt"),
set_wf_content("finish_sh_2.8.2.txt"),
)
#!/bin/sh
cd {{data_location}}
# Set up for emails
ADDRESS_CAPO_PROPERTY="edu.nrao.workspaces.NotificationSettings.vlassAnalystEmail"
ADDRESS=$(/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin/pycapo ${ADDRESS_CAPO_PROPERTY} | cut -d '"' -f 2)
NOTIFICATION_CAPO_PROPERTY="edu.nrao.workspaces.NotificationSettings.serviceUrl"
NOTIFICATION_URL=$(/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin/pycapo ${NOTIFICATION_CAPO_PROPERTY} | cut -d '"' -f 2)/notify/pims_notification/send
ANALYZER_JSON=$(/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin/pims_analyzer --id {{request_id}} 2> analyzer_call.err)
# The analyzer call failed
if [[ $? -ne 0 ]] ; then
FAIL_MESSAGE="Error getting metadata for pims job, check {{data_location}}/analyzer_call.log for more information"
FAIL_SUBJECT="Failure to analyze pims_split for {{vlass_product}}"
FAIL_JSON="{"destination_email": "$ADDRESS", "subject": "$FAIL_SUBJECT", "message": "$FAIL_MESSAGE"}"
FAIL_NOTIFICATION_URL=$(/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin/pycapo ${NOTIFICATION_CAPO_PROPERTY} | cut -d '"' -f 2)/email/send
/bin/curl --location --request POST $FAIL_NOTIFICATION_URL --header 'Content-Type: application/json' --data "$FAIL_JSON"
exit 1
fi
# Append address information to the analyzer JSON
JSON="${ANALYZER_JSON%\}}"
JSON+=",\"destination_email\":\"$ADDRESS\"}"
# Send the email
/bin/curl --location --request POST $NOTIFICATION_URL --header 'Content-Type: application/json' --data "$JSON"
/bin/date > finished
#!/bin/sh
SBIN_PATH=/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin
${SBIN_PATH}/update_stage FINISH
cd {{data_location}}
# Set up for emails
ADDRESS_CAPO_PROPERTY="edu.nrao.workspaces.NotificationSettings.vlassAnalystEmail"
ADDRESS=$(/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin/pycapo ${ADDRESS_CAPO_PROPERTY} | cut -d '"' -f 2)
NOTIFICATION_CAPO_PROPERTY="edu.nrao.workspaces.NotificationSettings.serviceUrl"
NOTIFICATION_URL=$(/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin/pycapo ${NOTIFICATION_CAPO_PROPERTY} | cut -d '"' -f 2)/notify/pims_notification/send
ANALYZER_JSON=$(/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin/pims_analyzer --id {{request_id}} 2> analyzer_call.err)
# The analyzer call failed
if [[ $? -ne 0 ]] ; then
FAIL_MESSAGE="Error getting metadata for pims job, check {{data_location}}/analyzer_call.log for more information"
FAIL_SUBJECT="Failure to analyze pims_split for {{vlass_product}}"
FAIL_JSON="{"destination_email": "$ADDRESS", "subject": "$FAIL_SUBJECT", "message": "$FAIL_MESSAGE"}"
FAIL_NOTIFICATION_URL=$(/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin/pycapo ${NOTIFICATION_CAPO_PROPERTY} | cut -d '"' -f 2)/email/send
/bin/curl --location --request POST $FAIL_NOTIFICATION_URL --header 'Content-Type: application/json' --data "$FAIL_JSON"
exit 1
fi
# Append address information to the analyzer JSON
JSON="${ANALYZER_JSON%\}}"
JSON+=",\"destination_email\":\"$ADDRESS\"}"
# Send the email
/bin/curl --location --request POST $NOTIFICATION_URL --header 'Content-Type: application/json' --data "$JSON"
/bin/date > finished
${SBIN_PATH}/update_stage FINISH --complete
#!/bin/sh
export HOME={{spool_dir}}
SBIN_PATH=/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin
cd {{spool_dir}}
{{^existing_restore}}
chmod 770 .
cd rawdata/
$SBIN_PATH/productfetcher --product-locator $1 --product-locator $2
cd ../
$SBIN_PATH/casa_envoy --restore -c metadata.json PPR.xml
{{/existing_restore}}
{{#existing_restore}}
$SBIN_PATH/null -n
{{/existing_restore}}
#!/bin/sh
export HOME={{spool_dir}}
SBIN_PATH=/lustre/aoc/cluster/pipeline/$CAPO_PROFILE/workspaces/sbin
${SBIN_PATH}/update_stage RESTORE
cd {{spool_dir}}
{{^existing_restore}}
chmod 770 .
cd rawdata/
$SBIN_PATH/productfetcher --product-locator $1 --product-locator $2
cd ../
$SBIN_PATH/casa_envoy --restore -c metadata.json PPR.xml
{{/existing_restore}}
{{#existing_restore}}
$SBIN_PATH/null -n
{{/existing_restore}}
${SBIN_PATH}/update_stage RESTORE --complete
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment