Skip to content
Snippets Groups Projects
Commit 8900acde authored by Nathan Bockisch's avatar Nathan Bockisch
Browse files

Hopefully fixed DAG, null call, and redundant VLASS_DIR variable

parent 809dc3af
No related branches found
No related tags found
No related merge requests found
This commit is part of merge request !1076. Comments created here will be created in the context of that merge request.
......@@ -18,15 +18,15 @@ depends_on = None
# Main DAG for the workflow
pims_split_dag = """JOB RESTORE restore.condor
{#splits}
JOB {{.}} split_workflow.condor
{{#splits}}
JOB {{.}} split.condor
VARS {{.}} jobname="$(JOB)" split_dir="$({{.}})"
{/splits}
{{/splits}}
JOB FINISH write_finished_file.condor
PARENT RESTORE CHILD SPLIT
PARENT SPLIT CHILD FINISH
PARENT RESTORE CHILD {{#splits}}{{.}}{{/splits}}
PARENT {{#splits}}{{.}}{{/splits}} CHILD FINISH
"""
# Restore job
......@@ -39,10 +39,9 @@ log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
SPOOL_DIR = {{spool_dir}}
VLASS_DIR = {{data_location}}
should_transfer_files = yes
{{^existingRestore}}
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/pycapo, nraosync://$(VLASS_DIR)/rawdata, nraosync://$(VLASS_DIR)/products, nraosync://$(VLASS_DIR)/working, nraosync://$(VLASS_DIR)/metadata.json, nraosync://$(VLASS_DIR)/PPR.xml, nraorsync://$(SBIN_PATH)/productfetcher, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/pycapo, nraosync://$(SPOOL_DIR)/rawdata, nraosync://$(SPOOL_DIR)/products, nraosync://$(SPOOL_DIR)/working, nraosync://$(SPOOL_DIR)/metadata.json, nraosync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SBIN_PATH)/productfetcher, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela
transfer_output_files = .job.ad
+nrao_output_files = "rawdata working products"
......@@ -72,21 +71,20 @@ cd ../
./casa_envoy --restore -c metadata.json PPR.xml
{{/existingRestore}}
{{#existingRestore}}
./null $*
./null -g
{{/existingRestore}}
"""
# Workflow run on each split
split_workflow_condor = """executable = split_workflow.sh
split_condor = """executable = split.sh
arguments = "$(split_dir)"
SPLIT_DIR = "$(split_dir)"
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_DIR = {{data_location}}
SPOOL_DIR = {{spool_dir}}
PIMS_PATH = /lustre/aoc/cluster/pipeline/vlass_{{vlass_env}}/workspaces/bin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(PIMS_PATH)/pimscache, nraosync://$(VLASS_DIR)/metadata.json, nraosync://$(VLASS_DIR)/tiles/$(SPLIT_DIR)
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(PIMS_PATH)/pimscache, nraosync://$(SPOOL_DIR)/metadata.json, nraosync://$(SPOOL_DIR)/tiles/$(SPLIT_DIR)
transfer_output_files = .job.ad
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
......@@ -100,7 +98,7 @@ queue
"""
split_workflow_sh = """#!/bin/sh
split_sh = """#!/bin/sh
set -o errexit
# Something to link measurement set to rawdata directory
......@@ -193,14 +191,14 @@ def upgrade():
op.execute(
f"""
INSERT INTO workflow_templates (filename, content, workflow_name)
VALUES ('split_workflow.condor', E'{split_workflow_condor}', 'pims_split')
VALUES ('split.condor', E'{split_condor}', 'pims_split')
"""
)
op.execute(
f"""
INSERT INTO workflow_templates (filename, content, workflow_name)
VALUES ('split_workflow.sh', E'{split_workflow_sh}', 'pims_split')
VALUES ('split.sh', E'{split_sh}', 'pims_split')
"""
)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment