Skip to content
Snippets Groups Projects
Commit b8daf3ce authored by Nathan Bockisch's avatar Nathan Bockisch
Browse files

WS-1094: Removed rawdata transfer back from workflow envoy steps

parent abdbb132
No related branches found
No related tags found
1 merge request!1085WS-1094: Removed rawdata transfer back from workflow envoy steps
Pipeline #6872 passed
"""remove unnecessary rawdata output
Revision ID: 637935ee1583
Revises: d15c553334fe
Create Date: 2022-10-03 10:17:37.525120
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '637935ee1583'
down_revision = 'd15c553334fe'
branch_labels = None
depends_on = None
old_restore_cms_envoy = """executable = restore_cms_envoy.sh
arguments = metadata.json PPR.xml
output = restore_envoy.out
error = restore_envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
SPOOL_DIR = {{spool_dir}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SPOOL_DIR)/working, nraorsync://$(SPOOL_DIR)/rawdata, nraorsync://$(SPOOL_DIR)/products{{#remote}}, nraorsync://$(SPOOL_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SPOOL_DIR)/metadata.json{{files_to_transfer}}
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working rawdata products"
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
request_memory = {{ramInGb}}
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = HasLustre == True
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
new_restore_cms_envoy = """executable = restore_cms_envoy.sh
arguments = metadata.json PPR.xml
output = restore_envoy.out
error = restore_envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
SPOOL_DIR = {{spool_dir}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SPOOL_DIR)/working, nraorsync://$(SPOOL_DIR)/rawdata, nraorsync://$(SPOOL_DIR)/products{{#remote}}, nraorsync://$(SPOOL_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SPOOL_DIR)/metadata.json{{files_to_transfer}}
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products"
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
request_memory = {{ramInGb}}
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = HasLustre == True
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
old_std_calibration_envoy = """executable = std_calibration_envoy.sh
arguments = metadata.json PPR.xml
output = calibration_envoy.out
error = calibration_envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
SPOOL_DIR = {{spool_dir}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SPOOL_DIR)/working, nraorsync://$(SPOOL_DIR)/rawdata, nraorsync://$(SPOOL_DIR)/products{{#remote}}, nraorsync://$(SPOOL_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SPOOL_DIR)/metadata.json{{files_to_transfer}}
transfer_output_files = .job.ad
+nrao_output_files = "working rawdata products"
when_to_transfer_output = ON_EXIT
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
request_memory = {{ramInGb}}
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = HasLustre == True
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
new_std_calibration_envoy = """executable = std_calibration_envoy.sh
arguments = metadata.json PPR.xml
output = calibration_envoy.out
error = calibration_envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
SPOOL_DIR = {{spool_dir}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SPOOL_DIR)/working, nraorsync://$(SPOOL_DIR)/rawdata, nraorsync://$(SPOOL_DIR)/products{{#remote}}, nraorsync://$(SPOOL_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SPOOL_DIR)/metadata.json{{files_to_transfer}}
transfer_output_files = .job.ad
+nrao_output_files = "working products"
when_to_transfer_output = ON_EXIT
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
request_memory = {{ramInGb}}
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = HasLustre == True
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
old_std_cms_imaging_envoy = """executable = std_cms_imaging_envoy.sh
arguments = metadata.json PPR.xml
output = imaging_envoy.out
error = imaging_envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
CMS_PATH = {{cms_path}}
SPOOL_DIR = {{spool_dir}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/framework.sh, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela{{#remote}}, nraorsync://$(SPOOL_DIR){{profile}}.properties{{/remote}}, nraorsync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SPOOL_DIR)/metadata.json, nraorsync://$(CMS_PATH)/{{sdmId}}.ms{{files_to_transfer}}
when_to_transfer_files = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working rawdata products"
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
request_memory = {{ramInGb}}
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = HasLustre == True
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
new_std_cms_imaging_envoy = """executable = std_cms_imaging_envoy.sh
arguments = metadata.json PPR.xml
output = imaging_envoy.out
error = imaging_envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
CMS_PATH = {{cms_path}}
SPOOL_DIR = {{spool_dir}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/framework.sh, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela{{#remote}}, nraorsync://$(SPOOL_DIR){{profile}}.properties{{/remote}}, nraorsync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SPOOL_DIR)/metadata.json, nraorsync://$(CMS_PATH)/{{sdmId}}.ms{{files_to_transfer}}
when_to_transfer_files = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products"
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
request_memory = {{ramInGb}}
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = HasLustre == True
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
old_std_restore_imaging_envoy = """executable = std_restore_imaging_envoy.sh
arguments = metadata.json PPR.xml
output = restore_img_envoy.out
error = restore_img_envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
SPOOL_DIR = {{spool_dir}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/framework.sh, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SPOOL_DIR)/working, nraorsync://$(SPOOL_DIR)/rawdata, nraorsync://$(SPOOL_DIR)/products{{#remote}}, nraorsync://$(SPOOL_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SPOOL_DIR)/metadata.json{{files_to_transfer}}
when_to_transfer_files = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working rawdata products"
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
request_memory = {{ramInGb}}
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = HasLustre == True
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
new_std_restore_imaging_envoy = """executable = std_restore_imaging_envoy.sh
arguments = metadata.json PPR.xml
output = restore_img_envoy.out
error = restore_img_envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
SPOOL_DIR = {{spool_dir}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/framework.sh, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SPOOL_DIR)/working, nraorsync://$(SPOOL_DIR)/rawdata, nraorsync://$(SPOOL_DIR)/products{{#remote}}, nraorsync://$(SPOOL_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SPOOL_DIR)/metadata.json{{files_to_transfer}}
when_to_transfer_files = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products"
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
request_memory = {{ramInGb}}
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = HasLustre == True
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
def upgrade():
op.execute(
f"""
UPDATE workflow_templates SET content = E'{new_restore_cms_envoy}' WHERE workflow_name = 'restore_cms'
AND filename = 'restore_cms_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates SET content = E'{new_std_calibration_envoy}' WHERE workflow_name = 'std_calibration'
AND filename = 'std_calibration_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates SET content = E'{new_std_cms_imaging_envoy}' WHERE workflow_name = 'std_cms_imaging'
AND filename = 'std_cms_imaging_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates SET content = E'{new_std_restore_imaging_envoy}' WHERE workflow_name = 'std_restore_imaging'
AND filename = 'std_restore_imaging_envoy.condor'
"""
)
def downgrade():
op.execute(
f"""
UPDATE workflow_templates SET content = E'{old_restore_cms_envoy}' WHERE workflow_name = 'restore_cms'
AND filename = 'restore_cms_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates SET content = E'{old_std_calibration_envoy}' WHERE workflow_name = 'std_calibration'
AND filename = 'std_calibration_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates SET content = E'{old_std_cms_imaging_envoy}' WHERE workflow_name = 'std_cms_imaging'
AND filename = 'std_cms_imaging_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates SET content = E'{old_std_restore_imaging_envoy}' WHERE workflow_name = 'std_restore_imaging'
AND filename = 'std_restore_imaging_envoy.condor'
"""
)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment