Skip to content
Snippets Groups Projects
Commit a38fc632 authored by Daniel Nemergut's avatar Daniel Nemergut
Browse files

Alembic script to add '*flagtemplate.txt' to the list of files to be transferred in

parent 20e36516
No related branches found
No related tags found
2 merge requests!1607merge 2.8.3 to main,!1602WS-1975 VLASS imaging flagtemplate in WS workflows
"""vlass image flagtemplate
Revision ID: ce2cca9f1220
Revises: d992b379d8a7
Create Date: 2024-03-11 10:35:18.654783
"""
from alembic import op
from pathlib import Path
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ce2cca9f1220'
down_revision = 'd992b379d8a7'
branch_labels = None
depends_on = None
conn = op.get_bind()
old_condor = "envoy_condor_2.8.2.txt"
new_condor = "envoy_condor_2.8.3.txt"
def set_wf_content(wf_name: str, filename: str) -> bytes:
return (Path.cwd() / "versions" / "templates" / wf_name / filename).read_text().encode()
def upgrade():
# Transfer the image flag template into condor
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'vlass_ql_envoy.condor'
""",
set_wf_content("vlass_quicklook", new_condor),
)
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'vlass_seci_envoy.condor'
""",
set_wf_content("vlass_seci", new_condor),
)
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'vlass_coarse_envoy.condor'
""",
set_wf_content("vlass_coarse", new_condor),
)
def downgrade():
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'vlass_ql_envoy.condor'
""",
set_wf_content("vlass_quicklook", old_condor),
)
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'vlass_seci_envoy.condor'
""",
set_wf_content("vlass_seci", old_condor),
)
conn.execute(
f"""
UPDATE workflow_templates
SET content = %s
WHERE filename = 'vlass_coarse_envoy.condor'
""",
set_wf_content("vlass_coarse", old_condor),
)
executable = vlass_coarse_envoy.sh
arguments = metadata.json PPR.xml {{requested_parallel}}
output = envoy.out
error = envoy.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_BIN = /lustre/aoc/cluster/pipeline/vlass_{{vlass_env}}/workspaces/bin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_BIN)/planescraper, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products planes.json"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_cpus = {{requested_parallel}}
request_memory = 150G
request_disk = 950G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True) && (HasLustre =!= True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) && (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
executable = vlass_coarse_envoy.sh
arguments = metadata.json PPR.xml {{requested_parallel}}
output = envoy.out
error = envoy.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_BIN = /lustre/aoc/cluster/pipeline/vlass_{{vlass_env}}/workspaces/bin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_BIN)/planescraper, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/*flagtemplate.txt, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products planes.json"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_cpus = {{requested_parallel}}
request_memory = 150G
request_disk = 950G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True) && (HasLustre =!= True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) && (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
executable = vlass_ql_envoy.sh
arguments = metadata.json PPR.xml {{request_id}}
output = envoy.out
error = envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_DIR = {{data_location}}
should_transfer_files = yes
transfer_input_files = {{#radial}}$ENV(HOME)/.ssh/condor_ssh_config, {{/radial}}$ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json{{files_to_transfer}}
transfer_output_files = .job.ad
+nrao_output_files = "working products"
when_to_transfer_output = ON_EXIT
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
{{#radial}}
universe = grid
grid_resource = condor radialhead.nrao.radial.local radialhead.nrao.radial.local
+remote_jobuniverse = 5
+remote_requirements = True
+remote_ShouldTransferFiles = "YES"
+remote_WhenToTransferOutput = "ON_EXIT"
{{/radial}}
request_memory = 31G
request_disk = 100G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True) && (HasLustre =!= True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) && (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
executable = vlass_ql_envoy.sh
arguments = metadata.json PPR.xml {{request_id}}
output = envoy.out
error = envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_DIR = {{data_location}}
should_transfer_files = yes
transfer_input_files = {{#radial}}$ENV(HOME)/.ssh/condor_ssh_config, {{/radial}}$ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/*flagtemplate.txt, nraorsync://$(VLASS_DIR)/metadata.json{{files_to_transfer}}
transfer_output_files = .job.ad
+nrao_output_files = "working products"
when_to_transfer_output = ON_EXIT
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
{{#radial}}
universe = grid
grid_resource = condor radialhead.nrao.radial.local radialhead.nrao.radial.local
+remote_jobuniverse = 5
+remote_requirements = True
+remote_ShouldTransferFiles = "YES"
+remote_WhenToTransferOutput = "ON_EXIT"
{{/radial}}
request_memory = 31G
request_disk = 100G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True) && (HasLustre =!= True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) && (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
executable = vlass_seci_envoy.sh
arguments = metadata.json PPR.xml
output = envoy.out
error = envoy.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_memory = 50G
request_disk = 200G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True) && (HasLustre =!= True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) && (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
executable = vlass_seci_envoy.sh
arguments = metadata.json PPR.xml
output = envoy.out
error = envoy.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/*flagtemplate.txt, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_memory = 50G
request_disk = 200G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True) && (HasLustre =!= True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) && (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment