Skip to content
Snippets Groups Projects

fix remote vs. local for vlass

Merged Charlotte Hausman requested to merge fix_remote_vs_local_cube_templates into main
2 files
+ 357
1
Compare changes
  • Side-by-side
  • Inline
Files
2
"""fix remote vs local cube templates
Revision ID: 55e5b37d1ccf
Revises: 5dd6938d2307
Create Date: 2022-09-12 14:31:55.228979
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "55e5b37d1ccf"
down_revision = "5dd6938d2307"
branch_labels = None
depends_on = None
seci_condor = """executable = vlass_seci.sh
arguments = metadata.json PPR.xml
output = seci.out
error = seci.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_memory = 50G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
old_seci_condor = """executable = vlass_seci.sh
arguments = metadata.json PPR.xml
output = seci.out
error = seci.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/framework.sh, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products, nraorsync://$(VLASS_DIR)/{{profile}}.properties, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "rawdata working products"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_memory = 50G
getenv = True
environment = "CAPO_PATH=."
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
seci_script = """#!/bin/sh
export HOME=$TMPDIR
{{#remote}}
export CAPO_PATH=.
export CAPO_PROFILE={{profile}}
{{/remote}}
set -o errexit
./casa_envoy --vlass-img $1 $2
"""
old_seci_script = """#!/bin/sh
export HOME=$TMPDIR
set -o errexit
./casa_envoy --vlass-img $1 $2
"""
cube_condor = """executable = vlass_coarse_envoy.sh
arguments = metadata.json PPR.xml {{requested_parallel}}
output = vlass_coarse_envoy.out
error = vlass_coarse_envoy.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_cpus = {{requested_parallel}}
request_memory = 150G
request_disk = 850G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
old_cube_condor = """executable = vlass_coarse_envoy.sh
arguments = metadata.json PPR.xml {{requested_parallel}}
output = vlass_coarse_envoy.out
error = vlass_coarse_envoy.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/framework.sh, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products, nraorsync://$(VLASS_DIR)/{{profile}}.properties, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "rawdata working products"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_cpus = {{requested_parallel}}
request_memory = 150G
request_disk = 850G
getenv = True
environment = "CAPO_PATH=."
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
cube_script = """#!/bin/sh
export HOME=$TMPDIR
{{#remote}}
export CAPO_PATH=.
export CAPO_PROFILE={{profile}}
{{/remote}}
set -o errexit
./casa_envoy --vlass-img $1 $2 --parallel $3
"""
old_cube_script = """#!/bin/sh
export HOME=$TMPDIR
set -o errexit
./casa_envoy --vlass-img $1 $2 --parallel $3
"""
ql_condor = """executable = vlass_ql_envoy.sh
arguments = metadata.json PPR.xml {{request_id}}
output = ql_envoy.out
error = ql_envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_DIR = {{data_location}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json{{files_to_transfer}}
transfer_output_files = .job.ad
+nrao_output_files = "working products"
when_to_transfer_output = ON_EXIT
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_memory = {{ramInGb}}
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
old_ql_condor = """executable = vlass_ql_envoy.sh
arguments = metadata.json PPR.xml {{request_id}}
output = ql_envoy.out
error = ql_envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_DIR = {{data_location}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json{{files_to_transfer}}
transfer_output_files = .job.ad
+nrao_output_files = "working rawdata products"
when_to_transfer_output = ON_EXIT
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_memory = {{ramInGb}}
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = HasLustre == True
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
ql_script = """#!/bin/sh
export HOME=$TMPDIR
{{#remote}}
export CAPO_PATH=.
export CAPO_PROFILE={{profile}}
{{/remote}}
set -o errexit
./casa_envoy --vlass-img $1 $2
"""
old_ql_script = """#!/bin/sh
export HOME=$TMPDIR
set -o errexit
./casa_envoy --vlass-img $1 $2
"""
def upgrade():
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{seci_condor}' WHERE filename='vlass_seci.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{seci_script}' WHERE filename='vlass_seci.sh'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{cube_condor}' WHERE filename='vlass_coarse_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{cube_script}' WHERE filename='vlass_coarse_envoy.sh'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{ql_condor}' WHERE filename='vlass_ql_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{ql_script}' WHERE filename='vlass_ql_envoy.sh'
"""
)
def downgrade():
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_seci_condor}' WHERE filename='vlass_seci.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_seci_script}' WHERE filename='vlass_seci.sh'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_cube_condor}' WHERE filename='vlass_coarse_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_cube_script}' WHERE filename='vlass_coarse_envoy.sh'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_ql_condor}' WHERE filename='vlass_ql_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_ql_script}' WHERE filename='vlass_ql_envoy.sh'
"""
)
Loading