Skip to content
Snippets Groups Projects
Commit fff2a745 authored by Daniel Nemergut's avatar Daniel Nemergut Committed by Charlotte Hausman
Browse files

VLASS coarse cube migration script

parent b68bc3b4
No related branches found
No related tags found
1 merge request!1110VLASS coarse cube migration script
Pipeline #7043 passed
"""VLASS coarse cube conversion
Revision ID: af13ebf9e040
Revises: 1aacf11f8ba4
Create Date: 2022-10-18 11:12:10.659251
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'af13ebf9e040'
down_revision = '1aacf11f8ba4'
branch_labels = None
depends_on = None
old_fetch_condor = """executable = vlass_coarse_fetch.sh
arguments = {{reimaging_locator}} {{parameter_locator}}
output = vlass_coarse_fetch.out
error = vlass_coarse_fetch.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
SPOOL_DIR = {{data_location}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/productfetcher, nraorsync://$(SPOOL_DIR)/rawdata
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "rawdata"
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
getenv = True
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
queue
"""
new_fetch_condor = """executable = vlass_coarse_fetch.sh
arguments = {{calibration_name}} {{phase_center}} {{data_location}}/rawdata {{reimaging_locator}} {{parameter_locator}}
output = vlass_coarse_fetch.out
error = vlass_coarse_fetch.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
PIMS_PATH = /lustre/aoc/cluster/pipeline/vlass_{{vlass_env}}/workspaces/bin
SPOOL_DIR = {{data_location}}
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/pycapo, nraorysnc://$(PIMS_PATH)/pimscache, nraorsync://$(SBIN_PATH)/productfetcher, nraorsync://$(SPOOL_DIR)/rawdata
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "rawdata"
output_destination = nraorsync://$(SPOOL_DIR)
+WantIOProxy = True
getenv = True
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
queue
"""
old_fetch_sh = """#!/bin/sh
set -o errexit
export HOME=$TMPDIR
chmod 770 .
cd rawdata/
../productfetcher --product-locator $1 --product-locator $2
cd ../
"""
new_fetch_sh = """#!/bin/sh
set -o errexit
export HOME=$TMPDIR
chmod 770 .
cd rawdata/
./pimscache ln -c $1 -p $2 $3
../productfetcher --product-locator $4 --product-locator $5
cd ../
"""
old_dag = """JOB FETCH vlass_coarse_fetch.condor
VARS FETCH jobname="$(JOB)"
JOB ENVOY vlass_coarse_envoy.condor
VARS ENVOY jobname="$(JOB)"
PARENT FETCH CHILD ENVOY
"""
new_dag = """JOB FETCH vlass_coarse_fetch.condor
VARS FETCH jobname="$(JOB)"
JOB ENVOY vlass_coarse_envoy.condor
VARS ENVOY jobname="$(JOB)"
JOB POST vlass_coarse_post.condor
VARS POST jobname="$(JOB)"
PARENT FETCH CHILD ENVOY
PARENT ENVOY CHILD POST
"""
new_post = """executable = vlass_coarse_post.sh
arguments = metadata.json {{manager_job_id}}
output = post.out
error = post.err
log = condor.log
should_transfer_files = NO
getenv = True
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
queue
"""
new_post_vlass = """#!/bin/sh
set -o errexit
export CASAVERS=$(/lustre/aoc/cluster/pipeline/dsoc-{{vlass_env}}/workspaces/sbin/pycapo -q edu.nrao.workspaces.ProcessingSettings.CasaVersion.vlass)/bin/casa
/lustre/aoc/cluster/pipeline/dsoc-{{vlass_env}}/workspaces/sbin/conveyor --vlass $1
source /lustre/aoc/cluster/pipeline/vlass_{{vlass_env}}/workspaces/bin/activate
export CAPO_PROFILE=vlass.{{vlass_env}}
JOB_ID=$2
printf "JOB_ID = %s
" $JOB_ID
cd working/
CASA_LOG_FILE=`ls casa-*.log`
../../../../workspaces/bin/annotateQaSECube.py --jobid $JOB_ID $CASA_LOG_FILE
deactivate
cd ../products
/lustre/aoc/cluster/pipeline/vlass_{{vlass_env}}/workspaces/bin/create-cube-component-list.sh $JOB_ID
"""
def upgrade():
op.execute(f"""update workflow_templates set content=E'{new_fetch_condor}' where filename='vlass_coarse_fetch.condor'""")
op.execute(f"""update workflow_templates set content=E'{new_fetch_sh}' where filename='vlass_coarse_fetch.sh'""")
op.execute(f"""update workflow_templates set content=E'{new_dag}' where filename='vlass_coarse.dag'""")
op.execute(f"""insert into workflow_templates (filename, content, workflow_name) VALUES ('vlass_coarse_post.condor', E'{new_post}', 'vlass_coarse')""")
op.execute(f"""insert into workflow_templates (filename, content, workflow_name) VALUES ('vlass_coarse_post.sh', E'{new_post_vlass}', 'vlass_coarse')""")
def downgrade():
op.execute(f"""update workflow_templates set content=E'{old_fetch_condor}' where filename='vlass_coarse_fetch.condor'""")
op.execute(f"""update workflow_templates set content=E'{old_fetch_sh}' where filename='vlass_coarse_fetch.sh'""")
op.execute(f"""update workflow_templates set content=E'{old_dag}' where filename='vlass_coarse.dag'""")
op.execute(f"""delete from workflow_templates where filename='vlass_coarse_post.condor'""")
op.execute(f"""delete from workflow_templates where filename='vlass_coarse_post.sh'""")
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment