Skip to content
Snippets Groups Projects
Commit abdbb132 authored by Charlotte Hausman's avatar Charlotte Hausman
Browse files

update the pims templates post testing

parent 6392ba48
No related branches found
Tags end-of-sprint-46
1 merge request!1084update the pims templates post testing
Pipeline #6859 passed
Pipeline: workspaces

#6862

    Pipeline: workspaces

    #6861

      ......@@ -10,23 +10,26 @@ import sqlalchemy as sa
      # revision identifiers, used by Alembic.
      revision = 'e8e6d54d8444'
      down_revision = '55e5b37d1ccf'
      revision = "e8e6d54d8444"
      down_revision = "55e5b37d1ccf"
      branch_labels = None
      depends_on = None
      # Main DAG for the workflow
      pims_split_dag = """JOB RESTORE restore.condor
      VARS RESTORE jobname="$(JOB)"
      {{#splits}}
      JOB {{.}} split.condor
      VARS {{.}} jobname="$(JOB)" split_dir="$({{.}})"
      JOB {{#make_condor_jobname}}{{.}}{{/make_condor_jobname}} split.condor
      VARS {{#make_condor_jobname}}{{.}}{{/make_condor_jobname}} jobname="$(JOB)" split_dir="{{.}}"
      {{/splits}}
      JOB FINISH write_finished_file.condor
      VARS FINISH jobname="$(JOB)"
      PARENT RESTORE CHILD {{#splits}}{{.}} {{/splits}}
      PARENT {{#splits}}{{.}} {{/splits}} CHILD FINISH
      PARENT RESTORE CHILD {{#splits}}{{#make_condor_jobname}}{{.}}{{/make_condor_jobname}} {{/splits}}
      PARENT {{#splits}}{{#make_condor_jobname}}{{.}}{{/make_condor_jobname}} {{/splits}} CHILD FINISH
      """
      # Restore job
      ......@@ -40,20 +43,23 @@ log = condor.log
      SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
      SPOOL_DIR = {{spool_dir}}
      should_transfer_files = yes
      {{^existingRestore}}
      transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/pycapo, nraosync://$(SPOOL_DIR)/rawdata, nraosync://$(SPOOL_DIR)/products, nraosync://$(SPOOL_DIR)/working, nraosync://$(SPOOL_DIR)/metadata.json, nraosync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SBIN_PATH)/productfetcher, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela
      {{^existing_restore}}
      transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SPOOL_DIR)/rawdata, nraorsync://$(SPOOL_DIR)/products, nraorsync://$(SPOOL_DIR)/working, nraorsync://$(SPOOL_DIR)/metadata.json, nraorsync://$(SPOOL_DIR)/PPR.xml, nraorsync://$(SBIN_PATH)/productfetcher, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela
      when_to_transfer_output = ON_EXIT
      transfer_output_files = .job.ad
      +nrao_output_files = "rawdata working products"
      output_destination = nraorsync://$(SPOOL_DIR)
      +WantIOProxy = True
      {{/existingRestore}}
      {{#existingRestore}}
      transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraosync://$(SBIN_PATH)/null
      {{/existingRestore}}
      {{/existing_restore}}
      {{#existing_restore}}
      transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/null
      +WantIOProxy = True
      {{/existing_restore}}
      getenv = True
      environment = "CAPO_PATH=/home/casa/capo"
      request_memory = 62G
      requirements = HasLustre == True
      ......@@ -63,34 +69,42 @@ queue
      restore_sh = """#!/bin/sh
      set -o errexit
      {{^existingRestore}}
      export HOME=$TMPDIR
      {{^existing_restore}}
      chmod 770 .
      cd rawdata/
      ../productfetcher --product-locator $1 --product-locator $2
      cd ../
      ./casa_envoy --restore -c metadata.json PPR.xml
      {{/existingRestore}}
      {{#existingRestore}}
      ./null -g
      {{/existingRestore}}
      {{/existing_restore}}
      {{#existing_restore}}
      ./null -n
      {{/existing_restore}}
      """
      # Workflow run on each split
      split_condor = """executable = split.sh
      arguments = "$(split_dir)"
      output = tiles/$(split_dir)/split.out
      error = tiles/$(split_dir)/split.err
      log = condor.log
      SPLIT_DIR = "$(split_dir)"
      SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
      SPOOL_DIR = {{spool_dir}}
      PIMS_PATH = /lustre/aoc/cluster/pipeline/vlass_{{vlass_env}}/workspaces/bin
      should_transfer_files = yes
      transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(PIMS_PATH)/pimscache, nraosync://$(SPOOL_DIR)/metadata.json, nraosync://$(SPOOL_DIR)/tiles/$(SPLIT_DIR)
      transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(PIMS_PATH)/pimscache, nraorsync://$(SPOOL_DIR)/metadata.json, nraorsync://$(SPOOL_DIR)/tiles/$(SPLIT_DIR)/PPR.xml, nraorsync://$(SPOOL_DIR)/tiles/$(SPLIT_DIR)/working, nraorsync://$(SPOOL_DIR)/tiles/$(SPLIT_DIR)/rawdata, nraorsync://$(SPOOL_DIR)/tiles/$(SPLIT_DIR)/products
      when_to_transfer_output = ON_EXIT
      transfer_output_files = .job.ad
      output_destination = nraorsync://$(SPOOL_DIR)
      +WantIOProxy = True
      getenv = True
      environment = "CAPO_PATH=/home/casa/capo"
      request_memory = 24G
      requirements = HasLustre == True
      ......@@ -100,26 +114,26 @@ queue
      split_sh = """#!/bin/sh
      set -o errexit
      TILE=$(echo $1 | cut -d \"/\" -f 1)
      PHCENTER=$(echo $1 | cut -d \"/\" -f 2)
      export HOME=$TMPDIR
      TILE=$(echo $1 | cut -d "/" -f 1)
      PHCENTER=$(echo $1 | cut -d "/" -f 2)
      # Get the measurement set path
      {{^existingRestore}}
      {{^existing_restore}}
      MS={{data_location}}/working/*.ms
      {{/existingRestore}}
      {{#existingRestore}}
      MS={{existingRestore}}
      {{/existingRestore}}
      {{/existing_restore}}
      {{#existing_restore}}
      MS={{existing_restore}}
      {{/existing_restore}}
      # Link it in the splits rawdata
      ln -s $MS $1/rawdata/
      ln -s $MS rawdata/
      # Run CASA
      ./casa_envoy --split metadata.json $1/PPR.xml
      ./casa_envoy --split metadata.json PPR.xml
      # Populate cache
      ./pimscache cp -c {{vlass_product}} -t $TILE -p $PHCENTER $MS
      ./pimscache cp -c {{vlass_product}} -t $TILE -p $PHCENTER working/*_split.ms
      """
      ......@@ -156,6 +170,7 @@ cd {{data_location}}
      """
      def upgrade():
      op.execute(
      """
      ......
      0% Loading or .
      You are about to add 0 people to the discussion. Proceed with caution.
      Finish editing this message first!
      Please register or to comment