Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • ssa/workspaces
1 result
Show changes
Commits on Source (7)
......@@ -228,16 +228,16 @@ class IngestionManifestBuilder:
def __init__(
self,
staging_source_dir: Path | None, # partial curation has no source path
manifest_source_dir: Path | None, # partial curation has no source path
sp_type: str,
telescope: str,
locator: Optional[str] = None, # initial observation ingestion, and full curation, have no locator
product_group: Optional[int] = None, # used for full curation
additional_metadata: Optional[AbstractTextFile] = None,
filename: Optional[str] = None,
# (curator_type, curation_source, target_list)
curate: Optional[tuple[CuratorType, Optional[str], Optional[List[str]]]] = None,
# Curation doesn't need a curation_source, but it does need a place to stick the manifest
# (curator_type, target_list)
curate: Optional[tuple[CuratorType, Optional[List[str]]]] = None,
# If None, self.manifest_destination_dir = manifest_source_dir
manifest_destination_dir: Optional[Path] = None,
# alternative to staging_source_dir and curation_source for specifying source files
file_list: Optional[list[str]] = None,
......@@ -249,7 +249,7 @@ class IngestionManifestBuilder:
raise ValueError("telescope is required.")
# get the directory containing the files to be ingested
self.staging_source_dir = staging_source_dir
self.manifest_source_dir = manifest_source_dir
# get the additional metadata, if any (EVLA CAL ingestion will have none)
self.additional_metadata = additional_metadata
......@@ -274,8 +274,7 @@ class IngestionManifestBuilder:
raise NotImplementedError(
f"Don't know how to build a {self.curation_type.value} curation {self.sp_type} manifest"
)
self.curation_source = Path(curate[1]) if curate and curate[1] else None
self.target_list = curate[2]
self.target_list = curate[1]
if self.curation_type == CuratorType.FULL and self.target_list:
raise IngestionManifestException(
f"Expected targets=None for full curation, got targets={self.target_list}"
......@@ -292,10 +291,10 @@ class IngestionManifestBuilder:
# directory to put the manifest in
self.manifest_destination_dir = manifest_destination_dir
if self.manifest_destination_dir is None:
self.manifest_destination_dir = self.staging_source_dir
self.manifest_destination_dir = manifest_source_dir
if self.manifest_destination_dir is None:
raise IngestionManifestException(
f"IngestionManifestBuilder: Expected a directory to house the manifest, found instead staging_source_dir={self.staging_source_dir}, manifest_destination_dir={self.manifest_destination_dir}"
f"IngestionManifestBuilder: Expected a directory to house the manifest, found instead staging_source_dir={self.manifest_source_dir}, manifest_destination_dir={self.manifest_destination_dir}"
)
if file_list:
......@@ -303,6 +302,8 @@ class IngestionManifestBuilder:
elif self.manifest_source_dir is not None:
# we are not running partial curation, look for files
self.files_found = [file for file in self.manifest_source_dir.iterdir()]
if len(self.files_found) == 0:
raise IngestionManifestException(f"No ingestion files found at {manifest_source_dir}")
# Partial curation doesn't require source files, it can fetch them from productfetcher via their SPLs
self.filename = filename
......@@ -333,7 +334,7 @@ class IngestionManifestBuilder:
"""
manifest = IngestionManifest(
telescope=self.telescope,
staging_source_dir=self.curation_source,
staging_source_dir=self.manifest_source_dir,
locator=self.locator,
sp_type=self.sp_type,
reingest_group=self._build_reingest_group(),
......@@ -352,7 +353,7 @@ class IngestionManifestBuilder:
telescope=self.telescope,
locator=self.locator,
sp_type=self.sp_type,
staging_source_dir=self.staging_source_dir,
staging_source_dir=self.manifest_source_dir,
reingest_group=None,
input_group=self._build_input_group(),
output_group=self._build_evla_cal_output_group(),
......@@ -483,7 +484,7 @@ class IngestionManifestBuilder:
"""
# find science product (we expect just one for this SP type)
tars_found = find_output_tars(self.files_found, self.staging_source_dir)
tars_found = find_output_tars(self.files_found, self.manifest_source_dir)
sci_prod = None
for file in tars_found:
......@@ -491,14 +492,14 @@ class IngestionManifestBuilder:
break
if sci_prod:
weblog_files = find_weblogs(self.files_found, self.staging_source_dir)
weblog_files = find_weblogs(self.files_found, self.manifest_source_dir)
weblogs = []
for file in weblog_files:
ap = AncillaryProduct(type=AncillaryProductType.PIPELINE_WEBLOG, filename=file.name)
if ap not in weblogs:
weblogs.append(ap)
else:
raise NoScienceProductException(f">>> NO SCIENCE PRODUCT FOUND in {self.staging_source_dir}")
raise NoScienceProductException(f">>> NO SCIENCE PRODUCT FOUND in {self.manifest_source_dir}")
return OutputGroup(science_products=[sci_prod], ancillary_products=weblogs)
......@@ -580,10 +581,6 @@ class IngestionManifestBuilder:
return ingestion_artifacts_tar
@property
def manifest_source_dir(self) -> Path | None:
return self.staging_source_dir if self.staging_source_dir is not None else self.curation_source
def format_timestamp(datetime: arrow.Arrow) -> str:
"""
......
......@@ -338,11 +338,11 @@ class CuratorLauncher(LauncherIF):
input_group_locator = self.parameters["input_group_locator"]
IngestionManifestBuilder(
staging_source_dir=None,
manifest_source_dir=curation_source,
sp_type=self.sci_product_type,
locator=spl,
telescope=telescope,
curate=(self.curator_type, curation_source, target_list),
curate=(self.curator_type, target_list),
manifest_destination_dir=self.manifest_destination_dir,
file_list=file_list,
product_group=product_group,
......
......@@ -77,9 +77,11 @@ def test_manifest_full_curation_eb_manifest(
eb_dir = eb_files[0]
actual_manifest, actual_manifest_file = IngestionManifestBuilder(
telescope=Telescope.EVLA.value,
staging_source_dir=None,
manifest_source_dir=eb_dir,
sp_type=ScienceProductType.EXEC_BLOCK.value,
curate=(CuratorType.FULL, str(eb_dir), None),
locator=EVLA_EB_LOCATOR,
filename=EVLA_EB_NAME,
curate=(CuratorType.FULL, None),
manifest_destination_dir=manifest_destination_dir,
product_group=EVLA_EB_PRODUCT_GROUP,
).build()
......@@ -160,7 +162,7 @@ def test_manifest_partial_curation_eb(has_curation_source: bool, ingest_path: Pa
assert len(eb_files) == 1
eb_dir = eb_files[0]
curation_source = str(eb_dir)
curation_source = eb_dir
expected_manifest_name = "partial_curation_evla_eb_manifest"
if not has_curation_source:
expected_manifest_name = "partial_curation_evla_eb_manifest_no_curation_source"
......@@ -168,10 +170,11 @@ def test_manifest_partial_curation_eb(has_curation_source: bool, ingest_path: Pa
actual_manifest, actual_manifest_file = IngestionManifestBuilder(
telescope=Telescope.EVLA.value,
staging_source_dir=None,
manifest_source_dir=curation_source,
sp_type=ScienceProductType.EXEC_BLOCK.value,
locator=EVLA_EB_LOCATOR,
curate=(CuratorType.PARTIAL, curation_source, ["subscans.dec"]),
filename=EVLA_EB_NAME,
curate=(CuratorType.PARTIAL, ["subscans.dec"]),
manifest_destination_dir=ingest_path,
).build()
......@@ -180,7 +183,7 @@ def test_manifest_partial_curation_eb(has_curation_source: bool, ingest_path: Pa
assert params.ngas_ingest is False
assert params.telescope == Telescope.EVLA
if has_curation_source:
assert params.staging_source_dir == Path(curation_source)
assert params.staging_source_dir == curation_source
else:
assert params.staging_source_dir is None
assert not actual_manifest.input_group
......@@ -221,11 +224,11 @@ def test_curation_manifest_bad_no_manifest_destination_directory(is_full_curatio
with pytest.raises(IngestionManifestException, match="directory to house the manifest"):
IngestionManifestBuilder(
telescope=Telescope.EVLA.value,
staging_source_dir=None,
sp_type=ScienceProductType.EXEC_BLOCK.value,
locator=EVLA_EB_LOCATOR,
filename=EVLA_EB_NAME,
curate=(curator_type, ingest_path, target_list),
curate=(curator_type, target_list),
manifest_source_dir=None,
manifest_destination_dir=None,
)
......@@ -237,7 +240,7 @@ def test_manifest_full_curation_image(use_file_list: bool, ingest_path: Path, al
"""
image_paths = populate_fake_tmpx_ratuqh_ingest_path(ingest_path, is_for_curation_test=True)
curation_source = str(ingest_path)
curation_source = ingest_path
file_list = None
manifest_destination_dir = ingest_path
if use_file_list:
......@@ -247,14 +250,15 @@ def test_manifest_full_curation_image(use_file_list: bool, ingest_path: Path, al
curation_source = None
actual_manifest, actual_manifest_file = IngestionManifestBuilder(
staging_source_dir=None,
telescope=Telescope.EVLA.value,
sp_type=ScienceProductType.IMAGE.value,
curate=(CuratorType.FULL, curation_source, None),
file_list=file_list,
manifest_destination_dir=manifest_destination_dir,
product_group=IMAGE_PRODUCT_GROUP,
input_group_locator=IMAGE_LOCATOR,
manifest_source_dir=curation_source,
telescope=Telescope.EVLA.value,
sp_type=ScienceProductType.IMAGE.value,
locator=IMAGE_LOCATOR,
curate=(CuratorType.FULL, None),
).build()
assert actual_manifest_file.name == CURATOR_MANIFEST_FILENAME
......
......@@ -80,7 +80,7 @@ def test_filters_cal_input_files(ingest_path: Path):
locator = "uid://evla/calibration/twinkle-twinkle-little-quasar"
manifest, _ = IngestionManifestBuilder(
telescope=Telescope.EVLA.value,
staging_source_dir=ingest_path,
manifest_source_dir=ingest_path,
sp_type=ScienceProductType.EVLA_CAL.value,
locator=locator,
).build()
......@@ -120,7 +120,7 @@ def test_writes_expected_evla_cal_output_files(ingest_path: Path):
populate_fake_evla_cal_ingest_path(ingest_path)
manifest_file, manifest = IngestionManifestBuilder(
telescope=Telescope.EVLA.value,
staging_source_dir=ingest_path,
manifest_source_dir=ingest_path,
locator="uid://evla/calibration/fee-fi-fo-fum-acdf23",
sp_type=ScienceProductType.EVLA_CAL.value,
).build()
......@@ -259,7 +259,7 @@ def test_ingestion_artifacts_tar_filename_built_just_once(ingest_path: Path):
) as mock:
# build the manifest
IngestionManifestBuilder(
staging_source_dir=ingest_path,
manifest_source_dir=ingest_path,
sp_type=ScienceProductType.EVLA_CAL.value,
locator="uid://evla/calibration/are-we-there-yet",
telescope=Telescope.EVLA.value,
......@@ -281,7 +281,7 @@ def test_ingestion_artifacts_tar_correct(ingest_path: Path):
locator = "uid://evla/calibration/3dfa528b-9870-46c9-a200-131dbac701cc"
# if you build it, they will come
IngestionManifestBuilder(
staging_source_dir=ingest_path,
manifest_source_dir=ingest_path,
sp_type=ScienceProductType.EVLA_CAL.value,
locator=locator,
telescope=Telescope.EVLA.value,
......@@ -318,7 +318,7 @@ def test_evla_cal_manifest_matches_example(ingest_path: Path):
populate_fake_evla_cal_ingest_path(ingest_path)
builder = IngestionManifestBuilder(
staging_source_dir=ingest_path,
manifest_source_dir=ingest_path,
telescope=Telescope.EVLA.value,
sp_type=ScienceProductType.EVLA_CAL.value,
locator="uid://evla/execblock/i-just-play-an-eb-on-teevee",
......@@ -373,7 +373,7 @@ def test_evla_cal_final_manifest_finds_init_weblog(ingest_path: Path):
assert len(weblogs) == 2
builder = IngestionManifestBuilder(
staging_source_dir=ingest_path,
manifest_source_dir=ingest_path,
telescope=Telescope.EVLA.value,
sp_type=ScienceProductType.EVLA_CAL.value,
locator="uid://evla/execblock/50bb85af-ce52-49d8-b9d8-9221bfce939d",
......@@ -416,7 +416,7 @@ def test_evla_cal_final_manifest_matches_example(ingest_path: Path):
populate_fake_final_evla_cal_ingest_path(ingest_path)
builder = IngestionManifestBuilder(
staging_source_dir=ingest_path,
manifest_source_dir=ingest_path,
telescope=Telescope.EVLA.value,
sp_type=ScienceProductType.EVLA_CAL.value,
locator="uid://evla/execblock/some-nonsense-not-in-db",
......
......@@ -151,7 +151,7 @@ def test_catches_invalid_sp_type():
# we expect this to work fine
IngestionManifestBuilder(
staging_source_dir=Path(),
manifest_source_dir=Path(),
telescope=Telescope.EVLA.value,
sp_type=ScienceProductType.IMAGE.value,
locator="somewhere-over-the-rainbow",
......@@ -160,7 +160,7 @@ def test_catches_invalid_sp_type():
# this, however, should fail
with pytest.raises(ValueError):
IngestionManifestBuilder(
staging_source_dir=Path(),
manifest_source_dir=Path(),
telescope=Telescope.EVLA.value,
sp_type="model_built_out_of_playdoh",
locator="somewhere-over-the-rainbow",
......@@ -180,7 +180,7 @@ def test_catches_invalid_telescope():
# we expect this to work fine
IngestionManifestBuilder(
staging_source_dir=Path(),
manifest_source_dir=Path(),
telescope=Telescope.EVLA.value,
sp_type=ScienceProductType.IMAGE.value,
locator="ill-have-what-shes-having",
......@@ -189,7 +189,7 @@ def test_catches_invalid_telescope():
# this, however, should go belly-up
with pytest.raises(ValueError):
IngestionManifestBuilder(
staging_source_dir=Path(),
manifest_source_dir=Path(),
telescope="nathans_mars_telescope",
sp_type=ScienceProductType.IMAGE.value,
locator="ill-have-what-shes-having",
......@@ -212,7 +212,7 @@ def build_tmpx_ratuqh_image_manifest(staging_source_dir: Path):
additional_metadata = AbstractTextFile(filename=ADDITIONAL_METADATA_FILENAME, content="")
return IngestionManifestBuilder(
staging_source_dir=staging_source_dir,
manifest_source_dir=staging_source_dir,
additional_metadata=additional_metadata,
telescope=Telescope.EVLA.value,
sp_type=ScienceProductType.IMAGE.value,
......
......@@ -60,7 +60,7 @@ def test_entry_point_for_evla_cal(ingest_path: Path):
telescope=Telescope.EVLA.value,
locator=locator,
sp_type=ScienceProductType.EVLA_CAL.value,
staging_source_dir=ingest_path,
manifest_source_dir=ingest_path,
)
builder.build()
ingestion_files = [file for file in ingest_path.iterdir()]
......@@ -107,7 +107,7 @@ def test_entry_point_for_image(ingest_path: Path):
telescope=Telescope.EVLA.value,
locator=locator,
sp_type=ScienceProductType.IMAGE.value,
staging_source_dir=ingest_path,
manifest_source_dir=ingest_path,
).build()
# there should be one ingestion manifest....
......
"""empty message
Revision ID: 008590dd66fd
Revises: f2a76d224984, c9d8e14ae603
Create Date: 2024-02-14 14:16:25.168901
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '008590dd66fd'
down_revision = ('f2a76d224984', 'c9d8e14ae603')
branch_labels = None
depends_on = None
def upgrade():
pass
def downgrade():
pass
# Copyright (C) 2023 Associated Universities, Inc. Washington DC, USA.
#
# This file is part of NRAO Workspaces.
#
# Workspaces is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Workspaces is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Workspaces. If not, see <https://www.gnu.org/licenses/>.
#
"""fix vlass_seci_envoy.condor
Revision ID: 12d0499d232d
Revises: 1c435b5d7a8d
Create Date: 2024-02-07 11:25:02.570523
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "12d0499d232d"
down_revision = "1c435b5d7a8d"
branch_labels = None
depends_on = None
old_vlass_seci_envoy_condor = """executable = vlass_seci_envoy.sh
arguments = metadata.json PPR.xml
output = envoy.out
error = envoy.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_memory = 50G
request_disk = 200G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True) + (HasLustre =!= True)
{{/remote}}
queue
"""
new_vlass_seci_envoy_condor = """executable = vlass_seci_envoy.sh
arguments = metadata.json PPR.xml
output = envoy.out
error = envoy.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_memory = 50G
request_disk = 200G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True) && (HasLustre =!= True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) && (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
old_vlass_ql_envoy_condor = """executable = vlass_ql_envoy.sh
arguments = metadata.json PPR.xml {{request_id}}
output = envoy.out
error = envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_DIR = {{data_location}}
should_transfer_files = yes
transfer_input_files = {{#radial}}$ENV(HOME)/.ssh/condor_ssh_config, {{/radial}}$ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json{{files_to_transfer}}
transfer_output_files = .job.ad
+nrao_output_files = "working products"
when_to_transfer_output = ON_EXIT
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
{{#radial}}
universe = grid
grid_resource = condor radialhead.nrao.radial.local radialhead.nrao.radial.local
+remote_jobuniverse = 5
+remote_requirements = True
+remote_ShouldTransferFiles = "YES"
+remote_WhenToTransferOutput = "ON_EXIT"
{{/radial}}
request_memory = 31G
request_disk = 100G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True) + (HasLustre =!= True)
{{/remote}}
queue
"""
new_vlass_ql_envoy_condor = """executable = vlass_ql_envoy.sh
arguments = metadata.json PPR.xml {{request_id}}
output = envoy.out
error = envoy.err
log = condor.log
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_DIR = {{data_location}}
should_transfer_files = yes
transfer_input_files = {{#radial}}$ENV(HOME)/.ssh/condor_ssh_config, {{/radial}}$ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json{{files_to_transfer}}
transfer_output_files = .job.ad
+nrao_output_files = "working products"
when_to_transfer_output = ON_EXIT
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
{{#radial}}
universe = grid
grid_resource = condor radialhead.nrao.radial.local radialhead.nrao.radial.local
+remote_jobuniverse = 5
+remote_requirements = True
+remote_ShouldTransferFiles = "YES"
+remote_WhenToTransferOutput = "ON_EXIT"
{{/radial}}
request_memory = 31G
request_disk = 100G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True) && (HasLustre =!= True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) && (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
old_vlass_coarse_envoy_condor = """executable = vlass_coarse_envoy.sh
arguments = metadata.json PPR.xml {{requested_parallel}}
output = envoy.out
error = envoy.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_BIN = /lustre/aoc/cluster/pipeline/vlass_{{vlass_env}}/workspaces/bin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_BIN)/planescraper, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products planes.json"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_cpus = {{requested_parallel}}
request_memory = 150G
request_disk = 950G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) + (TARGET.VLASSTEST =!= True) + (HasLustre =!= True)
{{/remote}}
queue
"""
new_vlass_coarse_envoy_condor = """executable = vlass_coarse_envoy.sh
arguments = metadata.json PPR.xml {{requested_parallel}}
output = envoy.out
error = envoy.err
log = condor.log
VLASS_DIR = {{data_location}}
SBIN_PATH = /lustre/aoc/cluster/pipeline/$ENV(CAPO_PROFILE)/workspaces/sbin
VLASS_BIN = /lustre/aoc/cluster/pipeline/vlass_{{vlass_env}}/workspaces/bin
should_transfer_files = yes
transfer_input_files = $ENV(HOME)/.ssh/condor_transfer, nraorsync://$(SBIN_PATH)/.matplotlib, nraorsync://$(SBIN_PATH)/pycapo, nraorsync://$(SBIN_PATH)/update_stage, nraorsync://$(SBIN_PATH)/vela, nraorsync://$(SBIN_PATH)/casa_envoy, nraorsync://$(VLASS_BIN)/planescraper, nraorsync://$(VLASS_DIR)/working, nraorsync://$(VLASS_DIR)/rawdata, nraorsync://$(VLASS_DIR)/products{{#remote}}, nraorsync://$(VLASS_DIR)/{{profile}}.properties{{/remote}}, nraorsync://$(VLASS_DIR)/PPR.xml, nraorsync://$(VLASS_DIR)/metadata.json
when_to_transfer_output = ON_EXIT
transfer_output_files = .job.ad
+nrao_output_files = "working products planes.json"
output_destination = nraorsync://$(VLASS_DIR)
+WantIOProxy = True
request_cpus = {{requested_parallel}}
request_memory = 150G
request_disk = 950G
getenv = True
{{^remote}}
environment = "CAPO_PATH=/home/casa/capo"
requirements = (VLASS == True) && (HasLustre == True)
+partition = "VLASS"
{{/remote}}
{{#remote}}
requirements = (VLASS == True) && (HasLustre =!= True)
+partition = "VLASS"
Rank = (TARGET.VLASS == True) && (TARGET.VLASSTEST =!= True)
{{/remote}}
queue
"""
def upgrade():
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{new_vlass_seci_envoy_condor}'
WHERE filename='vlass_seci_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{new_vlass_ql_envoy_condor}'
WHERE filename='vlass_ql_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{new_vlass_coarse_envoy_condor}'
WHERE filename='vlass_coarse_envoy.condor'
"""
)
def downgrade():
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_vlass_seci_envoy_condor}'
WHERE filename='vlass_seci_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_vlass_ql_envoy_condor}'
WHERE filename='vlass_ql_envoy.condor'
"""
)
op.execute(
f"""
UPDATE workflow_templates
SET content=E'{old_vlass_coarse_envoy_condor}'
WHERE filename='vlass_coarse_envoy.condor'
"""
)